2021-01-24 04:55:11 +00:00
|
|
|
/*
|
|
|
|
Mangadex@Home
|
|
|
|
Copyright (c) 2020, MangaDex Network
|
|
|
|
This file is part of MangaDex@Home.
|
|
|
|
|
|
|
|
MangaDex@Home is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
MangaDex@Home is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this MangaDex@Home. If not, see <http://www.gnu.org/licenses/>.
|
2021-01-25 02:25:49 +00:00
|
|
|
*/
|
2021-01-24 04:55:11 +00:00
|
|
|
package mdnet.server
|
|
|
|
|
|
|
|
import com.fasterxml.jackson.core.JsonProcessingException
|
|
|
|
import com.fasterxml.jackson.databind.DeserializationFeature
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper
|
|
|
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
|
|
|
|
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
|
|
|
|
import com.fasterxml.jackson.module.kotlin.readValue
|
|
|
|
import io.micrometer.core.instrument.FunctionCounter
|
|
|
|
import io.micrometer.core.instrument.Timer
|
|
|
|
import io.micrometer.prometheus.PrometheusMeterRegistry
|
|
|
|
import mdnet.Constants
|
|
|
|
import mdnet.cache.CachingInputStream
|
|
|
|
import mdnet.cache.Image
|
|
|
|
import mdnet.cache.ImageMetadata
|
|
|
|
import mdnet.cache.ImageStorage
|
|
|
|
import mdnet.data.Statistics
|
|
|
|
import mdnet.data.Token
|
|
|
|
import mdnet.logging.info
|
|
|
|
import mdnet.logging.trace
|
|
|
|
import mdnet.logging.warn
|
|
|
|
import mdnet.metrics.GeoIpMetricsFilterBuilder
|
|
|
|
import mdnet.metrics.PostTransactionLabeler
|
|
|
|
import mdnet.netty.Netty
|
|
|
|
import mdnet.security.TweetNaclFast
|
|
|
|
import mdnet.settings.MetricsSettings
|
|
|
|
import mdnet.settings.RemoteSettings
|
|
|
|
import mdnet.settings.ServerSettings
|
|
|
|
import org.http4k.core.*
|
|
|
|
import org.http4k.filter.CachingFilters
|
|
|
|
import org.http4k.filter.ClientFilters
|
|
|
|
import org.http4k.filter.MicrometerMetrics
|
|
|
|
import org.http4k.filter.ServerFilters
|
|
|
|
import org.http4k.lens.LensFailure
|
|
|
|
import org.http4k.lens.Path
|
|
|
|
import org.http4k.routing.bind
|
|
|
|
import org.http4k.routing.routes
|
|
|
|
import org.http4k.server.Http4kServer
|
|
|
|
import org.http4k.server.asServer
|
|
|
|
import org.slf4j.LoggerFactory
|
|
|
|
import java.io.BufferedInputStream
|
|
|
|
import java.io.BufferedOutputStream
|
|
|
|
import java.io.InputStream
|
|
|
|
import java.time.Clock
|
|
|
|
import java.time.OffsetDateTime
|
|
|
|
import java.util.*
|
|
|
|
import java.util.concurrent.Executors
|
|
|
|
|
|
|
|
private val LOGGER = LoggerFactory.getLogger(ImageServer::class.java)
|
|
|
|
private val JACKSON: ObjectMapper = jacksonObjectMapper()
|
|
|
|
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
|
|
|
|
.registerModule(JavaTimeModule())
|
|
|
|
|
|
|
|
class ImageServer(
|
|
|
|
private val storage: ImageStorage,
|
2021-01-28 14:17:24 +00:00
|
|
|
private val upstream: HttpHandler,
|
2021-01-24 04:55:11 +00:00
|
|
|
registry: PrometheusMeterRegistry
|
|
|
|
) {
|
|
|
|
private val executor = Executors.newCachedThreadPool()
|
|
|
|
private val cacheLookupTimer = Timer
|
|
|
|
.builder("cache_lookup")
|
|
|
|
.publishPercentiles(0.5, 0.75, 0.9, 0.99)
|
|
|
|
.register(registry)
|
|
|
|
|
|
|
|
// This is part of the ImageServer, and it expects `chapterHash` and `fileName` path segments.
|
|
|
|
fun handler(dataSaver: Boolean): HttpHandler = baseHandler().then { request ->
|
|
|
|
val chapterHash = Path.of("chapterHash")(request)
|
|
|
|
val fileName = Path.of("fileName")(request)
|
|
|
|
|
|
|
|
val sanitizedUri = if (dataSaver) {
|
|
|
|
"/data-saver"
|
|
|
|
} else {
|
|
|
|
"/data"
|
|
|
|
} + "/$chapterHash/$fileName"
|
|
|
|
|
|
|
|
val imageId = if (dataSaver) {
|
|
|
|
md5Bytes("saver$chapterHash.$fileName")
|
|
|
|
} else {
|
|
|
|
md5Bytes("$chapterHash.$fileName")
|
|
|
|
}.let {
|
|
|
|
printHexString(it)
|
|
|
|
}
|
|
|
|
|
|
|
|
val image: Image? = cacheLookupTimer.recordCallable { storage.loadImage(imageId) }
|
|
|
|
|
|
|
|
if (image != null) {
|
|
|
|
request.handleCacheHit(sanitizedUri, image)
|
|
|
|
} else {
|
|
|
|
request.handleCacheMiss(sanitizedUri, imageId)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private fun Request.handleCacheHit(sanitizedUri: String, image: Image): Response {
|
|
|
|
// our files never change, so it's safe to use the browser cache
|
|
|
|
return if (this.header("If-Modified-Since") != null) {
|
|
|
|
LOGGER.info { "Request for $sanitizedUri cached by browser" }
|
|
|
|
|
|
|
|
val lastModified = image.data.lastModified
|
|
|
|
|
|
|
|
Response(Status.NOT_MODIFIED)
|
|
|
|
.header("Last-Modified", lastModified)
|
|
|
|
} else {
|
2021-01-26 16:15:50 +00:00
|
|
|
LOGGER.info { "Request for $sanitizedUri is being served" }
|
2021-01-24 04:55:11 +00:00
|
|
|
|
|
|
|
respondWithImage(
|
|
|
|
BufferedInputStream(image.stream),
|
|
|
|
image.data.size, image.data.contentType, image.data.lastModified,
|
|
|
|
true
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private fun Request.handleCacheMiss(sanitizedUri: String, imageId: String): Response {
|
2021-01-28 14:17:24 +00:00
|
|
|
val mdResponse = upstream(Request(Method.GET, sanitizedUri))
|
2021-01-24 04:55:11 +00:00
|
|
|
|
|
|
|
if (mdResponse.status != Status.OK) {
|
2021-01-25 03:00:56 +00:00
|
|
|
LOGGER.warn { "Upstream query for $sanitizedUri errored with status ${mdResponse.status}" }
|
2021-01-24 04:55:11 +00:00
|
|
|
|
|
|
|
mdResponse.close()
|
|
|
|
return Response(mdResponse.status)
|
|
|
|
}
|
|
|
|
|
|
|
|
val contentType = mdResponse.header("Content-Type")!!
|
|
|
|
val contentLength = mdResponse.header("Content-Length")?.toInt()
|
|
|
|
val lastModified = mdResponse.header("Last-Modified")
|
|
|
|
|
|
|
|
if (!contentType.isImageMimetype()) {
|
|
|
|
LOGGER.warn { "Upstream query for $sanitizedUri returned bad mimetype $contentType" }
|
|
|
|
mdResponse.close()
|
|
|
|
return Response(Status.INTERNAL_SERVER_ERROR)
|
|
|
|
}
|
|
|
|
|
|
|
|
// bad upstream responses mean we can't cache, so bail
|
|
|
|
if (contentLength == null || lastModified == null) {
|
2021-01-25 03:00:56 +00:00
|
|
|
LOGGER.info { "Request for $sanitizedUri is being served due to upstream issues" }
|
2021-01-24 04:55:11 +00:00
|
|
|
return respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
LOGGER.trace { "Upstream query for $sanitizedUri succeeded" }
|
|
|
|
|
|
|
|
val writer = storage.storeImage(imageId, ImageMetadata(contentType, lastModified, contentLength))
|
|
|
|
|
|
|
|
// A null writer means that this file is being written to
|
|
|
|
// concurrently so we skip the cache process
|
|
|
|
return if (writer != null) {
|
2021-01-25 03:00:56 +00:00
|
|
|
LOGGER.info { "Request for $sanitizedUri is being cached and served" }
|
2021-01-24 04:55:11 +00:00
|
|
|
|
|
|
|
val tee = CachingInputStream(
|
|
|
|
mdResponse.body.stream,
|
|
|
|
executor, BufferedOutputStream(writer.stream),
|
|
|
|
) {
|
|
|
|
try {
|
|
|
|
if (writer.commit(contentLength)) {
|
|
|
|
LOGGER.info { "Cache download for $sanitizedUri committed" }
|
|
|
|
} else {
|
2021-01-25 03:00:56 +00:00
|
|
|
LOGGER.info { "Cache download for $sanitizedUri aborted" }
|
2021-01-24 04:55:11 +00:00
|
|
|
}
|
|
|
|
} catch (e: Exception) {
|
|
|
|
LOGGER.warn(e) { "Cache go/no go for $sanitizedUri failed" }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
respondWithImage(tee, contentLength, contentType, lastModified, false)
|
|
|
|
} else {
|
2021-01-25 03:00:56 +00:00
|
|
|
LOGGER.info { "Request for $sanitizedUri is being served" }
|
2021-01-24 04:55:11 +00:00
|
|
|
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private fun respondWithImage(input: InputStream, length: Int?, type: String, lastModified: String?, cached: Boolean): Response =
|
|
|
|
Response(Status.OK)
|
|
|
|
.header("Content-Type", type)
|
|
|
|
.header("X-Content-Type-Options", "nosniff")
|
|
|
|
.let {
|
|
|
|
if (length != null) {
|
|
|
|
it.body(input, length.toLong()).header("Content-Length", length.toString())
|
|
|
|
} else {
|
|
|
|
it.body(input).header("Transfer-Encoding", "chunked")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
.let {
|
|
|
|
if (lastModified != null) {
|
|
|
|
it.header("Last-Modified", lastModified)
|
|
|
|
} else {
|
|
|
|
it
|
|
|
|
}
|
|
|
|
}
|
|
|
|
.header("X-Cache", if (cached) "HIT" else "MISS")
|
|
|
|
|
|
|
|
companion object {
|
|
|
|
private fun baseHandler(): Filter =
|
|
|
|
CachingFilters.Response.MaxAge(Clock.systemUTC(), Constants.MAX_AGE_CACHE)
|
|
|
|
.then { next: HttpHandler ->
|
|
|
|
{ request: Request ->
|
|
|
|
val response = next(request)
|
|
|
|
response.header("access-control-allow-origin", "https://mangadex.org")
|
|
|
|
.header("access-control-allow-headers", "*")
|
|
|
|
.header("access-control-allow-methods", "GET")
|
|
|
|
.header("timing-allow-origin", "https://mangadex.org")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private fun String.isImageMimetype() = this.toLowerCase().startsWith("image/")
|
|
|
|
|
|
|
|
fun getServer(
|
|
|
|
storage: ImageStorage,
|
|
|
|
remoteSettings: RemoteSettings,
|
|
|
|
serverSettings: ServerSettings,
|
|
|
|
metricsSettings: MetricsSettings,
|
2021-01-26 16:15:50 +00:00
|
|
|
statistics: Statistics,
|
2021-01-24 04:55:11 +00:00
|
|
|
registry: PrometheusMeterRegistry,
|
2021-01-28 14:17:24 +00:00
|
|
|
client: HttpHandler
|
2021-01-24 04:55:11 +00:00
|
|
|
): Http4kServer {
|
2021-01-28 14:17:24 +00:00
|
|
|
val upstream =
|
2021-01-24 04:55:11 +00:00
|
|
|
ClientFilters.SetBaseUriFrom(remoteSettings.imageServer)
|
2021-01-26 23:04:48 +00:00
|
|
|
.then(ClientFilters.MicrometerMetrics.RequestTimer(registry))
|
2021-01-28 14:17:24 +00:00
|
|
|
.then(client)
|
2021-01-24 04:55:11 +00:00
|
|
|
|
|
|
|
val imageServer = ImageServer(
|
|
|
|
storage = storage,
|
2021-01-28 14:17:24 +00:00
|
|
|
upstream = upstream,
|
2021-01-24 04:55:11 +00:00
|
|
|
registry = registry
|
|
|
|
)
|
|
|
|
|
|
|
|
FunctionCounter.builder(
|
|
|
|
"client_sent_bytes",
|
|
|
|
statistics,
|
2021-01-26 16:15:50 +00:00
|
|
|
{ it.bytesSent.get().toDouble() }
|
2021-01-24 04:55:11 +00:00
|
|
|
).register(registry)
|
|
|
|
|
|
|
|
val verifier = tokenVerifier(
|
|
|
|
tokenKey = remoteSettings.tokenKey,
|
|
|
|
shouldVerify = { chapter, _ ->
|
2021-01-31 22:47:39 +00:00
|
|
|
!remoteSettings.disableTokens && !(chapter == "1b682e7b24ae7dbdc5064eeeb8e8e353" || chapter == "8172a46adc798f4f4ace6663322a383e")
|
2021-01-24 04:55:11 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2021-01-25 03:00:56 +00:00
|
|
|
return timeRequest()
|
|
|
|
.then(addCommonHeaders())
|
2021-01-24 04:55:11 +00:00
|
|
|
.then(catchAllHideDetails())
|
|
|
|
.then(
|
|
|
|
routes(
|
|
|
|
"/{token}/data/{chapterHash}/{fileName}" bind Method.GET to verifier.then(
|
|
|
|
imageServer.handler(
|
|
|
|
dataSaver = false,
|
|
|
|
)
|
|
|
|
),
|
|
|
|
"/{token}/data-saver/{chapterHash}/{fileName}" bind Method.GET to verifier.then(
|
|
|
|
imageServer.handler(
|
|
|
|
dataSaver = true,
|
|
|
|
)
|
|
|
|
),
|
|
|
|
"/data/{chapterHash}/{fileName}" bind Method.GET to verifier.then(
|
|
|
|
imageServer.handler(
|
|
|
|
dataSaver = false,
|
|
|
|
)
|
|
|
|
),
|
|
|
|
"/data-saver/{chapterHash}/{fileName}" bind Method.GET to verifier.then(
|
|
|
|
imageServer.handler(
|
|
|
|
dataSaver = true,
|
|
|
|
)
|
|
|
|
),
|
|
|
|
"/prometheus" bind Method.GET to {
|
|
|
|
Response(Status.OK).body(registry.scrape())
|
|
|
|
}
|
|
|
|
).withFilter(
|
|
|
|
ServerFilters.MicrometerMetrics.RequestTimer(registry, labeler = PostTransactionLabeler())
|
|
|
|
).withFilter(
|
2021-01-28 14:17:24 +00:00
|
|
|
GeoIpMetricsFilterBuilder(metricsSettings.enableGeoip, metricsSettings.geoipLicenseKey, registry).build()
|
2021-01-24 04:55:11 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
.asServer(Netty(remoteSettings.tls!!, serverSettings, statistics))
|
|
|
|
}
|
|
|
|
|
|
|
|
fun timeRequest(): Filter {
|
|
|
|
return Filter { next: HttpHandler ->
|
|
|
|
{ request: Request ->
|
|
|
|
val cleanedUri = request.uri.path.replaceBefore("/data", "/{token}")
|
|
|
|
|
|
|
|
LOGGER.info { "Request for $cleanedUri received" }
|
|
|
|
|
|
|
|
val start = System.currentTimeMillis()
|
|
|
|
val response = next(request)
|
|
|
|
val latency = System.currentTimeMillis() - start
|
|
|
|
|
|
|
|
LOGGER.info { "Request for $cleanedUri completed (TTFB) in ${latency}ms" }
|
|
|
|
|
|
|
|
response
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fun tokenVerifier(tokenKey: ByteArray, shouldVerify: (String, String) -> Boolean): Filter {
|
|
|
|
val box = TweetNaclFast.SecretBox(tokenKey)
|
|
|
|
|
|
|
|
return Filter { next ->
|
|
|
|
then@{
|
|
|
|
val chapterHash = Path.of("chapterHash")(it)
|
|
|
|
val fileName = Path.of("fileName")(it)
|
|
|
|
|
|
|
|
if (shouldVerify(chapterHash, fileName)) {
|
|
|
|
val cleanedUri = it.uri.path.replaceBefore("/data", "/{token}")
|
|
|
|
|
|
|
|
val tokenArr = try {
|
|
|
|
val toDecode = try {
|
|
|
|
Path.of("token")(it)
|
|
|
|
} catch (e: LensFailure) {
|
|
|
|
LOGGER.info(e) { "Request for $cleanedUri rejected for missing token" }
|
|
|
|
return@then Response(Status.FORBIDDEN).body("Token is missing")
|
|
|
|
}
|
|
|
|
Base64.getUrlDecoder().decode(toDecode)
|
|
|
|
} catch (e: IllegalArgumentException) {
|
|
|
|
LOGGER.info(e) { "Request for $cleanedUri rejected for non-base64 token" }
|
|
|
|
return@then Response(Status.FORBIDDEN).body("Token is invalid base64")
|
|
|
|
}
|
|
|
|
if (tokenArr.size < 24) {
|
|
|
|
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
|
|
|
|
return@then Response(Status.FORBIDDEN)
|
|
|
|
}
|
|
|
|
val token = try {
|
|
|
|
JACKSON.readValue<Token>(
|
|
|
|
box.open(tokenArr.sliceArray(24 until tokenArr.size), tokenArr.sliceArray(0 until 24)).apply {
|
|
|
|
if (this == null) {
|
|
|
|
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
|
|
|
|
return@then Response(Status.FORBIDDEN)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} catch (e: JsonProcessingException) {
|
|
|
|
LOGGER.info(e) { "Request for $cleanedUri rejected for invalid token" }
|
|
|
|
return@then Response(Status.FORBIDDEN).body("Token is invalid")
|
|
|
|
}
|
|
|
|
|
|
|
|
if (OffsetDateTime.now().isAfter(token.expires)) {
|
|
|
|
LOGGER.info { "Request for $cleanedUri rejected for expired token" }
|
|
|
|
return@then Response(Status.GONE).body("Token has expired")
|
|
|
|
}
|
|
|
|
|
|
|
|
if (token.hash != chapterHash) {
|
|
|
|
LOGGER.info { "Request for $cleanedUri rejected for inapplicable token" }
|
|
|
|
return@then Response(Status.FORBIDDEN).body("Token is inapplicable for the image")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return@then next(it)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|