Add Last-Modified for easy caching
This commit is contained in:
parent
4238717460
commit
902798d868
|
@ -7,7 +7,7 @@ plugins {
|
||||||
}
|
}
|
||||||
|
|
||||||
group = 'com.mangadex'
|
group = 'com.mangadex'
|
||||||
version = '1.0.0-rc2'
|
version = '1.0.0-rc3'
|
||||||
mainClassName = 'mdnet.base.MangadexClient'
|
mainClassName = 'mdnet.base.MangadexClient'
|
||||||
|
|
||||||
repositories {
|
repositories {
|
||||||
|
|
|
@ -37,7 +37,7 @@ public class MangadexClient {
|
||||||
this.statistics = new AtomicReference<>();
|
this.statistics = new AtomicReference<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
cache = DiskLruCache.open(new File("cache"), 1, 2,
|
cache = DiskLruCache.open(new File("cache"), 2, 3,
|
||||||
clientSettings.getMaxCacheSizeMib() * 1024 * 1024 /* MiB to bytes */);
|
clientSettings.getMaxCacheSizeMib() * 1024 * 1024 /* MiB to bytes */);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
MangadexClient.dieWithError(e);
|
MangadexClient.dieWithError(e);
|
||||||
|
|
15
src/main/java/mdnet/cache/DiskLruCache.java
vendored
15
src/main/java/mdnet/cache/DiskLruCache.java
vendored
|
@ -228,9 +228,9 @@ public final class DiskLruCache implements Closeable {
|
||||||
cache.readJournal();
|
cache.readJournal();
|
||||||
cache.processJournal();
|
cache.processJournal();
|
||||||
return cache;
|
return cache;
|
||||||
} catch (IOException journalIsCorrupt) {
|
} catch (IOException e) {
|
||||||
if (LOGGER.isWarnEnabled()) {
|
if (LOGGER.isWarnEnabled()) {
|
||||||
LOGGER.warn("DiskLruCache " + directory + " is corrupt - removing", journalIsCorrupt);
|
LOGGER.warn("DiskLruCache " + directory + " is corrupt/outdated - removing");
|
||||||
}
|
}
|
||||||
cache.delete();
|
cache.delete();
|
||||||
}
|
}
|
||||||
|
@ -600,7 +600,7 @@ public final class DiskLruCache implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
redundantOpCount++;
|
redundantOpCount++;
|
||||||
journalWriter.append(REMOVE + ' ' + key + '\n');
|
journalWriter.append(REMOVE).append(' ').append(key).append('\n');
|
||||||
lruEntries.remove(key);
|
lruEntries.remove(key);
|
||||||
|
|
||||||
if (journalRebuildRequired()) {
|
if (journalRebuildRequired()) {
|
||||||
|
@ -694,10 +694,15 @@ public final class DiskLruCache implements Closeable {
|
||||||
return ins[index];
|
return ins[index];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the string value for {@code index}. */
|
/**
|
||||||
|
* Returns the string value for {@code index}. This consumes the InputStream!
|
||||||
|
*/
|
||||||
public String getString(int index) throws IOException {
|
public String getString(int index) throws IOException {
|
||||||
try (InputStream in = getInputStream(index)) {
|
InputStream in = getInputStream(index);
|
||||||
|
try {
|
||||||
return IOUtils.toString(in, StandardCharsets.UTF_8);
|
return IOUtils.toString(in, StandardCharsets.UTF_8);
|
||||||
|
} finally {
|
||||||
|
Util.closeQuietly(in);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.http4k.core.Request
|
||||||
import org.http4k.core.Response
|
import org.http4k.core.Response
|
||||||
import org.http4k.core.Status
|
import org.http4k.core.Status
|
||||||
import org.http4k.core.then
|
import org.http4k.core.then
|
||||||
|
import org.http4k.filter.CachingFilters
|
||||||
import org.http4k.filter.MaxAgeTtl
|
import org.http4k.filter.MaxAgeTtl
|
||||||
import org.http4k.filter.ServerFilters
|
import org.http4k.filter.ServerFilters
|
||||||
import org.http4k.lens.Path
|
import org.http4k.lens.Path
|
||||||
|
@ -55,21 +56,38 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||||
statistics.get().requestsServed.incrementAndGet()
|
statistics.get().requestsServed.incrementAndGet()
|
||||||
|
|
||||||
// Netty doesn't do Content-Length or Content-Type, so we have the pleasure of doing that ourselves
|
// Netty doesn't do Content-Length or Content-Type, so we have the pleasure of doing that ourselves
|
||||||
fun respond(input: InputStream, length: String, type: String): Response =
|
fun respondWithImage(input: InputStream, length: String, type: String, lastModified: String): Response =
|
||||||
Response(Status.OK).header("Content-Length", length)
|
Response(Status.OK).header("Content-Length", length)
|
||||||
.header("Content-Type", type)
|
.header("Content-Type", type)
|
||||||
.header("X-Content-Type-Options", "nosniff")
|
.header("X-Content-Type-Options", "nosniff")
|
||||||
|
.header("Last-Modified", lastModified)
|
||||||
|
.header("Cache-Control", listOf("public", MaxAgeTtl(Constants.MAX_AGE_CACHE).toHeaderValue()).joinToString(", "))
|
||||||
|
.header("Timing-Allow-Origin", "https://mangadex.org")
|
||||||
.body(input, length.toLong())
|
.body(input, length.toLong())
|
||||||
|
|
||||||
val snapshot = cache.get(cacheId)
|
val snapshot = cache.get(cacheId)
|
||||||
if (snapshot != null) {
|
if (snapshot != null) {
|
||||||
statistics.get().cacheHits.incrementAndGet()
|
statistics.get().cacheHits.incrementAndGet()
|
||||||
if (LOGGER.isTraceEnabled) {
|
|
||||||
LOGGER.trace("Request for $chapterHash/$fileName hit cache")
|
|
||||||
}
|
|
||||||
|
|
||||||
respond(CipherInputStream(snapshot.getInputStream(0), getRc4(cacheId)),
|
// our files never change, so it's safe to use the browser cache
|
||||||
snapshot.getLength(0).toString(), snapshot.getString(1))
|
if (request.header("If-Modified-Since") != null) {
|
||||||
|
if (LOGGER.isTraceEnabled) {
|
||||||
|
LOGGER.trace("Request for $chapterHash/$fileName cached by browser")
|
||||||
|
}
|
||||||
|
|
||||||
|
val lastModified = snapshot.getString(2)
|
||||||
|
snapshot.close()
|
||||||
|
|
||||||
|
Response(Status.NOT_MODIFIED)
|
||||||
|
.header("Last-Modified", lastModified)
|
||||||
|
} else {
|
||||||
|
if (LOGGER.isTraceEnabled) {
|
||||||
|
LOGGER.trace("Request for $chapterHash/$fileName hit cache")
|
||||||
|
}
|
||||||
|
|
||||||
|
respondWithImage(CipherInputStream(snapshot.getInputStream(0), getRc4(cacheId)),
|
||||||
|
snapshot.getLength(0).toString(), snapshot.getString(1), snapshot.getString(2))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
statistics.get().cacheMisses.incrementAndGet()
|
statistics.get().cacheMisses.incrementAndGet()
|
||||||
if (LOGGER.isTraceEnabled) {
|
if (LOGGER.isTraceEnabled) {
|
||||||
|
@ -89,6 +107,8 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||||
|
|
||||||
val editor = cache.edit(cacheId)
|
val editor = cache.edit(cacheId)
|
||||||
|
|
||||||
|
val lastModified = HTTP_TIME_FORMATTER.format(ZonedDateTime.now(ZoneOffset.UTC))
|
||||||
|
|
||||||
// A null editor means that this file is being written to
|
// A null editor means that this file is being written to
|
||||||
// concurrently so we skip the cache process
|
// concurrently so we skip the cache process
|
||||||
if (editor != null) {
|
if (editor != null) {
|
||||||
|
@ -96,6 +116,7 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||||
LOGGER.trace("Request for $chapterHash/$fileName is being cached and served")
|
LOGGER.trace("Request for $chapterHash/$fileName is being cached and served")
|
||||||
}
|
}
|
||||||
editor.setString(1, contentType)
|
editor.setString(1, contentType)
|
||||||
|
editor.setString(2, lastModified)
|
||||||
|
|
||||||
val tee = CachingInputStream(mdResponse.body.stream,
|
val tee = CachingInputStream(mdResponse.body.stream,
|
||||||
executor, CipherOutputStream(editor.newOutputStream(0), getRc4(cacheId))) {
|
executor, CipherOutputStream(editor.newOutputStream(0), getRc4(cacheId))) {
|
||||||
|
@ -115,18 +136,20 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||||
editor.abort()
|
editor.abort()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
respond(tee, contentLength, contentType)
|
respondWithImage(tee, contentLength, contentType, lastModified)
|
||||||
} else {
|
} else {
|
||||||
if (LOGGER.isTraceEnabled) {
|
if (LOGGER.isTraceEnabled) {
|
||||||
LOGGER.trace("Request for $chapterHash/$fileName is being served")
|
LOGGER.trace("Request for $chapterHash/$fileName is being served")
|
||||||
}
|
}
|
||||||
|
|
||||||
respond(mdResponse.body.stream, contentLength, contentType)
|
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CachingFilters
|
||||||
|
|
||||||
return catchAllHideDetails()
|
return catchAllHideDetails()
|
||||||
.then(ServerFilters.CatchLensFailure)
|
.then(ServerFilters.CatchLensFailure)
|
||||||
.then(addCommonHeaders())
|
.then(addCommonHeaders())
|
||||||
|
@ -152,8 +175,6 @@ private fun addCommonHeaders(): Filter {
|
||||||
val response = next(request)
|
val response = next(request)
|
||||||
response.header("Date", HTTP_TIME_FORMATTER.format(ZonedDateTime.now(ZoneOffset.UTC)))
|
response.header("Date", HTTP_TIME_FORMATTER.format(ZonedDateTime.now(ZoneOffset.UTC)))
|
||||||
.header("Server", "Mangadex@Home Node")
|
.header("Server", "Mangadex@Home Node")
|
||||||
.header("Cache-Control", listOf("public", MaxAgeTtl(Constants.MAX_AGE_CACHE).toHeaderValue()).joinToString(", "))
|
|
||||||
.header("Timing-Allow-Origin", "https://mangadex.org")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue