mirror of
https://gitlab.com/mangadex-pub/mangadex_at_home.git
synced 2024-01-19 02:48:37 +00:00
Merge branch 'update-and-fix' into 'master'
Update deps and fix See merge request mangadex-pub/mangadex_at_home!100
This commit is contained in:
commit
acda98ae55
11
CHANGELOG.md
11
CHANGELOG.md
|
@ -17,6 +17,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
|
||||||
|
## [2.0.3] - 2022-02-17
|
||||||
|
### Changed
|
||||||
|
- [2022-02-17] Updated dependencies [@carbotaniuman].
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- [2022-02-17] Fix possible race condition in DB handling code [@carbotaniuman].
|
||||||
|
- [2022-02-17] Missing ISO code no longer fails request [@carbotaniuman].
|
||||||
|
|
||||||
## [2.0.2] - 2022-02-16
|
## [2.0.2] - 2022-02-16
|
||||||
### Removed
|
### Removed
|
||||||
- [2022-02-16] Remove TLS 1.0 and 1.1 support [@carbotaniuman].
|
- [2022-02-16] Remove TLS 1.0 and 1.1 support [@carbotaniuman].
|
||||||
|
@ -403,7 +411,8 @@ This release contains many breaking changes! Of note are the changes to the cach
|
||||||
### Fixed
|
### Fixed
|
||||||
- [2020-06-11] Tweaked logging configuration to reduce log file sizes by [@carbotaniuman].
|
- [2020-06-11] Tweaked logging configuration to reduce log file sizes by [@carbotaniuman].
|
||||||
|
|
||||||
[Unreleased]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.2...HEAD
|
[Unreleased]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.3...HEAD
|
||||||
|
[2.0.3]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.2...2.0.3
|
||||||
[2.0.2]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.1...2.0.2
|
[2.0.2]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.1...2.0.2
|
||||||
[2.0.1]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0...2.0.1
|
[2.0.1]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0...2.0.1
|
||||||
[2.0.0]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc14...2.0.0
|
[2.0.0]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc14...2.0.0
|
||||||
|
|
30
build.gradle
30
build.gradle
|
@ -1,12 +1,12 @@
|
||||||
plugins {
|
plugins {
|
||||||
id "jacoco"
|
id "jacoco"
|
||||||
id "java"
|
id "java"
|
||||||
id "org.jetbrains.kotlin.jvm" version "1.5.31"
|
id "org.jetbrains.kotlin.jvm" version "1.6.0"
|
||||||
id "org.jetbrains.kotlin.kapt" version "1.5.31"
|
id "org.jetbrains.kotlin.kapt" version "1.6.0"
|
||||||
id "application"
|
id "application"
|
||||||
id "com.github.johnrengelman.shadow" version "7.0.0"
|
id "com.github.johnrengelman.shadow" version "7.0.0"
|
||||||
id "com.diffplug.spotless" version "5.8.2"
|
id "com.diffplug.spotless" version "5.8.2"
|
||||||
id "net.afanasev.sekret" version "0.1.0"
|
id "net.afanasev.sekret" version "0.1.1-RC3"
|
||||||
id "com.palantir.git-version" version "0.12.3"
|
id "com.palantir.git-version" version "0.12.3"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,22 +28,22 @@ configurations {
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation "org.jetbrains.kotlin:kotlin-reflect"
|
implementation "org.jetbrains.kotlin:kotlin-reflect"
|
||||||
|
|
||||||
compileOnly group: "net.afanasev", name: "sekret-annotation", version: "0.1.0"
|
compileOnly group: "net.afanasev", name: "sekret-annotation", version: "0.1.1-RC3"
|
||||||
|
|
||||||
implementation group: "commons-io", name: "commons-io", version: "2.8.0"
|
implementation group: "commons-io", name: "commons-io", version: "2.11.0"
|
||||||
implementation group: "org.apache.commons", name: "commons-compress", version: "1.20"
|
implementation group: "org.apache.commons", name: "commons-compress", version: "1.21"
|
||||||
implementation group: "ch.qos.logback", name: "logback-classic", version: "1.3.0-alpha4"
|
implementation group: "ch.qos.logback", name: "logback-classic", version: "1.3.0-alpha4"
|
||||||
|
|
||||||
implementation group: "io.micrometer", name: "micrometer-registry-prometheus", version: "1.6.2"
|
implementation group: "io.micrometer", name: "micrometer-registry-prometheus", version: "1.8.3"
|
||||||
implementation group: "com.maxmind.geoip2", name: "geoip2", version: "2.15.0"
|
implementation group: "com.maxmind.geoip2", name: "geoip2", version: "2.15.0"
|
||||||
|
|
||||||
implementation platform(group: "org.http4k", name: "http4k-bom", version: "4.3.5.4")
|
implementation platform(group: "org.http4k", name: "http4k-bom", version: "4.19.3.0")
|
||||||
implementation platform(group: "com.fasterxml.jackson", name: "jackson-bom", version: "2.12.1")
|
implementation platform(group: "com.fasterxml.jackson", name: "jackson-bom", version: "2.13.1")
|
||||||
implementation platform(group: "io.netty", name: "netty-bom", version: "4.1.60.Final")
|
implementation platform(group: "io.netty", name: "netty-bom", version: "4.1.74.Final")
|
||||||
|
|
||||||
implementation group: "org.http4k", name: "http4k-core"
|
implementation group: "org.http4k", name: "http4k-core"
|
||||||
implementation group: "org.http4k", name: "http4k-resilience4j"
|
implementation group: "org.http4k", name: "http4k-resilience4j"
|
||||||
implementation group: "io.github.resilience4j", name: "resilience4j-micrometer", version: "1.6.1"
|
implementation group: "io.github.resilience4j", name: "resilience4j-micrometer", version: "1.7.1"
|
||||||
implementation group: "org.http4k", name: "http4k-format-jackson"
|
implementation group: "org.http4k", name: "http4k-format-jackson"
|
||||||
implementation group: "com.fasterxml.jackson.dataformat", name: "jackson-dataformat-yaml"
|
implementation group: "com.fasterxml.jackson.dataformat", name: "jackson-dataformat-yaml"
|
||||||
implementation group: "com.fasterxml.jackson.datatype", name: "jackson-datatype-jsr310"
|
implementation group: "com.fasterxml.jackson.datatype", name: "jackson-datatype-jsr310"
|
||||||
|
@ -52,11 +52,11 @@ dependencies {
|
||||||
implementation group: "org.http4k", name: "http4k-server-netty"
|
implementation group: "org.http4k", name: "http4k-server-netty"
|
||||||
implementation group: "io.netty", name: "netty-codec-haproxy"
|
implementation group: "io.netty", name: "netty-codec-haproxy"
|
||||||
implementation group: "io.netty", name: "netty-transport-native-epoll", classifier: "linux-x86_64"
|
implementation group: "io.netty", name: "netty-transport-native-epoll", classifier: "linux-x86_64"
|
||||||
implementation group: "io.netty.incubator", name: "netty-incubator-transport-native-io_uring", version: "0.0.3.Final", classifier: "linux-x86_64"
|
implementation group: "io.netty.incubator", name: "netty-incubator-transport-native-io_uring", version: "0.0.11.Final", classifier: "linux-x86_64"
|
||||||
testImplementation group: "org.http4k", name: "http4k-testing-kotest"
|
testImplementation group: "org.http4k", name: "http4k-testing-kotest"
|
||||||
runtimeOnly group: "io.netty", name: "netty-tcnative-boringssl-static", version: "2.0.36.Final"
|
runtimeOnly group: "io.netty", name: "netty-tcnative-boringssl-static", version: "2.0.48.Final"
|
||||||
|
|
||||||
implementation group: "com.zaxxer", name: "HikariCP", version: "4.0.2"
|
implementation group: "com.zaxxer", name: "HikariCP", version: "4.0.3"
|
||||||
implementation group: "org.xerial", name: "sqlite-jdbc", version: "3.34.0"
|
implementation group: "org.xerial", name: "sqlite-jdbc", version: "3.34.0"
|
||||||
implementation "org.ktorm:ktorm-core:$ktorm_version"
|
implementation "org.ktorm:ktorm-core:$ktorm_version"
|
||||||
implementation "org.ktorm:ktorm-jackson:$ktorm_version"
|
implementation "org.ktorm:ktorm-jackson:$ktorm_version"
|
||||||
|
@ -66,7 +66,7 @@ dependencies {
|
||||||
|
|
||||||
testImplementation "io.kotest:kotest-runner-junit5:$kotest_version"
|
testImplementation "io.kotest:kotest-runner-junit5:$kotest_version"
|
||||||
testImplementation "io.kotest:kotest-assertions-core:$kotest_version"
|
testImplementation "io.kotest:kotest-assertions-core:$kotest_version"
|
||||||
testImplementation "io.mockk:mockk:1.10.4"
|
testImplementation "io.mockk:mockk:1.12.2"
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.withType(Test) {
|
tasks.withType(Test) {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
http_4k_version=4.3.0.0
|
kotest_version=5.1.0
|
||||||
kotest_version=4.6.0
|
ktorm_version=3.4.1
|
||||||
ktorm_version=3.3.0
|
picocli_version=4.6.3
|
||||||
picocli_version=4.6.1
|
|
4
src/main/kotlin/mdnet/cache/ImageStorage.kt
vendored
4
src/main/kotlin/mdnet/cache/ImageStorage.kt
vendored
|
@ -62,7 +62,7 @@ class ImageStorage(
|
||||||
) : AutoCloseable {
|
) : AutoCloseable {
|
||||||
private val tempCacheDirectory = cacheDirectory.resolve("tmp")
|
private val tempCacheDirectory = cacheDirectory.resolve("tmp")
|
||||||
|
|
||||||
private val evictor: ScheduledExecutorService = Executors.newScheduledThreadPool(2)
|
private val evictor: ScheduledExecutorService = Executors.newScheduledThreadPool(1)
|
||||||
private val queue = LinkedBlockingQueue<String>(1000)
|
private val queue = LinkedBlockingQueue<String>(1000)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -113,7 +113,7 @@ class ImageStorage(
|
||||||
LOGGER.warn(e) { "Error updating LRU $this" }
|
LOGGER.warn(e) { "Error updating LRU $this" }
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
30, 30, TimeUnit.SECONDS
|
15, 30, TimeUnit.SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
if (autoPrune) {
|
if (autoPrune) {
|
||||||
|
|
|
@ -62,8 +62,11 @@ class GeoIpMetricsFilter(
|
||||||
val inetAddress = InetAddress.getByName(sourceIp)
|
val inetAddress = InetAddress.getByName(sourceIp)
|
||||||
if (!inetAddress.isLoopbackAddress && !inetAddress.isAnyLocalAddress) {
|
if (!inetAddress.isLoopbackAddress && !inetAddress.isAnyLocalAddress) {
|
||||||
val country = databaseReader!!.country(inetAddress)
|
val country = databaseReader!!.country(inetAddress)
|
||||||
|
|
||||||
|
if (country.country.isoCode != null) {
|
||||||
recordCountry(country.country.isoCode)
|
recordCountry(country.country.isoCode)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (e: GeoIp2Exception) {
|
} catch (e: GeoIp2Exception) {
|
||||||
// do not disclose ip here, for privacy of logs
|
// do not disclose ip here, for privacy of logs
|
||||||
LOGGER.warn { "Cannot resolve the country of the request's IP!" }
|
LOGGER.warn { "Cannot resolve the country of the request's IP!" }
|
||||||
|
|
|
@ -189,7 +189,7 @@ class Netty(
|
||||||
object : ChannelInboundHandlerAdapter() {
|
object : ChannelInboundHandlerAdapter() {
|
||||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
if (msg is ByteBuf) {
|
if (msg is ByteBuf) {
|
||||||
// Since the builtin `HAProxyMessageDecoder` will break non Proxy Protocol requests
|
// Since the builtin `HAProxyMessageDecoder` will break non Proxy-Protocol requests
|
||||||
// we need to use its detection capabilities to only add it when needed.
|
// we need to use its detection capabilities to only add it when needed.
|
||||||
val result: ProtocolDetectionResult<HAProxyProtocolVersion> = HAProxyMessageDecoder.detectProtocol(msg)
|
val result: ProtocolDetectionResult<HAProxyProtocolVersion> = HAProxyMessageDecoder.detectProtocol(msg)
|
||||||
if (result.state() == ProtocolDetectionState.DETECTED) {
|
if (result.state() == ProtocolDetectionState.DETECTED) {
|
||||||
|
@ -250,7 +250,7 @@ class Netty(
|
||||||
"setForwardHeader",
|
"setForwardHeader",
|
||||||
object : SimpleChannelInboundHandler<FullHttpRequest>(false) {
|
object : SimpleChannelInboundHandler<FullHttpRequest>(false) {
|
||||||
override fun channelRead0(ctx: ChannelHandlerContext, request: FullHttpRequest) {
|
override fun channelRead0(ctx: ChannelHandlerContext, request: FullHttpRequest) {
|
||||||
// The geo location code already supports the `Forwarded header so setting
|
// The geo-location code already supports the `Forwarded` header so setting
|
||||||
// it is the easiest way to introduce the original IP downstream.
|
// it is the easiest way to introduce the original IP downstream.
|
||||||
if ((ctx as AttributeMap).hasAttr(HAPROXY_SOURCE)) {
|
if ((ctx as AttributeMap).hasAttr(HAPROXY_SOURCE)) {
|
||||||
val addr = (ctx as AttributeMap).attr(HAPROXY_SOURCE).get()
|
val addr = (ctx as AttributeMap).attr(HAPROXY_SOURCE).get()
|
||||||
|
|
Loading…
Reference in a new issue