Add Panda Chaika Extension (#3801)
* Add Panda Chaika Extension - Add "Panda Chaika" extension from panda.chaika.moe * Add Support for Zip64 - Add support for Zip64 type of .zip ( large zip [ size/pages ] ) -> For Example: https://panda.chaika.moe/archive/49406/ - Use Little Endian for All signatures - Apply AwkwardPeak7's suggestions * Fix null Genres? * Fix "null" genre if there's no genre * Fix mistakes caused by previous commit Sorry... * Improve description readability - Sorry for the commits spam - Make manga description more readable * Fix Broken Filters, Apply Suggestions * Apply suggestions - Apply AwkwardPeak's suggestions
This commit is contained in:
parent
4d764ef0a9
commit
2dbf798f0c
|
@ -0,0 +1,8 @@
|
||||||
|
ext {
|
||||||
|
extName = 'PandaChaika'
|
||||||
|
extClass = '.PandaChaikaFactory'
|
||||||
|
extVersionCode = 1
|
||||||
|
isNsfw = true
|
||||||
|
}
|
||||||
|
|
||||||
|
apply from: "$rootDir/common.gradle"
|
Binary file not shown.
After Width: | Height: | Size: 3.9 KiB |
Binary file not shown.
After Width: | Height: | Size: 2.1 KiB |
Binary file not shown.
After Width: | Height: | Size: 5.7 KiB |
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
Binary file not shown.
After Width: | Height: | Size: 17 KiB |
|
@ -0,0 +1,253 @@
|
||||||
|
package eu.kanade.tachiyomi.extension.all.pandachaika
|
||||||
|
|
||||||
|
import eu.kanade.tachiyomi.network.GET
|
||||||
|
import eu.kanade.tachiyomi.source.model.FilterList
|
||||||
|
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||||
|
import eu.kanade.tachiyomi.source.model.Page
|
||||||
|
import eu.kanade.tachiyomi.source.model.SChapter
|
||||||
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
|
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||||
|
import kotlinx.serialization.decodeFromString
|
||||||
|
import kotlinx.serialization.encodeToString
|
||||||
|
import kotlinx.serialization.json.Json
|
||||||
|
import okhttp3.HttpUrl.Companion.toHttpUrl
|
||||||
|
import okhttp3.Interceptor
|
||||||
|
import okhttp3.MediaType.Companion.toMediaType
|
||||||
|
import okhttp3.Protocol
|
||||||
|
import okhttp3.Request
|
||||||
|
import okhttp3.Response
|
||||||
|
import okhttp3.ResponseBody.Companion.toResponseBody
|
||||||
|
import rx.Observable
|
||||||
|
import uy.kohesive.injekt.injectLazy
|
||||||
|
import java.lang.String.CASE_INSENSITIVE_ORDER
|
||||||
|
import java.math.BigInteger
|
||||||
|
|
||||||
|
class PandaChaika(
|
||||||
|
override val lang: String = "all",
|
||||||
|
private val searchLang: String = "",
|
||||||
|
) : HttpSource() {
|
||||||
|
|
||||||
|
override val name = "PandaChaika"
|
||||||
|
|
||||||
|
override val baseUrl = "https://panda.chaika.moe"
|
||||||
|
|
||||||
|
private val baseSearchUrl = "$baseUrl/search"
|
||||||
|
|
||||||
|
override val supportsLatest = true
|
||||||
|
|
||||||
|
override val client = network.cloudflareClient
|
||||||
|
.newBuilder()
|
||||||
|
.addInterceptor(::Intercept)
|
||||||
|
.build()
|
||||||
|
|
||||||
|
private val json: Json by injectLazy()
|
||||||
|
|
||||||
|
// Popular
|
||||||
|
override fun popularMangaRequest(page: Int): Request {
|
||||||
|
return GET("$baseSearchUrl/?tags=$searchLang&sort=rating&apply=&json=&page=$page", headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun popularMangaParse(response: Response): MangasPage = searchMangaParse(response)
|
||||||
|
|
||||||
|
override fun latestUpdatesParse(response: Response): MangasPage = searchMangaParse(response)
|
||||||
|
|
||||||
|
// Latest
|
||||||
|
override fun latestUpdatesRequest(page: Int): Request {
|
||||||
|
return GET("$baseSearchUrl/?tags=$searchLang&sort=public_date&apply=&json=&page=$page", headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parsePageRange(query: String, minPages: Int = 1, maxPages: Int = 9999): Pair<Int, Int> {
|
||||||
|
val num = query.filter(Char::isDigit).toIntOrNull() ?: -1
|
||||||
|
fun limitedNum(number: Int = num): Int = number.coerceIn(minPages, maxPages)
|
||||||
|
|
||||||
|
if (num < 0) return minPages to maxPages
|
||||||
|
return when (query.firstOrNull()) {
|
||||||
|
'<' -> 1 to if (query[1] == '=') limitedNum() else limitedNum(num + 1)
|
||||||
|
'>' -> limitedNum(if (query[1] == '=') num else num + 1) to maxPages
|
||||||
|
'=' -> when (query[1]) {
|
||||||
|
'>' -> limitedNum() to maxPages
|
||||||
|
'<' -> 1 to limitedNum(maxPages)
|
||||||
|
else -> limitedNum() to limitedNum()
|
||||||
|
}
|
||||||
|
else -> limitedNum() to limitedNum()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun searchMangaParse(response: Response): MangasPage {
|
||||||
|
val library = response.parseAs<ArchiveResponse>()
|
||||||
|
|
||||||
|
val mangas = library.archives.map(LongArchive::toSManga)
|
||||||
|
|
||||||
|
val hasNextPage = library.has_next
|
||||||
|
|
||||||
|
return MangasPage(mangas, hasNextPage)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||||
|
val url = baseSearchUrl.toHttpUrl().newBuilder().apply {
|
||||||
|
val tags = mutableListOf<String>()
|
||||||
|
var reason = ""
|
||||||
|
var uploader = ""
|
||||||
|
var pagesMin = 1
|
||||||
|
var pagesMax = 9999
|
||||||
|
|
||||||
|
tags.add(searchLang)
|
||||||
|
|
||||||
|
filters.forEach {
|
||||||
|
when (it) {
|
||||||
|
is SortFilter -> {
|
||||||
|
addQueryParameter("sort", it.getValue())
|
||||||
|
addQueryParameter("asc_desc", if (it.state!!.ascending) "asc" else "desc")
|
||||||
|
}
|
||||||
|
|
||||||
|
is SelectFilter -> {
|
||||||
|
addQueryParameter("category", it.vals[it.state].replace("All", ""))
|
||||||
|
}
|
||||||
|
|
||||||
|
is PageFilter -> {
|
||||||
|
if (it.state.isNotBlank()) {
|
||||||
|
val (min, max) = parsePageRange(it.state)
|
||||||
|
pagesMin = min
|
||||||
|
pagesMax = max
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is TextFilter -> {
|
||||||
|
if (it.state.isNotEmpty()) {
|
||||||
|
when (it.type) {
|
||||||
|
"reason" -> reason = it.state
|
||||||
|
"uploader" -> uploader = it.state
|
||||||
|
else -> {
|
||||||
|
it.state.split(",").filter(String::isNotBlank).map { tag ->
|
||||||
|
val trimmed = tag.trim()
|
||||||
|
tags.add(
|
||||||
|
buildString {
|
||||||
|
if (trimmed.startsWith('-')) append("-")
|
||||||
|
append(it.type)
|
||||||
|
if (it.type.isNotBlank()) append(":")
|
||||||
|
append(trimmed.lowercase().removePrefix("-"))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else -> {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
addQueryParameter("title", query)
|
||||||
|
addQueryParameter("tags", tags.joinToString())
|
||||||
|
addQueryParameter("filecount_from", pagesMin.toString())
|
||||||
|
addQueryParameter("filecount_to", pagesMax.toString())
|
||||||
|
addQueryParameter("reason", reason)
|
||||||
|
addQueryParameter("uploader", uploader)
|
||||||
|
addQueryParameter("page", page.toString())
|
||||||
|
addQueryParameter("apply", "")
|
||||||
|
addQueryParameter("json", "")
|
||||||
|
}.build()
|
||||||
|
|
||||||
|
return GET(url, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun chapterListRequest(manga: SManga): Request {
|
||||||
|
return GET("$baseUrl/api?archive=${manga.url}", headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getFilterList() = getFilters()
|
||||||
|
|
||||||
|
// Details
|
||||||
|
|
||||||
|
override fun fetchMangaDetails(manga: SManga): Observable<SManga> {
|
||||||
|
return Observable.just(manga.apply { initialized = true })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chapters
|
||||||
|
|
||||||
|
override fun chapterListParse(response: Response): List<SChapter> {
|
||||||
|
val archive = response.parseAs<Archive>()
|
||||||
|
|
||||||
|
return listOf(
|
||||||
|
SChapter.create().apply {
|
||||||
|
name = "Chapter"
|
||||||
|
url = archive.download.substringBefore("/download/")
|
||||||
|
date_upload = archive.posted * 1000
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getMangaUrl(manga: SManga) = "$baseUrl/archive/${manga.url}"
|
||||||
|
override fun getChapterUrl(chapter: SChapter) = "$baseUrl${chapter.url}"
|
||||||
|
|
||||||
|
// Pages
|
||||||
|
override fun fetchPageList(chapter: SChapter): Observable<List<Page>> {
|
||||||
|
fun List<String>.sort() = this.sortedWith(compareBy(CASE_INSENSITIVE_ORDER) { it })
|
||||||
|
val url = "$baseUrl${chapter.url}/download/"
|
||||||
|
val (fileType, contentLength) = getZipType(url)
|
||||||
|
|
||||||
|
val remoteZip = ZipHandler(url, client, headers, fileType, contentLength).populate()
|
||||||
|
val fileListing = remoteZip.files().sort()
|
||||||
|
|
||||||
|
val files = remoteZip.toJson()
|
||||||
|
return Observable.just(
|
||||||
|
fileListing.mapIndexed { index, filename ->
|
||||||
|
Page(index, imageUrl = "https://127.0.0.1/#$filename&$files")
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getZipType(url: String): Pair<String, BigInteger> {
|
||||||
|
val request = Request.Builder()
|
||||||
|
.url(url)
|
||||||
|
.headers(headers)
|
||||||
|
.method("HEAD", null)
|
||||||
|
.build()
|
||||||
|
|
||||||
|
val contentLength = (
|
||||||
|
client.newCall(request).execute().header("content-length")
|
||||||
|
?: throw Exception("Could not get Content-Length of URL")
|
||||||
|
)
|
||||||
|
.toBigInteger()
|
||||||
|
|
||||||
|
return (if (contentLength > Int.MAX_VALUE.toBigInteger()) "zip64" else "zip") to contentLength
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun Intercept(chain: Interceptor.Chain): Response {
|
||||||
|
val url = chain.request().url.toString()
|
||||||
|
return if (url.startsWith("https://127.0.0.1/#")) {
|
||||||
|
val fragment = url.toHttpUrl().fragment!!
|
||||||
|
val remoteZip = fragment.substringAfter("&").parseAs<Zip>()
|
||||||
|
val filename = fragment.substringBefore("&")
|
||||||
|
|
||||||
|
val byteArray = remoteZip.fetch(filename, client)
|
||||||
|
var type = filename.substringAfterLast('.').lowercase()
|
||||||
|
type = if (type == "jpg") "jpeg" else type
|
||||||
|
|
||||||
|
Response.Builder().body(byteArray.toResponseBody("image/$type".toMediaType()))
|
||||||
|
.request(chain.request())
|
||||||
|
.protocol(Protocol.HTTP_1_0)
|
||||||
|
.code(200)
|
||||||
|
.message("")
|
||||||
|
.build()
|
||||||
|
} else {
|
||||||
|
chain.proceed(chain.request())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private inline fun <reified T> Response.parseAs(): T {
|
||||||
|
return json.decodeFromString(body.string())
|
||||||
|
}
|
||||||
|
|
||||||
|
private inline fun <reified T> String.parseAs(): T {
|
||||||
|
return json.decodeFromString(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun Zip.toJson(): String {
|
||||||
|
return json.encodeToString(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun imageUrlParse(response: Response): String = throw UnsupportedOperationException()
|
||||||
|
override fun pageListParse(response: Response): List<Page> = throw UnsupportedOperationException()
|
||||||
|
override fun mangaDetailsParse(response: Response): SManga = throw UnsupportedOperationException()
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
package eu.kanade.tachiyomi.extension.all.pandachaika
|
||||||
|
|
||||||
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
|
import eu.kanade.tachiyomi.source.model.UpdateStrategy
|
||||||
|
import kotlinx.serialization.Serializable
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.Date
|
||||||
|
import java.util.Locale
|
||||||
|
|
||||||
|
val dateReformat = SimpleDateFormat("EEEE, d MMM yyyy HH:mm (z)", Locale.ENGLISH)
|
||||||
|
fun filterTags(include: String = "", exclude: List<String> = emptyList(), tags: List<String>): String {
|
||||||
|
return tags.filter { it.startsWith("$include:") && exclude.none { substring -> it.startsWith("$substring:") } }
|
||||||
|
.joinToString {
|
||||||
|
it.substringAfter(":").replace("_", " ").split(" ").joinToString(" ") { s ->
|
||||||
|
s.replaceFirstChar { sr ->
|
||||||
|
if (sr.isLowerCase()) sr.titlecase(Locale.getDefault()) else sr.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fun getReadableSize(bytes: Double): String {
|
||||||
|
return when {
|
||||||
|
bytes >= 300 * 1024 * 1024 -> "${"%.2f".format(bytes / (1024.0 * 1024.0 * 1024.0))} GB"
|
||||||
|
bytes >= 100 * 1024 -> "${"%.2f".format(bytes / (1024.0 * 1024.0))} MB"
|
||||||
|
bytes >= 1024 -> "${"%.2f".format(bytes / (1024.0))} KB"
|
||||||
|
else -> "$bytes B"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Serializable
|
||||||
|
class Archive(
|
||||||
|
val download: String,
|
||||||
|
val posted: Long,
|
||||||
|
)
|
||||||
|
|
||||||
|
@Serializable
|
||||||
|
class LongArchive(
|
||||||
|
private val thumbnail: String,
|
||||||
|
private val title: String,
|
||||||
|
private val id: Int,
|
||||||
|
private val posted: Long?,
|
||||||
|
private val public_date: Long?,
|
||||||
|
private val filecount: Int,
|
||||||
|
private val filesize: Double,
|
||||||
|
private val tags: List<String>,
|
||||||
|
private val title_jpn: String?,
|
||||||
|
private val uploader: String,
|
||||||
|
) {
|
||||||
|
fun toSManga() = SManga.create().apply {
|
||||||
|
val groups = filterTags("group", tags = tags)
|
||||||
|
val artists = filterTags("artist", tags = tags)
|
||||||
|
val publishers = filterTags("publisher", tags = tags)
|
||||||
|
val male = filterTags("male", tags = tags)
|
||||||
|
val female = filterTags("female", tags = tags)
|
||||||
|
val others = filterTags(exclude = listOf("female", "male", "artist", "publisher", "group", "parody"), tags = tags)
|
||||||
|
val parodies = filterTags("parody", tags = tags)
|
||||||
|
url = id.toString()
|
||||||
|
title = this@LongArchive.title
|
||||||
|
thumbnail_url = thumbnail
|
||||||
|
author = groups.ifEmpty { artists }
|
||||||
|
artist = artists
|
||||||
|
genre = listOf(male, female, others).joinToString()
|
||||||
|
description = buildString {
|
||||||
|
append("Uploader: ", uploader.ifEmpty { "Anonymous" }, "\n")
|
||||||
|
publishers.takeIf { it.isNotBlank() }?.let {
|
||||||
|
append("Publishers: ", it, "\n\n")
|
||||||
|
}
|
||||||
|
parodies.takeIf { it.isNotBlank() }?.let {
|
||||||
|
append("Parodies: ", it, "\n\n")
|
||||||
|
}
|
||||||
|
male.takeIf { it.isNotBlank() }?.let {
|
||||||
|
append("Male tags: ", it, "\n\n")
|
||||||
|
}
|
||||||
|
female.takeIf { it.isNotBlank() }?.let {
|
||||||
|
append("Female tags: ", it, "\n\n")
|
||||||
|
}
|
||||||
|
others.takeIf { it.isNotBlank() }?.let {
|
||||||
|
append("Other tags: ", it, "\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
title_jpn?.let { append("Japanese Title: ", it, "\n") }
|
||||||
|
append("Pages: ", filecount, "\n")
|
||||||
|
append("File Size: ", getReadableSize(filesize), "\n")
|
||||||
|
|
||||||
|
try {
|
||||||
|
append("Public Date: ", dateReformat.format(Date(public_date!! * 1000)), "\n")
|
||||||
|
} catch (_: Exception) {}
|
||||||
|
try {
|
||||||
|
append("Posted: ", dateReformat.format(Date(posted!! * 1000)), "\n")
|
||||||
|
} catch (_: Exception) {}
|
||||||
|
}
|
||||||
|
status = SManga.COMPLETED
|
||||||
|
update_strategy = UpdateStrategy.ONLY_FETCH_ONCE
|
||||||
|
initialized = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Serializable
|
||||||
|
class ArchiveResponse(
|
||||||
|
val archives: List<LongArchive>,
|
||||||
|
val has_next: Boolean,
|
||||||
|
)
|
|
@ -0,0 +1,29 @@
|
||||||
|
package eu.kanade.tachiyomi.extension.all.pandachaika
|
||||||
|
|
||||||
|
import eu.kanade.tachiyomi.source.Source
|
||||||
|
import eu.kanade.tachiyomi.source.SourceFactory
|
||||||
|
|
||||||
|
class PandaChaikaFactory : SourceFactory {
|
||||||
|
override fun createSources(): List<Source> = listOf(
|
||||||
|
PandaChaika(),
|
||||||
|
PandaChaika("en", "english"),
|
||||||
|
PandaChaika("zh", "chinese"),
|
||||||
|
PandaChaika("ko", "korean"),
|
||||||
|
PandaChaika("es", "spanish"),
|
||||||
|
PandaChaika("ru", "russian"),
|
||||||
|
PandaChaika("pt", "portuguese"),
|
||||||
|
PandaChaika("fr", "french"),
|
||||||
|
PandaChaika("th", "thai"),
|
||||||
|
PandaChaika("vi", "vietnamese"),
|
||||||
|
PandaChaika("ja", "japanese"),
|
||||||
|
PandaChaika("id", "indonesian"),
|
||||||
|
PandaChaika("ar", "arabic"),
|
||||||
|
PandaChaika("uk", "ukrainian"),
|
||||||
|
PandaChaika("tr", "turkish"),
|
||||||
|
PandaChaika("cs", "czech"),
|
||||||
|
PandaChaika("tl", "tagalog"),
|
||||||
|
PandaChaika("fi", "finnish"),
|
||||||
|
PandaChaika("jv", "javanese"),
|
||||||
|
PandaChaika("el", "greek"),
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,62 @@
|
||||||
|
package eu.kanade.tachiyomi.extension.all.pandachaika
|
||||||
|
|
||||||
|
import eu.kanade.tachiyomi.source.model.Filter
|
||||||
|
import eu.kanade.tachiyomi.source.model.Filter.Sort.Selection
|
||||||
|
import eu.kanade.tachiyomi.source.model.FilterList
|
||||||
|
|
||||||
|
fun getFilters(): FilterList {
|
||||||
|
return FilterList(
|
||||||
|
SortFilter("Sort by", Selection(0, false), getSortsList),
|
||||||
|
SelectFilter("Types", getTypes),
|
||||||
|
Filter.Separator(),
|
||||||
|
Filter.Header("Separate tags with commas (,)"),
|
||||||
|
Filter.Header("Prepend with dash (-) to exclude"),
|
||||||
|
Filter.Header("Use 'Male Tags' or 'Female Tags' for specific categories. 'Tags' searches all categories."),
|
||||||
|
TextFilter("Tags", ""),
|
||||||
|
TextFilter("Male Tags", "male"),
|
||||||
|
TextFilter("Female Tags", "female"),
|
||||||
|
TextFilter("Artists", "artist"),
|
||||||
|
TextFilter("Parodies", "parody"),
|
||||||
|
Filter.Separator(),
|
||||||
|
TextFilter("Reason", "reason"),
|
||||||
|
TextFilter("Uploader", "reason"),
|
||||||
|
Filter.Separator(),
|
||||||
|
Filter.Header("Filter by pages, for example: (>20)"),
|
||||||
|
PageFilter("Pages"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
internal open class PageFilter(name: String) : Filter.Text(name)
|
||||||
|
|
||||||
|
internal open class TextFilter(name: String, val type: String) : Filter.Text(name)
|
||||||
|
|
||||||
|
internal open class SelectFilter(name: String, val vals: List<String>, state: Int = 0) :
|
||||||
|
Filter.Select<String>(name, vals.map { it }.toTypedArray(), state)
|
||||||
|
|
||||||
|
internal open class SortFilter(name: String, selection: Selection, private val vals: List<Pair<String, String>>) :
|
||||||
|
Filter.Sort(name, vals.map { it.first }.toTypedArray(), selection) {
|
||||||
|
fun getValue() = vals[state!!.index].second
|
||||||
|
}
|
||||||
|
|
||||||
|
private val getTypes = listOf(
|
||||||
|
"All",
|
||||||
|
"Doujinshi",
|
||||||
|
"Manga",
|
||||||
|
"Image Set",
|
||||||
|
"Artist CG",
|
||||||
|
"Game CG",
|
||||||
|
"Western",
|
||||||
|
"Non-H",
|
||||||
|
"Misc",
|
||||||
|
)
|
||||||
|
|
||||||
|
private val getSortsList: List<Pair<String, String>> = listOf(
|
||||||
|
Pair("Public Date", "public_date"),
|
||||||
|
Pair("Posted Date", "posted_date"),
|
||||||
|
Pair("Title", "title"),
|
||||||
|
Pair("Japanese Title", "title_jpn"),
|
||||||
|
Pair("Rating", "rating"),
|
||||||
|
Pair("Images", "images"),
|
||||||
|
Pair("File Size", "size"),
|
||||||
|
Pair("Category", "category"),
|
||||||
|
)
|
|
@ -0,0 +1,287 @@
|
||||||
|
package eu.kanade.tachiyomi.extension.all.pandachaika
|
||||||
|
|
||||||
|
import eu.kanade.tachiyomi.extension.all.pandachaika.ZipParser.inflateRaw
|
||||||
|
import eu.kanade.tachiyomi.extension.all.pandachaika.ZipParser.parseAllCDs
|
||||||
|
import eu.kanade.tachiyomi.extension.all.pandachaika.ZipParser.parseEOCD
|
||||||
|
import eu.kanade.tachiyomi.extension.all.pandachaika.ZipParser.parseEOCD64
|
||||||
|
import eu.kanade.tachiyomi.extension.all.pandachaika.ZipParser.parseLocalFile
|
||||||
|
import eu.kanade.tachiyomi.network.GET
|
||||||
|
import kotlinx.serialization.Serializable
|
||||||
|
import okhttp3.Headers
|
||||||
|
import okhttp3.OkHttpClient
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.math.BigInteger
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
import java.nio.ByteOrder.LITTLE_ENDIAN
|
||||||
|
import java.util.zip.Inflater
|
||||||
|
import kotlin.text.Charsets.UTF_8
|
||||||
|
|
||||||
|
const val CENTRAL_DIRECTORY_FILE_HEADER_SIGNATURE = 0x02014b50
|
||||||
|
const val END_OF_CENTRAL_DIRECTORY_SIGNATURE = 0x06054b50
|
||||||
|
const val END_OF_CENTRAL_DIRECTORY_64_SIGNATURE = 0x06064b50
|
||||||
|
const val LOCAL_FILE_HEADER_SIGNATURE = 0x04034b50
|
||||||
|
|
||||||
|
class EndOfCentralDirectory(
|
||||||
|
val centralDirectoryByteSize: BigInteger,
|
||||||
|
val centralDirectoryByteOffset: BigInteger,
|
||||||
|
)
|
||||||
|
|
||||||
|
@Serializable
|
||||||
|
class CentralDirectoryRecord(
|
||||||
|
val length: Int,
|
||||||
|
val compressedSize: Int,
|
||||||
|
val localFileHeaderRelativeOffset: Int,
|
||||||
|
val filename: String,
|
||||||
|
)
|
||||||
|
|
||||||
|
class LocalFileHeader(
|
||||||
|
val compressedData: ByteArray,
|
||||||
|
val compressionMethod: Int,
|
||||||
|
)
|
||||||
|
|
||||||
|
@Serializable
|
||||||
|
class Zip(
|
||||||
|
private val url: String,
|
||||||
|
private val centralDirectoryRecords: List<CentralDirectoryRecord>,
|
||||||
|
) {
|
||||||
|
fun files(): List<String> {
|
||||||
|
return centralDirectoryRecords.map {
|
||||||
|
it.filename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun fetch(path: String, client: OkHttpClient): ByteArray {
|
||||||
|
val file = centralDirectoryRecords.find { it.filename == path }
|
||||||
|
?: throw Exception("File not found in ZIP: $path")
|
||||||
|
|
||||||
|
val MAX_LOCAL_FILE_HEADER_SIZE = 256 + 32 + 30 + 100
|
||||||
|
|
||||||
|
val headersBuilder = Headers.Builder()
|
||||||
|
.set(
|
||||||
|
"Range",
|
||||||
|
"bytes=${file.localFileHeaderRelativeOffset}-${
|
||||||
|
file.localFileHeaderRelativeOffset +
|
||||||
|
file.compressedSize +
|
||||||
|
MAX_LOCAL_FILE_HEADER_SIZE
|
||||||
|
}",
|
||||||
|
).build()
|
||||||
|
|
||||||
|
val request = GET(url, headersBuilder)
|
||||||
|
|
||||||
|
val response = client.newCall(request).execute()
|
||||||
|
|
||||||
|
val byteArray = response.body.byteStream().use { it.readBytes() }
|
||||||
|
|
||||||
|
val localFile = parseLocalFile(byteArray, file.compressedSize)
|
||||||
|
?: throw Exception("Failed to parse local file header in ZIP")
|
||||||
|
|
||||||
|
return if (localFile.compressionMethod == 0) {
|
||||||
|
localFile.compressedData
|
||||||
|
} else {
|
||||||
|
inflateRaw(localFile.compressedData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ZipHandler(
|
||||||
|
private val url: String,
|
||||||
|
private val client: OkHttpClient,
|
||||||
|
private val additionalHeaders: Headers = Headers.Builder().build(),
|
||||||
|
private val zipType: String = "zip",
|
||||||
|
private val contentLength: BigInteger,
|
||||||
|
) {
|
||||||
|
fun populate(): Zip {
|
||||||
|
val endOfCentralDirectory = fetchEndOfCentralDirectory(contentLength, zipType)
|
||||||
|
val centralDirectoryRecords = fetchCentralDirectoryRecords(endOfCentralDirectory)
|
||||||
|
|
||||||
|
return Zip(
|
||||||
|
url,
|
||||||
|
centralDirectoryRecords,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun fetchEndOfCentralDirectory(zipByteLength: BigInteger, zipType: String): EndOfCentralDirectory {
|
||||||
|
val EOCD_MAX_BYTES = 128.toBigInteger()
|
||||||
|
val eocdInitialOffset = maxOf(0.toBigInteger(), zipByteLength - EOCD_MAX_BYTES)
|
||||||
|
|
||||||
|
val headers = additionalHeaders
|
||||||
|
.newBuilder()
|
||||||
|
.set("Range", "bytes=$eocdInitialOffset-$zipByteLength")
|
||||||
|
.build()
|
||||||
|
val request = GET(url, headers)
|
||||||
|
|
||||||
|
val response = client.newCall(request).execute()
|
||||||
|
|
||||||
|
if (!response.isSuccessful) {
|
||||||
|
throw Exception("Could not fetch ZIP: HTTP status ${response.code}")
|
||||||
|
}
|
||||||
|
|
||||||
|
val eocdBuffer = response.body.byteStream().use { it.readBytes() }
|
||||||
|
|
||||||
|
if (eocdBuffer.isEmpty()) throw Exception("Could not get Range request to start looking for EOCD")
|
||||||
|
|
||||||
|
val eocd =
|
||||||
|
(if (zipType == "zip64") parseEOCD64(eocdBuffer) else parseEOCD(eocdBuffer))
|
||||||
|
?: throw Exception("Could not get EOCD record of the ZIP")
|
||||||
|
|
||||||
|
return eocd
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun fetchCentralDirectoryRecords(endOfCentralDirectory: EndOfCentralDirectory): List<CentralDirectoryRecord> {
|
||||||
|
val headersBuilder = Headers.Builder()
|
||||||
|
.set(
|
||||||
|
"Range",
|
||||||
|
"bytes=${endOfCentralDirectory.centralDirectoryByteOffset}-${
|
||||||
|
endOfCentralDirectory.centralDirectoryByteOffset +
|
||||||
|
endOfCentralDirectory.centralDirectoryByteSize
|
||||||
|
}",
|
||||||
|
).build()
|
||||||
|
|
||||||
|
val request = GET(url, headersBuilder)
|
||||||
|
|
||||||
|
val response = client.newCall(request).execute()
|
||||||
|
|
||||||
|
val cdBuffer = response.body.byteStream().use { it.readBytes() }
|
||||||
|
|
||||||
|
return parseAllCDs(cdBuffer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object ZipParser {
|
||||||
|
|
||||||
|
fun parseAllCDs(buffer: ByteArray): List<CentralDirectoryRecord> {
|
||||||
|
val cds = ArrayList<CentralDirectoryRecord>()
|
||||||
|
val view = ByteBuffer.wrap(buffer).order(LITTLE_ENDIAN)
|
||||||
|
|
||||||
|
var i = 0
|
||||||
|
while (i <= buffer.size - 4) {
|
||||||
|
val signature = view.getInt(i)
|
||||||
|
if (signature == CENTRAL_DIRECTORY_FILE_HEADER_SIGNATURE) {
|
||||||
|
val cd = parseCD(buffer.sliceArray(i until buffer.size))
|
||||||
|
if (cd != null) {
|
||||||
|
cds.add(cd)
|
||||||
|
i += cd.length - 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if (signature == END_OF_CENTRAL_DIRECTORY_SIGNATURE) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return cds
|
||||||
|
}
|
||||||
|
|
||||||
|
fun parseCD(buffer: ByteArray): CentralDirectoryRecord? {
|
||||||
|
val MIN_CD_LENGTH = 46
|
||||||
|
val view = ByteBuffer.wrap(buffer).order(LITTLE_ENDIAN)
|
||||||
|
|
||||||
|
for (i in 0..buffer.size - MIN_CD_LENGTH) {
|
||||||
|
if (view.getInt(i) == CENTRAL_DIRECTORY_FILE_HEADER_SIGNATURE) {
|
||||||
|
val filenameLength = view.getShort(i + 28).toInt()
|
||||||
|
val extraFieldLength = view.getShort(i + 30).toInt()
|
||||||
|
val fileCommentLength = view.getShort(i + 32).toInt()
|
||||||
|
|
||||||
|
return CentralDirectoryRecord(
|
||||||
|
length = 46 + filenameLength + extraFieldLength + fileCommentLength,
|
||||||
|
compressedSize = view.getInt(i + 20),
|
||||||
|
localFileHeaderRelativeOffset = view.getInt(i + 42),
|
||||||
|
filename = buffer.sliceArray(i + 46 until i + 46 + filenameLength).toString(UTF_8),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
fun parseEOCD(buffer: ByteArray): EndOfCentralDirectory? {
|
||||||
|
val MIN_EOCD_LENGTH = 22
|
||||||
|
val view = ByteBuffer.wrap(buffer).order(LITTLE_ENDIAN)
|
||||||
|
|
||||||
|
for (i in 0 until buffer.size - MIN_EOCD_LENGTH + 1) {
|
||||||
|
if (view.getInt(i) == END_OF_CENTRAL_DIRECTORY_SIGNATURE) {
|
||||||
|
return EndOfCentralDirectory(
|
||||||
|
centralDirectoryByteSize = view.getInt(i + 12).toBigInteger(),
|
||||||
|
centralDirectoryByteOffset = view.getInt(i + 16).toBigInteger(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
fun parseEOCD64(buffer: ByteArray): EndOfCentralDirectory? {
|
||||||
|
val MIN_EOCD_LENGTH = 56
|
||||||
|
val view = ByteBuffer.wrap(buffer).order(LITTLE_ENDIAN)
|
||||||
|
|
||||||
|
for (i in 0 until buffer.size - MIN_EOCD_LENGTH + 1) {
|
||||||
|
if (view.getInt(i) == END_OF_CENTRAL_DIRECTORY_64_SIGNATURE) {
|
||||||
|
return EndOfCentralDirectory(
|
||||||
|
centralDirectoryByteSize = view.getLong(i + 40).toBigInteger(),
|
||||||
|
centralDirectoryByteOffset = view.getLong(i + 48).toBigInteger(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
fun parseLocalFile(buffer: ByteArray, compressedSizeOverride: Int = 0): LocalFileHeader? {
|
||||||
|
val MIN_LOCAL_FILE_LENGTH = 30
|
||||||
|
|
||||||
|
val view = ByteBuffer.wrap(buffer).order(LITTLE_ENDIAN)
|
||||||
|
|
||||||
|
for (i in 0..buffer.size - MIN_LOCAL_FILE_LENGTH) {
|
||||||
|
if (view.getInt(i) == LOCAL_FILE_HEADER_SIGNATURE) {
|
||||||
|
val filenameLength = view.getShort(i + 26).toInt() and 0xFFFF
|
||||||
|
val extraFieldLength = view.getShort(i + 28).toInt() and 0xFFFF
|
||||||
|
|
||||||
|
val bitflags = view.getShort(i + 6).toInt() and 0xFFFF
|
||||||
|
val hasDataDescriptor = (bitflags shr 3) and 1 != 0
|
||||||
|
|
||||||
|
val headerEndOffset = i + 30 + filenameLength + extraFieldLength
|
||||||
|
val regularCompressedSize = view.getInt(i + 18)
|
||||||
|
|
||||||
|
val compressedData = if (hasDataDescriptor) {
|
||||||
|
buffer.copyOfRange(
|
||||||
|
headerEndOffset,
|
||||||
|
headerEndOffset + compressedSizeOverride,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
buffer.copyOfRange(
|
||||||
|
headerEndOffset,
|
||||||
|
headerEndOffset + regularCompressedSize,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return LocalFileHeader(
|
||||||
|
compressedData = compressedData,
|
||||||
|
compressionMethod = view.getShort(i + 8).toInt(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
fun inflateRaw(compressedData: ByteArray): ByteArray {
|
||||||
|
val inflater = Inflater(true)
|
||||||
|
inflater.setInput(compressedData)
|
||||||
|
|
||||||
|
val buffer = ByteArray(8192)
|
||||||
|
val output = ByteArrayOutputStream()
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (!inflater.finished()) {
|
||||||
|
val count = inflater.inflate(buffer)
|
||||||
|
if (count > 0) {
|
||||||
|
output.write(buffer, 0, count)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
throw Exception("Invalid compressed data format: ${e.message}", e)
|
||||||
|
} finally {
|
||||||
|
inflater.end()
|
||||||
|
output.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
return output.toByteArray()
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue