Run default Android Studio formatter on code
(cherry picked from commit 3ecc883944cba6102fb68940023d90ef959c8b9c)
This commit is contained in:
parent
319571edf3
commit
c85825f3c7
@ -51,6 +51,7 @@ class ChapterCache(private val context: Context) {
|
|||||||
/** Cache class used for cache management. */
|
/** Cache class used for cache management. */
|
||||||
// --> EH
|
// --> EH
|
||||||
private var diskCache = setupDiskCache(prefs.eh_cacheSize().getOrDefault().toLong())
|
private var diskCache = setupDiskCache(prefs.eh_cacheSize().getOrDefault().toLong())
|
||||||
|
|
||||||
init {
|
init {
|
||||||
prefs.eh_cacheSize().asObservable().skip(1).subscribe {
|
prefs.eh_cacheSize().asObservable().skip(1).subscribe {
|
||||||
// Save old cache for destruction later
|
// Save old cache for destruction later
|
||||||
|
@ -1,505 +0,0 @@
|
|||||||
package eu.kanade.tachiyomi.data.track.myanimelist
|
|
||||||
|
|
||||||
import android.net.Uri
|
|
||||||
import eu.kanade.tachiyomi.data.database.models.Track
|
|
||||||
import eu.kanade.tachiyomi.data.track.TrackManager
|
|
||||||
import eu.kanade.tachiyomi.data.track.model.TrackSearch
|
|
||||||
import eu.kanade.tachiyomi.network.GET
|
|
||||||
import eu.kanade.tachiyomi.network.POST
|
|
||||||
import eu.kanade.tachiyomi.network.asObservable
|
|
||||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
|
||||||
import eu.kanade.tachiyomi.util.lang.toCalendar
|
|
||||||
import eu.kanade.tachiyomi.util.selectInt
|
|
||||||
import eu.kanade.tachiyomi.util.selectText
|
|
||||||
import java.io.BufferedReader
|
|
||||||
import java.io.InputStreamReader
|
|
||||||
import java.text.SimpleDateFormat
|
|
||||||
import java.util.Calendar
|
|
||||||
import java.util.GregorianCalendar
|
|
||||||
import java.util.Locale
|
|
||||||
import java.util.zip.GZIPInputStream
|
|
||||||
import okhttp3.FormBody
|
|
||||||
import okhttp3.MediaType.Companion.toMediaTypeOrNull
|
|
||||||
import okhttp3.OkHttpClient
|
|
||||||
import okhttp3.RequestBody
|
|
||||||
import okhttp3.RequestBody.Companion.toRequestBody
|
|
||||||
import okhttp3.Response
|
|
||||||
import org.json.JSONObject
|
|
||||||
import org.jsoup.Jsoup
|
|
||||||
import org.jsoup.nodes.Document
|
|
||||||
import org.jsoup.nodes.Element
|
|
||||||
import org.jsoup.parser.Parser
|
|
||||||
import rx.Observable
|
|
||||||
|
|
||||||
class MyAnimeListApi(private val client: OkHttpClient, interceptor: MyAnimeListInterceptor) {
|
|
||||||
|
|
||||||
private val authClient = client.newBuilder().addInterceptor(interceptor).build()
|
|
||||||
|
|
||||||
fun search(query: String): Observable<List<TrackSearch>> {
|
|
||||||
return if (query.startsWith(PREFIX_MY)) {
|
|
||||||
val realQuery = query.removePrefix(PREFIX_MY)
|
|
||||||
getList()
|
|
||||||
.flatMap { Observable.from(it) }
|
|
||||||
.filter { it.title.contains(realQuery, true) }
|
|
||||||
.toList()
|
|
||||||
} else {
|
|
||||||
client.newCall(GET(searchUrl(query)))
|
|
||||||
.asObservable()
|
|
||||||
.flatMap { response ->
|
|
||||||
Observable.from(
|
|
||||||
Jsoup.parse(response.consumeBody())
|
|
||||||
.select("div.js-categories-seasonal.js-block-list.list")
|
|
||||||
.select("table").select("tbody")
|
|
||||||
.select("tr").drop(1)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
.filter { row ->
|
|
||||||
row.select(TD)[2].text() != "Novel"
|
|
||||||
}
|
|
||||||
.map { row ->
|
|
||||||
TrackSearch.create(TrackManager.MYANIMELIST).apply {
|
|
||||||
title = row.searchTitle()
|
|
||||||
media_id = row.searchMediaId()
|
|
||||||
total_chapters = row.searchTotalChapters()
|
|
||||||
summary = row.searchSummary()
|
|
||||||
cover_url = row.searchCoverUrl()
|
|
||||||
tracking_url = mangaUrl(media_id)
|
|
||||||
publishing_status = row.searchPublishingStatus()
|
|
||||||
publishing_type = row.searchPublishingType()
|
|
||||||
start_date = row.searchStartDate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.toList()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun addLibManga(track: Track): Observable<Track> {
|
|
||||||
return Observable.defer {
|
|
||||||
authClient.newCall(POST(url = addUrl(), body = mangaPostPayload(track)))
|
|
||||||
.asObservableSuccess()
|
|
||||||
.map { track }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun updateLibManga(track: Track): Observable<Track> {
|
|
||||||
return Observable.defer {
|
|
||||||
// Get track data
|
|
||||||
val response = authClient.newCall(GET(url = editPageUrl(track.media_id))).execute()
|
|
||||||
val editData = response.use {
|
|
||||||
val page = Jsoup.parse(it.consumeBody())
|
|
||||||
|
|
||||||
// Extract track data from MAL page
|
|
||||||
extractDataFromEditPage(page).apply {
|
|
||||||
// Apply changes to the just fetched data
|
|
||||||
copyPersonalFrom(track)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update remote
|
|
||||||
authClient.newCall(POST(url = editPageUrl(track.media_id), body = mangaEditPostBody(editData)))
|
|
||||||
.asObservableSuccess()
|
|
||||||
.map {
|
|
||||||
track
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun findLibManga(track: Track): Observable<Track?> {
|
|
||||||
return authClient.newCall(GET(url = editPageUrl(track.media_id)))
|
|
||||||
.asObservable()
|
|
||||||
.map { response ->
|
|
||||||
var libTrack: Track? = null
|
|
||||||
response.use {
|
|
||||||
if (it.priorResponse?.isRedirect != true) {
|
|
||||||
val trackForm = Jsoup.parse(it.consumeBody())
|
|
||||||
|
|
||||||
libTrack = Track.create(TrackManager.MYANIMELIST).apply {
|
|
||||||
last_chapter_read = trackForm.select("#add_manga_num_read_chapters").`val`().toInt()
|
|
||||||
total_chapters = trackForm.select("#totalChap").text().toInt()
|
|
||||||
status = trackForm.select("#add_manga_status > option[selected]").`val`().toInt()
|
|
||||||
score = trackForm.select("#add_manga_score > option[selected]").`val`().toFloatOrNull()
|
|
||||||
?: 0f
|
|
||||||
started_reading_date = trackForm.searchDatePicker("#add_manga_start_date")
|
|
||||||
finished_reading_date = trackForm.searchDatePicker("#add_manga_finish_date")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
libTrack
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun getLibManga(track: Track): Observable<Track> {
|
|
||||||
return findLibManga(track)
|
|
||||||
.map { it ?: throw Exception("Could not find manga") }
|
|
||||||
}
|
|
||||||
|
|
||||||
fun login(username: String, password: String): String {
|
|
||||||
val csrf = getSessionInfo()
|
|
||||||
|
|
||||||
login(username, password, csrf)
|
|
||||||
|
|
||||||
return csrf
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getSessionInfo(): String {
|
|
||||||
val response = client.newCall(GET(loginUrl())).execute()
|
|
||||||
|
|
||||||
return Jsoup.parse(response.consumeBody())
|
|
||||||
.select("meta[name=csrf_token]")
|
|
||||||
.attr("content")
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun login(username: String, password: String, csrf: String) {
|
|
||||||
val response = client.newCall(POST(url = loginUrl(), body = loginPostBody(username, password, csrf))).execute()
|
|
||||||
|
|
||||||
response.use {
|
|
||||||
if (response.priorResponse?.code != 302) throw Exception("Authentication error")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getList(): Observable<List<TrackSearch>> {
|
|
||||||
return getListUrl()
|
|
||||||
.flatMap { url ->
|
|
||||||
getListXml(url)
|
|
||||||
}
|
|
||||||
.flatMap { doc ->
|
|
||||||
Observable.from(doc.select("manga"))
|
|
||||||
}
|
|
||||||
.map {
|
|
||||||
TrackSearch.create(TrackManager.MYANIMELIST).apply {
|
|
||||||
title = it.selectText("manga_title")!!
|
|
||||||
media_id = it.selectInt("manga_mangadb_id")
|
|
||||||
last_chapter_read = it.selectInt("my_read_chapters")
|
|
||||||
status = getStatus(it.selectText("my_status")!!)
|
|
||||||
score = it.selectInt("my_score").toFloat()
|
|
||||||
total_chapters = it.selectInt("manga_chapters")
|
|
||||||
tracking_url = mangaUrl(media_id)
|
|
||||||
started_reading_date = it.searchDateXml("my_start_date")
|
|
||||||
finished_reading_date = it.searchDateXml("my_finish_date")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.toList()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getListUrl(): Observable<String> {
|
|
||||||
return authClient.newCall(POST(url = exportListUrl(), body = exportPostBody()))
|
|
||||||
.asObservable()
|
|
||||||
.map { response ->
|
|
||||||
baseUrl + Jsoup.parse(response.consumeBody())
|
|
||||||
.select("div.goodresult")
|
|
||||||
.select("a")
|
|
||||||
.attr("href")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getListXml(url: String): Observable<Document> {
|
|
||||||
return authClient.newCall(GET(url))
|
|
||||||
.asObservable()
|
|
||||||
.map { response ->
|
|
||||||
Jsoup.parse(response.consumeXmlBody(), "", Parser.xmlParser())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun Response.consumeBody(): String? {
|
|
||||||
use {
|
|
||||||
if (it.code != 200) throw Exception("HTTP error ${it.code}")
|
|
||||||
return it.body?.string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun Response.consumeXmlBody(): String? {
|
|
||||||
use { res ->
|
|
||||||
if (res.code != 200) throw Exception("Export list error")
|
|
||||||
BufferedReader(InputStreamReader(GZIPInputStream(res.body?.source()?.inputStream()))).use { reader ->
|
|
||||||
val sb = StringBuilder()
|
|
||||||
reader.forEachLine { line ->
|
|
||||||
sb.append(line)
|
|
||||||
}
|
|
||||||
return sb.toString()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun extractDataFromEditPage(page: Document): MyAnimeListEditData {
|
|
||||||
val tables = page.select("form#main-form table")
|
|
||||||
|
|
||||||
return MyAnimeListEditData(
|
|
||||||
entry_id = tables[0].select("input[name=entry_id]").`val`(), // Always 0
|
|
||||||
manga_id = tables[0].select("#manga_id").`val`(),
|
|
||||||
status = tables[0].select("#add_manga_status > option[selected]").`val`(),
|
|
||||||
num_read_volumes = tables[0].select("#add_manga_num_read_volumes").`val`(),
|
|
||||||
last_completed_vol = tables[0].select("input[name=last_completed_vol]").`val`(), // Always empty
|
|
||||||
num_read_chapters = tables[0].select("#add_manga_num_read_chapters").`val`(),
|
|
||||||
score = tables[0].select("#add_manga_score > option[selected]").`val`(),
|
|
||||||
start_date_month = tables[0].select("#add_manga_start_date_month > option[selected]").`val`(),
|
|
||||||
start_date_day = tables[0].select("#add_manga_start_date_day > option[selected]").`val`(),
|
|
||||||
start_date_year = tables[0].select("#add_manga_start_date_year > option[selected]").`val`(),
|
|
||||||
finish_date_month = tables[0].select("#add_manga_finish_date_month > option[selected]").`val`(),
|
|
||||||
finish_date_day = tables[0].select("#add_manga_finish_date_day > option[selected]").`val`(),
|
|
||||||
finish_date_year = tables[0].select("#add_manga_finish_date_year > option[selected]").`val`(),
|
|
||||||
tags = tables[1].select("#add_manga_tags").`val`(),
|
|
||||||
priority = tables[1].select("#add_manga_priority > option[selected]").`val`(),
|
|
||||||
storage_type = tables[1].select("#add_manga_storage_type > option[selected]").`val`(),
|
|
||||||
num_retail_volumes = tables[1].select("#add_manga_num_retail_volumes").`val`(),
|
|
||||||
num_read_times = tables[1].select("#add_manga_num_read_times").`val`(),
|
|
||||||
reread_value = tables[1].select("#add_manga_reread_value > option[selected]").`val`(),
|
|
||||||
comments = tables[1].select("#add_manga_comments").`val`(),
|
|
||||||
is_asked_to_discuss = tables[1].select("#add_manga_is_asked_to_discuss > option[selected]").`val`(),
|
|
||||||
sns_post_type = tables[1].select("#add_manga_sns_post_type > option[selected]").`val`()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
const val CSRF = "csrf_token"
|
|
||||||
|
|
||||||
private const val baseUrl = "https://myanimelist.net"
|
|
||||||
private const val baseMangaUrl = "$baseUrl/manga/"
|
|
||||||
private const val baseModifyListUrl = "$baseUrl/ownlist/manga"
|
|
||||||
private const val PREFIX_MY = "my:"
|
|
||||||
private const val TD = "td"
|
|
||||||
|
|
||||||
private fun mangaUrl(remoteId: Int) = baseMangaUrl + remoteId
|
|
||||||
|
|
||||||
private fun loginUrl() = Uri.parse(baseUrl).buildUpon()
|
|
||||||
.appendPath("login.php")
|
|
||||||
.toString()
|
|
||||||
|
|
||||||
private fun searchUrl(query: String): String {
|
|
||||||
val col = "c[]"
|
|
||||||
return Uri.parse(baseUrl).buildUpon()
|
|
||||||
.appendPath("manga.php")
|
|
||||||
.appendQueryParameter("q", query)
|
|
||||||
.appendQueryParameter(col, "a")
|
|
||||||
.appendQueryParameter(col, "b")
|
|
||||||
.appendQueryParameter(col, "c")
|
|
||||||
.appendQueryParameter(col, "d")
|
|
||||||
.appendQueryParameter(col, "e")
|
|
||||||
.appendQueryParameter(col, "g")
|
|
||||||
.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun exportListUrl() = Uri.parse(baseUrl).buildUpon()
|
|
||||||
.appendPath("panel.php")
|
|
||||||
.appendQueryParameter("go", "export")
|
|
||||||
.toString()
|
|
||||||
|
|
||||||
private fun editPageUrl(mediaId: Int) = Uri.parse(baseModifyListUrl).buildUpon()
|
|
||||||
.appendPath(mediaId.toString())
|
|
||||||
.appendPath("edit")
|
|
||||||
.toString()
|
|
||||||
|
|
||||||
private fun addUrl() = Uri.parse(baseModifyListUrl).buildUpon()
|
|
||||||
.appendPath("add.json")
|
|
||||||
.toString()
|
|
||||||
|
|
||||||
private fun loginPostBody(username: String, password: String, csrf: String): RequestBody {
|
|
||||||
return FormBody.Builder()
|
|
||||||
.add("user_name", username)
|
|
||||||
.add("password", password)
|
|
||||||
.add("cookie", "1")
|
|
||||||
.add("sublogin", "Login")
|
|
||||||
.add("submit", "1")
|
|
||||||
.add(CSRF, csrf)
|
|
||||||
.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun exportPostBody(): RequestBody {
|
|
||||||
return FormBody.Builder()
|
|
||||||
.add("type", "2")
|
|
||||||
.add("subexport", "Export My List")
|
|
||||||
.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun mangaPostPayload(track: Track): RequestBody {
|
|
||||||
val body = JSONObject()
|
|
||||||
.put("manga_id", track.media_id)
|
|
||||||
.put("status", track.status)
|
|
||||||
.put("score", track.score)
|
|
||||||
.put("num_read_chapters", track.last_chapter_read)
|
|
||||||
|
|
||||||
return body.toString().toRequestBody("application/json; charset=utf-8".toMediaTypeOrNull())
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun mangaEditPostBody(track: MyAnimeListEditData): RequestBody {
|
|
||||||
return FormBody.Builder()
|
|
||||||
.add("entry_id", track.entry_id)
|
|
||||||
.add("manga_id", track.manga_id)
|
|
||||||
.add("add_manga[status]", track.status)
|
|
||||||
.add("add_manga[num_read_volumes]", track.num_read_volumes)
|
|
||||||
.add("last_completed_vol", track.last_completed_vol)
|
|
||||||
.add("add_manga[num_read_chapters]", track.num_read_chapters)
|
|
||||||
.add("add_manga[score]", track.score)
|
|
||||||
.add("add_manga[start_date][month]", track.start_date_month)
|
|
||||||
.add("add_manga[start_date][day]", track.start_date_day)
|
|
||||||
.add("add_manga[start_date][year]", track.start_date_year)
|
|
||||||
.add("add_manga[finish_date][month]", track.finish_date_month)
|
|
||||||
.add("add_manga[finish_date][day]", track.finish_date_day)
|
|
||||||
.add("add_manga[finish_date][year]", track.finish_date_year)
|
|
||||||
.add("add_manga[tags]", track.tags)
|
|
||||||
.add("add_manga[priority]", track.priority)
|
|
||||||
.add("add_manga[storage_type]", track.storage_type)
|
|
||||||
.add("add_manga[num_retail_volumes]", track.num_retail_volumes)
|
|
||||||
.add("add_manga[num_read_times]", track.num_read_times)
|
|
||||||
.add("add_manga[reread_value]", track.reread_value)
|
|
||||||
.add("add_manga[comments]", track.comments)
|
|
||||||
.add("add_manga[is_asked_to_discuss]", track.is_asked_to_discuss)
|
|
||||||
.add("add_manga[sns_post_type]", track.sns_post_type)
|
|
||||||
.add("submitIt", track.submitIt)
|
|
||||||
.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun Element.searchDateXml(field: String): Long {
|
|
||||||
val text = selectText(field, "0000-00-00")!!
|
|
||||||
// MAL sets the data to 0000-00-00 when date is invalid or missing
|
|
||||||
if (text == "0000-00-00") {
|
|
||||||
return 0L
|
|
||||||
}
|
|
||||||
|
|
||||||
return SimpleDateFormat("yyyy-MM-dd", Locale.US).parse(text)?.time ?: 0L
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun Element.searchDatePicker(id: String): Long {
|
|
||||||
val month = select(id + "_month > option[selected]").`val`().toIntOrNull()
|
|
||||||
val day = select(id + "_day > option[selected]").`val`().toIntOrNull()
|
|
||||||
val year = select(id + "_year > option[selected]").`val`().toIntOrNull()
|
|
||||||
if (year == null || month == null || day == null) {
|
|
||||||
return 0L
|
|
||||||
}
|
|
||||||
|
|
||||||
return GregorianCalendar(year, month - 1, day).timeInMillis
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun Element.searchTitle() = select("strong").text()!!
|
|
||||||
|
|
||||||
private fun Element.searchTotalChapters() = if (select(TD)[4].text() == "-") 0 else select(TD)[4].text().toInt()
|
|
||||||
|
|
||||||
private fun Element.searchCoverUrl() = select("img")
|
|
||||||
.attr("data-src")
|
|
||||||
.split("\\?")[0]
|
|
||||||
.replace("/r/50x70/", "/")
|
|
||||||
|
|
||||||
private fun Element.searchMediaId() = select("div.picSurround")
|
|
||||||
.select("a").attr("id")
|
|
||||||
.replace("sarea", "")
|
|
||||||
.toInt()
|
|
||||||
|
|
||||||
private fun Element.searchSummary() = select("div.pt4")
|
|
||||||
.first()
|
|
||||||
.ownText()!!
|
|
||||||
|
|
||||||
private fun Element.searchPublishingStatus() = if (select(TD).last().text() == "-") "Publishing" else "Finished"
|
|
||||||
|
|
||||||
private fun Element.searchPublishingType() = select(TD)[2].text()!!
|
|
||||||
|
|
||||||
private fun Element.searchStartDate() = select(TD)[6].text()!!
|
|
||||||
|
|
||||||
private fun getStatus(status: String) = when (status) {
|
|
||||||
"Reading" -> 1
|
|
||||||
"Completed" -> 2
|
|
||||||
"On-Hold" -> 3
|
|
||||||
"Dropped" -> 4
|
|
||||||
"Plan to Read" -> 6
|
|
||||||
else -> 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private class MyAnimeListEditData(
|
|
||||||
// entry_id
|
|
||||||
var entry_id: String,
|
|
||||||
|
|
||||||
// manga_id
|
|
||||||
var manga_id: String,
|
|
||||||
|
|
||||||
// add_manga[status]
|
|
||||||
var status: String,
|
|
||||||
|
|
||||||
// add_manga[num_read_volumes]
|
|
||||||
var num_read_volumes: String,
|
|
||||||
|
|
||||||
// last_completed_vol
|
|
||||||
var last_completed_vol: String,
|
|
||||||
|
|
||||||
// add_manga[num_read_chapters]
|
|
||||||
var num_read_chapters: String,
|
|
||||||
|
|
||||||
// add_manga[score]
|
|
||||||
var score: String,
|
|
||||||
|
|
||||||
// add_manga[start_date][month]
|
|
||||||
var start_date_month: String, // [1-12]
|
|
||||||
|
|
||||||
// add_manga[start_date][day]
|
|
||||||
var start_date_day: String,
|
|
||||||
|
|
||||||
// add_manga[start_date][year]
|
|
||||||
var start_date_year: String,
|
|
||||||
|
|
||||||
// add_manga[finish_date][month]
|
|
||||||
var finish_date_month: String, // [1-12]
|
|
||||||
|
|
||||||
// add_manga[finish_date][day]
|
|
||||||
var finish_date_day: String,
|
|
||||||
|
|
||||||
// add_manga[finish_date][year]
|
|
||||||
var finish_date_year: String,
|
|
||||||
|
|
||||||
// add_manga[tags]
|
|
||||||
var tags: String,
|
|
||||||
|
|
||||||
// add_manga[priority]
|
|
||||||
var priority: String,
|
|
||||||
|
|
||||||
// add_manga[storage_type]
|
|
||||||
var storage_type: String,
|
|
||||||
|
|
||||||
// add_manga[num_retail_volumes]
|
|
||||||
var num_retail_volumes: String,
|
|
||||||
|
|
||||||
// add_manga[num_read_times]
|
|
||||||
var num_read_times: String,
|
|
||||||
|
|
||||||
// add_manga[reread_value]
|
|
||||||
var reread_value: String,
|
|
||||||
|
|
||||||
// add_manga[comments]
|
|
||||||
var comments: String,
|
|
||||||
|
|
||||||
// add_manga[is_asked_to_discuss]
|
|
||||||
var is_asked_to_discuss: String,
|
|
||||||
|
|
||||||
// add_manga[sns_post_type]
|
|
||||||
var sns_post_type: String,
|
|
||||||
|
|
||||||
// submitIt
|
|
||||||
val submitIt: String = "0"
|
|
||||||
) {
|
|
||||||
fun copyPersonalFrom(track: Track) {
|
|
||||||
num_read_chapters = track.last_chapter_read.toString()
|
|
||||||
val numScore = track.score.toInt()
|
|
||||||
if (numScore in 1..9) {
|
|
||||||
score = numScore.toString()
|
|
||||||
}
|
|
||||||
status = track.status.toString()
|
|
||||||
if (track.started_reading_date == 0L) {
|
|
||||||
start_date_month = ""
|
|
||||||
start_date_day = ""
|
|
||||||
start_date_year = ""
|
|
||||||
}
|
|
||||||
if (track.finished_reading_date == 0L) {
|
|
||||||
finish_date_month = ""
|
|
||||||
finish_date_day = ""
|
|
||||||
finish_date_year = ""
|
|
||||||
}
|
|
||||||
track.started_reading_date.toCalendar()?.let { cal ->
|
|
||||||
start_date_month = (cal[Calendar.MONTH] + 1).toString()
|
|
||||||
start_date_day = cal[Calendar.DAY_OF_MONTH].toString()
|
|
||||||
start_date_year = cal[Calendar.YEAR].toString()
|
|
||||||
}
|
|
||||||
track.finished_reading_date.toCalendar()?.let { cal ->
|
|
||||||
finish_date_month = (cal[Calendar.MONTH] + 1).toString()
|
|
||||||
finish_date_day = cal[Calendar.DAY_OF_MONTH].toString()
|
|
||||||
finish_date_year = cal[Calendar.YEAR].toString()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -36,7 +36,8 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource {
|
|||||||
*/
|
*/
|
||||||
private fun newMetaInstance() = metaClass.constructors.find {
|
private fun newMetaInstance() = metaClass.constructors.find {
|
||||||
it.parameters.isEmpty()
|
it.parameters.isEmpty()
|
||||||
}?.call() ?: error("Could not find no-args constructor for meta class: ${metaClass.qualifiedName}!")
|
}?.call()
|
||||||
|
?: error("Could not find no-args constructor for meta class: ${metaClass.qualifiedName}!")
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses metadata from the input and then copies it into the manga
|
* Parses metadata from the input and then copies it into the manga
|
||||||
|
@ -81,8 +81,7 @@ class EHentai(override val id: Long,
|
|||||||
*/
|
*/
|
||||||
data class ParsedManga(val fav: Int, val manga: Manga)
|
data class ParsedManga(val fav: Int, val manga: Manga)
|
||||||
|
|
||||||
fun extendedGenericMangaParse(doc: Document)
|
fun extendedGenericMangaParse(doc: Document) = with(doc) {
|
||||||
= with(doc) {
|
|
||||||
// Parse mangas (supports compact + extended layout)
|
// Parse mangas (supports compact + extended layout)
|
||||||
val parsedMangas = select(".itg > tbody > tr").filter {
|
val parsedMangas = select(".itg > tbody > tr").filter {
|
||||||
// Do not parse header and ads
|
// Do not parse header and ads
|
||||||
@ -126,13 +125,11 @@ class EHentai(override val id: Long,
|
|||||||
/**
|
/**
|
||||||
* Parse a list of galleries
|
* Parse a list of galleries
|
||||||
*/
|
*/
|
||||||
fun genericMangaParse(response: Response)
|
fun genericMangaParse(response: Response) = extendedGenericMangaParse(response.asJsoup()).let {
|
||||||
= extendedGenericMangaParse(response.asJsoup()).let {
|
|
||||||
MangasPage(it.first.map { it.manga }, it.second)
|
MangasPage(it.first.map { it.manga }, it.second)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun fetchChapterList(manga: SManga)
|
override fun fetchChapterList(manga: SManga) = fetchChapterList(manga) {}
|
||||||
= fetchChapterList(manga) {}
|
|
||||||
|
|
||||||
fun fetchChapterList(manga: SManga, throttleFunc: () -> Unit): Observable<List<SChapter>> {
|
fun fetchChapterList(manga: SManga, throttleFunc: () -> Unit): Observable<List<SChapter>> {
|
||||||
return Single.fromCallable {
|
return Single.fromCallable {
|
||||||
@ -209,8 +206,7 @@ class EHentai(override val id: Long,
|
|||||||
}.toObservable()
|
}.toObservable()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun fetchPageList(chapter: SChapter)
|
override fun fetchPageList(chapter: SChapter) = fetchChapterPage(chapter, baseUrl + chapter.url).map {
|
||||||
= fetchChapterPage(chapter, baseUrl + chapter.url).map {
|
|
||||||
it.mapIndexed { i, s ->
|
it.mapIndexed { i, s ->
|
||||||
Page(i, s)
|
Page(i, s)
|
||||||
}
|
}
|
||||||
@ -230,17 +226,17 @@ class EHentai(override val id: Long,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
private fun parseChapterPage(response: Element)
|
|
||||||
= with(response) {
|
private fun parseChapterPage(response: Element) = with(response) {
|
||||||
select(".gdtm a").map {
|
select(".gdtm a").map {
|
||||||
Pair(it.child(0).attr("alt").toInt(), it.attr("href"))
|
Pair(it.child(0).attr("alt").toInt(), it.attr("href"))
|
||||||
}.sortedBy(Pair<Int, String>::first).map { it.second }
|
}.sortedBy(Pair<Int, String>::first).map { it.second }
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun chapterPageCall(np: String) = client.newCall(chapterPageRequest(np)).asObservableSuccess()
|
private fun chapterPageCall(np: String) = client.newCall(chapterPageRequest(np)).asObservableSuccess()
|
||||||
private fun chapterPageRequest(np: String) = exGet(np, null, headers)
|
private fun chapterPageRequest(np: String) = exGet(np, null, headers)
|
||||||
|
|
||||||
private fun nextPageUrl(element: Element): String?
|
private fun nextPageUrl(element: Element): String? = element.select("a[onclick=return false]").last()?.let {
|
||||||
= element.select("a[onclick=return false]").last()?.let {
|
|
||||||
return if (it.text() == ">") it.attr("href") else null
|
return if (it.text() == ">") it.attr("href") else null
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -290,8 +286,7 @@ class EHentai(override val id: Long,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException()
|
||||||
= throw UnsupportedOperationException()
|
|
||||||
|
|
||||||
override fun latestUpdatesRequest(page: Int) = exGet(baseUrl, page)
|
override fun latestUpdatesRequest(page: Int) = exGet(baseUrl, page)
|
||||||
|
|
||||||
@ -299,8 +294,7 @@ class EHentai(override val id: Long,
|
|||||||
override fun searchMangaParse(response: Response) = genericMangaParse(response)
|
override fun searchMangaParse(response: Response) = genericMangaParse(response)
|
||||||
override fun latestUpdatesParse(response: Response) = genericMangaParse(response)
|
override fun latestUpdatesParse(response: Response) = genericMangaParse(response)
|
||||||
|
|
||||||
fun exGet(url: String, page: Int? = null, additionalHeaders: Headers? = null, cache: Boolean = true)
|
fun exGet(url: String, page: Int? = null, additionalHeaders: Headers? = null, cache: Boolean = true) = GET(page?.let {
|
||||||
= GET(page?.let {
|
|
||||||
addParam(url, "page", Integer.toString(page - 1))
|
addParam(url, "page", Integer.toString(page - 1))
|
||||||
} ?: url, additionalHeaders?.let {
|
} ?: url, additionalHeaders?.let {
|
||||||
val headers = headers.newBuilder()
|
val headers = headers.newBuilder()
|
||||||
@ -458,11 +452,9 @@ class EHentai(override val id: Long,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun chapterListParse(response: Response)
|
override fun chapterListParse(response: Response) = throw UnsupportedOperationException("Unused method was called somehow!")
|
||||||
= throw UnsupportedOperationException("Unused method was called somehow!")
|
|
||||||
|
|
||||||
override fun pageListParse(response: Response)
|
override fun pageListParse(response: Response) = throw UnsupportedOperationException("Unused method was called somehow!")
|
||||||
= throw UnsupportedOperationException("Unused method was called somehow!")
|
|
||||||
|
|
||||||
override fun fetchImageUrl(page: Page): Observable<String> {
|
override fun fetchImageUrl(page: Page): Observable<String> {
|
||||||
return client.newCall(imageUrlRequest(page))
|
return client.newCall(imageUrlRequest(page))
|
||||||
@ -550,15 +542,12 @@ class EHentai(override val id: Long,
|
|||||||
return cookies
|
return cookies
|
||||||
}
|
}
|
||||||
|
|
||||||
fun cookiesHeader(sp: Int = spPref().getOrDefault())
|
fun cookiesHeader(sp: Int = spPref().getOrDefault()) = buildCookies(rawCookies(sp))
|
||||||
= buildCookies(rawCookies(sp))
|
|
||||||
|
|
||||||
//Headers
|
//Headers
|
||||||
override fun headersBuilder()
|
override fun headersBuilder() = super.headersBuilder().add("Cookie", cookiesHeader())!!
|
||||||
= super.headersBuilder().add("Cookie", cookiesHeader())!!
|
|
||||||
|
|
||||||
fun addParam(url: String, param: String, value: String)
|
fun addParam(url: String, param: String, value: String) = Uri.parse(url)
|
||||||
= Uri.parse(url)
|
|
||||||
.buildUpon()
|
.buildUpon()
|
||||||
.appendQueryParameter(param, value)
|
.appendQueryParameter(param, value)
|
||||||
.toString()
|
.toString()
|
||||||
@ -630,6 +619,7 @@ class EHentai(override val id: Long,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class MinPagesOption : PageOption("Minimum Pages", "f_spf")
|
class MinPagesOption : PageOption("Minimum Pages", "f_spf")
|
||||||
class MaxPagesOption : PageOption("Maximum Pages", "f_spt")
|
class MaxPagesOption : PageOption("Maximum Pages", "f_spt")
|
||||||
|
|
||||||
@ -745,8 +735,7 @@ class EHentai(override val id: Long,
|
|||||||
"e8e"
|
"e8e"
|
||||||
)
|
)
|
||||||
|
|
||||||
fun buildCookies(cookies: Map<String, String>)
|
fun buildCookies(cookies: Map<String, String>) = cookies.entries.joinToString(separator = "; ") {
|
||||||
= cookies.entries.joinToString(separator = "; ") {
|
|
||||||
"${URLEncoder.encode(it.key, "UTF-8")}=${URLEncoder.encode(it.value, "UTF-8")}"
|
"${URLEncoder.encode(it.key, "UTF-8")}=${URLEncoder.encode(it.value, "UTF-8")}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,8 +178,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo
|
|||||||
* @param query the search query.
|
* @param query the search query.
|
||||||
* @param filters the list of filters to apply.
|
* @param filters the list of filters to apply.
|
||||||
*/
|
*/
|
||||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException()
|
||||||
= throw UnsupportedOperationException()
|
|
||||||
|
|
||||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||||
return urlImportFetchSearchManga(query) {
|
return urlImportFetchSearchManga(query) {
|
||||||
|
@ -103,11 +103,9 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata
|
|||||||
return Observable.just(nhGet(uri.toString(), page))
|
return Observable.just(nhGet(uri.toString(), page))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException()
|
||||||
= throw UnsupportedOperationException()
|
|
||||||
|
|
||||||
override fun searchMangaParse(response: Response)
|
override fun searchMangaParse(response: Response) = parseResultPage(response)
|
||||||
= parseResultPage(response)
|
|
||||||
|
|
||||||
override fun latestUpdatesRequest(page: Int): Request {
|
override fun latestUpdatesRequest(page: Int): Request {
|
||||||
val uri = Uri.parse(baseUrl).buildUpon()
|
val uri = Uri.parse(baseUrl).buildUpon()
|
||||||
@ -115,8 +113,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata
|
|||||||
return nhGet(uri.toString(), page)
|
return nhGet(uri.toString(), page)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun latestUpdatesParse(response: Response)
|
override fun latestUpdatesParse(response: Response) = parseResultPage(response)
|
||||||
= parseResultPage(response)
|
|
||||||
|
|
||||||
override fun mangaDetailsParse(response: Response) = throw UnsupportedOperationException()
|
override fun mangaDetailsParse(response: Response) = throw UnsupportedOperationException()
|
||||||
|
|
||||||
@ -136,8 +133,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun mangaDetailsRequest(manga: SManga)
|
override fun mangaDetailsRequest(manga: SManga) = nhGet(baseUrl + manga.url)
|
||||||
= nhGet(baseUrl + manga.url)
|
|
||||||
|
|
||||||
fun parseResultPage(response: Response): MangasPage {
|
fun parseResultPage(response: Response): MangasPage {
|
||||||
val doc = response.asJsoup()
|
val doc = response.asJsoup()
|
||||||
@ -215,15 +211,13 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata
|
|||||||
.toSingle()
|
.toSingle()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun fetchChapterList(manga: SManga)
|
override fun fetchChapterList(manga: SManga) = Observable.just(listOf(SChapter.create().apply {
|
||||||
= Observable.just(listOf(SChapter.create().apply {
|
|
||||||
url = manga.url
|
url = manga.url
|
||||||
name = "Chapter"
|
name = "Chapter"
|
||||||
chapter_number = 1f
|
chapter_number = 1f
|
||||||
}))
|
}))
|
||||||
|
|
||||||
override fun fetchPageList(chapter: SChapter)
|
override fun fetchPageList(chapter: SChapter) = getOrLoadMetadata(chapter.mangaId, NHentaiSearchMetadata.nhUrlToId(chapter.url)).map { metadata ->
|
||||||
= getOrLoadMetadata(chapter.mangaId, NHentaiSearchMetadata.nhUrlToId(chapter.url)).map { metadata ->
|
|
||||||
if (metadata.mediaId == null) emptyList()
|
if (metadata.mediaId == null) emptyList()
|
||||||
else
|
else
|
||||||
metadata.pageImageTypes.mapIndexed { index, s ->
|
metadata.pageImageTypes.mapIndexed { index, s ->
|
||||||
@ -264,6 +258,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata
|
|||||||
val appName by lazy {
|
val appName by lazy {
|
||||||
context.getString(R.string.app_name)
|
context.getString(R.string.app_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun nhGet(url: String, tag: Any? = null) = GET(url)
|
fun nhGet(url: String, tag: Any? = null) = GET(url)
|
||||||
.newBuilder()
|
.newBuilder()
|
||||||
.header("User-Agent",
|
.header("User-Agent",
|
||||||
|
@ -197,8 +197,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun mangaDetailsParse(document: Document): SManga
|
override fun mangaDetailsParse(document: Document): SManga = throw UnsupportedOperationException()
|
||||||
= throw UnsupportedOperationException()
|
|
||||||
|
|
||||||
override fun latestUpdatesRequest(page: Int): Request {
|
override fun latestUpdatesRequest(page: Int): Request {
|
||||||
val num = when (lang) {
|
val num = when (lang) {
|
||||||
@ -226,16 +225,15 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
date_upload = DATE_FORMAT.parse(element.getElementsByClass("chapterDate").first().text().trim()).time
|
date_upload = DATE_FORMAT.parse(element.getElementsByClass("chapterDate").first().text().trim()).time
|
||||||
} catch(ignored: Exception) {}
|
} catch (ignored: Exception) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun pageListParse(document: Document)
|
override fun pageListParse(document: Document) = document.getElementById("pageSelect").getElementsByTag("option").map {
|
||||||
= document.getElementById("pageSelect").getElementsByTag("option").map {
|
|
||||||
Page(it.attr("data-page").toInt() - 1, baseUrl + it.attr("value"))
|
Page(it.attr("data-page").toInt() - 1, baseUrl + it.attr("value"))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun imageUrlParse(document: Document)
|
override fun imageUrlParse(document: Document) = "http:" + document.getElementById("mainImg").attr("src")!!
|
||||||
= "http:" + document.getElementById("mainImg").attr("src")!!
|
|
||||||
|
|
||||||
override fun getFilterList() = FilterList(
|
override fun getFilterList() = FilterList(
|
||||||
AuthorFilter(),
|
AuthorFilter(),
|
||||||
|
@ -161,11 +161,9 @@ class EightMuses: HttpSource(),
|
|||||||
throw UnsupportedOperationException("Should not be called!")
|
throw UnsupportedOperationException("Should not be called!")
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun fetchLatestUpdates(page: Int)
|
override fun fetchLatestUpdates(page: Int) = fetchListing(latestUpdatesRequest(page), false)
|
||||||
= fetchListing(latestUpdatesRequest(page), false)
|
|
||||||
|
|
||||||
override fun fetchPopularManga(page: Int)
|
override fun fetchPopularManga(page: Int) = fetchListing(popularMangaRequest(page), false) // TODO Dig
|
||||||
= fetchListing(popularMangaRequest(page), false) // TODO Dig
|
|
||||||
|
|
||||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||||
return urlImportFetchSearchManga(query) {
|
return urlImportFetchSearchManga(query) {
|
||||||
@ -281,6 +279,7 @@ class EightMuses: HttpSource(),
|
|||||||
}
|
}
|
||||||
|
|
||||||
data class SelfContents(val albums: List<Element>, val images: List<Element>)
|
data class SelfContents(val albums: List<Element>, val images: List<Element>)
|
||||||
|
|
||||||
private fun parseSelf(doc: Document): SelfContents {
|
private fun parseSelf(doc: Document): SelfContents {
|
||||||
// Parse self
|
// Parse self
|
||||||
val gc = doc.select(".gallery .c-tile")
|
val gc = doc.select(".gallery .c-tile")
|
||||||
|
@ -74,8 +74,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm
|
|||||||
*
|
*
|
||||||
* @param page the page number to retrieve.
|
* @param page the page number to retrieve.
|
||||||
*/
|
*/
|
||||||
override fun popularMangaRequest(page: Int)
|
override fun popularMangaRequest(page: Int) = GET("$baseUrl/browse/title/rank/DESC/$page", headers)
|
||||||
= GET("$baseUrl/browse/title/rank/DESC/$page", headers)
|
|
||||||
|
|
||||||
private fun parseListing(response: Response): MangasPage {
|
private fun parseListing(response: Response): MangasPage {
|
||||||
val doc = response.asJsoup()
|
val doc = response.asJsoup()
|
||||||
@ -125,8 +124,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm
|
|||||||
* @param query the search query.
|
* @param query the search query.
|
||||||
* @param filters the list of filters to apply.
|
* @param filters the list of filters to apply.
|
||||||
*/
|
*/
|
||||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException("Should not be called!")
|
||||||
= throw UnsupportedOperationException("Should not be called!")
|
|
||||||
|
|
||||||
private fun fetchSearchMangaInternal(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
private fun fetchSearchMangaInternal(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||||
return RxJavaInterop.toV1Single(GlobalScope.async(Dispatchers.IO) {
|
return RxJavaInterop.toV1Single(GlobalScope.async(Dispatchers.IO) {
|
||||||
@ -377,7 +375,8 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm
|
|||||||
val basePath = listOf("data") + response.request.url.pathSegments
|
val basePath = listOf("data") + response.request.url.pathSegments
|
||||||
val scripts = doc.getElementsByTag("script").map { it.data() }
|
val scripts = doc.getElementsByTag("script").map { it.data() }
|
||||||
for (script in scripts) {
|
for (script in scripts) {
|
||||||
val totalPages = TOTAL_PAGES_REGEX.find(script)?.groupValues?.getOrNull(1)?.toIntOrNull() ?: continue
|
val totalPages = TOTAL_PAGES_REGEX.find(script)?.groupValues?.getOrNull(1)?.toIntOrNull()
|
||||||
|
?: continue
|
||||||
val pageList = PAGE_LIST_REGEX.find(script)?.groupValues?.getOrNull(1) ?: continue
|
val pageList = PAGE_LIST_REGEX.find(script)?.groupValues?.getOrNull(1) ?: continue
|
||||||
|
|
||||||
return jsonParser.parse(pageList).array.take(totalPages).map {
|
return jsonParser.parse(pageList).array.take(totalPages).map {
|
||||||
@ -956,6 +955,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm
|
|||||||
).mapValues { it.value.sorted() }
|
).mapValues { it.value.sorted() }
|
||||||
|
|
||||||
private val TAGS_AS_MARKDOWN = ALL_TAGS.map { (ns, values) ->
|
private val TAGS_AS_MARKDOWN = ALL_TAGS.map { (ns, values) ->
|
||||||
"#### $ns\n" + values.map { "- $it" }.joinToString("\n") }.joinToString("\n\n")
|
"#### $ns\n" + values.map { "- $it" }.joinToString("\n")
|
||||||
|
}.joinToString("\n\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,7 @@ LewdSource<TsuminoSearchMetadata, Document>, UrlImportableSource {
|
|||||||
urlImportFetchSearchManga(query) {
|
urlImportFetchSearchManga(query) {
|
||||||
super.fetchSearchManga(page, query, filters)
|
super.fetchSearchManga(page, query, filters)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun mapUrlToMangaUrl(uri: Uri): String? {
|
override fun mapUrlToMangaUrl(uri: Uri): String? {
|
||||||
val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null
|
val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null
|
||||||
if (lcFirstPathSegment != "read" && lcFirstPathSegment != "book" && lcFirstPathSegment != "entry")
|
if (lcFirstPathSegment != "read" && lcFirstPathSegment != "book" && lcFirstPathSegment != "entry")
|
||||||
@ -116,6 +117,7 @@ LewdSource<TsuminoSearchMetadata, Document>, UrlImportableSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override val matchingHosts = listOf(
|
override val matchingHosts = listOf(
|
||||||
"www.tsumino.com",
|
"www.tsumino.com",
|
||||||
"tsumino.com"
|
"tsumino.com"
|
||||||
|
@ -36,7 +36,9 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) :
|
|||||||
// Keep compatibility as searchText field was replaced when we upgraded FlexibleAdapter
|
// Keep compatibility as searchText field was replaced when we upgraded FlexibleAdapter
|
||||||
var searchText
|
var searchText
|
||||||
get() = getFilter(String::class.java) ?: ""
|
get() = getFilter(String::class.java) ?: ""
|
||||||
set(value) { setFilter(value) }
|
set(value) {
|
||||||
|
setFilter(value)
|
||||||
|
}
|
||||||
// EXH <--
|
// EXH <--
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
x
Reference in New Issue
Block a user