webtoons: remove search filter and add mixed search (#362)
webtoons: remove search filter and add mixed search
This commit is contained in:
parent
83e1f8226c
commit
b16600fa64
|
@ -5,8 +5,8 @@ ext {
|
||||||
appName = 'Tachiyomi: Webtoons'
|
appName = 'Tachiyomi: Webtoons'
|
||||||
pkgNameSuffix = "en.webtoons"
|
pkgNameSuffix = "en.webtoons"
|
||||||
extClass = '.Webtoons'
|
extClass = '.Webtoons'
|
||||||
extVersionCode = 3
|
extVersionCode = 4
|
||||||
extVersionSuffix = 2
|
extVersionSuffix = 3
|
||||||
libVersion = '1.2'
|
libVersion = '1.2'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,16 +3,15 @@ package eu.kanade.tachiyomi.extension.en.webtoons
|
||||||
import eu.kanade.tachiyomi.network.GET
|
import eu.kanade.tachiyomi.network.GET
|
||||||
import eu.kanade.tachiyomi.source.model.*
|
import eu.kanade.tachiyomi.source.model.*
|
||||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||||
|
import eu.kanade.tachiyomi.util.asJsoup
|
||||||
import okhttp3.HttpUrl
|
import okhttp3.HttpUrl
|
||||||
import okhttp3.Request
|
import okhttp3.Request
|
||||||
|
import okhttp3.Response
|
||||||
import org.jsoup.nodes.Document
|
import org.jsoup.nodes.Document
|
||||||
import org.jsoup.nodes.Element
|
import org.jsoup.nodes.Element
|
||||||
import java.text.SimpleDateFormat
|
import java.text.SimpleDateFormat
|
||||||
import java.util.*
|
import java.util.*
|
||||||
|
|
||||||
/**
|
|
||||||
* Todo Cover -> crop right //possible?
|
|
||||||
*/
|
|
||||||
class Webtoons : ParsedHttpSource() {
|
class Webtoons : ParsedHttpSource() {
|
||||||
|
|
||||||
override val name = "Webtoons.com"
|
override val name = "Webtoons.com"
|
||||||
|
@ -81,14 +80,25 @@ class Webtoons : ParsedHttpSource() {
|
||||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||||
val url = HttpUrl.parse("$baseUrl/search?keyword=$query").newBuilder()
|
val url = HttpUrl.parse("$baseUrl/search?keyword=$query").newBuilder()
|
||||||
|
|
||||||
(if (filters.isEmpty()) getFilterList() else filters).forEach { filter ->
|
url.addQueryParameter("searchType", "WEBTOON")
|
||||||
when (filter) {
|
return GET(url.toString(), headers)
|
||||||
is Type -> url.addQueryParameter("searchType", arrayOf("WEBTOON", "CHALLENGE")[filter.state])
|
}
|
||||||
}
|
|
||||||
|
override fun searchMangaParse(response: Response): MangasPage {
|
||||||
|
val query = response.request().url().queryParameter("keyword")
|
||||||
|
val toonDocument = response.asJsoup()
|
||||||
|
val discDocument = client.newCall(GET("$baseUrl/search?keyword=$query&searchType=CHALLENGE", headers)).execute().asJsoup()
|
||||||
|
|
||||||
|
val elements = mutableListOf<Element>().apply {
|
||||||
|
addAll(toonDocument.select(searchMangaSelector()))
|
||||||
|
addAll(discDocument.select(searchMangaSelector()))
|
||||||
}
|
}
|
||||||
|
|
||||||
url.addQueryParameter("page", page.toString())
|
val mangas = elements.map { element ->
|
||||||
return GET(url.toString(), headers)
|
searchMangaFromElement(element)
|
||||||
|
}
|
||||||
|
|
||||||
|
return MangasPage(mangas, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun searchMangaSelector() = "#content > div.card_wrap.search li"
|
override fun searchMangaSelector() = "#content > div.card_wrap.search li"
|
||||||
|
@ -102,7 +112,7 @@ class Webtoons : ParsedHttpSource() {
|
||||||
return manga
|
return manga
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun searchMangaNextPageSelector() = "div.paginate > a[href=#] + a"
|
override fun searchMangaNextPageSelector() = null
|
||||||
|
|
||||||
override fun mangaDetailsParse(document: Document): SManga {
|
override fun mangaDetailsParse(document: Document): SManga {
|
||||||
val detailElement = document.select("#content > div.cont_box > div.detail_header > div.info")
|
val detailElement = document.select("#content > div.cont_box > div.detail_header > div.info")
|
||||||
|
@ -147,8 +157,4 @@ class Webtoons : ParsedHttpSource() {
|
||||||
override fun pageListParse(document: Document) = document.select("div#_imageList > img").mapIndexed { i, element -> Page(i, "", element.attr("data-url")) }
|
override fun pageListParse(document: Document) = document.select("div#_imageList > img").mapIndexed { i, element -> Page(i, "", element.attr("data-url")) }
|
||||||
|
|
||||||
override fun imageUrlParse(document: Document) = document.select("img").first().attr("src")
|
override fun imageUrlParse(document: Document) = document.select("img").first().attr("src")
|
||||||
|
|
||||||
private class Type : Filter.Select<String>("Type", arrayOf("Webtoon (default)", "Discover"))
|
|
||||||
|
|
||||||
override fun getFilterList() = FilterList(Type())
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue