WNACG: change URL and clean up (#13707)
This commit is contained in:
parent
31f81e7fc0
commit
370d8f1931
@ -5,7 +5,7 @@ ext {
|
||||
extName = 'WNACG'
|
||||
pkgNameSuffix = 'zh.wnacg'
|
||||
extClass = '.wnacg'
|
||||
extVersionCode = 6
|
||||
extVersionCode = 7
|
||||
isNsfw = true
|
||||
}
|
||||
|
||||
|
@ -1,15 +1,12 @@
|
||||
package eu.kanade.tachiyomi.extension.zh.wnacg
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.Headers
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
@ -17,16 +14,17 @@ import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
|
||||
// URL can be found at https://www.wnacglink.top/
|
||||
class wnacg : ParsedHttpSource() {
|
||||
override val name = "紳士漫畫"
|
||||
override val baseUrl = "https://www.wnacg.org"
|
||||
override val baseUrl = "https://www.wnacg.top"
|
||||
override val lang = "zh"
|
||||
override val supportsLatest = false
|
||||
|
||||
override fun popularMangaSelector() = "div.pic_box"
|
||||
override fun popularMangaSelector() = ".gallary_item"
|
||||
override fun latestUpdatesSelector() = throw Exception("Not used")
|
||||
override fun searchMangaSelector() = popularMangaSelector()
|
||||
override fun chapterListSelector() = "div.f_left > a"
|
||||
override fun chapterListSelector() = throw UnsupportedOperationException()
|
||||
|
||||
override fun popularMangaNextPageSelector() = "span.thispage + a"
|
||||
override fun latestUpdatesNextPageSelector() = throw Exception("Not used")
|
||||
@ -38,119 +36,78 @@ class wnacg : ParsedHttpSource() {
|
||||
|
||||
override fun latestUpdatesRequest(page: Int) = throw Exception("Not used")
|
||||
|
||||
override fun fetchSearchManga(
|
||||
page: Int,
|
||||
query: String,
|
||||
filters: FilterList
|
||||
): Observable<MangasPage> {
|
||||
// ps: this web don't support category search and sort
|
||||
var req: Request? = null
|
||||
if (query.isNotBlank()) {
|
||||
req = this.searchMangaRequest(page, query, filters)
|
||||
} else if (filters.isNotEmpty()) {
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
if (query.isBlank()) {
|
||||
filters.forEach { filter ->
|
||||
if (filter is CategoryFilter) {
|
||||
req = GET("$baseUrl/" + filter.toUriPart().format(page))
|
||||
return GET("$baseUrl/" + filter.toUriPart().format(page), headers)
|
||||
}
|
||||
}
|
||||
return popularMangaRequest(page)
|
||||
}
|
||||
if (req != null) {
|
||||
return client.newCall(req!!)
|
||||
.asObservableSuccess()
|
||||
.map { response -> queryParse(response) }
|
||||
}
|
||||
return super.fetchSearchManga(page, query, filters)
|
||||
}
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
return GET("$baseUrl/search/index.php?q=$query&p=$page", headers)
|
||||
}
|
||||
|
||||
override fun mangaDetailsRequest(manga: SManga) = GET(baseUrl + manga.url, headers)
|
||||
override fun chapterListRequest(manga: SManga) = mangaDetailsRequest(manga)
|
||||
|
||||
override fun pageListRequest(chapter: SChapter) = GET(baseUrl + chapter.url, headers)
|
||||
override fun headersBuilder(): Headers.Builder = super.headersBuilder()
|
||||
.set("referer", baseUrl)
|
||||
.set("sec-fetch-mode", "no-cors")
|
||||
.set("sec-fetch-site", "cross-site")
|
||||
.set(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"
|
||||
)
|
||||
|
||||
override fun popularMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun latestUpdatesFromElement(element: Element) = throw Exception("Not used")
|
||||
override fun searchMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
|
||||
private fun mangaFromElement(element: Element): SManga {
|
||||
val link = element.selectFirst(".title > a")
|
||||
val manga = SManga.create()
|
||||
manga.setUrlWithoutDomain(element.select("a").first().attr("href"))
|
||||
manga.title = element.select("a").attr("title").trim().replace(Regex("<[^<>]*>"), "")
|
||||
manga.thumbnail_url = "https://" + element.select("img").attr("src").replace("//", "")
|
||||
manga.url = link.attr("href")
|
||||
manga.title = link.text()
|
||||
manga.thumbnail_url = element.selectFirst("img").absUrl("src")
|
||||
// maybe the local cache cause the old source (url) can not be update. but the image can be update on detailpage.
|
||||
// ps. new machine can be load img normal.
|
||||
|
||||
return manga
|
||||
}
|
||||
|
||||
private fun queryParse(response: Response): MangasPage {
|
||||
val document = response.asJsoup()
|
||||
val mangas = document.select(searchMangaSelector())
|
||||
.map { element -> searchMangaFromElement(element) }
|
||||
val nextPage = document.select(searchMangaNextPageSelector()).first() != null
|
||||
return MangasPage(mangas, nextPage)
|
||||
}
|
||||
|
||||
override fun chapterListParse(response: Response): List<SChapter> {
|
||||
val document = response.asJsoup()
|
||||
val chapters = mutableListOf<SChapter>()
|
||||
// create one chapter since it is single books
|
||||
chapters.add(createChapter("1", document.baseUri()))
|
||||
return chapters
|
||||
}
|
||||
|
||||
private fun createChapter(pageNumber: String, mangaUrl: String): SChapter {
|
||||
val chapter = SChapter.create()
|
||||
chapter.setUrlWithoutDomain(mangaUrl)
|
||||
chapter.name = "Ch. $pageNumber"
|
||||
return chapter
|
||||
override fun fetchChapterList(manga: SManga): Observable<List<SChapter>> {
|
||||
val chapter = SChapter.create().apply {
|
||||
url = manga.url
|
||||
name = "Ch. 1"
|
||||
}
|
||||
return Observable.just(listOf(chapter))
|
||||
}
|
||||
|
||||
override fun mangaDetailsParse(document: Document): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.title = document.select("h2")?.text()?.trim() ?: "Unknown"
|
||||
manga.artist = document.select("div.uwuinfo p")?.first()?.text()?.trim() ?: "Unknown"
|
||||
manga.author = document.select("div.uwuinfo p")?.first()?.text()?.trim() ?: "Unknown"
|
||||
manga.title = document.selectFirst("h2")?.text() ?: "Unknown"
|
||||
manga.artist = document.selectFirst("div.uwuinfo p")?.text() ?: "Unknown"
|
||||
manga.author = document.selectFirst("div.uwuinfo p")?.text() ?: "Unknown"
|
||||
manga.thumbnail_url =
|
||||
"https://" + document.select("div.uwthumb img").first().attr("src").replace("//", "")
|
||||
"https://" + document.selectFirst("div.uwthumb img").attr("src").replace("//", "")
|
||||
manga.description =
|
||||
document.select("div.asTBcell p")?.first()?.html()?.replace("<br>", "\n")
|
||||
document.selectFirst("div.asTBcell p")?.html()?.replace("<br>", "\n")
|
||||
|
||||
manga.status = SManga.COMPLETED
|
||||
return manga
|
||||
}
|
||||
|
||||
override fun pageListRequest(chapter: SChapter) =
|
||||
GET(baseUrl + chapter.url.replace("-index-", "-gallery-"), headers)
|
||||
|
||||
override fun pageListParse(document: Document): List<Page> {
|
||||
val regex = "\\/\\/\\S*(jpg|png)".toRegex()
|
||||
val slideaid = client.newCall(
|
||||
GET(
|
||||
baseUrl + document.select("a.btn:containsOwn(下拉閱讀)").attr("href"),
|
||||
headers
|
||||
)
|
||||
).execute().asJsoup()
|
||||
throw UnsupportedOperationException()
|
||||
}
|
||||
|
||||
override fun pageListParse(response: Response): List<Page> {
|
||||
val regex = """//\S*(jpg|png)""".toRegex()
|
||||
val galleryaid =
|
||||
client.newCall(GET(baseUrl + slideaid.select("script[src$=html]").attr("src"), headers))
|
||||
.execute().asJsoup().toString()
|
||||
val matchresult = regex.findAll(galleryaid).map { it.value }.toList()
|
||||
val pages = mutableListOf<Page>()
|
||||
for (i in matchresult.indices) {
|
||||
pages.add(Page(i, "", "https:" + matchresult[i]))
|
||||
response.body!!.string()
|
||||
return regex.findAll(galleryaid).mapIndexedTo(ArrayList()) { index, match ->
|
||||
Page(index, imageUrl = "https:" + match.value)
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
||||
override fun chapterFromElement(element: Element) = throw Exception("Not used")
|
||||
override fun imageUrlRequest(page: Page) = throw Exception("Not used")
|
||||
override fun imageUrlParse(document: Document) = throw Exception("Not used")
|
||||
|
||||
// >>> Filters >>>
|
||||
|
Loading…
x
Reference in New Issue
Block a user