[RU]UniComics External Search (#11731)
* [RU]UniComics External Search * error404 * reduce amount of duplicate searches * empty chapters name filler * more reduce duplicate * only end * more exception * clean * message bot spam * for easy debug * Open captcha Yandex search engine * message spam change * more exception link reformat * optimize url format * -section
This commit is contained in:
parent
cd8eda18db
commit
a53b7b3c1e
|
@ -1,3 +1,4 @@
|
|||
|
||||
apply plugin: 'com.android.application'
|
||||
apply plugin: 'kotlin-android'
|
||||
|
||||
|
@ -5,7 +6,11 @@ ext {
|
|||
extName = 'UniComics'
|
||||
pkgNameSuffix = 'ru.unicomics'
|
||||
extClass = '.UniComics'
|
||||
extVersionCode = 1
|
||||
extVersionCode = 2
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':lib-ratelimit')
|
||||
}
|
||||
|
||||
apply from: "$rootDir/common.gradle"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package eu.kanade.tachiyomi.extension.ru.unicomics
|
||||
|
||||
import eu.kanade.tachiyomi.lib.ratelimit.RateLimitInterceptor
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
|
@ -10,37 +11,97 @@ import eu.kanade.tachiyomi.source.model.SManga
|
|||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.Headers
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class UniComics : ParsedHttpSource() {
|
||||
|
||||
override val name = "UniComics"
|
||||
|
||||
override val baseUrl = "https://unicomics.ru"
|
||||
private val baseDefaultUrl = "https://unicomics.ru"
|
||||
override var baseUrl = baseDefaultUrl
|
||||
|
||||
override val lang = "ru"
|
||||
|
||||
override val supportsLatest = true
|
||||
|
||||
override val client: OkHttpClient = network.client.newBuilder()
|
||||
.connectTimeout(10, TimeUnit.SECONDS)
|
||||
.readTimeout(30, TimeUnit.SECONDS)
|
||||
.addNetworkInterceptor(RateLimitInterceptor(3))
|
||||
.build()
|
||||
|
||||
override fun headersBuilder(): Headers.Builder = Headers.Builder()
|
||||
.add("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36 Edg/100.0.1185.50")
|
||||
.add("Referer", baseUrl)
|
||||
.add("Referer", baseDefaultUrl)
|
||||
|
||||
override fun popularMangaRequest(page: Int): Request =
|
||||
GET("$baseUrl/comics/series/page/$page", headers)
|
||||
GET("$baseDefaultUrl/comics/series/page/$page", headers)
|
||||
|
||||
override fun latestUpdatesRequest(page: Int): Request =
|
||||
GET("$baseUrl/comics/online/page/$page", headers)
|
||||
GET("$baseDefaultUrl/comics/online/page/$page", headers)
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request =
|
||||
GET("https://yandex.ru/search/site/?frame=1&lr=172&searchid=1959358&topdoc=xdm_e=$baseDefaultUrl&xdm_c=default5044&xdm_p=1&v=2.0&web=0&text=$query&p=$page", headers)
|
||||
|
||||
override fun searchMangaSelector() =
|
||||
".b-serp-item__content:has(.b-serp-url__item:contains(/comics/):not(:contains(/comics/events)):not(:contains(/comics/publishers)):not(:contains(/page/))):has(.b-serp-item__title-link:not(:contains(Комиксы читать онлайн бесплатно)))"
|
||||
|
||||
override fun searchMangaNextPageSelector() = ".b-pager__next"
|
||||
override fun searchMangaFromElement(element: Element): SManga {
|
||||
return SManga.create().apply {
|
||||
element.select("a.b-serp-item__title-link").first().let {
|
||||
val originUrl = it.attr("href")
|
||||
val urlString =
|
||||
"/characters$|/creators$".toRegex().replace(
|
||||
"/page$".toRegex().replace(
|
||||
"/[0-9]+/?$".toRegex().replace(
|
||||
originUrl.substringAfter(PATH_URL).substringAfter(PATH_online).substringAfter(PATH_issue), ""
|
||||
),
|
||||
""
|
||||
),
|
||||
""
|
||||
)
|
||||
val issueNumber = "-[0-9]+/?$".toRegex()
|
||||
setUrlWithoutDomain(
|
||||
if (issueNumber.containsMatchIn(urlString) && (originUrl.contains(PATH_online) || originUrl.contains(PATH_issue)))
|
||||
issueNumber.replace(urlString, "")
|
||||
else urlString
|
||||
)
|
||||
|
||||
title = it.text().substringBefore(" (").substringBefore(" №")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun searchMangaParse(response: Response): MangasPage {
|
||||
val document = response.asJsoup()
|
||||
if (document.select(".CheckboxCaptcha").isNotEmpty() && baseUrl == baseDefaultUrl) {
|
||||
baseUrl = document.location()
|
||||
throw Exception("Пройдите капчу в WebView(слишком много запросов)")
|
||||
} else if (baseUrl != baseDefaultUrl) {
|
||||
baseUrl = baseDefaultUrl
|
||||
}
|
||||
|
||||
var hasNextPage = false
|
||||
|
||||
val mangas = document.select(searchMangaSelector()).map { element ->
|
||||
searchMangaFromElement(element)
|
||||
}
|
||||
val nextSearchPage = document.select(searchMangaNextPageSelector())
|
||||
if (nextSearchPage.isNotEmpty()) {
|
||||
hasNextPage = true
|
||||
}
|
||||
return MangasPage(mangas.distinctBy { it.url }, hasNextPage)
|
||||
}
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request = throw UnsupportedOperationException("Поиск только через браузер. Открывается в приложении через «СЕРИИ»(не отдельная глава)")
|
||||
override fun searchMangaFromElement(element: Element): SManga = throw UnsupportedOperationException("Not used")
|
||||
override fun searchMangaSelector() = throw UnsupportedOperationException("Not used")
|
||||
private fun searchMangaByIdRequest(id: String): Request {
|
||||
return GET("$baseUrl$PATH_URL$id", headers)
|
||||
return GET("$baseDefaultUrl$PATH_URL$id", headers)
|
||||
}
|
||||
|
||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||
|
@ -72,7 +133,7 @@ class UniComics : ParsedHttpSource() {
|
|||
element.select("a").first().let {
|
||||
manga.setUrlWithoutDomain(it.attr("href").substringAfter(PATH_URL))
|
||||
}
|
||||
manga.title = element.select(".list_title_en").first().text()
|
||||
manga.title = element.select(".list_title").first().text()
|
||||
return manga
|
||||
}
|
||||
override fun popularMangaParse(response: Response): MangasPage {
|
||||
|
@ -91,16 +152,13 @@ class UniComics : ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesNextPageSelector() = popularMangaNextPageSelector()
|
||||
|
||||
override fun searchMangaNextPageSelector() = popularMangaNextPageSelector()
|
||||
|
||||
override fun mangaDetailsRequest(manga: SManga): Request {
|
||||
return GET(baseUrl + PATH_URL + manga.url, headers)
|
||||
return GET(baseDefaultUrl + PATH_URL + manga.url, headers)
|
||||
}
|
||||
|
||||
override fun mangaDetailsParse(document: Document): SManga = SManga.create().apply {
|
||||
val infoElement = document.select(".block.left.common").first()
|
||||
url = document.location().substringAfter(PATH_URL)
|
||||
title = infoElement.select("h2").first().text()
|
||||
title = infoElement.select("h1").first().text()
|
||||
thumbnail_url = infoElement.select("img").first().attr("src")
|
||||
description = infoElement.select("p").last()?.text()
|
||||
author = infoElement.select("tr:contains(Издательство)").text()
|
||||
|
@ -108,7 +166,7 @@ class UniComics : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
override fun fetchChapterList(manga: SManga): Observable<List<SChapter>> {
|
||||
val document = client.newCall(GET(baseUrl + PATH_URL + manga.url, headers)).execute().asJsoup()
|
||||
val document = client.newCall(GET(baseDefaultUrl + PATH_URL + manga.url, headers)).execute().asJsoup()
|
||||
val pages = mutableListOf(1)
|
||||
val dataStrArray = document.toString()
|
||||
.substringAfter("new Paginator(")
|
||||
|
@ -119,16 +177,13 @@ class UniComics : ParsedHttpSource() {
|
|||
}
|
||||
return Observable.just(
|
||||
pages.flatMap { page ->
|
||||
chapterListParse(client.newCall(chapterPageListRequest(manga, page)).execute(), manga)
|
||||
chapterListParse(client.newCall(chapterPageListRequest(manga, page)).execute())
|
||||
}.reversed()
|
||||
)
|
||||
}
|
||||
private fun chapterListParse(response: Response, manga: SManga): List<SChapter> {
|
||||
val document = response.asJsoup()
|
||||
return document.select(chapterListSelector()).map { chapterFromElement(it) }
|
||||
}
|
||||
|
||||
private fun chapterPageListRequest(manga: SManga, page: Int): Request {
|
||||
return GET("$baseUrl$PATH_URL${manga.url}/page/$page", headers)
|
||||
return GET("$baseDefaultUrl$PATH_URL${manga.url}/page/$page", headers)
|
||||
}
|
||||
|
||||
override fun chapterListSelector() = "div.right_comics"
|
||||
|
@ -137,16 +192,22 @@ class UniComics : ParsedHttpSource() {
|
|||
val urlElement = element.select(".button.online a").first()
|
||||
val chapter = SChapter.create()
|
||||
element.select(".list_title").first().text().let {
|
||||
chapter.name = it
|
||||
if (it.contains(" №")) {
|
||||
chapter.name = it.substringAfterLast(" ")
|
||||
chapter.chapter_number = it.substringAfter(" №").toFloatOrNull() ?: -1f
|
||||
} else {
|
||||
chapter.name = "$it Сингл"
|
||||
chapter.chapter_number = 0f
|
||||
}
|
||||
}
|
||||
chapter.setUrlWithoutDomain(urlElement.attr("href"))
|
||||
return chapter
|
||||
}
|
||||
|
||||
override fun pageListRequest(chapter: SChapter): Request {
|
||||
return GET(baseDefaultUrl + chapter.url, headers)
|
||||
}
|
||||
|
||||
override fun pageListParse(document: Document): List<Page> {
|
||||
val dataStrArray = document.toString()
|
||||
.substringAfter("new Paginator(")
|
||||
|
@ -163,5 +224,7 @@ class UniComics : ParsedHttpSource() {
|
|||
companion object {
|
||||
const val PREFIX_SLUG_SEARCH = "slug:"
|
||||
private const val PATH_URL = "/comics/series/"
|
||||
private const val PATH_online = "/comics/online/"
|
||||
private const val PATH_issue = "/comics/issue/"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue