Delegate HBrowse
This commit is contained in:
parent
0fe8990f99
commit
e346d95b0e
@ -42,7 +42,7 @@ android {
|
||||
minSdkVersion AndroidConfig.minSdk
|
||||
targetSdkVersion AndroidConfig.targetSdk
|
||||
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
|
||||
versionCode 3
|
||||
versionCode 4
|
||||
versionName "1.1.0"
|
||||
|
||||
buildConfigField "String", "COMMIT_COUNT", "\"${getCommitCount()}\""
|
||||
|
@ -0,0 +1,32 @@
|
||||
package eu.kanade.tachiyomi.data.database.resolvers
|
||||
|
||||
import android.content.ContentValues
|
||||
import com.pushtorefresh.storio.sqlite.StorIOSQLite
|
||||
import com.pushtorefresh.storio.sqlite.operations.put.PutResolver
|
||||
import com.pushtorefresh.storio.sqlite.operations.put.PutResult
|
||||
import com.pushtorefresh.storio.sqlite.queries.UpdateQuery
|
||||
import eu.kanade.tachiyomi.data.database.inTransactionReturn
|
||||
import eu.kanade.tachiyomi.data.database.models.Manga
|
||||
import eu.kanade.tachiyomi.data.database.tables.MangaTable
|
||||
|
||||
// [EXH]
|
||||
class MangaUrlPutResolver : PutResolver<Manga>() {
|
||||
|
||||
override fun performPut(db: StorIOSQLite, manga: Manga) = db.inTransactionReturn {
|
||||
val updateQuery = mapToUpdateQuery(manga)
|
||||
val contentValues = mapToContentValues(manga)
|
||||
|
||||
val numberOfRowsUpdated = db.lowLevel().update(updateQuery, contentValues)
|
||||
PutResult.newUpdateResult(numberOfRowsUpdated, updateQuery.table())
|
||||
}
|
||||
|
||||
fun mapToUpdateQuery(manga: Manga) = UpdateQuery.builder()
|
||||
.table(MangaTable.TABLE)
|
||||
.where("${MangaTable.COL_ID} = ?")
|
||||
.whereArgs(manga.id)
|
||||
.build()
|
||||
|
||||
fun mapToContentValues(manga: Manga) = ContentValues(1).apply {
|
||||
put(MangaTable.COL_URL, manga.url)
|
||||
}
|
||||
}
|
@ -21,7 +21,6 @@ import eu.kanade.tachiyomi.util.system.toast
|
||||
import exh.EH_SOURCE_ID
|
||||
import exh.EIGHTMUSES_SOURCE_ID
|
||||
import exh.EXH_SOURCE_ID
|
||||
import exh.HBROWSE_SOURCE_ID
|
||||
import exh.HITOMI_SOURCE_ID
|
||||
import exh.MERGED_SOURCE_ID
|
||||
import exh.NHENTAI_SOURCE_ID
|
||||
@ -89,7 +88,6 @@ class ExtensionManager(
|
||||
NHENTAI_SOURCE_ID -> context.getDrawable(R.mipmap.ic_nhentai_source)
|
||||
HITOMI_SOURCE_ID -> context.getDrawable(R.mipmap.ic_hitomi_source)
|
||||
EIGHTMUSES_SOURCE_ID -> context.getDrawable(R.mipmap.ic_8muses_source)
|
||||
HBROWSE_SOURCE_ID -> context.getDrawable(R.mipmap.ic_hbrowse_source)
|
||||
MERGED_SOURCE_ID -> context.getDrawable(R.mipmap.ic_merged_source)
|
||||
else -> null
|
||||
}
|
||||
|
@ -141,7 +141,6 @@ open class SourceManager(private val context: Context) {
|
||||
exSrcs += NHentai(context)
|
||||
exSrcs += Hitomi(context)
|
||||
exSrcs += EightMuses(context)
|
||||
exSrcs += HBrowse(context)
|
||||
return exSrcs
|
||||
}
|
||||
// SY <--
|
||||
@ -200,7 +199,13 @@ open class SourceManager(private val context: Context) {
|
||||
"eu.kanade.tachiyomi.extension.all.mangadex",
|
||||
MangaDex::class,
|
||||
true
|
||||
)*/
|
||||
)*/,
|
||||
DelegatedSource(
|
||||
"HBrowse",
|
||||
1401584337232758222,
|
||||
"eu.kanade.tachiyomi.extension.en.hbrowse.HBrowse",
|
||||
HBrowse::class
|
||||
)
|
||||
).associateBy { it.originalSourceQualifiedClassName }
|
||||
|
||||
var currentDelegatedSources = mutableMapOf<String, DelegatedSource>()
|
||||
|
@ -2,330 +2,56 @@ package eu.kanade.tachiyomi.source.online.english
|
||||
|
||||
import android.content.Context
|
||||
import android.net.Uri
|
||||
import com.github.salomonbrys.kotson.array
|
||||
import com.github.salomonbrys.kotson.string
|
||||
import com.google.gson.JsonParser
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.POST
|
||||
import eu.kanade.tachiyomi.network.asObservable
|
||||
import androidx.core.net.toUri
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.source.online.LewdSource
|
||||
import eu.kanade.tachiyomi.source.online.UrlImportableSource
|
||||
import eu.kanade.tachiyomi.ui.manga.MangaController
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import exh.HBROWSE_SOURCE_ID
|
||||
import exh.metadata.metadata.HBrowseSearchMetadata
|
||||
import exh.metadata.metadata.base.RaisedTag
|
||||
import exh.search.Namespace
|
||||
import exh.search.SearchEngine
|
||||
import exh.search.Text
|
||||
import exh.source.DelegatedHttpSource
|
||||
import exh.ui.metadata.adapters.HBrowseDescriptionAdapter
|
||||
import exh.util.await
|
||||
import exh.util.dropBlank
|
||||
import exh.util.urlImportFetchSearchManga
|
||||
import hu.akarnokd.rxjava.interop.RxJavaInterop
|
||||
import info.debatty.java.stringsimilarity.Levenshtein
|
||||
import kotlin.math.ceil
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.GlobalScope
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.rx2.asSingle
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.Headers
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
import rx.schedulers.Schedulers
|
||||
|
||||
class HBrowse(val context: Context) : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlImportableSource {
|
||||
/**
|
||||
* An ISO 639-1 compliant language code (two letters in lower case).
|
||||
*/
|
||||
override val lang: String = "en"
|
||||
/**
|
||||
* Base url of the website without the trailing slash, like: http://mysite.com
|
||||
*/
|
||||
override val baseUrl = HBrowseSearchMetadata.BASE_URL
|
||||
|
||||
override val name: String = "HBrowse"
|
||||
|
||||
override val supportsLatest = true
|
||||
|
||||
class HBrowse(delegate: HttpSource, val context: Context) :
|
||||
DelegatedHttpSource(delegate),
|
||||
LewdSource<HBrowseSearchMetadata, Document>,
|
||||
UrlImportableSource {
|
||||
override val metaClass = HBrowseSearchMetadata::class
|
||||
override val lang = "en"
|
||||
|
||||
override val id: Long = HBROWSE_SOURCE_ID
|
||||
// Support direct URL importing
|
||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> =
|
||||
urlImportFetchSearchManga(context, query) {
|
||||
super.fetchSearchManga(page, query, filters)
|
||||
}
|
||||
|
||||
override fun headersBuilder() = Headers.Builder()
|
||||
.add("Cookie", BASE_COOKIES)
|
||||
|
||||
private val clientWithoutCookies = client.newBuilder()
|
||||
.cookieJar(CookieJar.NO_COOKIES)
|
||||
.build()
|
||||
|
||||
private val nonRedirectingClientWithoutCookies = clientWithoutCookies.newBuilder()
|
||||
.followRedirects(false)
|
||||
.build()
|
||||
|
||||
private val searchEngine = SearchEngine()
|
||||
|
||||
/**
|
||||
* Returns the request for the popular manga given the page.
|
||||
*
|
||||
* @param page the page number to retrieve.
|
||||
*/
|
||||
override fun popularMangaRequest(page: Int) = GET("$baseUrl/browse/title/rank/DESC/$page", headers)
|
||||
|
||||
private fun parseListing(response: Response): MangasPage {
|
||||
val doc = response.asJsoup()
|
||||
val main = doc.selectFirst("#main")
|
||||
val items = main.select(".thumbTable > tbody")
|
||||
val manga = items.map { mangaEle ->
|
||||
SManga.create().apply {
|
||||
val thumbElement = mangaEle.selectFirst(".thumbImg")
|
||||
url = "/" + thumbElement.parent().attr("href").split("/").dropBlank().first()
|
||||
title = thumbElement.parent().attr("title").substringAfter('\'').substringBeforeLast('\'')
|
||||
thumbnail_url = baseUrl + thumbElement.attr("src")
|
||||
override fun fetchMangaDetails(manga: SManga): Observable<SManga> {
|
||||
return client.newCall(mangaDetailsRequest(manga))
|
||||
.asObservableSuccess()
|
||||
.flatMap {
|
||||
parseToManga(manga, it.asJsoup()).andThen(Observable.just(manga))
|
||||
}
|
||||
}
|
||||
|
||||
val hasNextPage = doc.selectFirst("#main > p > a[title~=jump]:nth-last-child(1)") != null
|
||||
return MangasPage(
|
||||
manga,
|
||||
hasNextPage
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an observable containing a page with a list of manga. Normally it's not needed to
|
||||
* override this method.
|
||||
*
|
||||
* @param page the page number to retrieve.
|
||||
* @param query the search query.
|
||||
* @param filters the list of filters to apply.
|
||||
*/
|
||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||
return urlImportFetchSearchManga(context, query) {
|
||||
fetchSearchMangaInternal(page, query, filters)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns a [MangasPage] object.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun popularMangaParse(response: Response) = parseListing(response)
|
||||
|
||||
/**
|
||||
* Returns the request for the search manga given the page.
|
||||
*
|
||||
* @param page the page number to retrieve.
|
||||
* @param query the search query.
|
||||
* @param filters the list of filters to apply.
|
||||
*/
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException("Should not be called!")
|
||||
|
||||
private fun fetchSearchMangaInternal(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||
return RxJavaInterop.toV1Single(
|
||||
GlobalScope.async(Dispatchers.IO) {
|
||||
val modeFilter = filters.filterIsInstance<ModeFilter>().firstOrNull()
|
||||
val sortFilter = filters.filterIsInstance<SortFilter>().firstOrNull()
|
||||
|
||||
var base: String? = null
|
||||
var isSortFilter = false
|
||||
// <NS, VALUE, EXCLUDED>
|
||||
var tagQuery: List<Triple<String, String, Boolean>>? = null
|
||||
|
||||
if (sortFilter != null) {
|
||||
sortFilter.state?.let { state ->
|
||||
if (query.isNotBlank()) {
|
||||
throw IllegalArgumentException("Cannot use sorting while text/tag search is active!")
|
||||
}
|
||||
|
||||
isSortFilter = true
|
||||
base = "/browse/title/${SortFilter.SORT_OPTIONS[state.index].first}/${if (state.ascending) "ASC" else "DESC"}"
|
||||
}
|
||||
}
|
||||
|
||||
if (base == null) {
|
||||
base = if (modeFilter != null && modeFilter.state == 1) {
|
||||
tagQuery = searchEngine.parseQuery(query, false).map {
|
||||
when (it) {
|
||||
is Text -> {
|
||||
var minDist = Int.MAX_VALUE.toDouble()
|
||||
// ns, value
|
||||
var minContent: Pair<String, String> = "" to ""
|
||||
for (ns in ALL_TAGS) {
|
||||
val (v, d) = ns.value.nearest(it.rawTextOnly(), minDist)
|
||||
if (d < minDist) {
|
||||
minDist = d
|
||||
minContent = ns.key to v
|
||||
}
|
||||
}
|
||||
minContent
|
||||
}
|
||||
is Namespace -> {
|
||||
// Map ns aliases
|
||||
val mappedNs = NS_MAPPINGS[it.namespace] ?: it.namespace
|
||||
|
||||
var key = mappedNs
|
||||
if (!ALL_TAGS.containsKey(key)) key = ALL_TAGS.keys.sorted().nearest(mappedNs).first
|
||||
|
||||
// Find nearest NS
|
||||
val nsContents = ALL_TAGS[key]
|
||||
|
||||
key to nsContents!!.nearest(it.tag?.rawTextOnly() ?: "").first
|
||||
}
|
||||
else -> error("Unknown type!")
|
||||
}.let { p ->
|
||||
Triple(p.first, p.second, it.excluded)
|
||||
}
|
||||
}
|
||||
|
||||
"/result"
|
||||
} else {
|
||||
"/search"
|
||||
}
|
||||
}
|
||||
|
||||
base += "/$page"
|
||||
|
||||
if (isSortFilter) {
|
||||
parseListing(
|
||||
client.newCall(GET(baseUrl + base, headers))
|
||||
.asObservableSuccess()
|
||||
.toSingle()
|
||||
.await(Schedulers.io())
|
||||
)
|
||||
} else {
|
||||
val body = if (tagQuery != null) {
|
||||
FormBody.Builder()
|
||||
.add("type", "advance")
|
||||
.apply {
|
||||
tagQuery.forEach {
|
||||
add(it.first + "_" + it.second, if (it.third) "n" else "y")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
FormBody.Builder()
|
||||
.add("type", "search")
|
||||
.add("needle", query)
|
||||
}
|
||||
val processRequest = POST(
|
||||
"$baseUrl/content/process.php",
|
||||
headers,
|
||||
body = body.build()
|
||||
)
|
||||
val processResponse = nonRedirectingClientWithoutCookies.newCall(processRequest)
|
||||
.asObservable()
|
||||
.toSingle()
|
||||
.await(Schedulers.io())
|
||||
|
||||
if (!processResponse.isRedirect) {
|
||||
throw IllegalStateException("Unexpected process response code!")
|
||||
}
|
||||
|
||||
val sessId = processResponse.headers("Set-Cookie").find {
|
||||
it.startsWith("PHPSESSID")
|
||||
} ?: throw IllegalStateException("Missing server session cookie!")
|
||||
|
||||
val response = clientWithoutCookies.newCall(
|
||||
GET(
|
||||
baseUrl + base,
|
||||
headersBuilder()
|
||||
.set("Cookie", BASE_COOKIES + " " + sessId.substringBefore(';'))
|
||||
.build()
|
||||
)
|
||||
)
|
||||
.asObservableSuccess()
|
||||
.toSingle()
|
||||
.await(Schedulers.io())
|
||||
|
||||
val doc = response.asJsoup()
|
||||
val manga = doc.select(".browseDescription").map {
|
||||
SManga.create().apply {
|
||||
val first = it.child(0)
|
||||
url = first.attr("href")
|
||||
title = first.attr("title").substringAfter('\'').removeSuffix("'").replace('_', ' ')
|
||||
thumbnail_url = HBrowseSearchMetadata.guessThumbnailUrl(url.substring(1))
|
||||
}
|
||||
}
|
||||
val hasNextPage = doc.selectFirst("#main > p > a[title~=jump]:nth-last-child(1)") != null
|
||||
MangasPage(
|
||||
manga,
|
||||
hasNextPage
|
||||
)
|
||||
}
|
||||
}.asSingle(GlobalScope.coroutineContext)
|
||||
).toObservable()
|
||||
}
|
||||
|
||||
// Collection must be sorted and cannot be sorted
|
||||
private fun List<String>.nearest(string: String, maxDist: Double = Int.MAX_VALUE.toDouble()): Pair<String, Double> {
|
||||
val idx = binarySearch(string)
|
||||
return if (idx < 0) {
|
||||
val l = Levenshtein()
|
||||
var minSoFar = maxDist
|
||||
var minIndexSoFar = 0
|
||||
forEachIndexed { index, s ->
|
||||
val d = l.distance(string, s, ceil(minSoFar).toInt())
|
||||
if (d < minSoFar) {
|
||||
minSoFar = d
|
||||
minIndexSoFar = index
|
||||
}
|
||||
}
|
||||
get(minIndexSoFar) to minSoFar
|
||||
} else {
|
||||
get(idx) to 0.0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns a [MangasPage] object.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun searchMangaParse(response: Response) = parseListing(response)
|
||||
|
||||
/**
|
||||
* Returns the request for latest manga given the page.
|
||||
*
|
||||
* @param page the page number to retrieve.
|
||||
*/
|
||||
override fun latestUpdatesRequest(page: Int) = GET("$baseUrl/browse/title/date/DESC/$page", headers)
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns a [MangasPage] object.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun latestUpdatesParse(response: Response) = parseListing(response)
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns the details of a manga.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun mangaDetailsParse(response: Response): SManga {
|
||||
throw UnsupportedOperationException("Should not be called!")
|
||||
}
|
||||
|
||||
override fun parseIntoMetadata(metadata: HBrowseSearchMetadata, input: Document) {
|
||||
val tables = parseIntoTables(input)
|
||||
with(metadata) {
|
||||
hbId = Uri.parse(input.location()).pathSegments.first().toLong()
|
||||
val uri = input.location().toUri()
|
||||
hbId = uri.pathSegments[1].toLong()
|
||||
|
||||
hbUrlExtra = uri.pathSegments[2]
|
||||
|
||||
tags.clear()
|
||||
(tables[""]!! + tables["categories"]!!).forEach { (k, v) ->
|
||||
((tables[""] ?: error("")) + (tables["categories"] ?: error(""))).forEach { (k, v) ->
|
||||
when (val lowercaseNs = k.toLowerCase()) {
|
||||
"title" -> title = v.text()
|
||||
"length" -> length = v.text().substringBefore(" ").toInt()
|
||||
@ -343,35 +69,6 @@ class HBrowse(val context: Context) : HttpSource(), LewdSource<HBrowseSearchMeta
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an observable with the updated details for a manga. Normally it's not needed to
|
||||
* override this method.
|
||||
*
|
||||
* @param manga the manga to be updated.
|
||||
*/
|
||||
override fun fetchMangaDetails(manga: SManga): Observable<SManga> {
|
||||
return client.newCall(mangaDetailsRequest(manga))
|
||||
.asObservableSuccess()
|
||||
.flatMap {
|
||||
parseToManga(manga, it.asJsoup()).andThen(Observable.just(manga))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns a list of chapters.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun chapterListParse(response: Response): List<SChapter> {
|
||||
return parseIntoTables(response.asJsoup())["read manga online"]?.map { (key, value) ->
|
||||
SChapter.create().apply {
|
||||
url = value.selectFirst(".listLink").attr("href")
|
||||
|
||||
name = key
|
||||
}
|
||||
} ?: emptyList()
|
||||
}
|
||||
|
||||
private fun parseIntoTables(doc: Document): Map<String, Map<String, Element>> {
|
||||
return doc.select("#main > .listTable").map { ele ->
|
||||
val tableName = ele.previousElementSibling()?.text()?.toLowerCase() ?: ""
|
||||
@ -381,93 +78,6 @@ class HBrowse(val context: Context) : HttpSource(), LewdSource<HBrowseSearchMeta
|
||||
}.toMap()
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns a list of pages.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun pageListParse(response: Response): List<Page> {
|
||||
val doc = response.asJsoup()
|
||||
val basePath = listOf("data") + response.request.url.pathSegments
|
||||
val scripts = doc.getElementsByTag("script").map { it.data() }
|
||||
for (script in scripts) {
|
||||
val totalPages = TOTAL_PAGES_REGEX.find(script)?.groupValues?.getOrNull(1)?.toIntOrNull()
|
||||
?: continue
|
||||
val pageList = PAGE_LIST_REGEX.find(script)?.groupValues?.getOrNull(1) ?: continue
|
||||
|
||||
return JsonParser.parseString(pageList).array.take(totalPages).map {
|
||||
it.string
|
||||
}.mapIndexed { index, pageName ->
|
||||
Page(
|
||||
index,
|
||||
pageName,
|
||||
"$baseUrl/${basePath.joinToString("/")}/$pageName"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
class HelpFilter : Filter.HelpDialog(
|
||||
"Usage instructions",
|
||||
markdown =
|
||||
"""
|
||||
### Modes
|
||||
There are three available filter modes:
|
||||
- Text search
|
||||
- Tag search
|
||||
- Sort mode
|
||||
|
||||
You can only use a single mode at a time. Switch between the text and tag search modes using the dropdown menu. Switch to sorting mode by selecting a sorting option.
|
||||
|
||||
### Text search
|
||||
Search for galleries by title, artist or origin.
|
||||
|
||||
### Tag search
|
||||
Search for galleries by tag (e.g. search for a specific genre, type, setting, etc). Uses nhentai/e-hentai syntax. Refer to the "Search" section on [this page](https://nhentai.net/info/) for more information.
|
||||
|
||||
### Sort mode
|
||||
View a list of all galleries sorted by a specific parameter. Exit sorting mode by resetting the filters using the reset button near the bottom of the screen.
|
||||
|
||||
### Tag list
|
||||
""".trimIndent() + "\n$TAGS_AS_MARKDOWN"
|
||||
)
|
||||
|
||||
class ModeFilter : Filter.Select<String>(
|
||||
"Mode",
|
||||
arrayOf(
|
||||
"Text search",
|
||||
"Tag search"
|
||||
)
|
||||
)
|
||||
|
||||
class SortFilter : Filter.Sort("Sort", SORT_OPTIONS.map { it.second }.toTypedArray()) {
|
||||
companion object {
|
||||
// internal to display
|
||||
val SORT_OPTIONS = listOf(
|
||||
"length" to "Length",
|
||||
"date" to "Date added",
|
||||
"rank" to "Rank"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
HelpFilter(),
|
||||
ModeFilter(),
|
||||
SortFilter()
|
||||
)
|
||||
|
||||
/**
|
||||
* Parses the response from the site and returns the absolute url to the source image.
|
||||
*
|
||||
* @param response the response from the site.
|
||||
*/
|
||||
override fun imageUrlParse(response: Response): String {
|
||||
throw UnsupportedOperationException("Should not be called!")
|
||||
}
|
||||
|
||||
override val matchingHosts = listOf(
|
||||
"www.hbrowse.com",
|
||||
"hbrowse.com"
|
||||
@ -480,507 +90,4 @@ class HBrowse(val context: Context) : HttpSource(), LewdSource<HBrowseSearchMeta
|
||||
override fun getDescriptionAdapter(controller: MangaController): HBrowseDescriptionAdapter {
|
||||
return HBrowseDescriptionAdapter(controller)
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val PAGE_LIST_REGEX = Regex("list *= *(\\[.*]);")
|
||||
private val TOTAL_PAGES_REGEX = Regex("totalPages *= *([0-9]*);")
|
||||
|
||||
private const val BASE_COOKIES = "thumbnails=1;"
|
||||
|
||||
private val NS_MAPPINGS = mapOf(
|
||||
"set" to "setting",
|
||||
"loc" to "setting",
|
||||
"location" to "setting",
|
||||
"fet" to "fetish",
|
||||
"relation" to "relationship",
|
||||
"male" to "malebody",
|
||||
"female" to "femalebody",
|
||||
"pos" to "position"
|
||||
)
|
||||
|
||||
private val ALL_TAGS = mapOf(
|
||||
"genre" to listOf(
|
||||
"action",
|
||||
"adventure",
|
||||
"anime",
|
||||
"bizarre",
|
||||
"comedy",
|
||||
"drama",
|
||||
"fantasy",
|
||||
"gore",
|
||||
"historic",
|
||||
"horror",
|
||||
"medieval",
|
||||
"modern",
|
||||
"myth",
|
||||
"psychological",
|
||||
"romance",
|
||||
"school_life",
|
||||
"scifi",
|
||||
"supernatural",
|
||||
"video_game",
|
||||
"visual_novel"
|
||||
),
|
||||
"type" to listOf(
|
||||
"anthology",
|
||||
"bestiality",
|
||||
"dandere",
|
||||
"deredere",
|
||||
"deviant",
|
||||
"fully_colored",
|
||||
"furry",
|
||||
"futanari",
|
||||
"gender_bender",
|
||||
"guro",
|
||||
"harem",
|
||||
"incest",
|
||||
"kuudere",
|
||||
"lolicon",
|
||||
"long_story",
|
||||
"netorare",
|
||||
"non-con",
|
||||
"partly_colored",
|
||||
"reverse_harem",
|
||||
"ryona",
|
||||
"short_story",
|
||||
"shotacon",
|
||||
"transgender",
|
||||
"tsundere",
|
||||
"uncensored",
|
||||
"vanilla",
|
||||
"yandere",
|
||||
"yaoi",
|
||||
"yuri"
|
||||
),
|
||||
"setting" to listOf(
|
||||
"amusement_park",
|
||||
"attic",
|
||||
"automobile",
|
||||
"balcony",
|
||||
"basement",
|
||||
"bath",
|
||||
"beach",
|
||||
"bedroom",
|
||||
"cabin",
|
||||
"castle",
|
||||
"cave",
|
||||
"church",
|
||||
"classroom",
|
||||
"deck",
|
||||
"dining_room",
|
||||
"doctors",
|
||||
"dojo",
|
||||
"doorway",
|
||||
"dream",
|
||||
"dressing_room",
|
||||
"dungeon",
|
||||
"elevator",
|
||||
"festival",
|
||||
"gym",
|
||||
"haunted_building",
|
||||
"hospital",
|
||||
"hotel",
|
||||
"hot_springs",
|
||||
"kitchen",
|
||||
"laboratory",
|
||||
"library",
|
||||
"living_room",
|
||||
"locker_room",
|
||||
"mansion",
|
||||
"office",
|
||||
"other",
|
||||
"outdoor",
|
||||
"outer_space",
|
||||
"park",
|
||||
"pool",
|
||||
"prison",
|
||||
"public",
|
||||
"restaurant",
|
||||
"restroom",
|
||||
"roof",
|
||||
"sauna",
|
||||
"school",
|
||||
"school_nurses_office",
|
||||
"shower",
|
||||
"shrine",
|
||||
"storage_room",
|
||||
"store",
|
||||
"street",
|
||||
"teachers_lounge",
|
||||
"theater",
|
||||
"tight_space",
|
||||
"toilet",
|
||||
"train",
|
||||
"transit",
|
||||
"virtual_reality",
|
||||
"warehouse",
|
||||
"wilderness"
|
||||
),
|
||||
"fetish" to listOf(
|
||||
"androphobia",
|
||||
"apron",
|
||||
"assertive_girl",
|
||||
"bikini",
|
||||
"bloomers",
|
||||
"breast_expansion",
|
||||
"business_suit",
|
||||
"chastity_device",
|
||||
"chinese_dress",
|
||||
"christmas",
|
||||
"collar",
|
||||
"corset",
|
||||
"cosplay_(female)",
|
||||
"cosplay_(male)",
|
||||
"crossdressing_(female)",
|
||||
"crossdressing_(male)",
|
||||
"eye_patch",
|
||||
"food",
|
||||
"giantess",
|
||||
"glasses",
|
||||
"gothic_lolita",
|
||||
"gyaru",
|
||||
"gynophobia",
|
||||
"high_heels",
|
||||
"hot_pants",
|
||||
"impregnation",
|
||||
"kemonomimi",
|
||||
"kimono",
|
||||
"knee_high_socks",
|
||||
"lab_coat",
|
||||
"latex",
|
||||
"leotard",
|
||||
"lingerie",
|
||||
"maid_outfit",
|
||||
"mother_and_daughter",
|
||||
"none",
|
||||
"nonhuman_girl",
|
||||
"olfactophilia",
|
||||
"pregnant",
|
||||
"rich_girl",
|
||||
"school_swimsuit",
|
||||
"shy_girl",
|
||||
"sisters",
|
||||
"sleeping_girl",
|
||||
"sporty",
|
||||
"stockings",
|
||||
"strapon",
|
||||
"student_uniform",
|
||||
"swimsuit",
|
||||
"tanned",
|
||||
"tattoo",
|
||||
"time_stop",
|
||||
"twins_(coed)",
|
||||
"twins_(female)",
|
||||
"twins_(male)",
|
||||
"uniform",
|
||||
"wedding_dress"
|
||||
),
|
||||
"role" to listOf(
|
||||
"alien",
|
||||
"android",
|
||||
"angel",
|
||||
"athlete",
|
||||
"bride",
|
||||
"bunnygirl",
|
||||
"cheerleader",
|
||||
"delinquent",
|
||||
"demon",
|
||||
"doctor",
|
||||
"dominatrix",
|
||||
"escort",
|
||||
"foreigner",
|
||||
"ghost",
|
||||
"housewife",
|
||||
"idol",
|
||||
"magical_girl",
|
||||
"maid",
|
||||
"mamono",
|
||||
"massagist",
|
||||
"miko",
|
||||
"mythical_being",
|
||||
"neet",
|
||||
"nekomimi",
|
||||
"newlywed",
|
||||
"ninja",
|
||||
"normal",
|
||||
"nun",
|
||||
"nurse",
|
||||
"office_lady",
|
||||
"other",
|
||||
"police",
|
||||
"priest",
|
||||
"princess",
|
||||
"queen",
|
||||
"school_nurse",
|
||||
"scientist",
|
||||
"sorcerer",
|
||||
"student",
|
||||
"succubus",
|
||||
"teacher",
|
||||
"tomboy",
|
||||
"tutor",
|
||||
"waitress",
|
||||
"warrior",
|
||||
"witch"
|
||||
),
|
||||
"relationship" to listOf(
|
||||
"acquaintance",
|
||||
"anothers_daughter",
|
||||
"anothers_girlfriend",
|
||||
"anothers_mother",
|
||||
"anothers_sister",
|
||||
"anothers_wife",
|
||||
"aunt",
|
||||
"babysitter",
|
||||
"childhood_friend",
|
||||
"classmate",
|
||||
"cousin",
|
||||
"customer",
|
||||
"daughter",
|
||||
"daughter-in-law",
|
||||
"employee",
|
||||
"employer",
|
||||
"enemy",
|
||||
"fiance",
|
||||
"friend",
|
||||
"friends_daughter",
|
||||
"friends_girlfriend",
|
||||
"friends_mother",
|
||||
"friends_sister",
|
||||
"friends_wife",
|
||||
"girlfriend",
|
||||
"landlord",
|
||||
"manager",
|
||||
"master",
|
||||
"mother",
|
||||
"mother-in-law",
|
||||
"neighbor",
|
||||
"niece",
|
||||
"none",
|
||||
"older_sister",
|
||||
"patient",
|
||||
"pet",
|
||||
"physician",
|
||||
"relative",
|
||||
"relatives_friend",
|
||||
"relatives_girlfriend",
|
||||
"relatives_wife",
|
||||
"servant",
|
||||
"server",
|
||||
"sister-in-law",
|
||||
"slave",
|
||||
"stepdaughter",
|
||||
"stepmother",
|
||||
"stepsister",
|
||||
"stranger",
|
||||
"student",
|
||||
"teacher",
|
||||
"tutee",
|
||||
"tutor",
|
||||
"twin",
|
||||
"underclassman",
|
||||
"upperclassman",
|
||||
"wife",
|
||||
"workmate",
|
||||
"younger_sister"
|
||||
),
|
||||
"maleBody" to listOf(
|
||||
"adult",
|
||||
"animal",
|
||||
"animal_ears",
|
||||
"bald",
|
||||
"beard",
|
||||
"dark_skin",
|
||||
"elderly",
|
||||
"exaggerated_penis",
|
||||
"fat",
|
||||
"furry",
|
||||
"goatee",
|
||||
"hairy",
|
||||
"half_animal",
|
||||
"horns",
|
||||
"large_penis",
|
||||
"long_hair",
|
||||
"middle_age",
|
||||
"monster",
|
||||
"muscular",
|
||||
"mustache",
|
||||
"none",
|
||||
"short",
|
||||
"short_hair",
|
||||
"skinny",
|
||||
"small_penis",
|
||||
"tail",
|
||||
"tall",
|
||||
"tanned",
|
||||
"tan_line",
|
||||
"teenager",
|
||||
"wings",
|
||||
"young"
|
||||
),
|
||||
"femaleBody" to listOf(
|
||||
"adult",
|
||||
"animal_ears",
|
||||
"bald",
|
||||
"big_butt",
|
||||
"chubby",
|
||||
"dark_skin",
|
||||
"elderly",
|
||||
"elf_ears",
|
||||
"exaggerated_breasts",
|
||||
"fat",
|
||||
"furry",
|
||||
"hairy",
|
||||
"hair_bun",
|
||||
"half_animal",
|
||||
"halo",
|
||||
"hime_cut",
|
||||
"horns",
|
||||
"large_breasts",
|
||||
"long_hair",
|
||||
"middle_age",
|
||||
"monster_girl",
|
||||
"muscular",
|
||||
"none",
|
||||
"pigtails",
|
||||
"ponytail",
|
||||
"short",
|
||||
"short_hair",
|
||||
"skinny",
|
||||
"small_breasts",
|
||||
"tail",
|
||||
"tall",
|
||||
"tanned",
|
||||
"tan_line",
|
||||
"teenager",
|
||||
"twintails",
|
||||
"wings",
|
||||
"young"
|
||||
),
|
||||
"grouping" to listOf(
|
||||
"foursome_(1_female)",
|
||||
"foursome_(1_male)",
|
||||
"foursome_(mixed)",
|
||||
"foursome_(only_female)",
|
||||
"one_on_one",
|
||||
"one_on_one_(2_females)",
|
||||
"one_on_one_(2_males)",
|
||||
"orgy_(1_female)",
|
||||
"orgy_(1_male)",
|
||||
"orgy_(mainly_female)",
|
||||
"orgy_(mainly_male)",
|
||||
"orgy_(mixed)",
|
||||
"orgy_(only_female)",
|
||||
"orgy_(only_male)",
|
||||
"solo_(female)",
|
||||
"solo_(male)",
|
||||
"threesome_(1_female)",
|
||||
"threesome_(1_male)",
|
||||
"threesome_(only_female)",
|
||||
"threesome_(only_male)"
|
||||
),
|
||||
"scene" to listOf(
|
||||
"adultery",
|
||||
"ahegao",
|
||||
"anal_(female)",
|
||||
"anal_(male)",
|
||||
"aphrodisiac",
|
||||
"armpit_sex",
|
||||
"asphyxiation",
|
||||
"blackmail",
|
||||
"blowjob",
|
||||
"bondage",
|
||||
"breast_feeding",
|
||||
"breast_sucking",
|
||||
"bukkake",
|
||||
"cheating_(female)",
|
||||
"cheating_(male)",
|
||||
"chikan",
|
||||
"clothed_sex",
|
||||
"consensual",
|
||||
"cunnilingus",
|
||||
"defloration",
|
||||
"discipline",
|
||||
"dominance",
|
||||
"double_penetration",
|
||||
"drunk",
|
||||
"enema",
|
||||
"exhibitionism",
|
||||
"facesitting",
|
||||
"fingering_(female)",
|
||||
"fingering_(male)",
|
||||
"fisting",
|
||||
"footjob",
|
||||
"grinding",
|
||||
"groping",
|
||||
"handjob",
|
||||
"humiliation",
|
||||
"hypnosis",
|
||||
"intercrural",
|
||||
"interracial_sex",
|
||||
"interspecies_sex",
|
||||
"lactation",
|
||||
"lotion",
|
||||
"masochism",
|
||||
"masturbation",
|
||||
"mind_break",
|
||||
"nonhuman",
|
||||
"orgy",
|
||||
"paizuri",
|
||||
"phone_sex",
|
||||
"props",
|
||||
"rape",
|
||||
"reverse_rape",
|
||||
"rimjob",
|
||||
"sadism",
|
||||
"scat",
|
||||
"sex_toys",
|
||||
"spanking",
|
||||
"squirt",
|
||||
"submission",
|
||||
"sumata",
|
||||
"swingers",
|
||||
"tentacles",
|
||||
"voyeurism",
|
||||
"watersports",
|
||||
"x-ray_blowjob",
|
||||
"x-ray_sex"
|
||||
),
|
||||
"position" to listOf(
|
||||
"69",
|
||||
"acrobat",
|
||||
"arch",
|
||||
"bodyguard",
|
||||
"butterfly",
|
||||
"cowgirl",
|
||||
"dancer",
|
||||
"deck_chair",
|
||||
"deep_stick",
|
||||
"doggy",
|
||||
"drill",
|
||||
"ex_sex",
|
||||
"jockey",
|
||||
"lap_dance",
|
||||
"leg_glider",
|
||||
"lotus",
|
||||
"mastery",
|
||||
"missionary",
|
||||
"none",
|
||||
"other",
|
||||
"pile_driver",
|
||||
"prison_guard",
|
||||
"reverse_piggyback",
|
||||
"rodeo",
|
||||
"spoons",
|
||||
"standing",
|
||||
"teaspoons",
|
||||
"unusual",
|
||||
"victory"
|
||||
)
|
||||
).mapValues { it.value.sorted() }
|
||||
|
||||
private val TAGS_AS_MARKDOWN = ALL_TAGS.map { (ns, values) ->
|
||||
"#### $ns\n" + values.map { "- $it" }.joinToString("\n")
|
||||
}.joinToString("\n\n")
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,6 @@ import exh.EH_SOURCE_ID
|
||||
import exh.EIGHTMUSES_SOURCE_ID
|
||||
import exh.EXHMigrations
|
||||
import exh.EXH_SOURCE_ID
|
||||
import exh.HBROWSE_SOURCE_ID
|
||||
import exh.HITOMI_SOURCE_ID
|
||||
import exh.NHENTAI_SOURCE_ID
|
||||
import exh.PERV_EDEN_EN_SOURCE_ID
|
||||
@ -235,9 +234,6 @@ class MainActivity : BaseActivity<MainActivityBinding>() {
|
||||
if (EIGHTMUSES_SOURCE_ID !in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES += EIGHTMUSES_SOURCE_ID
|
||||
}
|
||||
if (HBROWSE_SOURCE_ID !in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES += HBROWSE_SOURCE_ID
|
||||
}
|
||||
}
|
||||
// SY -->
|
||||
|
||||
|
@ -35,7 +35,6 @@ import eu.kanade.tachiyomi.util.system.toast
|
||||
import exh.EH_SOURCE_ID
|
||||
import exh.EIGHTMUSES_SOURCE_ID
|
||||
import exh.EXH_SOURCE_ID
|
||||
import exh.HBROWSE_SOURCE_ID
|
||||
import exh.HITOMI_SOURCE_ID
|
||||
import exh.NHENTAI_SOURCE_ID
|
||||
import exh.PERV_EDEN_EN_SOURCE_ID
|
||||
@ -175,9 +174,6 @@ class SettingsAdvancedController : SettingsController() {
|
||||
if (EIGHTMUSES_SOURCE_ID !in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES += EIGHTMUSES_SOURCE_ID
|
||||
}
|
||||
if (HBROWSE_SOURCE_ID !in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES += HBROWSE_SOURCE_ID
|
||||
}
|
||||
} else {
|
||||
if (EH_SOURCE_ID in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES -= EH_SOURCE_ID
|
||||
@ -200,9 +196,6 @@ class SettingsAdvancedController : SettingsController() {
|
||||
if (EIGHTMUSES_SOURCE_ID in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES -= EIGHTMUSES_SOURCE_ID
|
||||
}
|
||||
if (HBROWSE_SOURCE_ID in BlacklistedSources.HIDDEN_SOURCES) {
|
||||
BlacklistedSources.HIDDEN_SOURCES -= HBROWSE_SOURCE_ID
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package exh
|
||||
|
||||
import eu.kanade.tachiyomi.source.Source
|
||||
import eu.kanade.tachiyomi.source.SourceManager
|
||||
import eu.kanade.tachiyomi.source.online.english.HBrowse
|
||||
import eu.kanade.tachiyomi.source.online.english.HentaiCafe
|
||||
import eu.kanade.tachiyomi.source.online.english.Pururin
|
||||
import eu.kanade.tachiyomi.source.online.english.Tsumino
|
||||
@ -22,13 +23,14 @@ val PURURIN_SOURCE_ID = delegatedSourceId<Pururin>()
|
||||
val TSUMINO_SOURCE_ID = delegatedSourceId<Tsumino>()
|
||||
const val HITOMI_SOURCE_ID = LEWD_SOURCE_SERIES + 10
|
||||
const val EIGHTMUSES_SOURCE_ID = LEWD_SOURCE_SERIES + 11
|
||||
const val HBROWSE_SOURCE_ID = LEWD_SOURCE_SERIES + 12
|
||||
val HBROWSE_SOURCE_ID = delegatedSourceId<HBrowse>()
|
||||
const val MERGED_SOURCE_ID = LEWD_SOURCE_SERIES + 69
|
||||
|
||||
private val DELEGATED_LEWD_SOURCES = listOf(
|
||||
HentaiCafe::class,
|
||||
Pururin::class,
|
||||
Tsumino::class
|
||||
Tsumino::class,
|
||||
HBrowse::class
|
||||
)
|
||||
|
||||
val LIBRARY_UPDATE_EXCLUDED_SOURCES = listOf(
|
||||
|
@ -2,6 +2,8 @@ package exh
|
||||
|
||||
import android.content.Context
|
||||
import com.elvishew.xlog.XLog
|
||||
import com.pushtorefresh.storio.sqlite.queries.Query
|
||||
import com.pushtorefresh.storio.sqlite.queries.RawQuery
|
||||
import eu.kanade.tachiyomi.BuildConfig
|
||||
import eu.kanade.tachiyomi.data.backup.models.DHistory
|
||||
import eu.kanade.tachiyomi.data.database.DatabaseHelper
|
||||
@ -9,6 +11,8 @@ import eu.kanade.tachiyomi.data.database.models.Chapter
|
||||
import eu.kanade.tachiyomi.data.database.models.Manga
|
||||
import eu.kanade.tachiyomi.data.database.models.MangaImpl
|
||||
import eu.kanade.tachiyomi.data.database.models.Track
|
||||
import eu.kanade.tachiyomi.data.database.resolvers.MangaUrlPutResolver
|
||||
import eu.kanade.tachiyomi.data.database.tables.MangaTable
|
||||
import eu.kanade.tachiyomi.data.library.LibraryUpdateJob
|
||||
import eu.kanade.tachiyomi.data.preference.PreferencesHelper
|
||||
import eu.kanade.tachiyomi.data.updater.UpdaterJob
|
||||
@ -43,6 +47,44 @@ object EXHMigrations {
|
||||
LibraryUpdateJob.setupTask(context)
|
||||
return false
|
||||
}
|
||||
if (oldVersion < 4) {
|
||||
db.inTransaction {
|
||||
// Migrate Tsumino source IDs
|
||||
db.lowLevel().executeSQL(
|
||||
RawQuery.builder()
|
||||
.query(
|
||||
"""
|
||||
UPDATE ${MangaTable.TABLE}
|
||||
SET ${MangaTable.COL_SOURCE} = $HBROWSE_SOURCE_ID
|
||||
WHERE ${MangaTable.COL_SOURCE} = 6912
|
||||
""".trimIndent()
|
||||
)
|
||||
.affectsTables(MangaTable.TABLE)
|
||||
.build()
|
||||
)
|
||||
// Migrate BHrowse URLs
|
||||
val hBrowseManga = db.db.get()
|
||||
.listOfObjects(Manga::class.java)
|
||||
.withQuery(
|
||||
Query.builder()
|
||||
.table(MangaTable.TABLE)
|
||||
.where("${MangaTable.COL_SOURCE} = $HBROWSE_SOURCE_ID")
|
||||
.build()
|
||||
)
|
||||
.prepare()
|
||||
.executeAsBlocking()
|
||||
hBrowseManga.forEach {
|
||||
it.url = it.url + "/c00001"
|
||||
}
|
||||
|
||||
db.db.put()
|
||||
.objects(hBrowseManga)
|
||||
// Extremely slow without the resolver :/
|
||||
.withPutResolver(MangaUrlPutResolver())
|
||||
.prepare()
|
||||
.executeAsBlocking()
|
||||
}
|
||||
}
|
||||
|
||||
// if (oldVersion < 1) { }
|
||||
// do stuff here when releasing changed crap
|
||||
@ -70,6 +112,10 @@ object EXHMigrations {
|
||||
manga.source = TSUMINO_SOURCE_ID!!
|
||||
}
|
||||
|
||||
if (manga.source == 6912L) {
|
||||
manga.source = HBROWSE_SOURCE_ID!!
|
||||
}
|
||||
|
||||
// Migrate nhentai URLs
|
||||
if (manga.source == NHENTAI_SOURCE_ID) {
|
||||
manga.url = getUrlWithoutDomain(manga.url)
|
||||
|
@ -9,13 +9,17 @@ import exh.metadata.metadata.base.RaisedSearchMetadata
|
||||
class HBrowseSearchMetadata : RaisedSearchMetadata() {
|
||||
var hbId: Long? = null
|
||||
|
||||
var hbUrlExtra: String? = null
|
||||
|
||||
var thumbnail: String? = null
|
||||
|
||||
var title: String? by titleDelegate(TITLE_TYPE_MAIN)
|
||||
|
||||
// Length in pages
|
||||
var length: Int? = null
|
||||
|
||||
override fun copyTo(manga: SManga) {
|
||||
manga.url = "/$hbId"
|
||||
manga.url = "/$hbId/$hbUrlExtra"
|
||||
|
||||
title?.let {
|
||||
manga.title = it
|
||||
@ -44,6 +48,8 @@ class HBrowseSearchMetadata : RaisedSearchMetadata() {
|
||||
override fun getExtraInfoPairs(context: Context): List<Pair<String, String>> {
|
||||
val pairs = mutableListOf<Pair<String, String>>()
|
||||
hbId?.let { pairs += Pair(context.getString(R.string.id), it.toString()) }
|
||||
hbUrlExtra?.let { pairs += Pair(context.getString(R.string.id), it.toString()) }
|
||||
thumbnail?.let { pairs += Pair(context.getString(R.string.thumbnail_url), it.toString()) }
|
||||
title?.let { pairs += Pair(context.getString(R.string.title), it) }
|
||||
length?.let { pairs += Pair(context.getString(R.string.page_count), it.toString()) }
|
||||
return pairs
|
||||
|
Loading…
x
Reference in New Issue
Block a user