Fix repo build
AGP 4.1.0 automatically aligns output APKs, so we just do a zipalign check rather than actually trying to align it. cf. https://issuetracker.google.com/issues/162117652
This commit is contained in:
parent
1b3412993d
commit
8aa9b5ef1d
|
@ -1,5 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
./gradlew --no-daemon clean assembleRelease
|
./gradlew --no-daemon clean assembleRelease
|
||||||
|
@ -18,7 +17,8 @@ for APK in ${APKS[@]}; do
|
||||||
APKNAME="${BASENAME%%+(-release*)}.apk"
|
APKNAME="${BASENAME%%+(-release*)}.apk"
|
||||||
APKDEST="$DEST/$APKNAME"
|
APKDEST="$DEST/$APKNAME"
|
||||||
|
|
||||||
${TOOLS}/zipalign -v -p 4 $APK $APKDEST
|
${TOOLS}/zipalign -c -v -p 4 $APK
|
||||||
|
cp $APK $APKDEST
|
||||||
if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then
|
if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then
|
||||||
${TOOLS}/apksigner sign --ks $STORE_PATH --ks-key-alias $STORE_ALIAS --ks-pass env:STORE_PASS --key-pass env:KEY_PASS $APKDEST
|
${TOOLS}/apksigner sign --ks $STORE_PATH --ks-key-alias $STORE_ALIAS --ks-pass env:STORE_PASS --key-pass env:KEY_PASS $APKDEST
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
TOOLS="$(ls -d ${ANDROID_HOME}/build-tools/* | tail -1)"
|
TOOLS="$(ls -d ${ANDROID_HOME}/build-tools/* | tail -1)"
|
||||||
|
|
|
@ -6,7 +6,6 @@ import android.support.v7.preference.EditTextPreference
|
||||||
import android.support.v7.preference.PreferenceScreen
|
import android.support.v7.preference.PreferenceScreen
|
||||||
import android.widget.Toast
|
import android.widget.Toast
|
||||||
import com.github.salomonbrys.kotson.get
|
import com.github.salomonbrys.kotson.get
|
||||||
import com.google.gson.JsonParser
|
|
||||||
import eu.kanade.tachiyomi.annotations.Nsfw
|
import eu.kanade.tachiyomi.annotations.Nsfw
|
||||||
import eu.kanade.tachiyomi.extension.BuildConfig
|
import eu.kanade.tachiyomi.extension.BuildConfig
|
||||||
import eu.kanade.tachiyomi.network.GET
|
import eu.kanade.tachiyomi.network.GET
|
||||||
|
@ -19,7 +18,6 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
||||||
import eu.kanade.tachiyomi.source.model.SManga
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
import okhttp3.Request
|
import okhttp3.Request
|
||||||
import okhttp3.Response
|
import okhttp3.Response
|
||||||
import org.json.JSONObject
|
|
||||||
import org.jsoup.nodes.Document
|
import org.jsoup.nodes.Document
|
||||||
import org.jsoup.nodes.Element
|
import org.jsoup.nodes.Element
|
||||||
import uy.kohesive.injekt.Injekt
|
import uy.kohesive.injekt.Injekt
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -251,7 +251,6 @@ abstract class WPMangaStream(
|
||||||
|
|
||||||
val imageList = JSONArray(imageListRegex.find(docString)!!.destructured.toList()[0])
|
val imageList = JSONArray(imageListRegex.find(docString)!!.destructured.toList()[0])
|
||||||
|
|
||||||
|
|
||||||
for (i in 0 until imageList.length()) {
|
for (i in 0 until imageList.length()) {
|
||||||
pages.add(Page(i, "", imageList.getString(i)))
|
pages.add(Page(i, "", imageList.getString(i)))
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,6 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
||||||
import eu.kanade.tachiyomi.source.model.SManga
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||||
import eu.kanade.tachiyomi.util.asJsoup
|
import eu.kanade.tachiyomi.util.asJsoup
|
||||||
import java.io.IOException
|
|
||||||
import java.nio.charset.Charset
|
|
||||||
import java.util.Calendar
|
|
||||||
import okhttp3.CacheControl
|
import okhttp3.CacheControl
|
||||||
import okhttp3.Call
|
import okhttp3.Call
|
||||||
import okhttp3.Callback
|
import okhttp3.Callback
|
||||||
|
@ -25,6 +22,9 @@ import org.jsoup.nodes.Document
|
||||||
import org.jsoup.nodes.Element
|
import org.jsoup.nodes.Element
|
||||||
import org.jsoup.nodes.Node
|
import org.jsoup.nodes.Node
|
||||||
import org.jsoup.nodes.TextNode
|
import org.jsoup.nodes.TextNode
|
||||||
|
import java.io.IOException
|
||||||
|
import java.nio.charset.Charset
|
||||||
|
import java.util.Calendar
|
||||||
|
|
||||||
class MangaDoom : HttpSource() {
|
class MangaDoom : HttpSource() {
|
||||||
|
|
||||||
|
@ -43,9 +43,12 @@ class MangaDoom : HttpSource() {
|
||||||
override fun popularMangaParse(response: Response): MangasPage {
|
override fun popularMangaParse(response: Response): MangasPage {
|
||||||
val document = response.asJsoup()
|
val document = response.asJsoup()
|
||||||
|
|
||||||
return MangasPage(document.select(popularMangaSelector).map {
|
return MangasPage(
|
||||||
mangaFromMangaListElement(it)
|
document.select(popularMangaSelector).map {
|
||||||
}, paginationHasNext(document))
|
mangaFromMangaListElement(it)
|
||||||
|
},
|
||||||
|
paginationHasNext(document)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// latest
|
// latest
|
||||||
|
@ -71,8 +74,10 @@ class MangaDoom : HttpSource() {
|
||||||
|
|
||||||
val mangaUpdates = document.select("div.manga_updates > dl > div.manga-cover > a")
|
val mangaUpdates = document.select("div.manga_updates > dl > div.manga-cover > a")
|
||||||
|
|
||||||
return MangasPage(mangaUpdates.map { mangaFromMangaTitleElement(it) },
|
return MangasPage(
|
||||||
paginationHasNext(document))
|
mangaUpdates.map { mangaFromMangaTitleElement(it) },
|
||||||
|
paginationHasNext(document)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -110,8 +115,10 @@ class MangaDoom : HttpSource() {
|
||||||
this.artist = dlElement.select("dt:containsOwn(Artist:) + dd > a")
|
this.artist = dlElement.select("dt:containsOwn(Artist:) + dd > a")
|
||||||
.first()?.ownText().takeIf { it != "-" }
|
.first()?.ownText().takeIf { it != "-" }
|
||||||
|
|
||||||
this.status = when (dlElement.select("dt:containsOwn(Status:) + dd")
|
this.status = when (
|
||||||
.first().ownText()) {
|
dlElement.select("dt:containsOwn(Status:) + dd")
|
||||||
|
.first().ownText()
|
||||||
|
) {
|
||||||
"Ongoing" -> SManga.ONGOING
|
"Ongoing" -> SManga.ONGOING
|
||||||
"Completed" -> SManga.COMPLETED
|
"Completed" -> SManga.COMPLETED
|
||||||
else -> SManga.UNKNOWN
|
else -> SManga.UNKNOWN
|
||||||
|
@ -348,8 +355,11 @@ class MangaDoom : HttpSource() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return POST(baseUrl + underlyingSearchMangaPath,
|
return POST(
|
||||||
searchHeaders, requestBodyBuilder.build())
|
baseUrl + underlyingSearchMangaPath,
|
||||||
|
searchHeaders,
|
||||||
|
requestBodyBuilder.build()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
private val searchResultSelector = "div.row"
|
private val searchResultSelector = "div.row"
|
||||||
|
@ -357,9 +367,12 @@ class MangaDoom : HttpSource() {
|
||||||
override fun searchMangaParse(response: Response): MangasPage {
|
override fun searchMangaParse(response: Response): MangasPage {
|
||||||
val document = response.asJsoup()
|
val document = response.asJsoup()
|
||||||
|
|
||||||
return MangasPage(document.select(searchResultSelector).map {
|
return MangasPage(
|
||||||
mangaFromMangaListElement(it)
|
document.select(searchResultSelector).map {
|
||||||
}, false)
|
mangaFromMangaListElement(it)
|
||||||
|
},
|
||||||
|
false
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// filters
|
// filters
|
||||||
|
@ -373,13 +386,17 @@ class MangaDoom : HttpSource() {
|
||||||
genreManager.getGenreGroupFilterOrPlaceholder()
|
genreManager.getGenreGroupFilterOrPlaceholder()
|
||||||
)
|
)
|
||||||
|
|
||||||
private class TypeFilter : FormBodySelectFilter("Type", "type",
|
private class TypeFilter : FormBodySelectFilter(
|
||||||
|
"Type",
|
||||||
|
"type",
|
||||||
arrayOf(
|
arrayOf(
|
||||||
Pair("japanese", "Japanese Manga"),
|
Pair("japanese", "Japanese Manga"),
|
||||||
Pair("korean", "Korean Manhwa"),
|
Pair("korean", "Korean Manhwa"),
|
||||||
Pair("chinese", "Chinese Manhua"),
|
Pair("chinese", "Chinese Manhua"),
|
||||||
Pair("all", "All")
|
Pair("all", "All")
|
||||||
), 3)
|
),
|
||||||
|
3
|
||||||
|
)
|
||||||
|
|
||||||
private class AuthorTextFilter : Filter.Text("Author"), FormBodyFilter {
|
private class AuthorTextFilter : Filter.Text("Author"), FormBodyFilter {
|
||||||
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
||||||
|
@ -393,12 +410,16 @@ class MangaDoom : HttpSource() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class StatusFilter : FormBodySelectFilter("Status", "status",
|
private class StatusFilter : FormBodySelectFilter(
|
||||||
|
"Status",
|
||||||
|
"status",
|
||||||
arrayOf(
|
arrayOf(
|
||||||
Pair("ongoing", "Ongoing"),
|
Pair("ongoing", "Ongoing"),
|
||||||
Pair("completed", "Completed"),
|
Pair("completed", "Completed"),
|
||||||
Pair("both", "Both")
|
Pair("both", "Both")
|
||||||
), 2)
|
),
|
||||||
|
2
|
||||||
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GenreFilter aren't hard coded into this extension, instead it relies on asynchronous-fetching
|
* GenreFilter aren't hard coded into this extension, instead it relies on asynchronous-fetching
|
||||||
|
@ -439,8 +460,10 @@ class MangaDoom : HttpSource() {
|
||||||
* timestamp with the current time
|
* timestamp with the current time
|
||||||
*/
|
*/
|
||||||
private fun contentUpToDate(compareTimestamp: Long?): Boolean =
|
private fun contentUpToDate(compareTimestamp: Long?): Boolean =
|
||||||
(compareTimestamp != null &&
|
(
|
||||||
(System.currentTimeMillis() - compareTimestamp < 15 * 60 * 1000))
|
compareTimestamp != null &&
|
||||||
|
(System.currentTimeMillis() - compareTimestamp < 15 * 60 * 1000)
|
||||||
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to generate a GenreGroupFilter from cached Pair objects or (if the cached pairs are
|
* Used to generate a GenreGroupFilter from cached Pair objects or (if the cached pairs are
|
||||||
|
@ -448,9 +471,11 @@ class MangaDoom : HttpSource() {
|
||||||
*/
|
*/
|
||||||
private fun callForGenreGroup(): GenreGroupFilter? {
|
private fun callForGenreGroup(): GenreGroupFilter? {
|
||||||
fun genreContentListToGenreGroup(genreFiltersContent: List<Pair<String, String>>) =
|
fun genreContentListToGenreGroup(genreFiltersContent: List<Pair<String, String>>) =
|
||||||
GenreGroupFilter(genreFiltersContent.map { singleGenreContent ->
|
GenreGroupFilter(
|
||||||
GenreFilter(singleGenreContent.first, singleGenreContent.second)
|
genreFiltersContent.map { singleGenreContent ->
|
||||||
})
|
GenreFilter(singleGenreContent.first, singleGenreContent.second)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
val genreGroupFromVar = genreFiltersContent?.let { genreList ->
|
val genreGroupFromVar = genreFiltersContent?.let { genreList ->
|
||||||
genreContentListToGenreGroup(genreList)
|
genreContentListToGenreGroup(genreList)
|
||||||
|
@ -477,30 +502,43 @@ class MangaDoom : HttpSource() {
|
||||||
val document = genreResponse.asJsoup()
|
val document = genreResponse.asJsoup()
|
||||||
|
|
||||||
return document.select("ul.manga-cat > li").map {
|
return document.select("ul.manga-cat > li").map {
|
||||||
Pair(it.select("span.fa").first().attr("data-id"),
|
Pair(
|
||||||
it.ownText())
|
it.select("span.fa").first().attr("data-id"),
|
||||||
|
it.ownText()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val genreResponse = client
|
val genreResponse = client
|
||||||
.newCall(GET(url = baseUrl + advancedSearchPagePath,
|
.newCall(
|
||||||
cache = CacheControl.FORCE_CACHE)).execute()
|
GET(
|
||||||
|
url = baseUrl + advancedSearchPagePath,
|
||||||
|
cache = CacheControl.FORCE_CACHE
|
||||||
|
)
|
||||||
|
).execute()
|
||||||
|
|
||||||
return if (genreResponse.code() == 200 &&
|
return if (genreResponse.code() == 200 &&
|
||||||
contentUpToDate(genreResponse.receivedResponseAtMillis())) {
|
contentUpToDate(genreResponse.receivedResponseAtMillis())
|
||||||
responseToGenreFilterContentPair(genreResponse)
|
) {
|
||||||
|
responseToGenreFilterContentPair(genreResponse)
|
||||||
} else {
|
} else {
|
||||||
client.newCall(GET(url = baseUrl + advancedSearchPagePath,
|
client.newCall(
|
||||||
cache = CacheControl.FORCE_NETWORK)).enqueue(object : Callback {
|
GET(
|
||||||
override fun onFailure(call: Call, e: IOException) {
|
url = baseUrl + advancedSearchPagePath,
|
||||||
throw e
|
cache = CacheControl.FORCE_NETWORK
|
||||||
}
|
)
|
||||||
|
).enqueue(
|
||||||
|
object : Callback {
|
||||||
|
override fun onFailure(call: Call, e: IOException) {
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
|
||||||
override fun onResponse(call: Call, response: Response) {
|
override fun onResponse(call: Call, response: Response) {
|
||||||
genreFilterContentFrom = response.receivedResponseAtMillis()
|
genreFilterContentFrom = response.receivedResponseAtMillis()
|
||||||
genreFiltersContent = responseToGenreFilterContentPair(response)
|
genreFiltersContent = responseToGenreFilterContentPair(response)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
null
|
null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -515,11 +553,15 @@ class MangaDoom : HttpSource() {
|
||||||
val vals: Array<Pair<String, String>>,
|
val vals: Array<Pair<String, String>>,
|
||||||
defaultValue: Int = 0
|
defaultValue: Int = 0
|
||||||
) :
|
) :
|
||||||
Filter.Select<String>(displayName,
|
Filter.Select<String>(
|
||||||
vals.map { it.second }.toTypedArray(), defaultValue), FormBodyFilter {
|
displayName,
|
||||||
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
vals.map { it.second }.toTypedArray(),
|
||||||
formParameters[payloadParam] = vals[state].first
|
defaultValue
|
||||||
}
|
),
|
||||||
|
FormBodyFilter {
|
||||||
|
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
||||||
|
formParameters[payloadParam] = vals[state].first
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -547,5 +589,5 @@ class MangaDoom : HttpSource() {
|
||||||
this.setUrlWithoutDomain(mangaTitleElement.attr("href"))
|
this.setUrlWithoutDomain(mangaTitleElement.attr("href"))
|
||||||
this.thumbnail_url = mangaTitleElement.select("img").first()
|
this.thumbnail_url = mangaTitleElement.select("img").first()
|
||||||
.attr("src")
|
.attr("src")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,17 +9,17 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
||||||
import eu.kanade.tachiyomi.source.model.SManga
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||||
import eu.kanade.tachiyomi.util.asJsoup
|
import eu.kanade.tachiyomi.util.asJsoup
|
||||||
|
import okhttp3.Request
|
||||||
|
import okhttp3.Response
|
||||||
|
import org.jsoup.nodes.Document
|
||||||
|
import org.jsoup.nodes.Element
|
||||||
|
import rx.Observable
|
||||||
import java.lang.UnsupportedOperationException
|
import java.lang.UnsupportedOperationException
|
||||||
import java.text.SimpleDateFormat
|
import java.text.SimpleDateFormat
|
||||||
import java.util.Calendar
|
import java.util.Calendar
|
||||||
import java.util.Date
|
import java.util.Date
|
||||||
import java.util.GregorianCalendar
|
import java.util.GregorianCalendar
|
||||||
import java.util.Locale
|
import java.util.Locale
|
||||||
import okhttp3.Request
|
|
||||||
import okhttp3.Response
|
|
||||||
import org.jsoup.nodes.Document
|
|
||||||
import org.jsoup.nodes.Element
|
|
||||||
import rx.Observable
|
|
||||||
|
|
||||||
class Schlockmercenary : ParsedHttpSource() {
|
class Schlockmercenary : ParsedHttpSource() {
|
||||||
|
|
||||||
|
@ -41,8 +41,12 @@ class Schlockmercenary : ParsedHttpSource() {
|
||||||
|
|
||||||
override fun popularMangaFromElement(element: Element): SManga {
|
override fun popularMangaFromElement(element: Element): SManga {
|
||||||
val book = element.select("h4 > a").first()
|
val book = element.select("h4 > a").first()
|
||||||
val thumb = (baseUrl + (element.select("img").first()?.attr("src")
|
val thumb = (
|
||||||
?: defaultThumbnailUrl)).substringBefore("?")
|
baseUrl + (
|
||||||
|
element.select("img").first()?.attr("src")
|
||||||
|
?: defaultThumbnailUrl
|
||||||
|
)
|
||||||
|
).substringBefore("?")
|
||||||
return SManga.create().apply {
|
return SManga.create().apply {
|
||||||
url = book.attr("href")
|
url = book.attr("href")
|
||||||
title = book.text()
|
title = book.text()
|
||||||
|
|
|
@ -35,6 +35,16 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
||||||
import eu.kanade.tachiyomi.source.model.SManga
|
import eu.kanade.tachiyomi.source.model.SManga
|
||||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||||
import eu.kanade.tachiyomi.util.asJsoup
|
import eu.kanade.tachiyomi.util.asJsoup
|
||||||
|
import okhttp3.FormBody
|
||||||
|
import okhttp3.MediaType
|
||||||
|
import okhttp3.OkHttpClient
|
||||||
|
import okhttp3.Request
|
||||||
|
import okhttp3.Response
|
||||||
|
import okhttp3.ResponseBody
|
||||||
|
import org.jsoup.nodes.Document
|
||||||
|
import org.jsoup.nodes.Element
|
||||||
|
import uy.kohesive.injekt.Injekt
|
||||||
|
import uy.kohesive.injekt.api.get
|
||||||
import java.io.ByteArrayInputStream
|
import java.io.ByteArrayInputStream
|
||||||
import java.io.ByteArrayOutputStream
|
import java.io.ByteArrayOutputStream
|
||||||
import java.text.ParseException
|
import java.text.ParseException
|
||||||
|
@ -52,17 +62,6 @@ import kotlin.collections.map
|
||||||
import kotlin.collections.mapIndexed
|
import kotlin.collections.mapIndexed
|
||||||
import kotlin.collections.mutableListOf
|
import kotlin.collections.mutableListOf
|
||||||
import kotlin.collections.toTypedArray
|
import kotlin.collections.toTypedArray
|
||||||
import okhttp3.FormBody
|
|
||||||
import okhttp3.MediaType
|
|
||||||
import okhttp3.OkHttpClient
|
|
||||||
import okhttp3.Request
|
|
||||||
import okhttp3.Response
|
|
||||||
import okhttp3.ResponseBody
|
|
||||||
import org.apache.commons.lang3.StringUtils
|
|
||||||
import org.jsoup.nodes.Document
|
|
||||||
import org.jsoup.nodes.Element
|
|
||||||
import uy.kohesive.injekt.Injekt
|
|
||||||
import uy.kohesive.injekt.api.get
|
|
||||||
|
|
||||||
class Japscan : ConfigurableSource, ParsedHttpSource() {
|
class Japscan : ConfigurableSource, ParsedHttpSource() {
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,6 @@ class MangaKawaii : ParsedHttpSource() {
|
||||||
override fun latestUpdatesNextPageSelector(): String? = null
|
override fun latestUpdatesNextPageSelector(): String? = null
|
||||||
override fun searchMangaNextPageSelector() = "no selector"
|
override fun searchMangaNextPageSelector() = "no selector"
|
||||||
|
|
||||||
|
|
||||||
override fun popularMangaRequest(page: Int) = GET("$baseUrl/liste-manga/filterMangaList?page=$page&sortBy=views&asc=false", headersBuilder().add("X-Requested-With", "XMLHttpRequest").build())
|
override fun popularMangaRequest(page: Int) = GET("$baseUrl/liste-manga/filterMangaList?page=$page&sortBy=views&asc=false", headersBuilder().add("X-Requested-With", "XMLHttpRequest").build())
|
||||||
|
|
||||||
override fun latestUpdatesRequest(page: Int) = GET(baseUrl, headers)
|
override fun latestUpdatesRequest(page: Int) = GET(baseUrl, headers)
|
||||||
|
@ -112,5 +111,4 @@ class MangaKawaii : ParsedHttpSource() {
|
||||||
}
|
}
|
||||||
override fun pageListParse(document: Document): List<Page> = throw Exception("Not used")
|
override fun pageListParse(document: Document): List<Page> = throw Exception("Not used")
|
||||||
override fun imageUrlParse(document: Document): String = throw Exception("Not used")
|
override fun imageUrlParse(document: Document): String = throw Exception("Not used")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue