Fix repo build
AGP 4.1.0 automatically aligns output APKs, so we just do a zipalign check rather than actually trying to align it. cf. https://issuetracker.google.com/issues/162117652
This commit is contained in:
parent
1b3412993d
commit
8aa9b5ef1d
|
@ -1,5 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
./gradlew --no-daemon clean assembleRelease
|
||||
|
@ -18,7 +17,8 @@ for APK in ${APKS[@]}; do
|
|||
APKNAME="${BASENAME%%+(-release*)}.apk"
|
||||
APKDEST="$DEST/$APKNAME"
|
||||
|
||||
${TOOLS}/zipalign -v -p 4 $APK $APKDEST
|
||||
${TOOLS}/zipalign -c -v -p 4 $APK
|
||||
cp $APK $APKDEST
|
||||
if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then
|
||||
${TOOLS}/apksigner sign --ks $STORE_PATH --ks-key-alias $STORE_ALIAS --ks-pass env:STORE_PASS --key-pass env:KEY_PASS $APKDEST
|
||||
fi
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
TOOLS="$(ls -d ${ANDROID_HOME}/build-tools/* | tail -1)"
|
||||
|
|
|
@ -6,7 +6,6 @@ import android.support.v7.preference.EditTextPreference
|
|||
import android.support.v7.preference.PreferenceScreen
|
||||
import android.widget.Toast
|
||||
import com.github.salomonbrys.kotson.get
|
||||
import com.google.gson.JsonParser
|
||||
import eu.kanade.tachiyomi.annotations.Nsfw
|
||||
import eu.kanade.tachiyomi.extension.BuildConfig
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
|
@ -19,7 +18,6 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.json.JSONObject
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import uy.kohesive.injekt.Injekt
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -251,7 +251,6 @@ abstract class WPMangaStream(
|
|||
|
||||
val imageList = JSONArray(imageListRegex.find(docString)!!.destructured.toList()[0])
|
||||
|
||||
|
||||
for (i in 0 until imageList.length()) {
|
||||
pages.add(Page(i, "", imageList.getString(i)))
|
||||
}
|
||||
|
|
|
@ -10,9 +10,6 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.io.IOException
|
||||
import java.nio.charset.Charset
|
||||
import java.util.Calendar
|
||||
import okhttp3.CacheControl
|
||||
import okhttp3.Call
|
||||
import okhttp3.Callback
|
||||
|
@ -25,6 +22,9 @@ import org.jsoup.nodes.Document
|
|||
import org.jsoup.nodes.Element
|
||||
import org.jsoup.nodes.Node
|
||||
import org.jsoup.nodes.TextNode
|
||||
import java.io.IOException
|
||||
import java.nio.charset.Charset
|
||||
import java.util.Calendar
|
||||
|
||||
class MangaDoom : HttpSource() {
|
||||
|
||||
|
@ -43,9 +43,12 @@ class MangaDoom : HttpSource() {
|
|||
override fun popularMangaParse(response: Response): MangasPage {
|
||||
val document = response.asJsoup()
|
||||
|
||||
return MangasPage(document.select(popularMangaSelector).map {
|
||||
mangaFromMangaListElement(it)
|
||||
}, paginationHasNext(document))
|
||||
return MangasPage(
|
||||
document.select(popularMangaSelector).map {
|
||||
mangaFromMangaListElement(it)
|
||||
},
|
||||
paginationHasNext(document)
|
||||
)
|
||||
}
|
||||
|
||||
// latest
|
||||
|
@ -71,8 +74,10 @@ class MangaDoom : HttpSource() {
|
|||
|
||||
val mangaUpdates = document.select("div.manga_updates > dl > div.manga-cover > a")
|
||||
|
||||
return MangasPage(mangaUpdates.map { mangaFromMangaTitleElement(it) },
|
||||
paginationHasNext(document))
|
||||
return MangasPage(
|
||||
mangaUpdates.map { mangaFromMangaTitleElement(it) },
|
||||
paginationHasNext(document)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,8 +115,10 @@ class MangaDoom : HttpSource() {
|
|||
this.artist = dlElement.select("dt:containsOwn(Artist:) + dd > a")
|
||||
.first()?.ownText().takeIf { it != "-" }
|
||||
|
||||
this.status = when (dlElement.select("dt:containsOwn(Status:) + dd")
|
||||
.first().ownText()) {
|
||||
this.status = when (
|
||||
dlElement.select("dt:containsOwn(Status:) + dd")
|
||||
.first().ownText()
|
||||
) {
|
||||
"Ongoing" -> SManga.ONGOING
|
||||
"Completed" -> SManga.COMPLETED
|
||||
else -> SManga.UNKNOWN
|
||||
|
@ -348,8 +355,11 @@ class MangaDoom : HttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
return POST(baseUrl + underlyingSearchMangaPath,
|
||||
searchHeaders, requestBodyBuilder.build())
|
||||
return POST(
|
||||
baseUrl + underlyingSearchMangaPath,
|
||||
searchHeaders,
|
||||
requestBodyBuilder.build()
|
||||
)
|
||||
}
|
||||
|
||||
private val searchResultSelector = "div.row"
|
||||
|
@ -357,9 +367,12 @@ class MangaDoom : HttpSource() {
|
|||
override fun searchMangaParse(response: Response): MangasPage {
|
||||
val document = response.asJsoup()
|
||||
|
||||
return MangasPage(document.select(searchResultSelector).map {
|
||||
mangaFromMangaListElement(it)
|
||||
}, false)
|
||||
return MangasPage(
|
||||
document.select(searchResultSelector).map {
|
||||
mangaFromMangaListElement(it)
|
||||
},
|
||||
false
|
||||
)
|
||||
}
|
||||
|
||||
// filters
|
||||
|
@ -373,13 +386,17 @@ class MangaDoom : HttpSource() {
|
|||
genreManager.getGenreGroupFilterOrPlaceholder()
|
||||
)
|
||||
|
||||
private class TypeFilter : FormBodySelectFilter("Type", "type",
|
||||
private class TypeFilter : FormBodySelectFilter(
|
||||
"Type",
|
||||
"type",
|
||||
arrayOf(
|
||||
Pair("japanese", "Japanese Manga"),
|
||||
Pair("korean", "Korean Manhwa"),
|
||||
Pair("chinese", "Chinese Manhua"),
|
||||
Pair("all", "All")
|
||||
), 3)
|
||||
),
|
||||
3
|
||||
)
|
||||
|
||||
private class AuthorTextFilter : Filter.Text("Author"), FormBodyFilter {
|
||||
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
||||
|
@ -393,12 +410,16 @@ class MangaDoom : HttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
private class StatusFilter : FormBodySelectFilter("Status", "status",
|
||||
private class StatusFilter : FormBodySelectFilter(
|
||||
"Status",
|
||||
"status",
|
||||
arrayOf(
|
||||
Pair("ongoing", "Ongoing"),
|
||||
Pair("completed", "Completed"),
|
||||
Pair("both", "Both")
|
||||
), 2)
|
||||
),
|
||||
2
|
||||
)
|
||||
|
||||
/**
|
||||
* GenreFilter aren't hard coded into this extension, instead it relies on asynchronous-fetching
|
||||
|
@ -439,8 +460,10 @@ class MangaDoom : HttpSource() {
|
|||
* timestamp with the current time
|
||||
*/
|
||||
private fun contentUpToDate(compareTimestamp: Long?): Boolean =
|
||||
(compareTimestamp != null &&
|
||||
(System.currentTimeMillis() - compareTimestamp < 15 * 60 * 1000))
|
||||
(
|
||||
compareTimestamp != null &&
|
||||
(System.currentTimeMillis() - compareTimestamp < 15 * 60 * 1000)
|
||||
)
|
||||
|
||||
/**
|
||||
* Used to generate a GenreGroupFilter from cached Pair objects or (if the cached pairs are
|
||||
|
@ -448,9 +471,11 @@ class MangaDoom : HttpSource() {
|
|||
*/
|
||||
private fun callForGenreGroup(): GenreGroupFilter? {
|
||||
fun genreContentListToGenreGroup(genreFiltersContent: List<Pair<String, String>>) =
|
||||
GenreGroupFilter(genreFiltersContent.map { singleGenreContent ->
|
||||
GenreFilter(singleGenreContent.first, singleGenreContent.second)
|
||||
})
|
||||
GenreGroupFilter(
|
||||
genreFiltersContent.map { singleGenreContent ->
|
||||
GenreFilter(singleGenreContent.first, singleGenreContent.second)
|
||||
}
|
||||
)
|
||||
|
||||
val genreGroupFromVar = genreFiltersContent?.let { genreList ->
|
||||
genreContentListToGenreGroup(genreList)
|
||||
|
@ -477,30 +502,43 @@ class MangaDoom : HttpSource() {
|
|||
val document = genreResponse.asJsoup()
|
||||
|
||||
return document.select("ul.manga-cat > li").map {
|
||||
Pair(it.select("span.fa").first().attr("data-id"),
|
||||
it.ownText())
|
||||
Pair(
|
||||
it.select("span.fa").first().attr("data-id"),
|
||||
it.ownText()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val genreResponse = client
|
||||
.newCall(GET(url = baseUrl + advancedSearchPagePath,
|
||||
cache = CacheControl.FORCE_CACHE)).execute()
|
||||
.newCall(
|
||||
GET(
|
||||
url = baseUrl + advancedSearchPagePath,
|
||||
cache = CacheControl.FORCE_CACHE
|
||||
)
|
||||
).execute()
|
||||
|
||||
return if (genreResponse.code() == 200 &&
|
||||
contentUpToDate(genreResponse.receivedResponseAtMillis())) {
|
||||
responseToGenreFilterContentPair(genreResponse)
|
||||
contentUpToDate(genreResponse.receivedResponseAtMillis())
|
||||
) {
|
||||
responseToGenreFilterContentPair(genreResponse)
|
||||
} else {
|
||||
client.newCall(GET(url = baseUrl + advancedSearchPagePath,
|
||||
cache = CacheControl.FORCE_NETWORK)).enqueue(object : Callback {
|
||||
override fun onFailure(call: Call, e: IOException) {
|
||||
throw e
|
||||
}
|
||||
client.newCall(
|
||||
GET(
|
||||
url = baseUrl + advancedSearchPagePath,
|
||||
cache = CacheControl.FORCE_NETWORK
|
||||
)
|
||||
).enqueue(
|
||||
object : Callback {
|
||||
override fun onFailure(call: Call, e: IOException) {
|
||||
throw e
|
||||
}
|
||||
|
||||
override fun onResponse(call: Call, response: Response) {
|
||||
genreFilterContentFrom = response.receivedResponseAtMillis()
|
||||
genreFiltersContent = responseToGenreFilterContentPair(response)
|
||||
override fun onResponse(call: Call, response: Response) {
|
||||
genreFilterContentFrom = response.receivedResponseAtMillis()
|
||||
genreFiltersContent = responseToGenreFilterContentPair(response)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
null
|
||||
}
|
||||
}
|
||||
|
@ -515,11 +553,15 @@ class MangaDoom : HttpSource() {
|
|||
val vals: Array<Pair<String, String>>,
|
||||
defaultValue: Int = 0
|
||||
) :
|
||||
Filter.Select<String>(displayName,
|
||||
vals.map { it.second }.toTypedArray(), defaultValue), FormBodyFilter {
|
||||
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
||||
formParameters[payloadParam] = vals[state].first
|
||||
}
|
||||
Filter.Select<String>(
|
||||
displayName,
|
||||
vals.map { it.second }.toTypedArray(),
|
||||
defaultValue
|
||||
),
|
||||
FormBodyFilter {
|
||||
override fun addToFormParameters(formParameters: MutableMap<String, String>) {
|
||||
formParameters[payloadParam] = vals[state].first
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -547,5 +589,5 @@ class MangaDoom : HttpSource() {
|
|||
this.setUrlWithoutDomain(mangaTitleElement.attr("href"))
|
||||
this.thumbnail_url = mangaTitleElement.select("img").first()
|
||||
.attr("src")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,17 +9,17 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
import java.lang.UnsupportedOperationException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Date
|
||||
import java.util.GregorianCalendar
|
||||
import java.util.Locale
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
|
||||
class Schlockmercenary : ParsedHttpSource() {
|
||||
|
||||
|
@ -41,8 +41,12 @@ class Schlockmercenary : ParsedHttpSource() {
|
|||
|
||||
override fun popularMangaFromElement(element: Element): SManga {
|
||||
val book = element.select("h4 > a").first()
|
||||
val thumb = (baseUrl + (element.select("img").first()?.attr("src")
|
||||
?: defaultThumbnailUrl)).substringBefore("?")
|
||||
val thumb = (
|
||||
baseUrl + (
|
||||
element.select("img").first()?.attr("src")
|
||||
?: defaultThumbnailUrl
|
||||
)
|
||||
).substringBefore("?")
|
||||
return SManga.create().apply {
|
||||
url = book.attr("href")
|
||||
title = book.text()
|
||||
|
|
|
@ -35,6 +35,16 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.MediaType
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import okhttp3.ResponseBody
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import uy.kohesive.injekt.Injekt
|
||||
import uy.kohesive.injekt.api.get
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.text.ParseException
|
||||
|
@ -52,17 +62,6 @@ import kotlin.collections.map
|
|||
import kotlin.collections.mapIndexed
|
||||
import kotlin.collections.mutableListOf
|
||||
import kotlin.collections.toTypedArray
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.MediaType
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import okhttp3.ResponseBody
|
||||
import org.apache.commons.lang3.StringUtils
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import uy.kohesive.injekt.Injekt
|
||||
import uy.kohesive.injekt.api.get
|
||||
|
||||
class Japscan : ConfigurableSource, ParsedHttpSource() {
|
||||
|
||||
|
|
|
@ -37,7 +37,6 @@ class MangaKawaii : ParsedHttpSource() {
|
|||
override fun latestUpdatesNextPageSelector(): String? = null
|
||||
override fun searchMangaNextPageSelector() = "no selector"
|
||||
|
||||
|
||||
override fun popularMangaRequest(page: Int) = GET("$baseUrl/liste-manga/filterMangaList?page=$page&sortBy=views&asc=false", headersBuilder().add("X-Requested-With", "XMLHttpRequest").build())
|
||||
|
||||
override fun latestUpdatesRequest(page: Int) = GET(baseUrl, headers)
|
||||
|
@ -112,5 +111,4 @@ class MangaKawaii : ParsedHttpSource() {
|
|||
}
|
||||
override fun pageListParse(document: Document): List<Page> = throw Exception("Not used")
|
||||
override fun imageUrlParse(document: Document): String = throw Exception("Not used")
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue