Lint fixes
This commit is contained in:
parent
5d5c6016ea
commit
5e05175f8c
|
@ -1,6 +1,10 @@
|
|||
package eu.kanade.tachiyomi.extension.en.existentialcomics
|
||||
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
|
@ -79,5 +83,4 @@ class ExistentialComics : ParsedHttpSource() {
|
|||
override fun latestUpdatesRequest(page: Int): Request = throw Exception("Not used")
|
||||
|
||||
override fun latestUpdatesSelector(): String = throw Exception("Not used")
|
||||
|
||||
}
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
package eu.kanade.tachiyomi.extension.en.explosm
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import rx.Observable
|
||||
import org.jsoup.nodes.Element
|
||||
import java.util.Locale
|
||||
import java.text.SimpleDateFormat
|
||||
import rx.Observable
|
||||
|
||||
class Explosm : HttpSource() {
|
||||
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
package eu.kanade.tachiyomi.extension.en.gunnerkriggcourt
|
||||
|
||||
import android.util.Log
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
|
@ -54,14 +57,14 @@ class GunnerkriggCourt : ParsedHttpSource() {
|
|||
chapter_number = element.attr("value").toFloat()
|
||||
setUrlWithoutDomain("/?p=" + element.attr("value"))
|
||||
name = element.parent().previousElementSibling().text() + " (" + chapter_number.toInt() + ")"
|
||||
//date_upload // Find by using hovertext above "Tom" on actual comic page
|
||||
// date_upload // Find by using hovertext above "Tom" on actual comic page
|
||||
}
|
||||
}
|
||||
|
||||
override fun pageListParse(document: Document): List<Page> =
|
||||
document.select(".comic_image").mapIndexed { i, element -> Page(i, "", baseUrl + element.attr("src")) }
|
||||
|
||||
//<editor-fold desc="Not Used">
|
||||
// <editor-fold desc="Not Used">
|
||||
override fun imageUrlParse(document: Document): String = throw Exception("Not Used")
|
||||
|
||||
override fun popularMangaSelector(): String = throw Exception("Not used")
|
||||
|
@ -89,5 +92,5 @@ class GunnerkriggCourt : ParsedHttpSource() {
|
|||
override fun latestUpdatesRequest(page: Int): Request = throw Exception("Not used")
|
||||
|
||||
override fun latestUpdatesSelector(): String = throw Exception("Not used")
|
||||
//</editor-fold>
|
||||
// </editor-fold>
|
||||
}
|
||||
|
|
|
@ -1,26 +1,34 @@
|
|||
package eu.kanade.tachiyomi.extension.en.guya
|
||||
|
||||
import android.app.Application
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.ConfigurableSource
|
||||
import android.content.SharedPreferences
|
||||
import android.os.Build
|
||||
import android.support.v7.preference.ListPreference
|
||||
import android.support.v7.preference.PreferenceScreen
|
||||
import eu.kanade.tachiyomi.extension.BuildConfig
|
||||
import okhttp3.*
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.ConfigurableSource
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import java.io.IOException
|
||||
import java.util.HashMap
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.Call
|
||||
import okhttp3.Callback
|
||||
import okhttp3.Headers
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import rx.Observable
|
||||
import uy.kohesive.injekt.Injekt
|
||||
import uy.kohesive.injekt.api.get
|
||||
import java.io.IOException
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
import kotlin.collections.ArrayList
|
||||
|
||||
open class Guya() : ConfigurableSource, HttpSource() {
|
||||
|
||||
|
@ -32,7 +40,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
private val scanlatorCacheUrl = "https://raw.githubusercontent.com/appu1232/guyamoe/master/api/data_cache/all_groups.json"
|
||||
|
||||
override fun headersBuilder() = Headers.Builder().apply {
|
||||
add("User-Agent","(Android ${Build.VERSION.RELEASE}; " +
|
||||
add("User-Agent", "(Android ${Build.VERSION.RELEASE}; " +
|
||||
"${Build.MANUFACTURER} ${Build.MODEL}) " +
|
||||
"Tachiyomi/${BuildConfig.VERSION_NAME} " +
|
||||
Build.ID)
|
||||
|
@ -61,7 +69,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
override fun fetchMangaDetails(manga: SManga): Observable<SManga> {
|
||||
return clientBuilder().newCall(GET("$baseUrl/api/get_all_series/", headers))
|
||||
.asObservableSuccess()
|
||||
.map {response ->
|
||||
.map { response ->
|
||||
mangaDetailsParse(response, manga)
|
||||
}
|
||||
}
|
||||
|
@ -131,7 +139,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
}
|
||||
|
||||
override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> {
|
||||
return if ( query.startsWith(SLUG_PREFIX)) {
|
||||
return if (query.startsWith(SLUG_PREFIX)) {
|
||||
val slug = query.removePrefix(SLUG_PREFIX)
|
||||
client.newCall(searchMangaRequest(page, query, filters))
|
||||
.asObservableSuccess()
|
||||
|
@ -155,7 +163,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
throw Exception("Unused.")
|
||||
}
|
||||
|
||||
private fun searchMangaParseWithSlug(response: Response, slug: String) : MangasPage {
|
||||
private fun searchMangaParseWithSlug(response: Response, slug: String): MangasPage {
|
||||
val results = JSONObject(response.body()!!.string())
|
||||
val mangaIter = results.keys()
|
||||
val truncatedJSON = JSONObject()
|
||||
|
@ -167,7 +175,6 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
if (mangaDetails.get("slug") == slug) {
|
||||
truncatedJSON.put(mangaTitle, mangaDetails)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return parseManga(truncatedJSON)
|
||||
|
@ -207,7 +214,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
|
||||
this.setDefaultValue("1")
|
||||
|
||||
setOnPreferenceChangeListener{_, newValue ->
|
||||
setOnPreferenceChangeListener { _, newValue ->
|
||||
val selected = newValue.toString()
|
||||
preferences.edit().putString(SCANLATOR_PREFERENCE, selected).commit()
|
||||
}
|
||||
|
@ -233,7 +240,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
|
||||
this.setDefaultValue("1")
|
||||
|
||||
setOnPreferenceChangeListener{_, newValue ->
|
||||
setOnPreferenceChangeListener { _, newValue ->
|
||||
val selected = newValue.toString()
|
||||
preferences.edit().putString(SCANLATOR_PREFERENCE, selected).commit()
|
||||
}
|
||||
|
@ -267,7 +274,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
}
|
||||
|
||||
// Helper function to get all the listings
|
||||
private fun parseManga(payload: JSONObject) : MangasPage {
|
||||
private fun parseManga(payload: JSONObject): MangasPage {
|
||||
val mangas = ArrayList<SManga>()
|
||||
|
||||
val iter = payload.keys()
|
||||
|
@ -383,7 +390,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
|
|||
if (scanlatorMap.isEmpty() && !polling) {
|
||||
polling = true
|
||||
clientBuilder().newCall(GET(scanlatorCacheUrl, headers)).enqueue(
|
||||
object: Callback {
|
||||
object : Callback {
|
||||
override fun onResponse(call: Call, response: Response) {
|
||||
try {
|
||||
val json = JSONObject(response.body()!!.string())
|
||||
|
|
|
@ -40,5 +40,4 @@ class GuyaUrlActivity : Activity() {
|
|||
finish()
|
||||
exitProcess(0)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,17 +2,22 @@ package eu.kanade.tachiyomi.extension.en.hentai2read
|
|||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.POST
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.util.Calendar
|
||||
import java.util.regex.Pattern
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.util.Calendar
|
||||
import java.util.regex.Pattern
|
||||
|
||||
class Hentai2Read : ParsedHttpSource() {
|
||||
|
||||
|
@ -44,11 +49,11 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesSelector() = popularMangaSelector()
|
||||
|
||||
override fun popularMangaRequest(page: Int)
|
||||
= GET("$baseUrl/hentai-list/all/any/all/most-popular/$page/", headers)
|
||||
override fun popularMangaRequest(page: Int) =
|
||||
GET("$baseUrl/hentai-list/all/any/all/most-popular/$page/", headers)
|
||||
|
||||
override fun latestUpdatesRequest(page: Int)
|
||||
= GET("$baseUrl/hentai-list/all/any/all/last-updated/$page/", headers)
|
||||
override fun latestUpdatesRequest(page: Int) =
|
||||
GET("$baseUrl/hentai-list/all/any/all/last-updated/$page/", headers)
|
||||
|
||||
override fun popularMangaFromElement(element: Element): SManga {
|
||||
return SManga.create().apply {
|
||||
|
@ -99,7 +104,7 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
}
|
||||
POST(searchUrl, headers, form.build())
|
||||
} else {
|
||||
GET("$searchUrl/${base64String}", headers)
|
||||
GET("$searchUrl/$base64String", headers)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,7 +139,7 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
manga.artist = infoElement.select("li:contains(Artist) > a")?.text()
|
||||
manga.genre = infoElement.select("li:contains(Category) > a, li:contains(Content) > a").joinToString(", ") { it.text() }
|
||||
manga.description = infoElement.select("li:contains(Storyline) > p")?.text()
|
||||
manga.status = infoElement.select("li:contains(Status) > a")?.text().orEmpty().let {parseStatus(it)}
|
||||
manga.status = infoElement.select("li:contains(Status) > a")?.text().orEmpty().let { parseStatus(it) }
|
||||
manga.thumbnail_url = document.select("a#js-linkNext > img")?.attr("src")
|
||||
return manga
|
||||
}
|
||||
|
@ -281,7 +286,7 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
// Tags : 355
|
||||
// $("div#tab-tag > div:has(a.block)").map((i, el) => `Tag("${$(el).select("a").first().text().trim()}", ${$(el).find("input").first().attr("value")})`).get().sort().join(",\n")
|
||||
// on https://hentai2read.com/hentai-search/"
|
||||
// 360 Tags
|
||||
// 360 Tags
|
||||
private fun getTagList() = listOf(
|
||||
Tag("Abortion", 529),
|
||||
Tag("Absent Parents", 1423),
|
||||
|
@ -648,7 +653,7 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
// Doujins : 868
|
||||
// $("div#tab-doujin > div:has(a.block)").map((i, el) => `Tag("${$(el).select("a").first().text().trim()}", ${$(el).find("input").first().attr("value")})`).get().sort().join(",\n")
|
||||
// on https://hentai2read.com/hentai-search/"
|
||||
// 1035 Doujin tags
|
||||
// 1035 Doujin tags
|
||||
private fun getDoujinList() = listOf(
|
||||
Tag("3-gatsu no Lion", 2350),
|
||||
Tag("3x3 Eyes", 1118),
|
||||
|
@ -869,7 +874,7 @@ class Hentai2Read : ParsedHttpSource() {
|
|||
Tag("Dynasty Warriors", 1610),
|
||||
Tag("Dystopia", 810),
|
||||
Tag("Eiken", 2424),
|
||||
Tag("Elf-san wa Yaserarenai",2521),
|
||||
Tag("Elf-san wa Yaserarenai", 2521),
|
||||
Tag("Elsword", 2200),
|
||||
Tag("Emma", 1012),
|
||||
Tag("Endless Frontier", 2238),
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
package eu.kanade.tachiyomi.extension.en.hentaifox
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.OkHttpClient
|
||||
|
@ -107,7 +112,7 @@ class HentaiFox : ParsedHttpSource() {
|
|||
// page path with a marker at the end
|
||||
url = "${response.request().url().toString().replace("/gallery/", "/g/")}#"
|
||||
// number of pages
|
||||
url+= response.asJsoup().select("[id=load_pages]").attr("value")
|
||||
url += response.asJsoup().select("[id=load_pages]").attr("value")
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -120,7 +125,7 @@ class HentaiFox : ParsedHttpSource() {
|
|||
override fun fetchPageList(chapter: SChapter): Observable<List<Page>> {
|
||||
// split the "url" to get the page path and number of pages
|
||||
return chapter.url.split("#").let { list ->
|
||||
Observable.just(listOf(1 .. list[1].toInt()).flatten().map { Page(it, list[0] + "$it/") })
|
||||
Observable.just(listOf(1..list[1].toInt()).flatten().map { Page(it, list[0] + "$it/") })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,5 +202,4 @@ class HentaiFox : ParsedHttpSource() {
|
|||
Filter.Select<String>(displayName, vals.map { it.first }.toTypedArray()) {
|
||||
fun toUriPart() = vals[state].second
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
package eu.kanade.tachiyomi.extension.en.hentainexus
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.net.URLEncoder
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.lang.StringBuilder
|
||||
import java.net.URLEncoder
|
||||
|
||||
class HentaiNexus : ParsedHttpSource() {
|
||||
|
||||
|
@ -55,7 +58,7 @@ class HentaiNexus : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
filters.findInstance<ArtistFilter>()?.let { f ->
|
||||
if(f.state.isNotBlank()) {
|
||||
if (f.state.isNotBlank()) {
|
||||
requireNoUrl()
|
||||
url = "/"
|
||||
queryString = "q=artist:%22${URLEncoder.encode(f.state, "UTF-8")}%22"
|
||||
|
@ -63,14 +66,14 @@ class HentaiNexus : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
filters.findInstance<TagFilter>()?.let { f ->
|
||||
if(f.state.isNotBlank()) {
|
||||
if (f.state.isNotBlank()) {
|
||||
requireNoUrl()
|
||||
url = "/"
|
||||
queryString = "q=tag:%22${URLEncoder.encode(f.state, "UTF-8")}%22"
|
||||
}
|
||||
}
|
||||
|
||||
if(query.isNotBlank()) {
|
||||
if (query.isNotBlank()) {
|
||||
requireNoUrl()
|
||||
url = "/"
|
||||
queryString = "q=" + URLEncoder.encode(query, "UTF-8")
|
||||
|
@ -83,8 +86,8 @@ class HentaiNexus : ParsedHttpSource() {
|
|||
|
||||
private fun pagedRequest(url: String, page: Int, queryString: String? = null): Request {
|
||||
// The site redirects page 1 -> url-without-page so we do this redirect early for optimization
|
||||
val builtUrl = if(page == 1) url else "${url}page/$page"
|
||||
return GET(if(queryString != null) "$builtUrl?$queryString" else builtUrl)
|
||||
val builtUrl = if (page == 1) url else "${url}page/$page"
|
||||
return GET(if (queryString != null) "$builtUrl?$queryString" else builtUrl)
|
||||
}
|
||||
|
||||
override fun searchMangaSelector() = latestUpdatesSelector()
|
||||
|
|
|
@ -11,6 +11,10 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Date
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.Call
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
|
@ -18,21 +22,17 @@ import okhttp3.Response
|
|||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Date
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class Hiveworks : ParsedHttpSource() {
|
||||
|
||||
//Info
|
||||
// Info
|
||||
|
||||
override val name = "Hiveworks Comics"
|
||||
override val baseUrl = "https://hiveworkscomics.com"
|
||||
override val lang = "en"
|
||||
override val supportsLatest = true
|
||||
|
||||
//Client
|
||||
// Client
|
||||
|
||||
override val client: OkHttpClient = network.cloudflareClient.newBuilder()
|
||||
.connectTimeout(1, TimeUnit.MINUTES)
|
||||
|
@ -52,7 +52,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
|
||||
val mangas = document.select(popularMangaSelector()).filterNot {
|
||||
val url = it.select("a.comiclink").first().attr("abs:href")
|
||||
url.contains("sparklermonthly.com") || url.contains("explosm.net") //Filter Unsupported Comics
|
||||
url.contains("sparklermonthly.com") || url.contains("explosm.net") // Filter Unsupported Comics
|
||||
}.map { element ->
|
||||
popularMangaFromElement(element)
|
||||
}
|
||||
|
@ -76,7 +76,6 @@ class Hiveworks : ParsedHttpSource() {
|
|||
override fun latestUpdatesFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun latestUpdatesParse(response: Response): MangasPage = popularMangaParse(response)
|
||||
|
||||
|
||||
// Search
|
||||
// Source's website doesn't appear to have a search function; so searching locally
|
||||
|
||||
|
@ -85,7 +84,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
val uri = Uri.parse(baseUrl).buildUpon()
|
||||
if (filters.isNotEmpty()) uri.appendPath("home")
|
||||
//Append uri filters
|
||||
// Append uri filters
|
||||
filters.forEach { filter ->
|
||||
when (filter) {
|
||||
is UriFilter -> filter.addToUri(uri)
|
||||
|
@ -168,10 +167,9 @@ class Hiveworks : ParsedHttpSource() {
|
|||
?.let { mangaFromElement(it) } ?: SManga.create()
|
||||
}
|
||||
|
||||
|
||||
// Chapters
|
||||
|
||||
//Included to call custom error codes
|
||||
// Included to call custom error codes
|
||||
override fun fetchChapterList(manga: SManga): Observable<List<SChapter>> {
|
||||
return if (manga.status != SManga.LICENSED) {
|
||||
client.newCall(chapterListRequest(manga))
|
||||
|
@ -221,7 +219,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
|
||||
override fun chapterFromElement(element: Element) = throw Exception("Not Used")
|
||||
|
||||
//Pages
|
||||
// Pages
|
||||
|
||||
override fun pageListRequest(chapter: SChapter) = GET(chapter.url, headers)
|
||||
override fun pageListParse(response: Response): List<Page> {
|
||||
|
@ -233,7 +231,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
pages.add(Page(pages.size, "", it.attr("src")))
|
||||
}
|
||||
|
||||
//Site specific pages can be added here
|
||||
// Site specific pages can be added here
|
||||
when {
|
||||
"smbc-comics" in url -> {
|
||||
pages.add(Page(pages.size, "", document.select("div#aftercomic img").attr("src")))
|
||||
|
@ -248,7 +246,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
override fun imageUrlRequest(page: Page) = throw Exception("Not used")
|
||||
override fun imageUrlParse(document: Document) = throw Exception("Not used")
|
||||
|
||||
//Filters
|
||||
// Filters
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
Filter.Header("Only one filter can be used at a time"),
|
||||
|
@ -269,10 +267,13 @@ class Hiveworks : ParsedHttpSource() {
|
|||
private class KidsFilter : Filter.CheckBox("Kids Comics")
|
||||
private class CompletedFilter : Filter.CheckBox("Completed Comics")
|
||||
|
||||
|
||||
private open class UriSelectFilter(displayName: String, val uriParam: String, val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0) :
|
||||
private open class UriSelectFilter(
|
||||
displayName: String,
|
||||
val uriParam: String,
|
||||
val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0
|
||||
) :
|
||||
Filter.Select<String>(displayName, vals.map { it.second }.toTypedArray(), defaultValue), UriFilter {
|
||||
override fun addToUri(uri: Uri.Builder) {
|
||||
if (state != 0 || !firstIsUnspecified)
|
||||
|
@ -364,9 +365,9 @@ class Hiveworks : ParsedHttpSource() {
|
|||
Pair("z-a", "Z-A")
|
||||
))
|
||||
|
||||
//Other Code
|
||||
// Other Code
|
||||
|
||||
//Builds Image from mouse tooltip text
|
||||
// Builds Image from mouse tooltip text
|
||||
private fun smbcTextHandler(document: Document): String {
|
||||
val title = document.select("title").text().trim()
|
||||
val altText = document.select("div#cc-comicbody img").attr("title")
|
||||
|
@ -400,7 +401,7 @@ class Hiveworks : ParsedHttpSource() {
|
|||
return "https://fakeimg.pl/1500x2126/ffffff/000000/?text=$builder&font_size=42&font=museo"
|
||||
}
|
||||
|
||||
//Used to throw custom error codes for http codes
|
||||
// Used to throw custom error codes for http codes
|
||||
private fun Call.asObservableSuccess(): Observable<Response> {
|
||||
return asObservable().doOnNext { response ->
|
||||
if (!response.isSuccessful) {
|
||||
|
@ -412,7 +413,4 @@ class Hiveworks : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,18 +1,22 @@
|
|||
package eu.kanade.tachiyomi.extension.en.holymanga
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
abstract class HManga (
|
||||
abstract class HManga(
|
||||
override val name: String,
|
||||
override val baseUrl: String
|
||||
) : ParsedHttpSource() {
|
||||
|
@ -80,8 +84,8 @@ abstract class HManga (
|
|||
}
|
||||
}
|
||||
is GenreFilter -> {
|
||||
if(filter.toUriPart().isNotBlank() && filter.state != 0) {
|
||||
filter.toUriPart().let { genre = if(it == "completed") "completed" else "genre/$it" }
|
||||
if (filter.toUriPart().isNotBlank() && filter.state != 0) {
|
||||
filter.toUriPart().let { genre = if (it == "completed") "completed" else "genre/$it" }
|
||||
ret = "$baseUrl/$genre/page-$page"
|
||||
}
|
||||
}
|
||||
|
@ -132,9 +136,9 @@ abstract class HManga (
|
|||
|
||||
// Chapter list is paginated
|
||||
while (continueParsing) {
|
||||
document.select(chapterListSelector()).map{ chapters.add(chapterFromElement(it)) }
|
||||
document.select(chapterListSelector()).map { chapters.add(chapterFromElement(it)) }
|
||||
// Next page of chapters
|
||||
document.select("${latestUpdatesNextPageSelector()}:not([id])").let{
|
||||
document.select("${latestUpdatesNextPageSelector()}:not([id])").let {
|
||||
if (it.isNotEmpty()) {
|
||||
document = client.newCall(GET(it.attr("abs:href"), headers)).execute().asJsoup()
|
||||
} else {
|
||||
|
@ -190,12 +194,12 @@ abstract class HManga (
|
|||
Filter.Header("Author name must be exact."),
|
||||
Filter.Separator("-----------------"),
|
||||
TextField("Author", "author"),
|
||||
GenreFilter ()
|
||||
GenreFilter()
|
||||
)
|
||||
|
||||
// [...document.querySelectorAll('.sub-menu li a')].map(a => `Pair("${a.textContent}", "${a.getAttribute('href')}")`).join(',\n')
|
||||
// from $baseUrl
|
||||
private class GenreFilter: UriPartFilter("Genres",
|
||||
private class GenreFilter : UriPartFilter("Genres",
|
||||
arrayOf(
|
||||
Pair("Choose a genre", ""),
|
||||
Pair("Action", "action"),
|
||||
|
|
|
@ -12,4 +12,3 @@ class HMangaFactory : SourceFactory {
|
|||
|
||||
class HolyManga : HManga("HolyManga", "http://w12.holymanga.net")
|
||||
class HeavenManga : HManga("HeavenManga", "http://ww8.heavenmanga.org")
|
||||
|
||||
|
|
|
@ -13,18 +13,18 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class Honkaiimpact : ParsedHttpSource() {
|
||||
|
||||
//Info - Based of BH3
|
||||
//This is the english version of the site
|
||||
// Info - Based of BH3
|
||||
// This is the english version of the site
|
||||
override val name = "Honkai Impact 3rd"
|
||||
override val baseUrl = "https://manga.honkaiimpact3.com"
|
||||
override val lang = "en"
|
||||
|
@ -36,23 +36,21 @@ class Honkaiimpact : ParsedHttpSource() {
|
|||
.followRedirects(true)
|
||||
.build()!!
|
||||
|
||||
//Popular
|
||||
// Popular
|
||||
override fun popularMangaSelector() = "a[href*=book]"
|
||||
|
||||
override fun popularMangaNextPageSelector(): String? = null
|
||||
override fun popularMangaRequest(page: Int) = GET("$baseUrl/book", headers)
|
||||
override fun popularMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
|
||||
|
||||
//Latest
|
||||
// Latest
|
||||
override fun latestUpdatesSelector() = throw Exception("Not Used")
|
||||
|
||||
override fun latestUpdatesNextPageSelector(): String? = null
|
||||
override fun latestUpdatesRequest(page: Int) = throw Exception("Not Used")
|
||||
override fun latestUpdatesFromElement(element: Element) = mangaFromElement(element)
|
||||
|
||||
|
||||
//Search
|
||||
// Search
|
||||
override fun searchMangaSelector() = throw Exception("Not Used")
|
||||
|
||||
override fun searchMangaNextPageSelector(): String? = null
|
||||
|
@ -67,7 +65,7 @@ class Honkaiimpact : ParsedHttpSource() {
|
|||
return manga
|
||||
}
|
||||
|
||||
//Manga Details
|
||||
// Manga Details
|
||||
override fun mangaDetailsParse(document: Document): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.thumbnail_url = document.select("img.cover").attr("abs:src")
|
||||
|
@ -76,7 +74,7 @@ class Honkaiimpact : ParsedHttpSource() {
|
|||
return manga
|
||||
}
|
||||
|
||||
//Chapters
|
||||
// Chapters
|
||||
override fun chapterListSelector() = throw Exception("Not Used")
|
||||
|
||||
override fun chapterFromElement(element: Element) = throw Exception("Not Used")
|
||||
|
@ -102,7 +100,7 @@ class Honkaiimpact : ParsedHttpSource() {
|
|||
return SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US).parse(date)?.time ?: 0
|
||||
}
|
||||
|
||||
//Manga Pages
|
||||
// Manga Pages
|
||||
override fun pageListParse(response: Response): List<Page> = mutableListOf<Page>().apply {
|
||||
val body = response.asJsoup()
|
||||
body.select("img.lazy.comic_img")?.forEach {
|
||||
|
@ -112,6 +110,4 @@ class Honkaiimpact : ParsedHttpSource() {
|
|||
|
||||
override fun pageListParse(document: Document) = throw Exception("Not Used")
|
||||
override fun imageUrlParse(document: Document) = throw Exception("Not Used")
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ import eu.kanade.tachiyomi.source.model.Page
|
|||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.regex.Pattern
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.Headers
|
||||
import okhttp3.OkHttpClient
|
||||
|
@ -18,8 +20,6 @@ import okhttp3.Response
|
|||
import org.jsoup.Jsoup
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.regex.Pattern
|
||||
|
||||
class Kissmanga : ParsedHttpSource() {
|
||||
|
||||
|
@ -60,15 +60,15 @@ class Kissmanga : ParsedHttpSource() {
|
|||
element.select("td a:eq(0)").first().let {
|
||||
manga.setUrlWithoutDomain(it.attr("href"))
|
||||
val title = it.text()
|
||||
//check if cloudfire email obfuscation is affecting title name
|
||||
// check if cloudfire email obfuscation is affecting title name
|
||||
if (title.contains("[email protected]", true)) {
|
||||
try {
|
||||
var str: String = it.html()
|
||||
//get the number
|
||||
// get the number
|
||||
str = str.substringAfter("data-cfemail=\"")
|
||||
str = str.substringBefore("\">[email")
|
||||
val sb = StringBuilder()
|
||||
//convert number to char
|
||||
// convert number to char
|
||||
val r = Integer.valueOf(str.substring(0, 2), 16)!!
|
||||
var i = 2
|
||||
while (i < str.length) {
|
||||
|
@ -76,10 +76,10 @@ class Kissmanga : ParsedHttpSource() {
|
|||
sb.append(c)
|
||||
i += 2
|
||||
}
|
||||
//replace the new word into the title
|
||||
// replace the new word into the title
|
||||
manga.title = title.replace("[email protected]", sb.toString(), true)
|
||||
} catch (e: Exception) {
|
||||
//on error just default to obfuscated title
|
||||
// on error just default to obfuscated title
|
||||
manga.title = title
|
||||
}
|
||||
} else {
|
||||
|
@ -207,8 +207,8 @@ class Kissmanga : ParsedHttpSource() {
|
|||
private class Author : Filter.Text("Author")
|
||||
private class Genre(name: String) : Filter.TriState(name)
|
||||
private class GenreList(genres: List<Genre>) : Filter.Group<Genre>("Genres", genres)
|
||||
private class SortTrending: Filter.CheckBox("View Trending Manga")
|
||||
private class NewManga: Filter.CheckBox("View New Manga")
|
||||
private class SortTrending : Filter.CheckBox("View Trending Manga")
|
||||
private class NewManga : Filter.CheckBox("View New Manga")
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
Author(),
|
||||
|
|
|
@ -2,7 +2,11 @@ package eu.kanade.tachiyomi.extension.en.latisbooks
|
|||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.OkHttpClient
|
||||
|
|
|
@ -1,18 +1,21 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangacruzers
|
||||
|
||||
import android.net.Uri
|
||||
import android.util.Log
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
|
||||
class mangacruzers : ParsedHttpSource() {
|
||||
|
||||
|
@ -27,8 +30,8 @@ class mangacruzers : ParsedHttpSource() {
|
|||
.followRedirects(true)
|
||||
.build()!!
|
||||
|
||||
override fun popularMangaSelector() = "tr"//"td > a:not(a[href*=Cruzers])"
|
||||
override fun latestUpdatesSelector() = throw Exception ("Not Used")
|
||||
override fun popularMangaSelector() = "tr" // "td > a:not(a[href*=Cruzers])"
|
||||
override fun latestUpdatesSelector() = throw Exception("Not Used")
|
||||
override fun searchMangaSelector() = popularMangaSelector()
|
||||
override fun chapterListSelector() = "div.flex.items-center > div.flex.flex-col:not(.items-center), tbody.no-border-x > tr"
|
||||
|
||||
|
@ -37,19 +40,17 @@ class mangacruzers : ParsedHttpSource() {
|
|||
override fun searchMangaNextPageSelector() = popularMangaNextPageSelector()
|
||||
|
||||
override fun popularMangaRequest(page: Int) = GET("$baseUrl/read-manga/", headers)
|
||||
override fun latestUpdatesRequest(page: Int) = throw Exception ("Not Used")
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw Exception ("Not Used")
|
||||
|
||||
override fun latestUpdatesRequest(page: Int) = throw Exception("Not Used")
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw Exception("Not Used")
|
||||
|
||||
override fun popularMangaParse(response: Response): MangasPage {
|
||||
val document = response.asJsoup()
|
||||
val mangas = mutableListOf<SManga>()
|
||||
val element = document.select(popularMangaSelector())
|
||||
for (i in 1 until element.size-2) {
|
||||
for (i in 1 until element.size - 2) {
|
||||
mangas.add(mangaFromElement(element[i]))
|
||||
}
|
||||
return MangasPage(mangas, false)
|
||||
|
||||
}
|
||||
|
||||
override fun mangaDetailsRequest(manga: SManga) = chapterListRequest(manga)
|
||||
|
@ -67,18 +68,18 @@ class mangacruzers : ParsedHttpSource() {
|
|||
|
||||
override fun popularMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun latestUpdatesFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun searchMangaFromElement(element: Element)= mangaFromElement(element)
|
||||
override fun searchMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
|
||||
private fun mangaFromElement(element: Element): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.url = element.select("a").attr("abs:href")
|
||||
manga.title = element.select("td").first().text().trim()
|
||||
|
||||
|
||||
return manga
|
||||
}
|
||||
|
||||
private fun parseDate(date: String): Long {
|
||||
return SimpleDateFormat("MMM dd, yyyy", Locale.US ).parse(date).time
|
||||
return SimpleDateFormat("MMM dd, yyyy", Locale.US).parse(date).time
|
||||
}
|
||||
|
||||
override fun chapterFromElement(element: Element) = SChapter.create().apply {
|
||||
|
@ -89,7 +90,7 @@ class mangacruzers : ParsedHttpSource() {
|
|||
date_upload = parseDate(td[1].text())
|
||||
} else {
|
||||
val substring = element.select("div.text-xs").text()
|
||||
name = element.select("a").text() + if (!substring.isNullOrBlank()) { " - $substring" } else {""}
|
||||
name = element.select("a").text() + if (!substring.isNullOrBlank()) { " - $substring" } else { "" }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,9 +111,7 @@ class mangacruzers : ParsedHttpSource() {
|
|||
|
||||
return pages
|
||||
}
|
||||
|
||||
|
||||
override fun imageUrlRequest(page: Page) = throw Exception("Not used")
|
||||
override fun imageUrlParse(document: Document) = throw Exception("Not used")
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -6,14 +6,18 @@ import com.github.salomonbrys.kotson.string
|
|||
import com.google.gson.JsonElement
|
||||
import com.google.gson.JsonParser
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
class Mangadog : HttpSource() {
|
||||
|
||||
|
@ -33,8 +37,8 @@ class Mangadog : HttpSource() {
|
|||
}
|
||||
|
||||
override fun popularMangaParse(response: Response): MangasPage {
|
||||
//val page = response.request().url().queryParameterValues("page").toString().toInt()
|
||||
val jsonData= response.body()!!.string()
|
||||
// val page = response.request().url().queryParameterValues("page").toString().toInt()
|
||||
val jsonData = response.body()!!.string()
|
||||
val results = JsonParser().parse(jsonData)
|
||||
val data = results["data"]["data"]
|
||||
val mangas = mutableListOf<SManga>()
|
||||
|
@ -42,22 +46,22 @@ class Mangadog : HttpSource() {
|
|||
mangas.add(popularMangaFromjson(data[i]))
|
||||
}
|
||||
|
||||
val hasNextPage = true //page < results["data"]["pageNum"].int
|
||||
val hasNextPage = true // page < results["data"]["pageNum"].int
|
||||
return MangasPage(mangas, hasNextPage)
|
||||
}
|
||||
|
||||
private fun popularMangaFromjson(json: JsonElement): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.title = json["name"].string.trim()
|
||||
manga.thumbnail_url = cdn + json["image"].string.replace("\\/","/")
|
||||
manga.thumbnail_url = cdn + json["image"].string.replace("\\/", "/")
|
||||
val searchname = json["search_name"].string
|
||||
val id = json["id"].string
|
||||
manga.url = "/detail/$searchname/$id.html"
|
||||
return manga
|
||||
}
|
||||
|
||||
override fun latestUpdatesParse(response: Response): MangasPage {
|
||||
val jsonData= response.body()!!.string()
|
||||
|
||||
override fun latestUpdatesParse(response: Response): MangasPage {
|
||||
val jsonData = response.body()!!.string()
|
||||
val results = JsonParser().parse(jsonData)
|
||||
val data = results["data"]
|
||||
val mangas = mutableListOf<SManga>()
|
||||
|
@ -65,12 +69,12 @@ class Mangadog : HttpSource() {
|
|||
mangas.add(popularMangaFromjson(data[i]))
|
||||
}
|
||||
|
||||
val hasNextPage = true //data.asJsonArray.size()>18
|
||||
val hasNextPage = true // data.asJsonArray.size()>18
|
||||
return MangasPage(mangas, hasNextPage)
|
||||
}
|
||||
|
||||
override fun searchMangaParse(response: Response): MangasPage {
|
||||
val jsonData= response.body()!!.string()
|
||||
val jsonData = response.body()!!.string()
|
||||
val results = JsonParser().parse(jsonData)
|
||||
val data = results["suggestions"]
|
||||
val mangas = mutableListOf<SManga>()
|
||||
|
@ -85,19 +89,18 @@ class Mangadog : HttpSource() {
|
|||
private fun searchMangaFromjson(json: JsonElement): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.title = json["value"].string.trim()
|
||||
val data = json["data"].string.replace("\\/","/")
|
||||
val data = json["data"].string.replace("\\/", "/")
|
||||
manga.url = "/detail/$data.html"
|
||||
return manga
|
||||
}
|
||||
|
||||
|
||||
override fun chapterListRequest(manga: SManga): Request {
|
||||
val id = manga.url.substringAfterLast("/").substringBefore(".html")
|
||||
return GET("$baseUrl/index/detail/getChapterList?comic_id=$id&page=1", headers)
|
||||
}
|
||||
|
||||
override fun chapterListParse(response: Response): List<SChapter> {
|
||||
val jsonData= response.body()!!.string()
|
||||
val jsonData = response.body()!!.string()
|
||||
val results = JsonParser().parse(jsonData)
|
||||
val data = results["data"]["data"]
|
||||
val chapters = mutableListOf<SChapter>()
|
||||
|
@ -111,7 +114,7 @@ class Mangadog : HttpSource() {
|
|||
val chapter = SChapter.create()
|
||||
val searchname = json["search_name"].string
|
||||
val id = json["comic_id"].string
|
||||
chapter.url = "/read/read/$searchname/$id.html" //The url should include the manga name but it doesn't seem to matter
|
||||
chapter.url = "/read/read/$searchname/$id.html" // The url should include the manga name but it doesn't seem to matter
|
||||
chapter.name = json["name"].string.trim()
|
||||
chapter.chapter_number = json["obj_id"].asFloat
|
||||
chapter.date_upload = parseDate(json["create_date"].string)
|
||||
|
|
|
@ -1,15 +1,20 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangaeden
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
class Mangaeden : ParsedHttpSource() {
|
||||
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangafreak
|
||||
|
||||
import android.net.Uri
|
||||
import android.util.Log
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
|
||||
class Mangafreak : ParsedHttpSource() {
|
||||
override val name: String = "Mangafreak"
|
||||
|
@ -26,7 +30,7 @@ class Mangafreak : ParsedHttpSource() {
|
|||
.followRedirects(true)
|
||||
.build()!!
|
||||
|
||||
//Popular
|
||||
// Popular
|
||||
|
||||
override fun popularMangaRequest(page: Int): Request {
|
||||
return GET("$baseUrl/Genre/All/$page", headers)
|
||||
|
@ -41,22 +45,22 @@ class Mangafreak : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
//Latest
|
||||
// Latest
|
||||
|
||||
override fun latestUpdatesRequest(page: Int): Request {
|
||||
return GET("$baseUrl/Latest_Releases/$page",headers)
|
||||
return GET("$baseUrl/Latest_Releases/$page", headers)
|
||||
}
|
||||
override fun latestUpdatesNextPageSelector(): String? = popularMangaNextPageSelector()
|
||||
override fun latestUpdatesSelector(): String = "div.latest_releases_item"
|
||||
override fun latestUpdatesFromElement(element: Element): SManga = SManga.create().apply {
|
||||
thumbnail_url = element.select("img").attr("abs:src").replace("mini","manga").substringBeforeLast("/")+".jpg"
|
||||
thumbnail_url = element.select("img").attr("abs:src").replace("mini", "manga").substringBeforeLast("/") + ".jpg"
|
||||
element.select("a").apply {
|
||||
title = text()
|
||||
url = attr("href")
|
||||
}
|
||||
}
|
||||
|
||||
//Search
|
||||
// Search
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
val uri = Uri.parse(baseUrl).buildUpon()
|
||||
|
@ -68,7 +72,7 @@ class Mangafreak : ParsedHttpSource() {
|
|||
uri.appendPath("Genre")
|
||||
when (filter) {
|
||||
is GenreList -> {
|
||||
uri.appendPath( filter.state.map {
|
||||
uri.appendPath(filter.state.map {
|
||||
when (it.state) {
|
||||
Filter.TriState.STATE_IGNORE -> "0"
|
||||
Filter.TriState.STATE_INCLUDE -> "1"
|
||||
|
@ -80,13 +84,13 @@ class Mangafreak : ParsedHttpSource() {
|
|||
}
|
||||
uri.appendEncodedPath("Status/0/Type/0")
|
||||
}
|
||||
return GET(uri.toString(),headers)
|
||||
return GET(uri.toString(), headers)
|
||||
}
|
||||
override fun searchMangaNextPageSelector(): String? = null
|
||||
override fun searchMangaSelector(): String = "div.manga_search_item , div.mangaka_search_item"
|
||||
override fun searchMangaFromElement(element: Element): SManga = popularMangaFromElement(element)
|
||||
|
||||
//Details
|
||||
// Details
|
||||
|
||||
override fun mangaDetailsParse(document: Document): SManga = SManga.create().apply {
|
||||
thumbnail_url = document.select("div.manga_series_image img").attr("abs:src")
|
||||
|
@ -101,10 +105,9 @@ class Mangafreak : ParsedHttpSource() {
|
|||
val glist = document.select("div.series_sub_genre_list a").map { it.text() }
|
||||
genre = glist.joinToString(", ")
|
||||
description = document.select("div.manga_series_description p").text()
|
||||
|
||||
}
|
||||
|
||||
//Chapter
|
||||
// Chapter
|
||||
|
||||
override fun chapterListSelector(): String = "div.manga_series_list tbody tr"
|
||||
override fun chapterFromElement(element: Element): SChapter = SChapter.create().apply {
|
||||
|
@ -114,17 +117,17 @@ class Mangafreak : ParsedHttpSource() {
|
|||
date_upload = parseDate(element.select(" td:eq(1)").text())
|
||||
}
|
||||
private fun parseDate(date: String): Long {
|
||||
return SimpleDateFormat("yyyy/MM/dd", Locale.US ).parse(date).time
|
||||
return SimpleDateFormat("yyyy/MM/dd", Locale.US).parse(date).time
|
||||
}
|
||||
override fun chapterListParse(response: Response): List<SChapter> {
|
||||
return super.chapterListParse(response).reversed()
|
||||
}
|
||||
|
||||
//Pages
|
||||
// Pages
|
||||
|
||||
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
|
||||
document.select("img#gohere").forEachIndexed { index, element ->
|
||||
add(Page(index,"",element.attr("src")))
|
||||
add(Page(index, "", element.attr("src")))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,7 +135,7 @@ class Mangafreak : ParsedHttpSource() {
|
|||
return throw Exception("Not Used")
|
||||
}
|
||||
|
||||
//Filter
|
||||
// Filter
|
||||
|
||||
private class Genre(name: String) : Filter.TriState(name)
|
||||
private class GenreList(genres: List<Genre>) : Filter.Group<Genre>("Genres", genres)
|
||||
|
@ -180,8 +183,4 @@ class Mangafreak : ParsedHttpSource() {
|
|||
Genre("Yaoi"),
|
||||
Genre("Yuri")
|
||||
)
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -12,13 +12,13 @@ import eu.kanade.tachiyomi.source.model.Page
|
|||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.Headers
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
|
||||
class Mangahasu : ParsedHttpSource() {
|
||||
|
||||
|
@ -126,8 +126,8 @@ class Mangahasu : ParsedHttpSource() {
|
|||
|
||||
override fun pageListParse(document: Document): List<Page> {
|
||||
|
||||
//Grab All Pages from site
|
||||
//Some images are place holders on new chapters.
|
||||
// Grab All Pages from site
|
||||
// Some images are place holders on new chapters.
|
||||
|
||||
val pages = mutableListOf<Page>().apply {
|
||||
document.select("div.img img").forEach {
|
||||
|
@ -136,13 +136,13 @@ class Mangahasu : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
//Some images are not yet loaded onto Mangahasu's image server.
|
||||
//Decode temporary URLs and replace placeholder images.
|
||||
// Some images are not yet loaded onto Mangahasu's image server.
|
||||
// Decode temporary URLs and replace placeholder images.
|
||||
|
||||
val lstDUrls =
|
||||
document.select("script:containsData(lstDUrls)").html().substringAfter("lstDUrls")
|
||||
.substringAfter("\"").substringBefore("\"")
|
||||
if (lstDUrls != "W10=") { //Base64 = [] or empty file
|
||||
if (lstDUrls != "W10=") { // Base64 = [] or empty file
|
||||
val decoded = String(Base64.decode(lstDUrls, Base64.DEFAULT))
|
||||
val json = JsonParser().parse(decoded).array
|
||||
json.forEach {
|
||||
|
|
|
@ -2,18 +2,24 @@ package eu.kanade.tachiyomi.extension.en.mangahere
|
|||
|
||||
import com.squareup.duktape.Duktape
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.*
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.lang.NumberFormatException
|
||||
import java.lang.UnsupportedOperationException
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import kotlin.collections.ArrayList
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import okhttp3.Cookie
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
|
||||
class Mangahere : ParsedHttpSource() {
|
||||
|
||||
|
@ -28,7 +34,7 @@ class Mangahere : ParsedHttpSource() {
|
|||
override val supportsLatest = true
|
||||
|
||||
override val client: OkHttpClient = super.client.newBuilder()
|
||||
.cookieJar(object : CookieJar{
|
||||
.cookieJar(object : CookieJar {
|
||||
override fun saveFromResponse(url: HttpUrl, cookies: MutableList<Cookie>) {}
|
||||
override fun loadForRequest(url: HttpUrl): MutableList<Cookie> {
|
||||
return ArrayList<Cookie>().apply {
|
||||
|
@ -39,7 +45,6 @@ class Mangahere : ParsedHttpSource() {
|
|||
.value("1")
|
||||
.build()) }
|
||||
}
|
||||
|
||||
})
|
||||
.build()
|
||||
|
||||
|
@ -79,7 +84,7 @@ class Mangahere : ParsedHttpSource() {
|
|||
val url = HttpUrl.parse("$baseUrl/search")!!.newBuilder()
|
||||
|
||||
filters.forEach {
|
||||
when(it) {
|
||||
when (it) {
|
||||
|
||||
is TypeList -> {
|
||||
url.addEncodedQueryParameter("type", types[it.values[it.state]].toString())
|
||||
|
@ -100,7 +105,6 @@ class Mangahere : ParsedHttpSource() {
|
|||
url.addEncodedQueryParameter("genres", includeGenres.joinToString(","))
|
||||
.addEncodedQueryParameter("nogenres", excludeGenres.joinToString(","))
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,13 +113,13 @@ class Mangahere : ParsedHttpSource() {
|
|||
.addEncodedQueryParameter("sort", null)
|
||||
.addEncodedQueryParameter("stype", 1.toString())
|
||||
.addEncodedQueryParameter("name", null)
|
||||
.addEncodedQueryParameter("author_method","cw")
|
||||
.addEncodedQueryParameter("author_method", "cw")
|
||||
.addEncodedQueryParameter("author", null)
|
||||
.addEncodedQueryParameter("artist_method", "cw")
|
||||
.addEncodedQueryParameter("artist", null)
|
||||
.addEncodedQueryParameter("rating_method","eq")
|
||||
.addEncodedQueryParameter("rating",null)
|
||||
.addEncodedQueryParameter("released_method","eq")
|
||||
.addEncodedQueryParameter("rating_method", "eq")
|
||||
.addEncodedQueryParameter("rating", null)
|
||||
.addEncodedQueryParameter("released_method", "eq")
|
||||
.addEncodedQueryParameter("released", null)
|
||||
|
||||
return GET(url.toString(), headers)
|
||||
|
@ -169,7 +173,7 @@ class Mangahere : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
private fun parseChapterDate(date: String): Long {
|
||||
return if ("Today" in date || " ago" in date){
|
||||
return if ("Today" in date || " ago" in date) {
|
||||
Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
|
@ -230,7 +234,7 @@ class Mangahere : ParsedHttpSource() {
|
|||
|
||||
var secretKey = extractSecretKey(html, duktape)
|
||||
|
||||
val chapterIdStartLoc = html.indexOf("chapterid")
|
||||
val chapterIdStartLoc = html.indexOf("chapterid")
|
||||
val chapterId = html.substring(
|
||||
chapterIdStartLoc + 11,
|
||||
html.indexOf(";", chapterIdStartLoc)).trim()
|
||||
|
@ -243,21 +247,21 @@ class Mangahere : ParsedHttpSource() {
|
|||
|
||||
IntRange(1, pagesNumber).map { i ->
|
||||
|
||||
val pageLink = "${pageBase}/chapterfun.ashx?cid=$chapterId&page=$i&key=$secretKey"
|
||||
val pageLink = "$pageBase/chapterfun.ashx?cid=$chapterId&page=$i&key=$secretKey"
|
||||
|
||||
var responseText = ""
|
||||
|
||||
for (tr in 1..3){
|
||||
for (tr in 1..3) {
|
||||
|
||||
val request = Request.Builder()
|
||||
.url(pageLink)
|
||||
.addHeader("Referer",link)
|
||||
.addHeader("Accept","*/*")
|
||||
.addHeader("Accept-Language","en-US,en;q=0.9")
|
||||
.addHeader("Connection","keep-alive")
|
||||
.addHeader("Host","www.mangahere.cc")
|
||||
.addHeader("Referer", link)
|
||||
.addHeader("Accept", "*/*")
|
||||
.addHeader("Accept-Language", "en-US,en;q=0.9")
|
||||
.addHeader("Connection", "keep-alive")
|
||||
.addHeader("Host", "www.mangahere.cc")
|
||||
.addHeader("User-Agent", System.getProperty("http.agent") ?: "")
|
||||
.addHeader("X-Requested-With","XMLHttpRequest")
|
||||
.addHeader("X-Requested-With", "XMLHttpRequest")
|
||||
.build()
|
||||
|
||||
val response = client.newCall(request).execute()
|
||||
|
@ -267,7 +271,6 @@ class Mangahere : ParsedHttpSource() {
|
|||
break
|
||||
else
|
||||
secretKey = ""
|
||||
|
||||
}
|
||||
|
||||
val deobfuscatedScript = duktape.evaluate(responseText.removePrefix("eval")).toString()
|
||||
|
@ -281,7 +284,6 @@ class Mangahere : ParsedHttpSource() {
|
|||
val imageLink = deobfuscatedScript.substring(imageLinkStartPos, imageLinkEndPos)
|
||||
|
||||
Page(i - 1, "", "https:$baseLink$imageLink")
|
||||
|
||||
}
|
||||
}
|
||||
.dropLastIfBroken()
|
||||
|
@ -303,15 +305,14 @@ class Mangahere : ParsedHttpSource() {
|
|||
secretKeyStartLoc, secretKeyEndLoc)
|
||||
|
||||
return duktape.evaluate(secretKeyResultScript).toString()
|
||||
|
||||
}
|
||||
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
private class Genre(title: String, val id: Int) : Filter.TriState(title)
|
||||
|
||||
private class TypeList(types: Array<String>) : Filter.Select<String>("Type", types,0)
|
||||
private class CompletionList(completions: Array<String>) : Filter.Select<String>("Completed series", completions,0)
|
||||
private class TypeList(types: Array<String>) : Filter.Select<String>("Type", types, 0)
|
||||
private class CompletionList(completions: Array<String>) : Filter.Select<String>("Completed series", completions, 0)
|
||||
private class GenreList(genres: List<Genre>) : Filter.Group<Genre>("Genres", genres)
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
|
@ -327,7 +328,7 @@ class Mangahere : ParsedHttpSource() {
|
|||
"Any" to 0
|
||||
)
|
||||
|
||||
private val completions = arrayOf("Either","No","Yes")
|
||||
private val completions = arrayOf("Either", "No", "Yes")
|
||||
|
||||
private val genres = arrayListOf(
|
||||
Genre("Action", 1),
|
||||
|
@ -367,5 +368,4 @@ class Mangahere : ParsedHttpSource() {
|
|||
Genre("Shotacon", 35),
|
||||
Genre("Lolicon", 36)
|
||||
)
|
||||
|
||||
}
|
||||
|
|
|
@ -2,25 +2,29 @@ package eu.kanade.tachiyomi.extension.en.mangahub
|
|||
|
||||
import com.github.salomonbrys.kotson.fromJson
|
||||
import com.github.salomonbrys.kotson.get
|
||||
import com.github.salomonbrys.kotson.string
|
||||
import com.github.salomonbrys.kotson.keys
|
||||
import com.github.salomonbrys.kotson.string
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.POST
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.Request
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.RequestBody
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.net.URL
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.Request
|
||||
import okhttp3.RequestBody
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
|
||||
class Mangahub : ParsedHttpSource() {
|
||||
|
||||
|
@ -164,7 +168,7 @@ class Mangahub : ParsedHttpSource() {
|
|||
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
//https://mangahub.io/search/page/1?q=a&order=POPULAR&genre=all
|
||||
// https://mangahub.io/search/page/1?q=a&order=POPULAR&genre=all
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
val url = HttpUrl.parse("$baseUrl/search/page/$page")?.newBuilder()!!.addQueryParameter("q", query)
|
||||
(if (filters.isEmpty()) getFilterList() else filters).forEach { filter ->
|
||||
|
@ -200,7 +204,7 @@ class Mangahub : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
private class OrderBy(orders: Array<Order>) : Filter.Select<Order>("Order", orders,0)
|
||||
private class OrderBy(orders: Array<Order>) : Filter.Select<Order>("Order", orders, 0)
|
||||
private class GenreList(genres: Array<Genre>) : Filter.Select<Genre>("Genres", genres, 0)
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
|
|
|
@ -2,23 +2,19 @@ package eu.kanade.tachiyomi.extension.en.mangajar
|
|||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import org.jsoup.select.Elements
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
|
||||
class MangaJar: ParsedHttpSource() {
|
||||
class MangaJar : ParsedHttpSource() {
|
||||
|
||||
override val name = "MangaJar"
|
||||
|
||||
|
@ -89,14 +85,13 @@ class MangaJar: ParsedHttpSource() {
|
|||
date_upload = parseChapterDate(element.select("span.chapter-date").text().trim()) ?: 0
|
||||
}
|
||||
|
||||
//The following date related code is taken directly from Genkan.kt
|
||||
// The following date related code is taken directly from Genkan.kt
|
||||
companion object {
|
||||
val dateFormat by lazy {
|
||||
SimpleDateFormat("dd MMM yyyy", Locale.ENGLISH)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private fun parseChapterDate(string: String): Long? {
|
||||
return if ("ago" in string) {
|
||||
parseRelativeDate(string) ?: 0
|
||||
|
|
|
@ -1,22 +1,26 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangakisa
|
||||
|
||||
import android.net.Uri
|
||||
import android.app.Application
|
||||
import android.content.SharedPreferences
|
||||
import android.net.Uri
|
||||
import android.support.v7.preference.ListPreference
|
||||
import android.support.v7.preference.PreferenceScreen
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.ConfigurableSource
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.*
|
||||
import java.util.Calendar
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import uy.kohesive.injekt.Injekt
|
||||
import uy.kohesive.injekt.api.get
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
|
||||
class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
||||
|
||||
|
@ -45,23 +49,22 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
override fun searchMangaNextPageSelector() = popularMangaNextPageSelector()
|
||||
|
||||
override fun popularMangaRequest(page: Int): Request {
|
||||
val page0 = page-1
|
||||
val page0 = page - 1
|
||||
val popselect = getpoppref()
|
||||
return GET("$baseUrl/$popselect/$page0", headers)
|
||||
|
||||
}
|
||||
override fun latestUpdatesRequest(page: Int): Request {
|
||||
val page0 = page-1
|
||||
val page0 = page - 1
|
||||
val latestselect = getlastestpref()
|
||||
return GET("$baseUrl/$latestselect/$page0", headers)
|
||||
}
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
val page0 = page-1
|
||||
val page0 = page - 1
|
||||
val uri = if (query.isNotBlank()) {
|
||||
Uri.parse("$baseUrl/search?q=$query").buildUpon()
|
||||
} else {
|
||||
val uri = Uri.parse("$baseUrl/").buildUpon()
|
||||
//Append uri filters
|
||||
// Append uri filters
|
||||
filters.forEach {
|
||||
if (it is UriFilter)
|
||||
it.addToUri(uri)
|
||||
|
@ -78,7 +81,7 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
|
||||
override fun popularMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun latestUpdatesFromElement(element: Element) = mangaFromElement(element)
|
||||
override fun searchMangaFromElement(element: Element)= mangaFromElement(element)
|
||||
override fun searchMangaFromElement(element: Element) = mangaFromElement(element)
|
||||
private fun mangaFromElement(element: Element): SManga {
|
||||
val manga = SManga.create()
|
||||
manga.setUrlWithoutDomain(element.select(".an").first().attr("href"))
|
||||
|
@ -92,7 +95,7 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
chapter.setUrlWithoutDomain("/" + element.select("a").attr("href"))
|
||||
chapter.chapter_number = element.select("[class*=infoept2] > div").text().toFloat()
|
||||
chapter.name = "Chapter " + element.select("[class*=infoept2] > div").text().trim()
|
||||
chapter.date_upload = parseRelativeDate(element.select("[class*=infoept3] > div").text()) ?:0
|
||||
chapter.date_upload = parseRelativeDate(element.select("[class*=infoept3] > div").text()) ?: 0
|
||||
return chapter
|
||||
}
|
||||
|
||||
|
@ -128,7 +131,7 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
"Completed" -> SManga.COMPLETED
|
||||
else -> SManga.UNKNOWN
|
||||
}
|
||||
manga.thumbnail_url = baseUrl + "/" + document.select(".infopicbox > img").attr("src")
|
||||
manga.thumbnail_url = baseUrl + "/" + document.select(".infopicbox > img").attr("src")
|
||||
return manga
|
||||
}
|
||||
|
||||
|
@ -145,16 +148,20 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
override fun imageUrlRequest(page: Page) = throw Exception("Not used")
|
||||
override fun imageUrlParse(document: Document) = throw Exception("Not used")
|
||||
|
||||
//Filter List Code
|
||||
// Filter List Code
|
||||
override fun getFilterList() = FilterList(
|
||||
Filter.Header("NOTE: Ignored if using text search!"),
|
||||
Filter.Separator(),
|
||||
GenreFilter()
|
||||
)
|
||||
|
||||
private open class UriSelectFilter(displayName: String, val uriParam: String, val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0) :
|
||||
private open class UriSelectFilter(
|
||||
displayName: String,
|
||||
val uriParam: String,
|
||||
val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0
|
||||
) :
|
||||
Filter.Select<String>(displayName, vals.map { it.second }.toTypedArray(), defaultValue), UriFilter {
|
||||
override fun addToUri(uri: Uri.Builder) {
|
||||
if (state != 0 || !firstIsUnspecified)
|
||||
|
@ -167,7 +174,7 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
fun addToUri(uri: Uri.Builder)
|
||||
}
|
||||
|
||||
private class GenreFilter : UriSelectFilter("Genre","genres", arrayOf(
|
||||
private class GenreFilter : UriSelectFilter("Genre", "genres", arrayOf(
|
||||
Pair("all", "ALL"),
|
||||
Pair("action", "Action "),
|
||||
Pair("adult", "Adult "),
|
||||
|
@ -281,14 +288,10 @@ class MangaKisa : ConfigurableSource, ParsedHttpSource() {
|
|||
private fun getpoppref() = preferences.getString(BROWSE_PREF, "popular")
|
||||
private fun getlastestpref() = preferences.getString(LATEST_PREF, "latest")
|
||||
|
||||
|
||||
companion object {
|
||||
private const val LATEST_PREF_Title = "Latest Manga Selector"
|
||||
private const val LATEST_PREF = "latestmangaurl"
|
||||
private const val BROWSE_PREF_Title = "Popular Manga Selector"
|
||||
private const val BROWSE_PREF = "popularmangaurl"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -8,23 +8,27 @@ import com.google.gson.JsonArray
|
|||
import com.google.gson.JsonElement
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.Headers
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import rx.Observable
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
/**
|
||||
* Source responds to requests with their full database as a JsonArray, then sorts/filters it client-side
|
||||
* We'll take the database on first requests, then do what we want with it
|
||||
*/
|
||||
|
||||
class MangaLife : HttpSource() {
|
||||
|
||||
override val name = "MangaLife"
|
||||
|
@ -42,7 +46,7 @@ class MangaLife : HttpSource() {
|
|||
.build()
|
||||
|
||||
override fun headersBuilder(): Headers.Builder = Headers.Builder()
|
||||
.add("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:71.0) Gecko/20100101 Firefox/71.0")
|
||||
.add("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:71.0) Gecko/20100101 Firefox/71.0")
|
||||
|
||||
private val gson = GsonBuilder().setLenient().create()
|
||||
|
||||
|
@ -83,7 +87,7 @@ class MangaLife : HttpSource() {
|
|||
val mangas = mutableListOf<SManga>()
|
||||
val endRange = ((page * 24) - 1).let { if (it <= directory.lastIndex) it else directory.lastIndex }
|
||||
|
||||
for (i in (((page - 1) * 24) .. endRange)){
|
||||
for (i in (((page - 1) * 24)..endRange)) {
|
||||
mangas.add(SManga.create().apply {
|
||||
title = directory[i]["s"].string
|
||||
url = "/manga/${directory[i]["i"].string}"
|
||||
|
@ -201,20 +205,20 @@ class MangaLife : HttpSource() {
|
|||
|
||||
private val dateFormat = SimpleDateFormat("yyyy-MM-dd", Locale.getDefault())
|
||||
|
||||
private fun chapterURLEncode(e: String ):String {
|
||||
private fun chapterURLEncode(e: String): String {
|
||||
var index = ""
|
||||
val t = e.substring(0,1).toInt()
|
||||
val t = e.substring(0, 1).toInt()
|
||||
if (1 != t) { index = "-index-$t" }
|
||||
val n = e.substring(1,e.length-1)
|
||||
val n = e.substring(1, e.length - 1)
|
||||
var suffix = ""
|
||||
val path = e.substring(e.length-1).toInt()
|
||||
if (0 != path) {suffix = ".$path"}
|
||||
val path = e.substring(e.length - 1).toInt()
|
||||
if (0 != path) { suffix = ".$path" }
|
||||
return "-chapter-$n$index$suffix.html"
|
||||
}
|
||||
|
||||
private fun chapterImage(e: String): String {
|
||||
val a = e.substring(1,e.length-1)
|
||||
val b = e.substring(e.length-1).toInt()
|
||||
val a = e.substring(1, e.length - 1)
|
||||
val b = e.substring(e.length - 1).toInt()
|
||||
return if (b == 0) {
|
||||
a
|
||||
} else {
|
||||
|
@ -226,7 +230,7 @@ class MangaLife : HttpSource() {
|
|||
val vmChapters = response.asJsoup().select("script:containsData(MainFunction)").first().data()
|
||||
.substringAfter("vm.Chapters = ").substringBefore(";")
|
||||
|
||||
return gson.fromJson<JsonArray>(vmChapters).map{ json ->
|
||||
return gson.fromJson<JsonArray>(vmChapters).map { json ->
|
||||
val indexChapter = json["Chapter"].string
|
||||
SChapter.create().apply {
|
||||
name = json["ChapterName"].string.let { if (it.isNotEmpty()) it else "${json["Type"].string} ${chapterImage(indexChapter)}" }
|
||||
|
@ -258,7 +262,7 @@ class MangaLife : HttpSource() {
|
|||
var chNum = chapterImage(curChapter["Chapter"].string)
|
||||
|
||||
return IntRange(1, pageTotal).mapIndexed { i, _ ->
|
||||
var imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length-3) }
|
||||
var imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length - 3) }
|
||||
Page(i, "", path + "$chNum-$imageNum.png")
|
||||
}
|
||||
}
|
||||
|
@ -324,5 +328,4 @@ class MangaLife : HttpSource() {
|
|||
Genre("Yaoi"),
|
||||
Genre("Yuri")
|
||||
)
|
||||
|
||||
}
|
||||
|
|
|
@ -6,16 +6,18 @@ import com.github.salomonbrys.kotson.string
|
|||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.lang.Exception
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
|
||||
class MangaLinkz : ParsedHttpSource() {
|
||||
|
||||
|
@ -141,11 +143,10 @@ class MangaLinkz : ParsedHttpSource() {
|
|||
.substringAfter("atob(\"").substringBefore("\"")
|
||||
val decoded = Base64.decode(encoded, Base64.DEFAULT).toString(Charsets.UTF_8).removeSurrounding("[", "]")
|
||||
|
||||
return gson.fromJson<JsonObject>(decoded)["pages"].asJsonArray.mapIndexed{ i, jsonElement -> Page(i, "", jsonElement.string) }
|
||||
return gson.fromJson<JsonObject>(decoded)["pages"].asJsonArray.mapIndexed { i, jsonElement -> Page(i, "", jsonElement.string) }
|
||||
}
|
||||
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
override fun getFilterList() = FilterList()
|
||||
|
||||
}
|
||||
|
|
|
@ -1,52 +1,55 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangamainac
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.util.Calendar
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
import java.util.*
|
||||
|
||||
//MangaManiac is a network of sites built by Animemaniac.co.
|
||||
// MangaManiac is a network of sites built by Animemaniac.co.
|
||||
|
||||
class MangaMainac : ParsedHttpSource() {
|
||||
|
||||
companion object {
|
||||
val sourceList = listOf<Pair<String,String>>(
|
||||
Pair("Boku No Hero Academia","https://w15.readheroacademia.com"),
|
||||
Pair("One Punch Man","https://w12.readonepunchman.net"),
|
||||
Pair("One Punch Man (webcomic)","https://onewebcomic.net"),
|
||||
Pair("Solo Leveling","https://sololeveling.net"),
|
||||
Pair("Jojolion","https://readjojolion.com"),
|
||||
Pair("Hajime no Ippo","https://readhajimenoippo.com"),
|
||||
Pair("Berserk","http://berserkmanga.net"),
|
||||
Pair("The Quintessential Quintuplets","https://5-toubunnohanayome.net"),
|
||||
Pair("Kaguya Wants to be Confessed To","https://kaguyasama.net"),
|
||||
Pair("Domestic Girlfriend","https://domesticgirlfriend.net"),
|
||||
Pair("Black Clover","https://w5.blackclovermanga.com"),
|
||||
Pair("One Piece","https://1piecemanga.net"),
|
||||
Pair("The Promised Neverland","https://neverlandmanga.net"),
|
||||
Pair("Shingeki no Kyojin","https://readshingekinokyojin.com"),
|
||||
Pair("Nanatsu no Taizai","https://w1.readnanatsutaizai.net")
|
||||
val sourceList = listOf<Pair<String, String>>(
|
||||
Pair("Boku No Hero Academia", "https://w15.readheroacademia.com"),
|
||||
Pair("One Punch Man", "https://w12.readonepunchman.net"),
|
||||
Pair("One Punch Man (webcomic)", "https://onewebcomic.net"),
|
||||
Pair("Solo Leveling", "https://sololeveling.net"),
|
||||
Pair("Jojolion", "https://readjojolion.com"),
|
||||
Pair("Hajime no Ippo", "https://readhajimenoippo.com"),
|
||||
Pair("Berserk", "http://berserkmanga.net"),
|
||||
Pair("The Quintessential Quintuplets", "https://5-toubunnohanayome.net"),
|
||||
Pair("Kaguya Wants to be Confessed To", "https://kaguyasama.net"),
|
||||
Pair("Domestic Girlfriend", "https://domesticgirlfriend.net"),
|
||||
Pair("Black Clover", "https://w5.blackclovermanga.com"),
|
||||
Pair("One Piece", "https://1piecemanga.net"),
|
||||
Pair("The Promised Neverland", "https://neverlandmanga.net"),
|
||||
Pair("Shingeki no Kyojin", "https://readshingekinokyojin.com"),
|
||||
Pair("Nanatsu no Taizai", "https://w1.readnanatsutaizai.net")
|
||||
).sortedBy { it.first }.distinctBy { it.second }
|
||||
|
||||
}
|
||||
|
||||
//Info
|
||||
// Info
|
||||
|
||||
override val name: String = "MangaMainac (Multiple Sites)"
|
||||
override val baseUrl: String = "about:blank"
|
||||
override val lang: String = "en"
|
||||
override val supportsLatest: Boolean = false
|
||||
|
||||
//Popular
|
||||
// Popular
|
||||
|
||||
override fun fetchPopularManga(page: Int): Observable<MangasPage> {
|
||||
return Observable.just(MangasPage(sourceList.map { popularMangaFromPair(it.first,it.second ) }, false))
|
||||
return Observable.just(MangasPage(sourceList.map { popularMangaFromPair(it.first, it.second) }, false))
|
||||
}
|
||||
private fun popularMangaFromPair(name: String, sourceurl: String ): SManga = SManga.create().apply {
|
||||
private fun popularMangaFromPair(name: String, sourceurl: String): SManga = SManga.create().apply {
|
||||
title = name
|
||||
url = sourceurl
|
||||
}
|
||||
|
@ -55,41 +58,40 @@ class MangaMainac : ParsedHttpSource() {
|
|||
override fun popularMangaSelector(): String = throw Exception("Not used")
|
||||
override fun popularMangaFromElement(element: Element) = throw Exception("Not used")
|
||||
|
||||
|
||||
//Latest
|
||||
// Latest
|
||||
override fun latestUpdatesRequest(page: Int): Request = throw Exception("Not used")
|
||||
override fun latestUpdatesNextPageSelector(): String? = throw Exception("Not used")
|
||||
override fun latestUpdatesSelector(): String = throw Exception("Not used")
|
||||
override fun latestUpdatesFromElement(element: Element): SManga = throw Exception("Not used")
|
||||
|
||||
//Search
|
||||
// Search
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)= throw Exception("No Search Function")
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw Exception("No Search Function")
|
||||
override fun searchMangaNextPageSelector() = throw Exception("Not used")
|
||||
override fun searchMangaSelector() = throw Exception("Not used")
|
||||
override fun searchMangaFromElement(element: Element) = throw Exception("Not used")
|
||||
|
||||
//Get Override
|
||||
// Get Override
|
||||
|
||||
override fun mangaDetailsRequest(manga: SManga): Request {
|
||||
return GET( manga.url, headers)
|
||||
return GET(manga.url, headers)
|
||||
}
|
||||
override fun chapterListRequest(manga: SManga): Request {
|
||||
return GET( manga.url, headers)
|
||||
return GET(manga.url, headers)
|
||||
}
|
||||
override fun pageListRequest(chapter: SChapter): Request {
|
||||
return GET( chapter.url, headers)
|
||||
return GET(chapter.url, headers)
|
||||
}
|
||||
|
||||
//Details
|
||||
// Details
|
||||
|
||||
override fun mangaDetailsParse(document: Document): SManga = SManga.create().apply {
|
||||
val info = document.select(".intro_content").text()
|
||||
title = document.select(".intro_content h2").text()
|
||||
author = if ("Author" in info) substringextract(info,"Author(s):","Released") else null
|
||||
author = if ("Author" in info) substringextract(info, "Author(s):", "Released") else null
|
||||
artist = author
|
||||
genre = if ("Genre" in info) substringextract(info, "Genre(s):","Status") else null
|
||||
status = when (substringextract(info, "Status:","(")) {
|
||||
genre = if ("Genre" in info) substringextract(info, "Genre(s):", "Status") else null
|
||||
status = when (substringextract(info, "Status:", "(")) {
|
||||
"Ongoing" -> SManga.ONGOING
|
||||
"Completed" -> SManga.COMPLETED
|
||||
else -> SManga.UNKNOWN
|
||||
|
@ -97,9 +99,9 @@ class MangaMainac : ParsedHttpSource() {
|
|||
description = if ("Description" in info) info.substringAfter("Description:").trim() else null
|
||||
thumbnail_url = document.select(".mangainfo_body img").attr("src")
|
||||
}
|
||||
private fun substringextract(text: String, start:String, end:String): String = text.substringAfter(start).substringBefore(end).trim()
|
||||
private fun substringextract(text: String, start: String, end: String): String = text.substringAfter(start).substringBefore(end).trim()
|
||||
|
||||
//Chapters
|
||||
// Chapters
|
||||
|
||||
override fun chapterListSelector(): String = ".chap_tab tr"
|
||||
override fun chapterFromElement(element: Element): SChapter = SChapter.create().apply {
|
||||
|
@ -113,7 +115,7 @@ class MangaMainac : ParsedHttpSource() {
|
|||
val calendar = Calendar.getInstance()
|
||||
|
||||
if (date.contains("yesterday")) {
|
||||
calendar.apply{add(Calendar.DAY_OF_MONTH, -1)}
|
||||
calendar.apply { add(Calendar.DAY_OF_MONTH, -1) }
|
||||
} else {
|
||||
val trimmedDate = date.replace("one", "1").removeSuffix("s").split(" ")
|
||||
|
||||
|
@ -128,16 +130,13 @@ class MangaMainac : ParsedHttpSource() {
|
|||
return calendar.timeInMillis
|
||||
}
|
||||
|
||||
//Pages
|
||||
// Pages
|
||||
|
||||
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
|
||||
document.select(".img_container img").forEach { img ->
|
||||
add(Page(size,"",img.attr("src")))
|
||||
add(Page(size, "", img.attr("src")))
|
||||
}
|
||||
}
|
||||
|
||||
override fun imageUrlParse(document: Document): String = throw Exception("Not Used")
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangaowl
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class MangaOwl : ParsedHttpSource() {
|
||||
|
||||
|
@ -134,5 +137,4 @@ class MangaOwl : ParsedHttpSource() {
|
|||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
override fun getFilterList() = FilterList()
|
||||
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ package eu.kanade.tachiyomi.extension.en.mangapark
|
|||
import android.annotation.SuppressLint
|
||||
import android.app.Application
|
||||
import android.content.SharedPreferences
|
||||
import android.net.Uri
|
||||
import android.support.v7.preference.ListPreference
|
||||
import android.support.v7.preference.PreferenceScreen
|
||||
import android.net.Uri
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.ConfigurableSource
|
||||
import eu.kanade.tachiyomi.source.model.Filter
|
||||
|
@ -15,6 +15,10 @@ import eu.kanade.tachiyomi.source.model.SChapter
|
|||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import kotlin.math.absoluteValue
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.json.JSONArray
|
||||
|
@ -22,11 +26,6 @@ import org.jsoup.nodes.Document
|
|||
import org.jsoup.nodes.Element
|
||||
import uy.kohesive.injekt.Injekt
|
||||
import uy.kohesive.injekt.api.get
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
import kotlin.math.absoluteValue
|
||||
|
||||
|
||||
class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
||||
|
||||
|
@ -51,7 +50,6 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
|
||||
override fun popularMangaNextPageSelector() = directoryNextPageSelector
|
||||
|
||||
|
||||
override fun latestUpdatesRequest(page: Int) = GET("$baseUrl/latest")
|
||||
|
||||
override fun latestUpdatesSelector() = directorySelector
|
||||
|
@ -60,7 +58,6 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesNextPageSelector() = directoryNextPageSelector
|
||||
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
val uri = Uri.parse("$baseUrl/search").buildUpon()
|
||||
uri.appendQueryParameter("q", query)
|
||||
|
@ -122,7 +119,7 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
return allChapters.filter { it.chapter_number !in chapterNums }.distinctBy { it.chapter_number }
|
||||
}
|
||||
|
||||
fun List<SChapter>.filterOrAll(source: String): List<SChapter>{
|
||||
fun List<SChapter>.filterOrAll(source: String): List<SChapter> {
|
||||
val chapters = this.filter { it.scanlator!!.contains(source) }
|
||||
return if (chapters.isNotEmpty()) {
|
||||
(chapters + chapters.getMissingChapters(this)).sortedByDescending { it.chapter_number }
|
||||
|
@ -171,22 +168,22 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
val lcDate = date.toLowerCase()
|
||||
if (lcDate.endsWith("ago")) return parseRelativeDate(lcDate)
|
||||
|
||||
//Handle 'yesterday' and 'today'
|
||||
// Handle 'yesterday' and 'today'
|
||||
var relativeDate: Calendar? = null
|
||||
if (lcDate.startsWith("yesterday")) {
|
||||
relativeDate = Calendar.getInstance()
|
||||
relativeDate.add(Calendar.DAY_OF_MONTH, -1) //yesterday
|
||||
relativeDate.add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
} else if (lcDate.startsWith("today")) {
|
||||
relativeDate = Calendar.getInstance()
|
||||
}
|
||||
|
||||
relativeDate?.let {
|
||||
//Since the date is not specified, it defaults to 1970!
|
||||
// Since the date is not specified, it defaults to 1970!
|
||||
val time = dateFormatTimeOnly.parse(lcDate.substringAfter(' '))
|
||||
val cal = Calendar.getInstance()
|
||||
cal.time = time
|
||||
|
||||
//Copy time to relative date
|
||||
// Copy time to relative date
|
||||
it.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY))
|
||||
it.set(Calendar.MINUTE, cal.get(Calendar.MINUTE))
|
||||
return it.timeInMillis
|
||||
|
@ -208,11 +205,11 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
"a" -> 1
|
||||
else -> trimmedDate[0].toIntOrNull() ?: return 0
|
||||
}
|
||||
val unit = trimmedDate[1].removeSuffix("s") //Remove 's' suffix
|
||||
val unit = trimmedDate[1].removeSuffix("s") // Remove 's' suffix
|
||||
|
||||
val now = Calendar.getInstance()
|
||||
|
||||
//Map English unit to Java unit
|
||||
// Map English unit to Java unit
|
||||
val javaUnit = when (unit) {
|
||||
"year" -> Calendar.YEAR
|
||||
"month" -> Calendar.MONTH
|
||||
|
@ -245,7 +242,7 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
return pages
|
||||
}
|
||||
|
||||
//Unused, we can get image urls directly from the chapter page
|
||||
// Unused, we can get image urls directly from the chapter page
|
||||
override fun imageUrlParse(document: Document) = throw UnsupportedOperationException("Not used")
|
||||
|
||||
override fun getFilterList() = FilterList(
|
||||
|
@ -425,7 +422,7 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
|
||||
private class YearFilter : UriSelectFilter("Release year", "years",
|
||||
arrayOf(Pair("any", "Any"),
|
||||
//Get all years between today and 1946
|
||||
// Get all years between today and 1946
|
||||
*(Calendar.getInstance().get(Calendar.YEAR) downTo 1946).map {
|
||||
Pair(it.toString(), it.toString())
|
||||
}.toTypedArray()
|
||||
|
@ -445,10 +442,14 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
* If an entry is selected it is appended as a query parameter onto the end of the URI.
|
||||
* If `firstIsUnspecified` is set to true, if the first entry is selected, nothing will be appended on the the URI.
|
||||
*/
|
||||
//vals: <name, display>
|
||||
private open class UriSelectFilter(displayName: String, val uriParam: String, val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0) :
|
||||
// vals: <name, display>
|
||||
private open class UriSelectFilter(
|
||||
displayName: String,
|
||||
val uriParam: String,
|
||||
val vals: Array<Pair<String, String>>,
|
||||
val firstIsUnspecified: Boolean = true,
|
||||
defaultValue: Int = 0
|
||||
) :
|
||||
Filter.Select<String>(displayName, vals.map { it.second }.toTypedArray(), defaultValue), UriFilter {
|
||||
override fun addToUri(uri: Uri.Builder) {
|
||||
if (state != 0 || !firstIsUnspecified)
|
||||
|
@ -510,15 +511,14 @@ class MangaPark : ConfigurableSource, ParsedHttpSource() {
|
|||
private const val SOURCE_PREF_TITLE = "Chapter List Source"
|
||||
private const val SOURCE_PREF = "Manga_Park_Source"
|
||||
private val sourceArray = arrayOf(
|
||||
Pair("All sources, all chapters","all"),
|
||||
Pair("Source with most chapters","most"),
|
||||
Pair("Smart list","smart"),
|
||||
Pair("Prioritize source: Rock","rock"),
|
||||
Pair("Prioritize source: Duck","duck"),
|
||||
Pair("Prioritize source: Mini","mini"),
|
||||
Pair("Prioritize source: Fox","fox"),
|
||||
Pair("Prioritize source: Panda","panda")
|
||||
Pair("All sources, all chapters", "all"),
|
||||
Pair("Source with most chapters", "most"),
|
||||
Pair("Smart list", "smart"),
|
||||
Pair("Prioritize source: Rock", "rock"),
|
||||
Pair("Prioritize source: Duck", "duck"),
|
||||
Pair("Prioritize source: Mini", "mini"),
|
||||
Pair("Prioritize source: Fox", "fox"),
|
||||
Pair("Prioritize source: Panda", "panda")
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangareader
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.Response
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
abstract class MRP(
|
||||
override val name: String,
|
||||
override val baseUrl: String
|
||||
override val name: String,
|
||||
override val baseUrl: String
|
||||
) : ParsedHttpSource() {
|
||||
|
||||
override val lang = "en"
|
||||
|
@ -45,7 +49,7 @@ abstract class MRP(
|
|||
.substringAfterLast("/").substringBefore("\"")
|
||||
|
||||
val manga = mutableListOf<SManga>()
|
||||
document.select(popularMangaSelector()).map{manga.add(popularMangaFromElement(it))}
|
||||
document.select(popularMangaSelector()).map { manga.add(popularMangaFromElement(it)) }
|
||||
|
||||
return MangasPage(manga, document.select(nextPageSelector).hasText())
|
||||
}
|
||||
|
@ -57,7 +61,7 @@ abstract class MRP(
|
|||
.substringAfterLast("/").substringBefore("\"")
|
||||
|
||||
val manga = mutableListOf<SManga>()
|
||||
document.select(latestUpdatesSelector()).map{manga.add(latestUpdatesFromElement(it))}
|
||||
document.select(latestUpdatesSelector()).map { manga.add(latestUpdatesFromElement(it)) }
|
||||
|
||||
return MangasPage(manga, document.select(nextPageSelector).hasText())
|
||||
}
|
||||
|
@ -96,7 +100,7 @@ abstract class MRP(
|
|||
override fun latestUpdatesNextPageSelector() = "Not using this"
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
return if (page==1) {
|
||||
return if (page == 1) {
|
||||
GET("$baseUrl/search/?w=$query&p", headers)
|
||||
} else {
|
||||
GET("$baseUrl/search/?w=$query&p=$nextPageNumber", headers)
|
||||
|
@ -173,5 +177,4 @@ abstract class MRP(
|
|||
}
|
||||
|
||||
override fun getFilterList() = FilterList()
|
||||
|
||||
}
|
|
@ -1,18 +1,23 @@
|
|||
package eu.kanade.tachiyomi.extension.en.mangasail
|
||||
|
||||
import com.github.salomonbrys.kotson.fromJson
|
||||
import com.github.salomonbrys.kotson.get
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.Jsoup.parse
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import com.github.salomonbrys.kotson.*
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
|
||||
class Mangasail : ParsedHttpSource() {
|
||||
|
||||
|
@ -38,7 +43,6 @@ class Mangasail : ParsedHttpSource() {
|
|||
return GET("$baseUrl/directory/hot")
|
||||
} else {
|
||||
return GET("$baseUrl/directory/hot?page=" + (page - 1))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +77,7 @@ class Mangasail : ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesNextPageSelector(): String = "There is no next page"
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
|
||||
if (page == 1) {
|
||||
return GET("$baseUrl/search/node/$query")
|
||||
} else {
|
||||
|
@ -96,7 +100,7 @@ class Mangasail : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
// Function to get data fragments from website
|
||||
private fun getNodeDetail(node: String, field:String): String {
|
||||
private fun getNodeDetail(node: String, field: String): String {
|
||||
val requestUrl = "$baseUrl/sites/all/modules/authcache/modules/authcache_p13n/frontcontroller/authcache.php?a[field][0]=$node:full:en&r=asm/field/node/$field&o[q]=node/$node"
|
||||
val call = client.newCall(GET(requestUrl, headers)).execute()
|
||||
val gson = Gson()
|
||||
|
@ -112,7 +116,7 @@ class Mangasail : ParsedHttpSource() {
|
|||
}
|
||||
|
||||
// Get a page's node number so we can get data fragments for that page
|
||||
private fun getNodeNumber (document: Document) : String {
|
||||
private fun getNodeNumber(document: Document): String {
|
||||
return document.select("[rel=shortlink]").attr("href").split("/").last().replace("\"", "")
|
||||
}
|
||||
|
||||
|
@ -174,8 +178,7 @@ class Mangasail : ParsedHttpSource() {
|
|||
return pages
|
||||
}
|
||||
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
override fun getFilterList() = FilterList()
|
||||
|
||||
}
|
||||
|
|
|
@ -7,14 +7,14 @@ import eu.kanade.tachiyomi.source.model.Page
|
|||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.regex.Pattern
|
||||
import okhttp3.FormBody
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.regex.Pattern
|
||||
import java.util.Locale
|
||||
|
||||
class Mangasee : ParsedHttpSource() {
|
||||
|
||||
|
@ -250,5 +250,4 @@ class Mangasee : ParsedHttpSource() {
|
|||
Genre("Yaoi"),
|
||||
Genre("Yuri")
|
||||
)
|
||||
|
||||
}
|
||||
|
|
|
@ -2,18 +2,20 @@ package eu.kanade.tachiyomi.extension.en.mangatown
|
|||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.network.POST
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import okhttp3.Response
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
|
||||
class Mangatown : ParsedHttpSource() {
|
||||
|
||||
|
@ -104,8 +106,8 @@ class Mangatown : ParsedHttpSource() {
|
|||
|
||||
private fun parseDate(date: String): Long {
|
||||
return when {
|
||||
date.contains("Today") -> Calendar.getInstance().apply{}.timeInMillis
|
||||
date.contains("Yesterday") -> Calendar.getInstance().apply{add(Calendar.DAY_OF_MONTH, -1)}.timeInMillis
|
||||
date.contains("Today") -> Calendar.getInstance().apply {}.timeInMillis
|
||||
date.contains("Yesterday") -> Calendar.getInstance().apply { add(Calendar.DAY_OF_MONTH, -1) }.timeInMillis
|
||||
else -> {
|
||||
try {
|
||||
SimpleDateFormat("MMM dd,yyyy", Locale.US).parse(date).time
|
||||
|
@ -126,12 +128,11 @@ class Mangatown : ParsedHttpSource() {
|
|||
override fun imageUrlRequest(page: Page) = GET(baseUrl + page.url)
|
||||
|
||||
// Get the image from the requested page
|
||||
override fun imageUrlParse (response: Response): String {
|
||||
override fun imageUrlParse(response: Response): String {
|
||||
return response.asJsoup().select("div#viewer img").attr("abs:src")
|
||||
}
|
||||
|
||||
override fun imageUrlParse(document: Document): String = throw UnsupportedOperationException("Not used")
|
||||
|
||||
override fun getFilterList() = FilterList()
|
||||
|
||||
}
|
||||
|
|
|
@ -1,17 +1,20 @@
|
|||
package eu.kanade.tachiyomi.extension.en.manmanga
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import java.text.SimpleDateFormat
|
||||
import okhttp3.OkHttpClient
|
||||
import rx.Observable
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import rx.Observable
|
||||
|
||||
class ManManga : ParsedHttpSource() {
|
||||
override val name = "Man Manga"
|
||||
|
@ -30,21 +33,20 @@ class ManManga : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
override fun popularMangaSelector() = "#scrollBox > #scrollContent > li > a"
|
||||
|
||||
override fun latestUpdatesSelector() = popularMangaSelector()
|
||||
|
||||
override fun searchMangaSelector() = popularMangaSelector()
|
||||
|
||||
override fun popularMangaRequest(page: Int)
|
||||
= GET("$baseUrl/category?sort=hot&page=$page", headers)
|
||||
override fun popularMangaRequest(page: Int) =
|
||||
GET("$baseUrl/category?sort=hot&page=$page", headers)
|
||||
|
||||
override fun latestUpdatesRequest(page: Int)
|
||||
= GET("$baseUrl/category?sort=new&page=$page", headers)
|
||||
override fun latestUpdatesRequest(page: Int) =
|
||||
GET("$baseUrl/category?sort=new&page=$page", headers)
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
||||
= GET("$baseUrl/search?keyword=$query&page=$page", headers)
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) =
|
||||
GET("$baseUrl/search?keyword=$query&page=$page", headers)
|
||||
|
||||
override fun popularMangaFromElement(element: Element) = SManga.create().apply {
|
||||
setUrlWithoutDomain(element.attr("href"))
|
||||
|
@ -86,18 +88,18 @@ class ManManga : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
override fun mangaDetailsParse(document: Document) = SManga.create().apply{
|
||||
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
|
||||
val getThumbnailUrl = document.select(".bg-box .bg").attr("style")
|
||||
|
||||
author = document.select(".author").text().replace("Author:","").trim()
|
||||
author = document.select(".author").text().replace("Author:", "").trim()
|
||||
genre = document.select(".tags span").map {
|
||||
it.text().trim()
|
||||
}.joinToString(", ")
|
||||
status = document.select(".type").text().replace("Status:","").trim().let {
|
||||
status = document.select(".type").text().replace("Status:", "").trim().let {
|
||||
parseStatus(it)
|
||||
}
|
||||
description = document.select(".inner-text").text().trim()
|
||||
thumbnail_url = getThumbnailUrl.substring( getThumbnailUrl.indexOf("https://"), getThumbnailUrl.indexOf("')") )
|
||||
thumbnail_url = getThumbnailUrl.substring(getThumbnailUrl.indexOf("https://"), getThumbnailUrl.indexOf("')"))
|
||||
}
|
||||
|
||||
private fun parseStatus(status: String) = when {
|
||||
|
@ -120,12 +122,12 @@ class ManManga : ParsedHttpSource() {
|
|||
override fun pageListParse(document: Document): List<Page> {
|
||||
val pages = mutableListOf<Page>()
|
||||
|
||||
if(document.select("ul.img-list > li.unloaded > img").toString().isNotEmpty()) {
|
||||
if (document.select("ul.img-list > li.unloaded > img").toString().isNotEmpty()) {
|
||||
document.select("ul.img-list > li.unloaded > img").forEach {
|
||||
val imgUrl = it.attr("data-src")
|
||||
pages.add(Page(pages.size, "", "$imgUrl"))
|
||||
}
|
||||
} else {
|
||||
} else {
|
||||
document.select("ul.img-list > li.loaded > img").forEach {
|
||||
val imgUrl = it.attr("data-src")
|
||||
pages.add(Page(pages.size, "", "$imgUrl"))
|
||||
|
|
|
@ -1,18 +1,22 @@
|
|||
package eu.kanade.tachiyomi.extension.en.merakiscans
|
||||
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import eu.kanade.tachiyomi.util.asJsoup
|
||||
import eu.kanade.tachiyomi.network.asObservableSuccess
|
||||
import okhttp3.OkHttpClient
|
||||
import rx.Observable
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import java.text.ParseException
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Response
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
|
||||
class MerakiScans : ParsedHttpSource() {
|
||||
override val name = "MerakiScans"
|
||||
|
@ -35,11 +39,11 @@ class MerakiScans : ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesSelector() = "#mangalisthome > #mangalistitem > #mangaitem > #manganame > a"
|
||||
|
||||
override fun popularMangaRequest(page: Int)
|
||||
= GET("$baseUrl/manga", headers)
|
||||
override fun popularMangaRequest(page: Int) =
|
||||
GET("$baseUrl/manga", headers)
|
||||
|
||||
override fun latestUpdatesRequest(page: Int)
|
||||
= GET(baseUrl, headers)
|
||||
override fun latestUpdatesRequest(page: Int) =
|
||||
GET(baseUrl, headers)
|
||||
|
||||
override fun popularMangaFromElement(element: Element) = SManga.create().apply {
|
||||
setUrlWithoutDomain(element.attr("href"))
|
||||
|
@ -55,12 +59,12 @@ class MerakiScans : ParsedHttpSource() {
|
|||
|
||||
override fun latestUpdatesNextPageSelector(): String? = null
|
||||
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList)
|
||||
= GET("$baseUrl/manga", headers)
|
||||
override fun searchMangaRequest(page: Int, query: String, filters: FilterList) =
|
||||
GET("$baseUrl/manga", headers)
|
||||
|
||||
override fun searchMangaSelector() = popularMangaSelector()
|
||||
|
||||
//This makes it so that if somebody searches for "views" it lists everything, also includes #'s.
|
||||
// This makes it so that if somebody searches for "views" it lists everything, also includes #'s.
|
||||
private fun searchMangaSelector(query: String) = "#all > #listitem > a:contains($query)"
|
||||
|
||||
override fun searchMangaFromElement(element: Element) = popularMangaFromElement(element)
|
||||
|
@ -85,12 +89,12 @@ class MerakiScans : ParsedHttpSource() {
|
|||
}
|
||||
}
|
||||
|
||||
override fun mangaDetailsParse(document: Document) = SManga.create().apply{
|
||||
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
|
||||
val infoElement = document.select("#content2")
|
||||
author = infoElement.select("#detail_list > li:nth-child(5)").text().replace("Author:","").trim()
|
||||
artist = infoElement.select("#detail_list > li:nth-child(7)").text().replace("Artist:","").trim()
|
||||
author = infoElement.select("#detail_list > li:nth-child(5)").text().replace("Author:", "").trim()
|
||||
artist = infoElement.select("#detail_list > li:nth-child(7)").text().replace("Artist:", "").trim()
|
||||
genre = infoElement.select("#detail_list > li:nth-child(11) > a").joinToString { it.text().trim() }
|
||||
status = infoElement.select("#detail_list > li:nth-child(9)").text().replace("Status:","").trim().let {
|
||||
status = infoElement.select("#detail_list > li:nth-child(9)").text().replace("Status:", "").trim().let {
|
||||
parseStatus(it)
|
||||
}
|
||||
description = infoElement.select("#detail_list > span").text().trim()
|
||||
|
@ -121,7 +125,7 @@ class MerakiScans : ParsedHttpSource() {
|
|||
|
||||
override fun pageListParse(document: Document): List<Page> {
|
||||
val doc = document.toString()
|
||||
val imgarray = doc.substringAfter("var images = [").substringBefore("];").split(",").map { it.replace("\"","") }
|
||||
val imgarray = doc.substringAfter("var images = [").substringBefore("];").split(",").map { it.replace("\"", "") }
|
||||
val mangaslug = doc.substringAfter("var manga_slug = \"").substringBefore("\";")
|
||||
val chapnum = doc.substringAfter("var viewschapter = \"").substringBefore("\";")
|
||||
|
||||
|
|
|
@ -6,9 +6,20 @@ import com.github.salomonbrys.kotson.obj
|
|||
import com.github.salomonbrys.kotson.string
|
||||
import com.google.gson.JsonParser
|
||||
import eu.kanade.tachiyomi.network.GET
|
||||
import eu.kanade.tachiyomi.source.model.*
|
||||
import eu.kanade.tachiyomi.source.model.FilterList
|
||||
import eu.kanade.tachiyomi.source.model.MangasPage
|
||||
import eu.kanade.tachiyomi.source.model.Page
|
||||
import eu.kanade.tachiyomi.source.model.SChapter
|
||||
import eu.kanade.tachiyomi.source.model.SManga
|
||||
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
|
||||
import okhttp3.*
|
||||
import okhttp3.CacheControl
|
||||
import okhttp3.Headers
|
||||
import okhttp3.Interceptor
|
||||
import okhttp3.MediaType
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import okhttp3.ResponseBody
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import rx.Observable
|
||||
|
@ -55,8 +66,8 @@ class MyAnimeList : ParsedHttpSource() {
|
|||
|
||||
override fun searchMangaParse(response: Response): MangasPage = popularMangaParse(response)
|
||||
|
||||
override fun mangaDetailsRequest(manga: SManga): Request
|
||||
= GET(baseUrl + manga.url, headers, CacheControl.FORCE_NETWORK)
|
||||
override fun mangaDetailsRequest(manga: SManga): Request =
|
||||
GET(baseUrl + manga.url, headers, CacheControl.FORCE_NETWORK)
|
||||
|
||||
override fun mangaDetailsParse(document: Document): SManga = SManga.create().apply {
|
||||
val infoElement = document.select("div#content div.membership-manager table[width='100%'] tr").first()
|
||||
|
@ -104,7 +115,7 @@ class MyAnimeList : ParsedHttpSource() {
|
|||
.mapIndexed { i, fileName -> Page(i, "", "$imageBaseUrl/${fileName.string}?$queryParamsPart") }
|
||||
}
|
||||
|
||||
private fun imageIntercept(chain: Interceptor.Chain) : Response {
|
||||
private fun imageIntercept(chain: Interceptor.Chain): Response {
|
||||
val request = chain.request()
|
||||
val response = chain.proceed(request)
|
||||
|
||||
|
@ -133,7 +144,7 @@ class MyAnimeList : ParsedHttpSource() {
|
|||
*/
|
||||
private fun decodeImage(image: ByteArray): ByteArray {
|
||||
val n = image[1].toPositiveInt()
|
||||
val i = image.slice(2 until 2 + n).map{ it.toPositiveInt() }
|
||||
val i = image.slice(2 until 2 + n).map { it.toPositiveInt() }
|
||||
val r = image.drop(2 + n).map { it.toPositiveInt() }.toMutableList()
|
||||
|
||||
for ((o, b) in r.iterator().withIndex()) {
|
||||
|
|
Loading…
Reference in New Issue