Correct a bunch of lints from "en" to "ja" (#3075)

This commit is contained in:
TacoTheDank 2020-05-10 17:46:00 -04:00 committed by GitHub
parent 4148c1831d
commit 99ee039d09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 169 additions and 157 deletions

View File

@ -130,20 +130,20 @@ class ComicExtra : ParsedHttpSource() {
}
private fun dateParse(dateAsString: String): Long {
var date: Date
try {
date = SimpleDateFormat("MMM dd, yyyy", Locale.ENGLISH).parse(dateAsString.replace(Regex("(st|nd|rd|th)"), ""))
val date: Date
date = try {
SimpleDateFormat("MMM dd, yyyy", Locale.ENGLISH).parse(dateAsString.replace(Regex("(st|nd|rd|th)"), ""))
} catch (e: ParseException) {
val m = datePattern.matcher(dateAsString)
if (dateAsString != "Today" && m.matches()) {
val amount = m.group(1).toInt()
date = Calendar.getInstance().apply {
Calendar.getInstance().apply {
add(Calendar.DATE, -amount)
}.time
} else if (dateAsString == "Today") {
date = Calendar.getInstance().time
Calendar.getInstance().time
} else return 0
}

View File

@ -30,13 +30,13 @@ abstract class DynastyScans : ParsedHttpSource() {
override val supportsLatest = false
var parent: List<Node> = ArrayList()
private var parent: List<Node> = ArrayList()
var list = InternalList(ArrayList(), "")
private var list = InternalList(ArrayList(), "")
var imgList = InternalList(ArrayList(), "")
private var imgList = InternalList(ArrayList(), "")
var _valid: Validate = Validate(false, -1)
private var _valid: Validate = Validate(false, -1)
override fun popularMangaRequest(page: Int): Request {
return GET(popularMangaInitialUrl(), headers)
@ -71,8 +71,8 @@ abstract class DynastyScans : ParsedHttpSource() {
private fun buildListfromResponse(): List<Node> {
return client.newCall(Request.Builder().headers(headers)
.url(popularMangaInitialUrl()).build()).execute().asJsoup()
.select("div#main").filter { it.hasText() }.first().childNodes()
.url(popularMangaInitialUrl()).build()).execute().asJsoup()
.select("div#main").first { it.hasText() }.childNodes()
}
protected fun parseHeader(document: Document, manga: SManga): Boolean {
@ -157,9 +157,9 @@ abstract class DynastyScans : ParsedHttpSource() {
val pages = mutableListOf<Page>()
try {
val imageUrl = document.select("script").last().html().substringAfter("var pages = [").substringBefore("];")
var imageUrls = JSONArray("[$imageUrl]")
val imageUrls = JSONArray("[$imageUrl]")
(0..imageUrls.length() - 1)
(0 until imageUrls.length())
.map { imageUrls.getJSONObject(it) }
.map { baseUrl + it.get("image") }
.forEach { pages.add(Page(pages.size, "", it)) }
@ -169,9 +169,9 @@ abstract class DynastyScans : ParsedHttpSource() {
return pages
}
class InternalList : ArrayList<String> {
class InternalList(nodes: List<Node>, type: String) : ArrayList<String>() {
constructor(nodes: List<Node>, type: String) {
init {
if (type == "text") {
for (node in nodes) {
if (node is TextNode) {

View File

@ -30,7 +30,7 @@ import rx.Observable
import uy.kohesive.injekt.Injekt
import uy.kohesive.injekt.api.get
open class Guya() : ConfigurableSource, HttpSource() {
open class Guya : ConfigurableSource, HttpSource() {
override val name = "Guya"
override val baseUrl = "https://guya.moe"
@ -46,13 +46,13 @@ open class Guya() : ConfigurableSource, HttpSource() {
Build.ID)
}
private val Scanlators: ScanlatorStore = ScanlatorStore()
private val scanlators: ScanlatorStore = ScanlatorStore()
// Preferences confirguration
// Preferences configuration
private val preferences: SharedPreferences by lazy {
Injekt.get<Application>().getSharedPreferences("source_$id", 0x0000)
}
private val SCANLATOR_PREFERENCE = "SCANLATOR_PREFERENCE"
private val scanlatorPreference = "SCANLATOR_PREFERENCE"
// Request builder for the "browse" page of the manga
override fun popularMangaRequest(page: Int): Request {
@ -129,7 +129,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
val metadata = JSONObject()
metadata.put("chapter", chapterNum)
metadata.put("scanlator", Scanlators.getKeyFromValue(chapter.scanlator.toString()))
metadata.put("scanlator", scanlators.getKeyFromValue(chapter.scanlator.toString()))
metadata.put("slug", json.getString("slug"))
metadata.put("folder", json.getJSONObject("chapters")
.getJSONObject(chapterNum)
@ -203,8 +203,8 @@ open class Guya() : ConfigurableSource, HttpSource() {
title = "Preferred scanlator"
entries = arrayOf<String>()
entryValues = arrayOf<String>()
for (key in Scanlators.keys()) {
entries += Scanlators.getValueFromKey(key)
for (key in scanlators.keys()) {
entries += scanlators.getValueFromKey(key)
entryValues += key
}
summary = "Current: %s\n\n" +
@ -216,7 +216,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
setOnPreferenceChangeListener { _, newValue ->
val selected = newValue.toString()
preferences.edit().putString(SCANLATOR_PREFERENCE, selected).commit()
preferences.edit().putString(scanlatorPreference, selected).commit()
}
}
@ -229,8 +229,8 @@ open class Guya() : ConfigurableSource, HttpSource() {
title = "Preferred scanlator"
entries = arrayOf<String>()
entryValues = arrayOf<String>()
for (key in Scanlators.keys()) {
entries += Scanlators.getValueFromKey(key)
for (key in scanlators.keys()) {
entries += scanlators.getValueFromKey(key)
entryValues += key
}
summary = "Current: %s\n\n" +
@ -242,7 +242,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
setOnPreferenceChangeListener { _, newValue ->
val selected = newValue.toString()
preferences.edit().putString(SCANLATOR_PREFERENCE, selected).commit()
preferences.edit().putString(scanlatorPreference, selected).commit()
}
}
@ -252,7 +252,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
// ------------- Helpers and whatnot ---------------
private fun parseChapterList(payload: String): List<SChapter> {
val SORT_KEY = "preferred_sort"
val sortKey = "preferred_sort"
val response = JSONObject(payload)
val chapters = response.getJSONObject("chapters")
@ -263,9 +263,9 @@ open class Guya() : ConfigurableSource, HttpSource() {
while (iter.hasNext()) {
val chapter = iter.next()
val chapterObj = chapters.getJSONObject(chapter)
var preferredSort = response.getJSONArray(SORT_KEY)
if (chapterObj.has(SORT_KEY)) {
preferredSort = chapterObj.getJSONArray(SORT_KEY)
var preferredSort = response.getJSONArray(sortKey)
if (chapterObj.has(sortKey)) {
preferredSort = chapterObj.getJSONArray(sortKey)
}
chapterList.add(parseChapterFromJson(chapterObj, chapter, preferredSort, response.getString("slug")))
}
@ -306,7 +306,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
// Get the scanlator info based on group ranking; do it first since we need it later
val firstGroupId = getBestScanlator(json.getJSONObject("groups"), sort)
chapter.scanlator = Scanlators.getValueFromKey(firstGroupId)
chapter.scanlator = scanlators.getValueFromKey(firstGroupId)
chapter.date_upload = json.getJSONObject("release_date").getLong(firstGroupId) * 1000
chapter.name = num + " - " + json.getString("title")
chapter.chapter_number = num.toFloat()
@ -331,7 +331,7 @@ open class Guya() : ConfigurableSource, HttpSource() {
}
private fun getBestScanlator(json: JSONObject, sort: JSONArray): String {
val preferred = preferences.getString(SCANLATOR_PREFERENCE, null)
val preferred = preferences.getString(scanlatorPreference, null)
if (preferred != null && json.has(preferred)) {
return preferred

View File

@ -35,7 +35,7 @@ class Hentai2Read : ParsedHttpSource() {
override val client: OkHttpClient = network.cloudflareClient
companion object {
val imageBaseUrl = "https://static.hentaicdn.com/hentai"
const val imageBaseUrl = "https://static.hentaicdn.com/hentai"
val pagesUrlPattern by lazy {
Pattern.compile("""'images' : \[\"(.*?)[,]?\"\]""")
@ -1008,7 +1008,7 @@ class Hentai2Read : ParsedHttpSource() {
Tag("Hatsukoi Limited", 969),
Tag("Hayate no Gotoku", 1065),
Tag("He is My Master", 1280),
Tag("Heartcatch Precure!", 1791),
Tag("Heartcatch Precure!", 1791),
Tag("Heartful Maman", 2531),
Tag("Heavy Object", 2457),
Tag("Hellsing", 2248),

View File

@ -107,12 +107,16 @@ class Hiveworks : ParsedHttpSource() {
val document = response.asJsoup()
val selectManga = document.select(searchMangaSelector())
val mangas = if (url.endsWith("localSearch")) {
selectManga.filter { it.text().contains(searchQuery, true) }.map { element -> searchMangaFromElement(element) }
} else if (url.contains("originals")) {
selectManga.map { element -> searchOriginalMangaFromElement(element) }
} else {
selectManga.map { element -> searchMangaFromElement(element) }
val mangas = when {
url.endsWith("localSearch") -> {
selectManga.filter { it.text().contains(searchQuery, true) }.map { element -> searchMangaFromElement(element) }
}
url.contains("originals") -> {
selectManga.map { element -> searchOriginalMangaFromElement(element) }
}
else -> {
selectManga.map { element -> searchMangaFromElement(element) }
}
}
val hasNextPage = searchMangaNextPageSelector()?.let { selector ->

View File

@ -84,7 +84,7 @@ class Mangaeden : ParsedHttpSource() {
author = infos.select("a[href^=/en/en-directory/?author]").first()?.text()
artist = infos.select("a[href^=/en/en-directory/?artist]").first()?.text()
genre = infos.select("a[href^=/en/en-directory/?categoriesInc]").map { it.text() }.joinToString()
genre = infos.select("a[href^=/en/en-directory/?categoriesInc]").joinToString { it.text() }
description = document.select("h2#mangaDescription").text()
status = parseStatus(infos.select("h4:containsOwn(Status)").first()?.nextSibling().toString())
val img = infos.select("div.mangaImage2 > img").first()?.attr("src")
@ -108,14 +108,16 @@ class Mangaeden : ParsedHttpSource() {
}
private fun parseChapterDate(date: String): Long =
if ("Today" in date) {
when {
"Today" in date -> {
Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else if ("Yesterday" in date) {
}
"Yesterday" in date -> {
Calendar.getInstance().apply {
add(Calendar.DATE, -1)
set(Calendar.HOUR_OF_DAY, 0)
@ -123,11 +125,13 @@ class Mangaeden : ParsedHttpSource() {
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else try {
}
else -> try {
SimpleDateFormat("MMM d, yyyy", Locale.ENGLISH).parse(date).time
} catch (e: ParseException) {
0L
}
}
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
document.select("option[value^=/en/en-manga/]").forEach {
@ -141,7 +145,7 @@ class Mangaeden : ParsedHttpSource() {
private class Genre(name: String, val id: String) : Filter.TriState(name)
private class TextField(name: String, val key: String) : Filter.Text(name)
private class OrderBy : Filter.Sort("Order by", arrayOf("Manga title", "Views", "Chapters", "Latest chapter"),
Filter.Sort.Selection(1, false))
Selection(1, false))
private class StatusList(statuses: List<NamedId>) : Filter.Group<NamedId>("Stato", statuses)
private class Types(types: List<NamedId>) : Filter.Group<NamedId>("Tipo", types)

View File

@ -72,14 +72,14 @@ class Mangafreak : ParsedHttpSource() {
uri.appendPath("Genre")
when (filter) {
is GenreList -> {
uri.appendPath(filter.state.map {
uri.appendPath(filter.state.joinToString("") {
when (it.state) {
Filter.TriState.STATE_IGNORE -> "0"
Filter.TriState.STATE_INCLUDE -> "1"
Filter.TriState.STATE_EXCLUDE -> "2"
else -> "0"
}
}.joinToString(""))
})
}
}
uri.appendEncodedPath("Status/0/Type/0")

View File

@ -92,7 +92,7 @@ class Mangahere : ParsedHttpSource() {
is CompletionList -> url.addEncodedQueryParameter("st", it.state.toString())
is GenreList -> {
val genreFilter = filters.find { it is GenreList } as GenreList?
val genreFilter = filters.find { filter -> filter is GenreList } as GenreList?
val includeGenres = ArrayList<Int>()
val excludeGenres = ArrayList<Int>()
genreFilter?.state?.forEach { genre ->

View File

@ -64,9 +64,7 @@ class MangaJar : ParsedHttpSource() {
description = document.select("div.manga-description.entry > div").text()
thumbnail_url = document.select("div.row > div > img").attr("src")
genre = document.select("div.post-info > span > a[href*=genre]").joinToString { it.text() }
status = document.select("span:has(b)").get(1).text().let {
parseStatus(it)
}
status = parseStatus(document.select("span:has(b)")[1].text())
}
private fun parseStatus(status: String) = when {

View File

@ -2,18 +2,14 @@ package eu.kanade.tachiyomi.extension.en.mangakatana
import eu.kanade.tachiyomi.network.GET
import eu.kanade.tachiyomi.source.model.FilterList
import eu.kanade.tachiyomi.source.model.MangasPage
import eu.kanade.tachiyomi.source.model.Page
import eu.kanade.tachiyomi.source.model.SChapter
import eu.kanade.tachiyomi.source.model.SManga
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
import eu.kanade.tachiyomi.util.asJsoup
import java.text.SimpleDateFormat
import java.util.Locale
import okhttp3.OkHttpClient
import okhttp3.Response
import org.jsoup.nodes.Document
import org.jsoup.nodes.Element
import java.text.SimpleDateFormat
import java.util.Locale
class MangaKatana: ParsedHttpSource() {
override val name = "MangaKatana"
@ -57,9 +53,7 @@ class MangaKatana: ParsedHttpSource() {
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
author = document.select(".author").text()
description = document.select(".summary > p").text()
status = document.select(".value.status").let {
parseStatus(it.text())
}
status = parseStatus(document.select(".value.status").text())
genre = document.select(".genres > a").joinToString { it.text() }
thumbnail_url = document.select(".cover > img").attr("abs:data-src")
}

View File

@ -259,11 +259,11 @@ class MangaLife : HttpSource() {
.let { if (it.isEmpty()) "" else "$it/" }
val path = "$host/manga/$titleURI/$seasonURI"
var chNum = chapterImage(curChapter["Chapter"].string)
val chNum = chapterImage(curChapter["Chapter"].string)
return IntRange(1, pageTotal).mapIndexed { i, _ ->
var imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length - 3) }
Page(i, "", path + "$chNum-$imageNum.png")
val imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length - 3) }
Page(i, "", "$path$chNum-$imageNum.png")
}
}

View File

@ -18,7 +18,7 @@ import rx.Observable
class MangaMainac : ParsedHttpSource() {
companion object {
val sourceList = listOf<Pair<String, String>>(
val sourceList = listOf(
Pair("Boku No Hero Academia", "https://w15.readheroacademia.com"),
Pair("One Punch Man", "https://w12.readonepunchman.net"),
Pair("One Punch Man (webcomic)", "https://onewebcomic.net"),

View File

@ -31,7 +31,7 @@ class Mangasail : ParsedHttpSource() {
override val client: OkHttpClient = network.cloudflareClient
/* Site loads some mannga info (manga cover, author name, status, etc.) client side through JQuery
/* Site loads some manga info (manga cover, author name, status, etc.) client side through JQuery
need to add this header for when we request these data fragments
Also necessary for latest updates request */
override fun headersBuilder() = super.headersBuilder().add("X-Authcache", "1")!!
@ -39,10 +39,10 @@ class Mangasail : ParsedHttpSource() {
override fun popularMangaSelector() = "tbody tr"
override fun popularMangaRequest(page: Int): Request {
if (page == 1) {
return GET("$baseUrl/directory/hot")
return if (page == 1) {
GET("$baseUrl/directory/hot")
} else {
return GET("$baseUrl/directory/hot?page=" + (page - 1))
GET("$baseUrl/directory/hot?page=" + (page - 1))
}
}
@ -78,10 +78,10 @@ class Mangasail : ParsedHttpSource() {
override fun latestUpdatesNextPageSelector(): String = "There is no next page"
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
if (page == 1) {
return GET("$baseUrl/search/node/$query")
return if (page == 1) {
GET("$baseUrl/search/node/$query")
} else {
return GET("$baseUrl/search/node/$query?page=" + (page - 1))
GET("$baseUrl/search/node/$query?page=" + (page - 1))
}
}

View File

@ -73,7 +73,7 @@ class ManManga : ParsedHttpSource() {
searchMangaFromElement(element)
}
val hasNextPage = searchMangaNextPageSelector()?.let { selector ->
val hasNextPage = searchMangaNextPageSelector().let { selector ->
document.select(selector).first()
} != null
@ -92,9 +92,9 @@ class ManManga : ParsedHttpSource() {
val getThumbnailUrl = document.select(".bg-box .bg").attr("style")
author = document.select(".author").text().replace("Author", "").trim()
genre = document.select(".tags span").map {
genre = document.select(".tags span").joinToString(", ") {
it.text().trim()
}.joinToString(", ")
}
status = document.select(".type").text().replace("Status", "").trim().let {
parseStatus(it)
}
@ -125,12 +125,12 @@ class ManManga : ParsedHttpSource() {
if (document.select("ul.img-list > li.unloaded > img").toString().isNotEmpty()) {
document.select("ul.img-list > li.unloaded > img").forEach {
val imgUrl = it.attr("data-src")
pages.add(Page(pages.size, "", "$imgUrl"))
pages.add(Page(pages.size, "", imgUrl))
}
} else {
document.select("ul.img-list > li.loaded > img").forEach {
val imgUrl = it.attr("data-src")
pages.add(Page(pages.size, "", "$imgUrl"))
pages.add(Page(pages.size, "", imgUrl))
}
}
return pages

View File

@ -85,7 +85,7 @@ class Perveden : ParsedHttpSource() {
author = infos.select("a[href^=/en/en-directory/?author]").first()?.text()
artist = infos.select("a[href^=/en/en-directory/?artist]").first()?.text()
genre = infos.select("a[href^=/en/en-directory/?categoriesInc]").map { it.text() }.joinToString()
genre = infos.select("a[href^=/en/en-directory/?categoriesInc]").joinToString { it.text() }
description = document.select("h2#mangaDescription").text()
status = parseStatus(infos.select("h4:containsOwn(Status)").first()?.nextSibling().toString())
val img = infos.select("div.mangaImage2 > img").first()?.attr("src")
@ -109,14 +109,16 @@ class Perveden : ParsedHttpSource() {
}
private fun parseChapterDate(date: String): Long =
if ("Today" in date) {
when {
"Today" in date -> {
Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else if ("Yesterday" in date) {
}
"Yesterday" in date -> {
Calendar.getInstance().apply {
add(Calendar.DATE, -1)
set(Calendar.HOUR_OF_DAY, 0)
@ -124,11 +126,13 @@ class Perveden : ParsedHttpSource() {
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else try {
}
else -> try {
SimpleDateFormat("MMM d, yyyy", Locale.ENGLISH).parse(date).time
} catch (e: ParseException) {
0L
}
}
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
document.select("option[value^=/en/en-manga/]").forEach {
@ -142,7 +146,7 @@ class Perveden : ParsedHttpSource() {
private class TextField(name: String, val key: String) : Filter.Text(name)
private class GenreField(name: String, val key: String) : Filter.Text(name)
private class OrderBy : Filter.Sort("Order by", arrayOf("Manga title", "Views", "Chapters", "Latest chapter"),
Filter.Sort.Selection(1, false))
Selection(1, false))
private class StatusList(statuses: List<NamedId>) : Filter.Group<NamedId>("Stato", statuses)
private class Types(types: List<NamedId>) : Filter.Group<NamedId>("Tipo", types)

View File

@ -144,18 +144,25 @@ class Readmangatoday : ParsedHttpSource() {
val timeAgo = Integer.parseInt(dateWords[0])
val calendar = Calendar.getInstance()
if (dateWords[1].contains("Minute")) {
calendar.add(Calendar.MINUTE, -timeAgo)
} else if (dateWords[1].contains("Hour")) {
calendar.add(Calendar.HOUR_OF_DAY, -timeAgo)
} else if (dateWords[1].contains("Day")) {
calendar.add(Calendar.DAY_OF_YEAR, -timeAgo)
} else if (dateWords[1].contains("Week")) {
calendar.add(Calendar.WEEK_OF_YEAR, -timeAgo)
} else if (dateWords[1].contains("Month")) {
calendar.add(Calendar.MONTH, -timeAgo)
} else if (dateWords[1].contains("Year")) {
calendar.add(Calendar.YEAR, -timeAgo)
when {
dateWords[1].contains("Minute") -> {
calendar.add(Calendar.MINUTE, -timeAgo)
}
dateWords[1].contains("Hour") -> {
calendar.add(Calendar.HOUR_OF_DAY, -timeAgo)
}
dateWords[1].contains("Day") -> {
calendar.add(Calendar.DAY_OF_YEAR, -timeAgo)
}
dateWords[1].contains("Week") -> {
calendar.add(Calendar.WEEK_OF_YEAR, -timeAgo)
}
dateWords[1].contains("Month") -> {
calendar.add(Calendar.MONTH, -timeAgo)
}
dateWords[1].contains("Year") -> {
calendar.add(Calendar.YEAR, -timeAgo)
}
}
return calendar.timeInMillis

View File

@ -139,7 +139,7 @@ class Renascans : ParsedHttpSource() {
val pages = mutableListOf<Page>()
document.select("div.col-xs-12 img")?.forEach {
var page = it.attr("data-src")
val page = it.attr("data-src")
if (page.isNotEmpty()) {
pages.add(Page(pages.size, "", page))
}

View File

@ -122,8 +122,8 @@ class Webcomics : ParsedHttpSource() {
/* Source only allows 20 chapters to be readable on their website, trying to read past
that results in a page list empty error; so might as well not grab them. */
if (document.select("${chapterListSelector()}:nth-child(21)").isEmpty()) {
return document.select(chapterListSelector()).asReversed().map { chapterFromElement(it) }
return if (document.select("${chapterListSelector()}:nth-child(21)").isEmpty()) {
document.select(chapterListSelector()).asReversed().map { chapterFromElement(it) }
} else {
val chapters = mutableListOf<SChapter>()
for (i in 1..20)
@ -133,7 +133,7 @@ class Webcomics : ParsedHttpSource() {
lockedNotification.name = "[Attention] Additional chapters are restricted by the source to their own app"
lockedNotification.url = "wiki.html"
chapters.add(lockedNotification)
return chapters.reversed()
chapters.reversed()
}
}

View File

@ -1,7 +1,6 @@
package eu.kanade.tachiyomi.extension.es.heavenmanga
import eu.kanade.tachiyomi.network.GET
import eu.kanade.tachiyomi.network.asObservableSuccess
import eu.kanade.tachiyomi.source.model.Filter
import eu.kanade.tachiyomi.source.model.FilterList
import eu.kanade.tachiyomi.source.model.MangasPage
@ -10,16 +9,12 @@ import eu.kanade.tachiyomi.source.model.SChapter
import eu.kanade.tachiyomi.source.model.SManga
import eu.kanade.tachiyomi.source.online.ParsedHttpSource
import eu.kanade.tachiyomi.util.asJsoup
import java.text.SimpleDateFormat
import java.util.Locale
import okhttp3.Headers
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.Response
import org.jsoup.nodes.Document
import org.jsoup.nodes.Element
import org.jsoup.select.Elements
import rx.Observable
class HeavenManga : ParsedHttpSource() {
@ -29,7 +24,7 @@ class HeavenManga : ParsedHttpSource() {
override val lang = "es"
// latest is broken on the site, it's the same as populaar so turning it off
// latest is broken on the site, it's the same as popular so turning it off
override val supportsLatest = false
override val client: OkHttpClient = network.cloudflareClient

View File

@ -56,7 +56,7 @@ class Ikuhentai : ParsedHttpSource() {
url.addQueryParameter("post_type", "wp-manga")
val pattern = "\\s+".toRegex()
val q = query.replace(pattern, "+")
if (query.length > 0) {
if (query.isNotEmpty()) {
url.addQueryParameter("s", q)
} else {
url.addQueryParameter("s", "")
@ -166,7 +166,7 @@ class Ikuhentai : ParsedHttpSource() {
document.select("div.reading-content * img").forEach { element ->
val url = element.attr("data-src")
i++
if (url.length != 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}

View File

@ -366,7 +366,7 @@ class VMP : VCPVMP("VMP", "https://vermangasporno.com") {
))
}
open class UriPartFilter(displayName: String, val vals: Array<Pair<String, String>>) :
open class UriPartFilter(displayName: String, private val vals: Array<Pair<String, String>>) :
Filter.Select<String>(displayName, vals.map { it.first }.toTypedArray()) {
fun toUriPart() = vals[state].second
}

View File

@ -285,7 +285,7 @@ class Japscan : ParsedHttpSource() {
val ks = "0123456789abcdefghijklmnopqrstuvwxyz".toCharArray()
for (i in 0 until realPageUrls.count())
for (j in 0 until realPageUrls[i].length) {
for (j in realPageUrls[i].indices) {
if (realPageUrls[i][j] != pageUrls[i][j]) {
ks[az.indexOf(pageUrls[i][j])] = realPageUrls[i][j]
}

View File

@ -139,7 +139,7 @@ override fun mangaDetailsParse(document: Document): SManga {
document.select("div#chimg img").forEach { element ->
val url = element.attr("src")
i++
if (url.length != 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}

View File

@ -143,7 +143,7 @@ class Kiryuu : ParsedHttpSource() {
document.select("div#readerarea img").forEach { element ->
val url = element.attr("src")
i++
if (url.length != 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}

View File

@ -194,7 +194,7 @@ class Komikcast : ParsedHttpSource() {
document.select("div#readerarea img").forEach { element ->
val url = element.attr("src")
i++
if (url.length > 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}
@ -204,7 +204,7 @@ class Komikcast : ParsedHttpSource() {
override fun imageUrlParse(document: Document) = ""
override fun imageRequest(page: Page): Request {
var headers = Headers.Builder()
val headers = Headers.Builder()
headers.apply {
add("Referer", baseUrl)
add("User-Agent", "Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LGMS323 Build/KOT49I.MS32310c) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/76.0.3809.100 Mobile Safari/537.36")

View File

@ -26,7 +26,7 @@ class KomikIndo : ParsedHttpSource() {
override val client: OkHttpClient = network.cloudflareClient
override fun popularMangaRequest(page: Int): Request {
val url = if (page == 1) "$baseUrl" else "$baseUrl/page/$page"
val url = if (page == 1) baseUrl else "$baseUrl/page/$page"
return GET(url, headers)
}
@ -37,7 +37,7 @@ class KomikIndo : ParsedHttpSource() {
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
var builtUrl = if (page == 1) "$baseUrl/manga/" else "$baseUrl/manga/page/$page/"
if (!query.equals("")) {
if (query != "") {
builtUrl = if (page == 1) "$baseUrl/search/$query/" else "$baseUrl/search/$query/page/$page/"
} else if (filters.size > 0) {
filters.forEach { filter ->
@ -151,7 +151,7 @@ class KomikIndo : ParsedHttpSource() {
document.select("div#readerarea img").forEach { element ->
val url = element.attr("src")
i++
if (url.length != 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}

View File

@ -53,9 +53,7 @@ class Komiku : ParsedHttpSource() {
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
description = document.select("#Sinopsis > p").text().trim()
genre = document.select("li[itemprop=genre] > a").joinToString { it.text() }
status = document.select("table.inftable > tr > td").get(11).text().let {
parseStatus(it)
}
status = parseStatus(document.select("table.inftable > tr > td")[11].text())
thumbnail_url = document.select("div.ims > img").attr("src")
}

View File

@ -58,11 +58,9 @@ class MangaKita : ParsedHttpSource() {
override fun searchMangaNextPageSelector() = latestUpdatesNextPageSelector()
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
author = document.select("div.row > div").get(5).ownText().trim()
author = document.select("div.row > div")[5].ownText().trim()
genre = document.select("[rel=tag]").joinToString { it.text() }
status = document.select("div.row > div").get(10).ownText().let {
parseStatus(it)
}
status = parseStatus(document.select("div.row > div")[10].ownText())
thumbnail_url = document.select("div#wrap img").attr("src")
}

View File

@ -20,7 +20,7 @@ class Mangaku : ParsedHttpSource() {
override val baseUrl = "https://mangaku.in/"
override val lang = "id"
override val supportsLatest = true
var searchQuery = ""
private var searchQuery = ""
override fun popularMangaRequest(page: Int): Request {
return GET(baseUrl + "daftar-komik-bahasa-indonesia/", headers)
@ -142,7 +142,7 @@ class Mangaku : ParsedHttpSource() {
mainUrl = "https://mangaku.co"
}
if (imageUrl.startsWith("//")) {
imageUrl = "https:" + imageUrl
imageUrl = "https:$imageUrl"
} else if (imageUrl.startsWith("/")) {
imageUrl = mainUrl + imageUrl
}

View File

@ -141,7 +141,7 @@ class Mangashiro : ParsedHttpSource() {
document.select("div#readerarea img").forEach { element ->
val url = element.attr("src")
i++
if (url.length != 0) {
if (url.isNotEmpty()) {
pages.add(Page(i, "", url))
}
}

View File

@ -53,7 +53,7 @@ class ManhuaID : ParsedHttpSource() {
override fun mangaDetailsRequest(manga: SManga) = GET(baseUrl + manga.url, headers)
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
author = document.select("table").first().select("td").get(3).text()
author = document.select("table").first().select("td")[3].text()
title = document.select("title").text()
description = document.select(".text-justify").text()
genre = document.select("span.badge.badge-success.mr-1.mb-1").joinToString { it.text() }

View File

@ -127,7 +127,7 @@ class Neumanga : ParsedHttpSource() {
val manga = SManga.create()
manga.author = mangaInformationWrapper.select("span a[href*=author_search_mode]").first().text()
manga.artist = mangaInformationWrapper.select("span a[href*=artist_search_mode]").first().text()
manga.genre = mangaInformationWrapper.select("a[href*=genre]").map { it.text() }.joinToString()
manga.genre = mangaInformationWrapper.select("a[href*=genre]").joinToString { it.text() }
manga.thumbnail_url = mangaInformationWrapper.select("img.imagemg").first().attr("src")
manga.description = document.select(".summary").first().textNodes()[1].toString()
manga.status = parseStatus(mangaInformationWrapper.select("span a[href*=manga_status]").first().text())

View File

@ -52,12 +52,10 @@ class Ngomik : ParsedHttpSource() {
override fun mangaDetailsParse(document: Document) = SManga.create().apply {
title = document.select("h1[itemprop=headline]").text()
author = document.select("div.listinfo li").get(2).text().removePrefix("Author: ")
author = document.select("div.listinfo li")[2].text().removePrefix("Author: ")
description = document.select(".desc").text()
genre = document.select("div.gnr > a").joinToString { it.text() }
status = document.select("div.listinfo li").get(3).text().let {
parseStatus(it)
}
status = parseStatus(document.select("div.listinfo li")[3].text())
thumbnail_url = document.select("div[itemprop=image] > img").attr("src")
}

View File

@ -68,8 +68,8 @@ class HentaiFantasy : ParsedHttpSource() {
override fun searchMangaSelector() = popularMangaSelector()
override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request {
var tags = mutableListOf<String>()
var paths = mutableListOf<String>()
val tags = mutableListOf<String>()
val paths = mutableListOf<String>()
for (filter in if (filters.isEmpty()) getFilterList() else filters) {
when (filter) {
is TagList -> filter.state
@ -82,7 +82,7 @@ class HentaiFantasy : ParsedHttpSource() {
}
}
var searchTags = tags.size > 0
val searchTags = tags.size > 0
if (!searchTags && query.length < 3) {
throw Exception("Inserisci almeno tre caratteri")
}
@ -97,7 +97,7 @@ class HentaiFantasy : ParsedHttpSource() {
}
}
var searchPath = if (!searchTags) {
val searchPath = if (!searchTags) {
"search"
} else if (paths.size == 1) {
"tag/${paths[0]}/$page"
@ -116,7 +116,7 @@ class HentaiFantasy : ParsedHttpSource() {
override fun mangaDetailsParse(document: Document): SManga {
val manga = SManga.create()
var genres = mutableListOf<String>()
val genres = mutableListOf<String>()
document.select("div#tablelist > div.row").forEach { row ->
when (row.select("div.cell > b").first().text().trim()) {
"Autore" -> manga.author = row.select("div.cell > a").text().trim()
@ -149,17 +149,21 @@ class HentaiFantasy : ParsedHttpSource() {
}
private fun parseChapterDate(date: String): Long {
return if (date == "Oggi") {
Calendar.getInstance().timeInMillis
} else if (date == "Ieri") {
Calendar.getInstance().apply {
add(Calendar.DAY_OF_YEAR, -1)
}.timeInMillis
} else {
try {
dateFormat.parse(date).time
} catch (e: ParseException) {
0L
return when (date) {
"Oggi" -> {
Calendar.getInstance().timeInMillis
}
"Ieri" -> {
Calendar.getInstance().apply {
add(Calendar.DAY_OF_YEAR, -1)
}.timeInMillis
}
else -> {
try {
dateFormat.parse(date).time
} catch (e: ParseException) {
0L
}
}
}
}

View File

@ -84,7 +84,7 @@ class Mangaeden : ParsedHttpSource() {
author = infos.select("a[href^=/it/it-directory/?author]").first()?.text()
artist = infos.select("a[href^=/it/it-directory/?artist]").first()?.text()
genre = infos.select("a[href^=/it/it-directory/?categoriesInc]").map { it.text() }.joinToString()
genre = infos.select("a[href^=/it/it-directory/?categoriesInc]").joinToString { it.text() }
description = document.select("h2#mangaDescription").text()
status = parseStatus(infos.select("h4:containsOwn(Stato)").first()?.nextSibling().toString())
val img = infos.select("div.mangaImage2 > img").first()?.attr("src")
@ -108,14 +108,16 @@ class Mangaeden : ParsedHttpSource() {
}
private fun parseChapterDate(date: String): Long =
if ("Oggi" in date) {
when {
"Oggi" in date -> {
Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else if ("Ieri" in date) {
}
"Ieri" in date -> {
Calendar.getInstance().apply {
add(Calendar.DATE, -1)
set(Calendar.HOUR_OF_DAY, 0)
@ -123,11 +125,13 @@ class Mangaeden : ParsedHttpSource() {
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else try {
}
else -> try {
SimpleDateFormat("d MMM yyyy", Locale.ITALIAN).parse(date).time
} catch (e: ParseException) {
0L
}
}
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
document.select("option[value^=/it/it-manga/]").forEach {
@ -141,7 +145,7 @@ class Mangaeden : ParsedHttpSource() {
private class Genre(name: String, val id: String) : Filter.TriState(name)
private class TextField(name: String, val key: String) : Filter.Text(name)
private class OrderBy : Filter.Sort("Ordina per", arrayOf("Titolo manga", "Visite", "Capitoli", "Ultimo capitolo"),
Filter.Sort.Selection(1, false))
Selection(1, false))
private class StatusList(statuses: List<NamedId>) : Filter.Group<NamedId>("Stato", statuses)
private class Types(types: List<NamedId>) : Filter.Group<NamedId>("Tipo", types)

View File

@ -85,7 +85,7 @@ class Perveden : ParsedHttpSource() {
author = infos.select("a[href^=/it/it-directory/?author]").first()?.text()
artist = infos.select("a[href^=/it/it-directory/?artist]").first()?.text()
genre = infos.select("a[href^=/it/it-directory/?categoriesInc]").map { it.text() }.joinToString()
genre = infos.select("a[href^=/it/it-directory/?categoriesInc]").joinToString { it.text() }
description = document.select("h2#mangaDescription").text()
status = parseStatus(infos.select("h4:containsOwn(Stato)").first()?.nextSibling().toString())
val img = infos.select("div.mangaImage2 > img").first()?.attr("src")
@ -109,14 +109,16 @@ class Perveden : ParsedHttpSource() {
}
private fun parseChapterDate(date: String): Long =
if ("Oggi" in date) {
when {
"Oggi" in date -> {
Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else if ("Ieri" in date) {
}
"Ieri" in date -> {
Calendar.getInstance().apply {
add(Calendar.DATE, -1)
set(Calendar.HOUR_OF_DAY, 0)
@ -124,11 +126,13 @@ class Perveden : ParsedHttpSource() {
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
} else try {
}
else -> try {
SimpleDateFormat("d MMM yyyy", Locale.ITALIAN).parse(date).time
} catch (e: ParseException) {
0L
}
}
override fun pageListParse(document: Document): List<Page> = mutableListOf<Page>().apply {
document.select("option[value^=/it/it-manga/]").forEach {
@ -142,7 +146,7 @@ class Perveden : ParsedHttpSource() {
private class TextField(name: String, val key: String) : Filter.Text(name)
private class GenreField(name: String, val key: String) : Filter.Text(name)
private class OrderBy : Filter.Sort("Ordina per", arrayOf("Titolo manga", "Visite", "Capitoli", "Ultimo capitolo"),
Filter.Sort.Selection(1, false))
Selection(1, false))
private class StatusList(statuses: List<NamedId>) : Filter.Group<NamedId>("Stato", statuses)
private class Types(types: List<NamedId>) : Filter.Group<NamedId>("Tipo", types)

View File

@ -218,7 +218,7 @@ class Rawdevart : ParsedHttpSource() {
}
override fun pageListParse(document: Document): List<Page> {
var pages = mutableListOf<Page>()
val pages = mutableListOf<Page>()
document.select("img.not-lazy[data-src]").forEachIndexed { i, img ->
pages.add(Page(i, "", img.attr("data-src")))
}