-
Notifications
You must be signed in to change notification settings - Fork 24
/
Readmanga.lua
141 lines (113 loc) Β· 3.77 KB
/
Readmanga.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
--------------------------
-- @name Readmanga
-- @url https://readmanga.live/
-- @author ts-vadim (https://github.com/ts-vadim)
-- @license MIT
--------------------------
---@alias manga { name: string, url: string, author: string|nil, genres: string|nil, summary: string|nil }
---@alias chapter { name: string, url: string, volume: string|nil, manga_summary: string|nil, manga_author: string|nil, manga_genres: string|nil }
---@alias page { url: string, index: number }
----- IMPORTS -----
html = require("html")
http = require("http")
time = require("time")
HttpUtil = require("http_util")
inspect = require("inspect")
strings = require("strings")
json = require("json")
--- END IMPORTS ---
----- VARIABLES -----
DEBUG = true
URL_BASE = "https://readmanga.live/"
client = http.client()
--- END VARIABLES ---
----- HELPERS -----
function reverse(t)
local n = #t
local i = 1
while i < n do
t[i], t[n] = t[n], t[i]
i = i + 1
n = n - 1
end
end
--- END HELPERS ---
----- MAIN -----
--- Searches for manga with given query.
-- @param query string Query to search for
-- @return manga[] Table of mangas
function SearchManga(query)
function parse_page_n(page_offset, mangas)
local request = http.request("POST", URL_BASE .. "search/?q=" .. HttpUtil.query_escape(query))
local result = client:do_request(request)
local doc = html.parse(result.body)
start = #mangas
doc:find(".leftContent .tiles .tile .desc"):each(function(i,s)
title = strings.trim_space(s:find("h3"):text())
url = s:find("h3 a"):attr("href")
-- 1. There will be mangas from unrelated sources like mintmanga.live
-- 2. Sometimes it will recieve broken entries with a link to an author (cause idk what im doing)
if strings.contains(url, "https://") or strings.contains(url, "/list/person") then
return
end
mangas[start+i+1] = {
name = title,
url = URL_BASE .. strings.trim(url, "/"),
}
end)
end
mangas = {}
-- Seems like the step is always 50
parse_page_n(50, mangas)
-- parse_page_n(100, mangas)
return mangas
end
--- Gets the list of all manga chapters.
-- @param mangaURL string URL of the manga
-- @return chapter[] Table of chapters
function MangaChapters(mangaURL)
local request = http.request("GET", mangaURL)
local result = client:do_request(request)
local doc = html.parse(result.body)
chapters = {}
doc:find(".chapters-link a.chapter-link"):each(function(i,s)
chapters[i+1] = {
name = strings.trim_space(s:text()),
url = URL_BASE .. strings.trim(s:attr("href"), "/"),
}
end)
reverse(chapters)
return chapters
end
--- Gets the list of all pages of a chapter.
-- @param chapterURL string URL of the chapter
-- @return page[]
function ChapterPages(chapterURL)
local request = http.request("GET", chapterURL)
local result = client:do_request(request)
h = result.body
-- For some reason image URLs are passed to readerInit() function in bare HTML
-- with some other arguments. So I'm trying to get just the urls here.
json_start = h:find("rm_h.readerInit%(")
json_start = h:find("%[", json_start)
s = h:sub(json_start)
s = s:sub(1, s:find("%)"))
s = s:sub(1, #s - s:reverse():find("%]") + 1)
s = "[" .. s:gsub("'", "\"") .. "]"
j, e = json.decode(s)
if e then
error(e)
end
pages = {}
for i,v in ipairs(j[1]) do
url = v[1] .. v[3]
url = url:sub(1, url:find("?") - 1)
pages[i] = {
url = url,
index = i,
}
end
return pages
end
--- END MAIN ---
-- ex: ts=4 sw=4 et filetype=lua