mirror of
https://github.com/hkalexling/Mango.git
synced 2026-01-24 00:03:14 -05:00
Compare commits
196 Commits
v0.20.0
...
feature/lo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
80344c3bf0 | ||
|
|
8a732804ae | ||
|
|
9df372f784 | ||
|
|
cf7431b8b6 | ||
|
|
974b6cfe9b | ||
|
|
4fbe5b471c | ||
|
|
33e7e31fbc | ||
|
|
72fae7f5ed | ||
|
|
f50a7e3b3e | ||
|
|
66c4037f2b | ||
|
|
2c022a07e7 | ||
|
|
91362dfc7d | ||
|
|
97168b65d8 | ||
|
|
6e04e249e7 | ||
|
|
16397050dd | ||
|
|
3f73591dd4 | ||
|
|
ec25109fa5 | ||
|
|
96f1ef3dde | ||
|
|
b56e16e1e1 | ||
|
|
9769e760a0 | ||
|
|
70ab198a33 | ||
|
|
44a6f822cd | ||
|
|
2c241a96bb | ||
|
|
219d4446d1 | ||
|
|
d330db131e | ||
|
|
de193906a2 | ||
|
|
d13cfc045f | ||
|
|
a3b2cdd372 | ||
|
|
f4d7128b59 | ||
|
|
663c0c0b38 | ||
|
|
57b2f7c625 | ||
|
|
9489d6abfd | ||
|
|
670cf54957 | ||
|
|
2e09efbd62 | ||
|
|
523195d649 | ||
|
|
be47f309b0 | ||
|
|
03e044a1aa | ||
|
|
4eaf271fa4 | ||
|
|
4b464ed361 | ||
|
|
a9520d6f26 | ||
|
|
a151ec486d | ||
|
|
8f1383a818 | ||
|
|
f5933a48d9 | ||
|
|
7734dae138 | ||
|
|
8c90b46114 | ||
|
|
cd48b45f11 | ||
|
|
bdbdf9c94b | ||
|
|
7e36c91ea7 | ||
|
|
9309f51df6 | ||
|
|
a8f729f5c1 | ||
|
|
4e8b561f70 | ||
|
|
e6214ddc5d | ||
|
|
80e13abc4a | ||
|
|
fb43abb950 | ||
|
|
eb3e37b950 | ||
|
|
0a90e3b333 | ||
|
|
4409ed8f45 | ||
|
|
291a340cdd | ||
|
|
0667f01471 | ||
|
|
d5847bb105 | ||
|
|
3d295e961e | ||
|
|
e408398523 | ||
|
|
566cebfcdd | ||
|
|
a190ae3ed6 | ||
|
|
17d7cefa12 | ||
|
|
eaef0556fa | ||
|
|
53226eab61 | ||
|
|
ccf558eaa7 | ||
|
|
0305433e46 | ||
|
|
d2cad6c496 | ||
|
|
371796cce9 | ||
|
|
d9adb49c27 | ||
|
|
f67e4e6cb9 | ||
|
|
60a126024c | ||
|
|
da8a485087 | ||
|
|
d809c21ee1 | ||
|
|
ca1e221b10 | ||
|
|
44d9c51ff9 | ||
|
|
15a54f4f23 | ||
|
|
51806f18db | ||
|
|
79ef7bcd1c | ||
|
|
5cb85ea857 | ||
|
|
9807db6ac0 | ||
|
|
565a535d22 | ||
|
|
c5b6a8b5b9 | ||
|
|
c75c71709f | ||
|
|
11976b15f9 | ||
|
|
847f516a65 | ||
|
|
de410f42b8 | ||
|
|
0fd7caef4b | ||
|
|
5e919d3e19 | ||
|
|
9e90aa17b9 | ||
|
|
0a8fd993e5 | ||
|
|
365f71cd1d | ||
|
|
601346b209 | ||
|
|
e988a8c121 | ||
|
|
bf81a4e48b | ||
|
|
4a09aee177 | ||
|
|
00c9cc1fcd | ||
|
|
51a47b5ddd | ||
|
|
244f97a68e | ||
|
|
8d84a3c502 | ||
|
|
a26b4b3965 | ||
|
|
f2dd20cdec | ||
|
|
64d6cd293c | ||
|
|
08dc0601e8 | ||
|
|
9c983df7e9 | ||
|
|
efc547f5b2 | ||
|
|
995ca3b40f | ||
|
|
864435d3f9 | ||
|
|
64c145cf80 | ||
|
|
6549253ed1 | ||
|
|
d9565718a4 | ||
|
|
400c3024fd | ||
|
|
a703175b3a | ||
|
|
83b122ab75 | ||
|
|
1e7d6ba5b1 | ||
|
|
4d1ad8fb38 | ||
|
|
d544252e3e | ||
|
|
b02b28d3e3 | ||
|
|
d7efe1e553 | ||
|
|
1973564272 | ||
|
|
29923f6dc7 | ||
|
|
4a261d5ff8 | ||
|
|
31d425d462 | ||
|
|
a21681a6d7 | ||
|
|
208019a0b9 | ||
|
|
54e2a54ecb | ||
|
|
2426ef05ec | ||
|
|
25b90a8724 | ||
|
|
cd8944ed2d | ||
|
|
7f0c256fe6 | ||
|
|
46e6e41bfe | ||
|
|
c9f55e7a8e | ||
|
|
741c3a4e20 | ||
|
|
f6da20321d | ||
|
|
2764e955b2 | ||
|
|
00c15014a1 | ||
|
|
c6fdbfd9fd | ||
|
|
e03bf32358 | ||
|
|
bbf1520c73 | ||
|
|
8950c3a1ed | ||
|
|
17837d8a29 | ||
|
|
b4a69425c8 | ||
|
|
a612500b0f | ||
|
|
9bb7144479 | ||
|
|
ee52c52f46 | ||
|
|
daec2bdac6 | ||
|
|
e9a490676b | ||
|
|
757f7c8214 | ||
|
|
eed1a9717e | ||
|
|
8829d2e237 | ||
|
|
eec6ec60bf | ||
|
|
3a82effa40 | ||
|
|
0b3e78bcb7 | ||
|
|
cb4e4437a6 | ||
|
|
6a275286ea | ||
|
|
2743868438 | ||
|
|
d3f26ecbc9 | ||
|
|
f62344806a | ||
|
|
b7b7e6f718 | ||
|
|
05b4e77fa9 | ||
|
|
8aab113aab | ||
|
|
371c8056e7 | ||
|
|
a9a2c9faa8 | ||
|
|
011768ed1f | ||
|
|
c36d2608e8 | ||
|
|
1b25a1fa47 | ||
|
|
df7e2270a4 | ||
|
|
3c3549a489 | ||
|
|
8160b0a18e | ||
|
|
a7eff772be | ||
|
|
bf3900f9a2 | ||
|
|
6fa575cf4f | ||
|
|
604c5d49a6 | ||
|
|
7449d19075 | ||
|
|
c5c9305a0b | ||
|
|
fdceab9060 | ||
|
|
c18591c5cf | ||
|
|
bb5cb9b94c | ||
|
|
fb499a5caf | ||
|
|
154d85e197 | ||
|
|
933617503e | ||
|
|
31c6893bbb | ||
|
|
171125e8ac | ||
|
|
d81334026b | ||
|
|
2b3b2eb8ba | ||
|
|
ffd5f4454b | ||
|
|
cb25d7ba00 | ||
|
|
c61eb7554e | ||
|
|
edd9a2e093 | ||
|
|
1f50785e8f | ||
|
|
70d418d1a1 | ||
|
|
45e20c94f9 | ||
|
|
4da263c594 | ||
|
|
d67a24809b |
@@ -95,6 +95,24 @@
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "davidkna",
|
||||
"name": "David Knaack",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/835177?v=4",
|
||||
"profile": "https://github.com/davidkna",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "lincolnthedev",
|
||||
"name": "i use arch btw",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/41193328?v=4",
|
||||
"profile": "https://lncn.dev",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
|
||||
@@ -1,2 +1,9 @@
|
||||
node_modules
|
||||
lib
|
||||
Dockerfile
|
||||
Dockerfile.arm32v7
|
||||
Dockerfile.arm64v8
|
||||
README.md
|
||||
.all-contributorsrc
|
||||
env.example
|
||||
.github/
|
||||
|
||||
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
@@ -12,12 +12,12 @@ jobs:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: crystallang/crystal:0.35.1-alpine
|
||||
image: crystallang/crystal:1.0.0-alpine
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
run: apk add --no-cache yarn yaml-static sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
- name: Build
|
||||
run: make static || make static
|
||||
- name: Linter
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -13,3 +13,4 @@ public/css/uikit.css
|
||||
public/img/*.svg
|
||||
public/js/*.min.js
|
||||
public/css/*.css
|
||||
public/webfonts
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
FROM crystallang/crystal:0.35.1-alpine AS builder
|
||||
FROM crystallang/crystal:1.0.0-alpine AS builder
|
||||
|
||||
WORKDIR /Mango
|
||||
|
||||
COPY . .
|
||||
RUN apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
RUN apk add --no-cache yarn yaml-static sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
RUN make static || make static
|
||||
|
||||
FROM library/alpine
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=builder /Mango/mango .
|
||||
COPY --from=builder /Mango/mango /usr/local/bin/mango
|
||||
|
||||
CMD ["./mango"]
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
@@ -2,13 +2,14 @@ FROM arm32v7/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.35.1 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
|
||||
|
||||
COPY mango-arm32v7.o .
|
||||
|
||||
RUN cc 'mango-arm32v7.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
RUN cc 'mango-arm32v7.o' -o '/usr/local/bin/mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
CMD ["./mango"]
|
||||
|
||||
@@ -2,13 +2,13 @@ FROM arm64v8/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.35.1 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
|
||||
|
||||
COPY mango-arm64v8.o .
|
||||
|
||||
RUN cc 'mango-arm64v8.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/aarch64-linux-gnu/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
RUN cc 'mango-arm64v8.o' -o '/usr/local/bin/mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/aarch64-linux-gnu/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["./mango"]
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
17
README.md
17
README.md
@@ -2,7 +2,7 @@
|
||||
|
||||
# Mango
|
||||
|
||||
[](https://www.patreon.com/hkalexling)  [](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
|
||||
[](https://www.patreon.com/hkalexling)  [](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](http://discord.com/invite/ezKtacCp9Q)
|
||||
|
||||
Mango is a self-hosted manga server and reader. Its features include
|
||||
|
||||
@@ -13,7 +13,6 @@ Mango is a self-hosted manga server and reader. Its features include
|
||||
- Supports nested folders in library
|
||||
- Automatically stores reading progress
|
||||
- Thumbnail generation
|
||||
- Built-in [MangaDex](https://mangadex.org/) downloader
|
||||
- Supports [plugins](https://github.com/hkalexling/mango-plugins) to download from thrid-party sites
|
||||
- The web reader is responsive and works well on mobile, so there is no need for a mobile app
|
||||
- All the static files are embedded in the binary, so the deployment process is easy and painless
|
||||
@@ -52,7 +51,7 @@ The official docker images are available on [Dockerhub](https://hub.docker.com/r
|
||||
### CLI
|
||||
|
||||
```
|
||||
Mango - Manga Server and Web Reader. Version 0.20.0
|
||||
Mango - Manga Server and Web Reader. Version 0.24.0
|
||||
|
||||
Usage:
|
||||
|
||||
@@ -75,6 +74,7 @@ The default config file location is `~/.config/mango/config.yml`. It might be di
|
||||
|
||||
```yaml
|
||||
---
|
||||
host: 0.0.0.0
|
||||
port: 9000
|
||||
base_url: /
|
||||
session_secret: mango-session-secret
|
||||
@@ -86,12 +86,16 @@ log_level: info
|
||||
upload_path: ~/mango/uploads
|
||||
plugin_path: ~/mango/plugins
|
||||
download_timeout_seconds: 30
|
||||
page_margin: 30
|
||||
library_cache_path: ~/mango/library.yml.gz
|
||||
cache_enabled: false
|
||||
cache_size_mbs: 50
|
||||
cache_log_enabled: true
|
||||
disable_login: false
|
||||
default_username: ""
|
||||
auth_proxy_header_name: ""
|
||||
mangadex:
|
||||
base_url: https://mangadex.org
|
||||
api_url: https://mangadex.org/api
|
||||
api_url: https://api.mangadex.org/v2
|
||||
download_wait_seconds: 5
|
||||
download_retries: 4
|
||||
download_queue_db_path: ~/mango/queue.db
|
||||
@@ -102,6 +106,7 @@ mangadex:
|
||||
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
|
||||
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
|
||||
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
|
||||
- By setting `cache_enabled` to `true`, you can enable an experimental feature where Mango caches library metadata to improve page load time. You can further fine-tune the feature with `cache_size_mbs` and `cache_log_enabled`.
|
||||
|
||||
### Library Structure
|
||||
|
||||
@@ -172,6 +177,8 @@ Please check the [development guideline](https://github.com/hkalexling/Mango/wik
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/Leeingnyo"><img src="https://avatars0.githubusercontent.com/u/6760150?v=4?s=100" width="100px;" alt=""/><br /><sub><b>이인용</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=Leeingnyo" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://h45h74x.eu.org"><img src="https://avatars1.githubusercontent.com/u/27204033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=h45h74x" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/davidkna"><img src="https://avatars.githubusercontent.com/u/835177?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Knaack</b></sub></a><br /><a href="#infra-davidkna" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://lncn.dev"><img src="https://avatars.githubusercontent.com/u/41193328?v=4?s=100" width="100px;" alt=""/><br /><sub><b>i use arch btw</b></sub></a><br /><a href="#infra-lincolnthedev" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
36
gulpfile.js
36
gulpfile.js
@@ -4,26 +4,25 @@ const minify = require('gulp-babel-minify');
|
||||
const minifyCss = require('gulp-minify-css');
|
||||
const less = require('gulp-less');
|
||||
|
||||
// Copy libraries from node_moduels to public/js
|
||||
gulp.task('copy-js', () => {
|
||||
return gulp.src([
|
||||
'node_modules/@fortawesome/fontawesome-free/js/fontawesome.min.js',
|
||||
'node_modules/@fortawesome/fontawesome-free/js/solid.min.js',
|
||||
'node_modules/uikit/dist/js/uikit.min.js',
|
||||
'node_modules/uikit/dist/js/uikit-icons.min.js'
|
||||
])
|
||||
.pipe(gulp.dest('public/js'));
|
||||
});
|
||||
|
||||
// Copy UIKit SVG icons to public/img
|
||||
gulp.task('copy-uikit-icons', () => {
|
||||
gulp.task('copy-img', () => {
|
||||
return gulp.src('node_modules/uikit/src/images/backgrounds/*.svg')
|
||||
.pipe(gulp.dest('public/img'));
|
||||
});
|
||||
|
||||
gulp.task('copy-font', () => {
|
||||
return gulp.src('node_modules/@fortawesome/fontawesome-free/webfonts/fa-solid-900.woff**')
|
||||
.pipe(gulp.dest('public/webfonts'));
|
||||
});
|
||||
|
||||
// Copy files from node_modules
|
||||
gulp.task('node-modules-copy', gulp.parallel('copy-img', 'copy-font'));
|
||||
|
||||
// Compile less
|
||||
gulp.task('less', () => {
|
||||
return gulp.src('public/css/*.less')
|
||||
return gulp.src([
|
||||
'public/css/mango.less',
|
||||
'public/css/tags.less'
|
||||
])
|
||||
.pipe(less())
|
||||
.pipe(gulp.dest('public/css'));
|
||||
});
|
||||
@@ -54,14 +53,19 @@ gulp.task('minify-css', () => {
|
||||
|
||||
// Copy static files (includeing images) to dist
|
||||
gulp.task('copy-files', () => {
|
||||
return gulp.src(['public/img/*', 'public/*.*', 'public/js/*.min.js'], {
|
||||
return gulp.src([
|
||||
'public/*.*',
|
||||
'public/img/*',
|
||||
'public/webfonts/*',
|
||||
'public/js/*.min.js'
|
||||
], {
|
||||
base: 'public'
|
||||
})
|
||||
.pipe(gulp.dest('dist'));
|
||||
});
|
||||
|
||||
// Set up the public folder for development
|
||||
gulp.task('dev', gulp.parallel('copy-js', 'copy-uikit-icons', 'less'));
|
||||
gulp.task('dev', gulp.parallel('node-modules-copy', 'less'));
|
||||
|
||||
// Set up the dist folder for deployment
|
||||
gulp.task('deploy', gulp.parallel('babel', 'minify-css', 'copy-files'));
|
||||
|
||||
20
migration/md_account.11.cr
Normal file
20
migration/md_account.11.cr
Normal file
@@ -0,0 +1,20 @@
|
||||
class CreateMangaDexAccount < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE md_account (
|
||||
username TEXT NOT NULL PRIMARY KEY,
|
||||
token TEXT NOT NULL,
|
||||
expire INTEGER NOT NULL,
|
||||
FOREIGN KEY (username) REFERENCES users (username)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE md_account;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,16 @@
|
||||
// UIKit
|
||||
@import "./uikit.less";
|
||||
|
||||
// FontAwesome
|
||||
@import "../../node_modules/@fortawesome/fontawesome-free/less/fontawesome.less";
|
||||
@import "../../node_modules/@fortawesome/fontawesome-free/less/solid.less";
|
||||
|
||||
@font-face {
|
||||
src: url('@{fa-font-path}/fa-solid-900.woff2');
|
||||
src: url('@{fa-font-path}/fa-solid-900.woff2') format('woff2'),
|
||||
url('@{fa-font-path}/fa-solid-900.woff') format('woff');
|
||||
}
|
||||
|
||||
// Item cards
|
||||
.item .uk-card {
|
||||
cursor: pointer;
|
||||
@@ -21,9 +34,11 @@
|
||||
.uk-card-body {
|
||||
padding: 20px;
|
||||
.uk-card-title {
|
||||
max-height: 3em;
|
||||
font-size: 1rem;
|
||||
}
|
||||
.uk-card-title:not(.free-height) {
|
||||
max-height: 3em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -43,3 +43,22 @@
|
||||
@internal-list-bullet-image: "../img/list-bullet.svg";
|
||||
@internal-accordion-open-image: "../img/accordion-open.svg";
|
||||
@internal-accordion-close-image: "../img/accordion-close.svg";
|
||||
|
||||
.hook-card-default() {
|
||||
.uk-light & {
|
||||
background: @card-secondary-background;
|
||||
color: @card-secondary-color;
|
||||
}
|
||||
}
|
||||
|
||||
.hook-card-default-title() {
|
||||
.uk-light & {
|
||||
color: @card-secondary-title-color;
|
||||
}
|
||||
}
|
||||
|
||||
.hook-card-default-hover() {
|
||||
.uk-light & {
|
||||
background-color: @card-secondary-hover-background;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,14 +117,10 @@ const setTheme = (theme) => {
|
||||
if (theme === 'dark') {
|
||||
$('html').css('background', 'rgb(20, 20, 20)');
|
||||
$('body').addClass('uk-light');
|
||||
$('.uk-card').addClass('uk-card-secondary');
|
||||
$('.uk-card').removeClass('uk-card-default');
|
||||
$('.ui-widget-content').addClass('dark');
|
||||
} else {
|
||||
$('html').css('background', '');
|
||||
$('body').removeClass('uk-light');
|
||||
$('.uk-card').removeClass('uk-card-secondary');
|
||||
$('.uk-card').addClass('uk-card-default');
|
||||
$('.ui-widget-content').removeClass('dark');
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,305 +0,0 @@
|
||||
$(() => {
|
||||
$('#search-input').keypress(event => {
|
||||
if (event.which === 13) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
$('.filter-field').each((i, ele) => {
|
||||
$(ele).change(() => {
|
||||
buildTable();
|
||||
});
|
||||
});
|
||||
});
|
||||
const selectAll = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).addClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const unselect = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).removeClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const download = () => {
|
||||
const selected = $('tbody > tr.ui-selected');
|
||||
if (selected.length === 0) return;
|
||||
UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => {
|
||||
$('#download-btn').attr('hidden', '');
|
||||
$('#download-spinner').removeAttr('hidden');
|
||||
const ids = selected.map((i, e) => {
|
||||
return $(e).find('td').first().text();
|
||||
}).get();
|
||||
const chapters = globalChapters.filter(c => ids.indexOf(c.id) >= 0);
|
||||
console.log(ids);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: base_url + 'api/admin/mangadex/download',
|
||||
data: JSON.stringify({
|
||||
chapters: chapters
|
||||
}),
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
console.log(data);
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
$('#download-spinner').attr('hidden', '');
|
||||
$('#download-btn').removeAttr('hidden');
|
||||
});
|
||||
});
|
||||
};
|
||||
const toggleSpinner = () => {
|
||||
var attr = $('#spinner').attr('hidden');
|
||||
if (attr) {
|
||||
$('#spinner').removeAttr('hidden');
|
||||
$('#search-btn').attr('hidden', '');
|
||||
} else {
|
||||
$('#search-btn').removeAttr('hidden');
|
||||
$('#spinner').attr('hidden', '');
|
||||
}
|
||||
searching = !searching;
|
||||
};
|
||||
var searching = false;
|
||||
var globalChapters;
|
||||
const search = () => {
|
||||
if (searching) {
|
||||
return;
|
||||
}
|
||||
$('#manga-details').attr('hidden', '');
|
||||
$('#filter-form').attr('hidden', '');
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
toggleSpinner();
|
||||
const input = $('input').val();
|
||||
|
||||
if (input === "") {
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
var int_id = -1;
|
||||
|
||||
try {
|
||||
const path = new URL(input).pathname;
|
||||
const match = /\/(?:title|manga)\/([0-9]+)/.exec(path);
|
||||
int_id = parseInt(match[1]);
|
||||
} catch (e) {
|
||||
int_id = parseInt(input);
|
||||
}
|
||||
|
||||
if (int_id <= 0 || isNaN(int_id)) {
|
||||
alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex.');
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
$.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`)
|
||||
.done((data) => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to get manga info. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
const cover = baseURL + data.cover_url;
|
||||
$('#cover').attr("src", cover);
|
||||
$('#title').text("Title: " + data.title);
|
||||
$('#artist').text("Artist: " + data.artist);
|
||||
$('#author').text("Author: " + data.author);
|
||||
|
||||
$('#manga-details').removeAttr('hidden');
|
||||
|
||||
console.log(data.chapters);
|
||||
globalChapters = data.chapters;
|
||||
|
||||
let langs = new Set();
|
||||
let group_names = new Set();
|
||||
data.chapters.forEach(chp => {
|
||||
Object.entries(chp.groups).forEach(([k, v]) => {
|
||||
group_names.add(k);
|
||||
});
|
||||
langs.add(chp.language);
|
||||
});
|
||||
|
||||
const comp = (a, b) => {
|
||||
var ai;
|
||||
var bi;
|
||||
try {
|
||||
ai = parseFloat(a);
|
||||
} catch (e) {}
|
||||
try {
|
||||
bi = parseFloat(b);
|
||||
} catch (e) {}
|
||||
if (typeof ai === 'undefined') return -1;
|
||||
if (typeof bi === 'undefined') return 1;
|
||||
if (ai < bi) return 1;
|
||||
if (ai > bi) return -1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
langs = [...langs].sort();
|
||||
group_names = [...group_names].sort();
|
||||
|
||||
langs.unshift('All');
|
||||
group_names.unshift('All');
|
||||
|
||||
$('select#lang-select').html(langs.map(e => `<option>${e}</option>`).join(''));
|
||||
$('select#group-select').html(group_names.map(e => `<option>${e}</option>`).join(''));
|
||||
|
||||
$('#filter-form').removeAttr('hidden');
|
||||
|
||||
buildTable();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
toggleSpinner();
|
||||
});
|
||||
};
|
||||
const parseRange = str => {
|
||||
const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m;
|
||||
const matches = str.match(regex);
|
||||
var num;
|
||||
|
||||
if (!matches) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
} else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') {
|
||||
// e.g., <= 30
|
||||
num = parseInt(matches[2]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
switch (matches[1]) {
|
||||
case '<':
|
||||
return [null, num - 1];
|
||||
case '<=':
|
||||
return [null, num];
|
||||
case '>':
|
||||
return [num + 1, null];
|
||||
case '>=':
|
||||
return [num, null];
|
||||
}
|
||||
} else if (typeof matches[3] !== 'undefined') {
|
||||
// a single number
|
||||
num = parseInt(matches[3]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, num];
|
||||
} else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') {
|
||||
// e.g., 10 - 23
|
||||
num = parseInt(matches[4]);
|
||||
const n2 = parseInt(matches[5]);
|
||||
if (isNaN(num) || isNaN(n2) || num > n2) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, n2];
|
||||
} else {
|
||||
// empty or space only
|
||||
return [null, null];
|
||||
}
|
||||
};
|
||||
const getFilters = () => {
|
||||
const filters = {};
|
||||
$('.uk-select').each((i, ele) => {
|
||||
const id = $(ele).attr('id');
|
||||
const by = id.split('-')[0];
|
||||
const choice = $(ele).val();
|
||||
filters[by] = choice;
|
||||
});
|
||||
filters.volume = parseRange($('#volume-range').val());
|
||||
filters.chapter = parseRange($('#chapter-range').val());
|
||||
return filters;
|
||||
};
|
||||
const buildTable = () => {
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
console.log('rebuilding table');
|
||||
const filters = getFilters();
|
||||
console.log('filters:', filters);
|
||||
var chapters = globalChapters.slice();
|
||||
Object.entries(filters).forEach(([k, v]) => {
|
||||
if (v === 'All') return;
|
||||
if (k === 'group') {
|
||||
chapters = chapters.filter(c => {
|
||||
const unescaped_groups = Object.entries(c.groups).map(([g, id]) => unescapeHTML(g));
|
||||
return unescaped_groups.indexOf(v) >= 0;
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (k === 'lang') {
|
||||
chapters = chapters.filter(c => c.language === v);
|
||||
return;
|
||||
}
|
||||
const lb = parseFloat(v[0]);
|
||||
const ub = parseFloat(v[1]);
|
||||
if (isNaN(lb) && isNaN(ub)) return;
|
||||
chapters = chapters.filter(c => {
|
||||
const val = parseFloat(c[k]);
|
||||
if (isNaN(val)) return false;
|
||||
if (isNaN(lb))
|
||||
return val <= ub;
|
||||
else if (isNaN(ub))
|
||||
return val >= lb;
|
||||
else
|
||||
return val >= lb && val <= ub;
|
||||
});
|
||||
});
|
||||
console.log('filtered chapters:', chapters);
|
||||
$('#count-text').text(`${chapters.length} chapters found`);
|
||||
|
||||
const chaptersLimit = 1000;
|
||||
if (chapters.length > chaptersLimit) {
|
||||
$('#filter-notification').text(`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters in this manga. Please use the filter options above to narrow down your search.`);
|
||||
$('#filter-notification').removeAttr('hidden');
|
||||
return;
|
||||
}
|
||||
|
||||
const inner = chapters.map(chp => {
|
||||
const group_str = Object.entries(chp.groups).map(([k, v]) => {
|
||||
return `<a href="${baseURL }/group/${v}">${k}</a>`;
|
||||
}).join(' | ');
|
||||
return `<tr class="ui-widget-content">
|
||||
<td><a href="${baseURL}/chapter/${chp.id}">${chp.id}</a></td>
|
||||
<td>${chp.title}</td>
|
||||
<td>${chp.language}</td>
|
||||
<td>${group_str}</td>
|
||||
<td>${chp.volume}</td>
|
||||
<td>${chp.chapter}</td>
|
||||
<td>${moment.unix(chp.time).fromNow()}</td>
|
||||
</tr>`;
|
||||
}).join('');
|
||||
const tbody = `<tbody id="selectable">${inner}</tbody>`;
|
||||
$('tbody').remove();
|
||||
$('table').append(tbody);
|
||||
$('table').removeAttr('hidden');
|
||||
$("#selectable").selectable({
|
||||
filter: 'tr'
|
||||
});
|
||||
$('#selection-controls').removeAttr('hidden');
|
||||
};
|
||||
|
||||
const unescapeHTML = (str) => {
|
||||
var elt = document.createElement("span");
|
||||
elt.innerHTML = str;
|
||||
return elt.innerText;
|
||||
};
|
||||
@@ -126,9 +126,7 @@ const download = () => {
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
alert('success', `${successCount} of ${successCount + failCount} chapters added to the download queue. You can view and manage your download queue on the <a href="${base_url}admin/downloads">download manager page</a>.`);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
|
||||
@@ -6,9 +6,13 @@ const readerComponent = () => {
|
||||
alertClass: 'uk-alert-primary',
|
||||
items: [],
|
||||
curItem: {},
|
||||
enableFlipAnimation: true,
|
||||
flipAnimation: null,
|
||||
longPages: false,
|
||||
lastSavedPage: page,
|
||||
selectedIndex: 0, // 0: not selected; 1: the first page
|
||||
margin: 30,
|
||||
preloadLookahead: 3,
|
||||
|
||||
/**
|
||||
* Initialize the component by fetching the page dimensions
|
||||
@@ -26,7 +30,6 @@ const readerComponent = () => {
|
||||
url: `${base_url}api/page/${tid}/${eid}/${i+1}`,
|
||||
width: d.width,
|
||||
height: d.height,
|
||||
style: `margin-top: ${data.margin}px; margin-bottom: ${data.margin}px;`
|
||||
};
|
||||
});
|
||||
|
||||
@@ -46,6 +49,21 @@ const readerComponent = () => {
|
||||
const mode = this.mode;
|
||||
this.updateMode(this.mode, page, nextTick);
|
||||
$('#mode-select').val(mode);
|
||||
|
||||
const savedMargin = localStorage.getItem('margin');
|
||||
if (savedMargin) {
|
||||
this.margin = savedMargin;
|
||||
}
|
||||
|
||||
// Preload Images
|
||||
this.preloadLookahead = +(localStorage.getItem('preloadLookahead') ?? 3);
|
||||
const limit = Math.min(page + this.preloadLookahead, this.items.length + 1);
|
||||
for (let idx = page + 1; idx <= limit; idx++) {
|
||||
this.preloadImage(this.items[idx - 1].url);
|
||||
}
|
||||
|
||||
const savedFlipAnimation = localStorage.getItem('enableFlipAnimation');
|
||||
this.enableFlipAnimation = savedFlipAnimation === null || savedFlipAnimation === 'true';
|
||||
})
|
||||
.catch(e => {
|
||||
const errMsg = `Failed to get the page dimensions. ${e}`;
|
||||
@@ -54,6 +72,12 @@ const readerComponent = () => {
|
||||
this.msg = errMsg;
|
||||
})
|
||||
},
|
||||
/**
|
||||
* Preload an image, which is expected to be cached
|
||||
*/
|
||||
preloadImage(url) {
|
||||
(new Image()).src = url;
|
||||
},
|
||||
/**
|
||||
* Handles the `change` event for the page selector
|
||||
*/
|
||||
@@ -105,12 +129,18 @@ const readerComponent = () => {
|
||||
|
||||
if (newIdx <= 0 || newIdx > this.items.length) return;
|
||||
|
||||
if (newIdx + this.preloadLookahead < this.items.length + 1) {
|
||||
this.preloadImage(this.items[newIdx + this.preloadLookahead - 1].url);
|
||||
}
|
||||
|
||||
this.toPage(newIdx);
|
||||
|
||||
if (isNext)
|
||||
this.flipAnimation = 'right';
|
||||
else
|
||||
this.flipAnimation = 'left';
|
||||
if (this.enableFlipAnimation) {
|
||||
if (isNext)
|
||||
this.flipAnimation = 'right';
|
||||
else
|
||||
this.flipAnimation = 'left';
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
this.flipAnimation = null;
|
||||
@@ -221,10 +251,7 @@ const readerComponent = () => {
|
||||
*/
|
||||
showControl(event) {
|
||||
const idx = event.currentTarget.id;
|
||||
const pageCount = this.items.length;
|
||||
const progressText = `Progress: ${idx}/${pageCount} (${(idx/pageCount * 100).toFixed(1)}%)`;
|
||||
$('#progress-label').text(progressText);
|
||||
$('#page-select').val(idx);
|
||||
this.selectedIndex = idx;
|
||||
UIkit.modal($('#modal-sections')).show();
|
||||
},
|
||||
/**
|
||||
@@ -263,19 +290,35 @@ const readerComponent = () => {
|
||||
});
|
||||
},
|
||||
/**
|
||||
* Exits the reader, and optionally sets the reading progress tp 100%
|
||||
* Exits the reader, and sets the reading progress tp 100%
|
||||
*
|
||||
* @param {string} exitUrl - The Exit URL
|
||||
* @param {boolean} [markCompleted] - Whether we should mark the
|
||||
* reading progress to 100%
|
||||
*/
|
||||
exitReader(exitUrl, markCompleted = false) {
|
||||
if (!markCompleted) {
|
||||
return this.redirect(exitUrl);
|
||||
}
|
||||
exitReader(exitUrl) {
|
||||
this.saveProgress(this.items.length, () => {
|
||||
this.redirect(exitUrl);
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Handles the `change` event for the entry selector
|
||||
*/
|
||||
entryChanged() {
|
||||
const id = $('#entry-select').val();
|
||||
this.redirect(`${base_url}reader/${tid}/${id}`);
|
||||
},
|
||||
|
||||
marginChanged() {
|
||||
localStorage.setItem('margin', this.margin);
|
||||
this.toPage(this.selectedIndex);
|
||||
},
|
||||
|
||||
preloadLookaheadChanged() {
|
||||
localStorage.setItem('preloadLookahead', this.preloadLookahead);
|
||||
},
|
||||
|
||||
enableFlipAnimationChanged() {
|
||||
localStorage.setItem('enableFlipAnimation', this.enableFlipAnimation);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
82
public/js/subscription.js
Normal file
82
public/js/subscription.js
Normal file
@@ -0,0 +1,82 @@
|
||||
const component = () => {
|
||||
return {
|
||||
available: undefined,
|
||||
subscriptions: [],
|
||||
|
||||
init() {
|
||||
$.getJSON(`${base_url}api/admin/mangadex/expires`)
|
||||
.done((data) => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to check MangaDex integration status. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
this.available = Boolean(data.expires && data.expires > Math.floor(Date.now() / 1000));
|
||||
|
||||
if (this.available) this.getSubscriptions();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to check MangaDex integration status. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
},
|
||||
|
||||
getSubscriptions() {
|
||||
$.getJSON(`${base_url}api/admin/mangadex/subscriptions`)
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to get subscriptions. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
this.subscriptions = data.subscriptions;
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to get subscriptions. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
},
|
||||
|
||||
rm(event) {
|
||||
const id = event.currentTarget.parentNode.getAttribute('data-id');
|
||||
$.ajax({
|
||||
type: 'DELETE',
|
||||
url: `${base_url}api/admin/mangadex/subscriptions/${id}`,
|
||||
contentType: 'application/json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to delete subscription. Error: ${data.error}`);
|
||||
}
|
||||
this.getSubscriptions();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to delete subscription. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
},
|
||||
|
||||
check(event) {
|
||||
const id = event.currentTarget.parentNode.getAttribute('data-id');
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: `${base_url}api/admin/mangadex/subscriptions/check/${id}`,
|
||||
contentType: 'application/json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to check subscription. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
alert('success', 'Mango is now checking the subscription for updates. This might take a while, but you can safely leave the page.');
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to check subscription. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
},
|
||||
|
||||
formatRange(min, max) {
|
||||
if (!isNaN(min) && isNaN(max)) return `≥ ${min}`;
|
||||
if (isNaN(min) && !isNaN(max)) return `≤ ${max}`;
|
||||
if (isNaN(min) && isNaN(max)) return 'All';
|
||||
|
||||
if (min === max) return `= ${min}`;
|
||||
return `${min} - ${max}`;
|
||||
}
|
||||
};
|
||||
};
|
||||
40
shard.lock
40
shard.lock
@@ -2,77 +2,77 @@ version: 2.0
|
||||
shards:
|
||||
ameba:
|
||||
git: https://github.com/crystal-ameba/ameba.git
|
||||
version: 0.12.1
|
||||
version: 0.14.3
|
||||
|
||||
archive:
|
||||
git: https://github.com/hkalexling/archive.cr.git
|
||||
version: 0.4.0
|
||||
version: 0.5.0
|
||||
|
||||
baked_file_system:
|
||||
git: https://github.com/schovi/baked_file_system.git
|
||||
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
|
||||
version: 0.10.0
|
||||
|
||||
clim:
|
||||
git: https://github.com/at-grandpa/clim.git
|
||||
version: 0.12.0
|
||||
version: 0.17.1
|
||||
|
||||
db:
|
||||
git: https://github.com/crystal-lang/crystal-db.git
|
||||
version: 0.9.0
|
||||
version: 0.10.1
|
||||
|
||||
duktape:
|
||||
git: https://github.com/jessedoyle/duktape.cr.git
|
||||
version: 0.20.0
|
||||
version: 1.0.0
|
||||
|
||||
exception_page:
|
||||
git: https://github.com/crystal-loot/exception_page.git
|
||||
version: 0.1.4
|
||||
version: 0.1.5
|
||||
|
||||
http_proxy:
|
||||
git: https://github.com/mamantoha/http_proxy.git
|
||||
version: 0.7.1
|
||||
version: 0.8.0
|
||||
|
||||
image_size:
|
||||
git: https://github.com/hkalexling/image_size.cr.git
|
||||
version: 0.4.0
|
||||
version: 0.5.0
|
||||
|
||||
kemal:
|
||||
git: https://github.com/kemalcr/kemal.git
|
||||
version: 0.27.0
|
||||
version: 1.0.0
|
||||
|
||||
kemal-session:
|
||||
git: https://github.com/kemalcr/kemal-session.git
|
||||
version: 0.12.1
|
||||
version: 1.0.0
|
||||
|
||||
kilt:
|
||||
git: https://github.com/jeromegn/kilt.git
|
||||
version: 0.4.0
|
||||
version: 0.4.1
|
||||
|
||||
koa:
|
||||
git: https://github.com/hkalexling/koa.git
|
||||
version: 0.5.0
|
||||
version: 0.8.0
|
||||
|
||||
mg:
|
||||
git: https://github.com/hkalexling/mg.git
|
||||
version: 0.2.0+git.commit.171c46489d991a8353818e00fc6a3c4e0809ded9
|
||||
version: 0.5.0+git.commit.697e46e27cde8c3969346e228e372db2455a6264
|
||||
|
||||
myhtml:
|
||||
git: https://github.com/kostya/myhtml.git
|
||||
version: 1.5.1
|
||||
version: 1.5.8
|
||||
|
||||
open_api:
|
||||
git: https://github.com/jreinert/open_api.cr.git
|
||||
version: 1.2.1+git.commit.95e4df2ca10b1fe88b8b35c62a18b06a10267b6c
|
||||
git: https://github.com/hkalexling/open_api.cr.git
|
||||
version: 1.2.1+git.commit.1d3c55dd5534c6b0af18964d031858a08515553a
|
||||
|
||||
radix:
|
||||
git: https://github.com/luislavena/radix.git
|
||||
version: 0.3.9
|
||||
version: 0.4.1
|
||||
|
||||
sqlite3:
|
||||
git: https://github.com/crystal-lang/crystal-sqlite3.git
|
||||
version: 0.16.0
|
||||
version: 0.18.0
|
||||
|
||||
tallboy:
|
||||
git: https://github.com/epoch/tallboy.git
|
||||
version: 0.9.3
|
||||
version: 0.9.3+git.commit.9be1510bb0391c95e92f1b288f3afb429a73caa6
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: mango
|
||||
version: 0.20.0
|
||||
version: 0.24.0
|
||||
|
||||
authors:
|
||||
- Alex Ling <hkalexling@gmail.com>
|
||||
@@ -8,7 +8,7 @@ targets:
|
||||
mango:
|
||||
main: src/mango.cr
|
||||
|
||||
crystal: 0.35.1
|
||||
crystal: 1.0.0
|
||||
|
||||
license: MIT
|
||||
|
||||
@@ -21,7 +21,6 @@ dependencies:
|
||||
github: crystal-lang/crystal-sqlite3
|
||||
baked_file_system:
|
||||
github: schovi/baked_file_system
|
||||
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
|
||||
archive:
|
||||
github: hkalexling/archive.cr
|
||||
ameba:
|
||||
@@ -30,7 +29,6 @@ dependencies:
|
||||
github: at-grandpa/clim
|
||||
duktape:
|
||||
github: jessedoyle/duktape.cr
|
||||
version: ~> 0.20.0
|
||||
myhtml:
|
||||
github: kostya/myhtml
|
||||
http_proxy:
|
||||
@@ -41,5 +39,6 @@ dependencies:
|
||||
github: hkalexling/koa
|
||||
tallboy:
|
||||
github: epoch/tallboy
|
||||
branch: master
|
||||
mg:
|
||||
github: hkalexling/mg
|
||||
|
||||
@@ -8,9 +8,7 @@ describe Storage do
|
||||
end
|
||||
|
||||
it "deletes user" do
|
||||
with_storage do |storage|
|
||||
storage.delete_user "admin"
|
||||
end
|
||||
with_storage &.delete_user "admin"
|
||||
end
|
||||
|
||||
it "creates new user" do
|
||||
|
||||
@@ -21,7 +21,7 @@ describe "compare_numerically" do
|
||||
it "sorts like the stack exchange post" do
|
||||
ary = ["2", "12", "200000", "1000000", "a", "a12", "b2", "text2",
|
||||
"text2a", "text2a2", "text2a12", "text2ab", "text12", "text12a"]
|
||||
ary.reverse.sort { |a, b|
|
||||
ary.reverse.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
@@ -29,7 +29,7 @@ describe "compare_numerically" do
|
||||
# https://github.com/hkalexling/Mango/issues/22
|
||||
it "handles numbers larger than Int32" do
|
||||
ary = ["14410155591588.jpg", "21410155591588.png", "104410155591588.jpg"]
|
||||
ary.reverse.sort { |a, b|
|
||||
ary.reverse.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
@@ -56,8 +56,18 @@ describe "chapter_sort" do
|
||||
it "sorts correctly" do
|
||||
ary = ["Vol.1 Ch.01", "Vol.1 Ch.02", "Vol.2 Ch. 2.5", "Ch. 3", "Ch.04"]
|
||||
sorter = ChapterSorter.new ary
|
||||
ary.reverse.sort do |a, b|
|
||||
ary.reverse.sort! do |a, b|
|
||||
sorter.compare a, b
|
||||
end.should eq ary
|
||||
end
|
||||
end
|
||||
|
||||
describe "sanitize_filename" do
|
||||
it "returns a random string for empty sanitized string" do
|
||||
sanitize_filename("..").should_not eq sanitize_filename("..")
|
||||
end
|
||||
it "sanitizes correctly" do
|
||||
sanitize_filename(".. \n\v.\rマンゴー/|*()<[1/2] 3.14 hello world ")
|
||||
.should eq "マンゴー_()[1_2] 3.14 hello world"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
Arabic,sa
|
||||
Bengali,bd
|
||||
Bulgarian,bg
|
||||
Burmese,mm
|
||||
Catalan,ct
|
||||
Chinese (Simp),cn
|
||||
Chinese (Trad),hk
|
||||
Czech,cz
|
||||
Danish,dk
|
||||
Dutch,nl
|
||||
English,gb
|
||||
Filipino,ph
|
||||
Finnish,fi
|
||||
French,fr
|
||||
German,de
|
||||
Greek,gr
|
||||
Hebrew,il
|
||||
Hindi,in
|
||||
Hungarian,hu
|
||||
Indonesian,id
|
||||
Italian,it
|
||||
Japanese,jp
|
||||
Korean,kr
|
||||
Lithuanian,lt
|
||||
Malay,my
|
||||
Mongolian,mn
|
||||
Other,
|
||||
Persian,ir
|
||||
Polish,pl
|
||||
Portuguese (Br),br
|
||||
Portuguese (Pt),pt
|
||||
Romanian,ro
|
||||
Russian,ru
|
||||
Serbo-Croatian,rs
|
||||
Spanish (Es),es
|
||||
Spanish (LATAM),mx
|
||||
Swedish,se
|
||||
Thai,th
|
||||
Turkish,tr
|
||||
Ukrainian,ua
|
||||
Vietnames,vn
|
||||
|
@@ -5,11 +5,14 @@ class Config
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
property path : String = ""
|
||||
property host : String = "0.0.0.0"
|
||||
property port : Int32 = 9000
|
||||
property base_url : String = "/"
|
||||
property session_secret : String = "mango-session-secret"
|
||||
property library_path : String = File.expand_path "~/mango/library",
|
||||
home: true
|
||||
property library_cache_path = File.expand_path "~/mango/library.yml.gz",
|
||||
home: true
|
||||
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
||||
property scan_interval_minutes : Int32 = 5
|
||||
property thumbnail_generation_interval_hours : Int32 = 24
|
||||
@@ -19,15 +22,18 @@ class Config
|
||||
property plugin_path : String = File.expand_path "~/mango/plugins",
|
||||
home: true
|
||||
property download_timeout_seconds : Int32 = 30
|
||||
property page_margin : Int32 = 30
|
||||
property cache_enabled = false
|
||||
property cache_size_mbs = 50
|
||||
property cache_log_enabled = true
|
||||
property disable_login = false
|
||||
property default_username = ""
|
||||
property auth_proxy_header_name = ""
|
||||
property mangadex = Hash(String, String | Int32).new
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
@mangadex_defaults = {
|
||||
"base_url" => "https://mangadex.org",
|
||||
"api_url" => "https://mangadex.org/api",
|
||||
"api_url" => "https://api.mangadex.org/v2",
|
||||
"download_wait_seconds" => 5,
|
||||
"download_retries" => 4,
|
||||
"download_queue_db_path" => File.expand_path("~/mango/queue.db",
|
||||
@@ -51,9 +57,9 @@ class Config
|
||||
cfg_path = File.expand_path path, home: true
|
||||
if File.exists? cfg_path
|
||||
config = self.from_yaml File.read cfg_path
|
||||
config.preprocess
|
||||
config.path = path
|
||||
config.fill_defaults
|
||||
config.preprocess
|
||||
return config
|
||||
end
|
||||
puts "The config file #{cfg_path} does not exist. " \
|
||||
@@ -91,5 +97,24 @@ class Config
|
||||
raise "Login is disabled, but default username is not set. " \
|
||||
"Please set a default username"
|
||||
end
|
||||
|
||||
# `Logger.default` is not available yet
|
||||
Log.setup :debug
|
||||
unless mangadex["api_url"] =~ /\/v2/
|
||||
Log.warn { "It looks like you are using the deprecated MangaDex API " \
|
||||
"v1 in your config file. Please update it to " \
|
||||
"https://api.mangadex.org/v2 to suppress this warning." }
|
||||
mangadex["api_url"] = "https://api.mangadex.org/v2"
|
||||
end
|
||||
if mangadex["api_url"] =~ /\/api\/v2/
|
||||
Log.warn { "It looks like you are using the outdated MangaDex API " \
|
||||
"url (mangadex.org/api/v2) in your config file. Please " \
|
||||
"update it to https://api.mangadex.org/v2 to suppress this " \
|
||||
"warning." }
|
||||
mangadex["api_url"] = "https://api.mangadex.org/v2"
|
||||
end
|
||||
|
||||
mangadex["api_url"] = mangadex["api_url"].to_s.rstrip "/"
|
||||
mangadex["base_url"] = mangadex["base_url"].to_s.rstrip "/"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -15,7 +15,11 @@ class AuthHandler < Kemal::Handler
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
call_next env
|
||||
end
|
||||
|
||||
def require_auth(env)
|
||||
env.session.string "callback", env.request.path
|
||||
redirect env, "/login"
|
||||
end
|
||||
|
||||
def validate_token(env)
|
||||
@@ -49,55 +53,51 @@ class AuthHandler < Kemal::Handler
|
||||
Storage.default.verify_user username, password
|
||||
end
|
||||
|
||||
def handle_opds_auth(env)
|
||||
if validate_token(env) || validate_auth_header(env)
|
||||
call_next env
|
||||
else
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
end
|
||||
end
|
||||
|
||||
def handle_auth(env)
|
||||
if request_path_startswith(env, ["/login", "/logout"]) ||
|
||||
def call(env)
|
||||
# Skip all authentication if requesting /login, /logout, /api/login,
|
||||
# or a static file
|
||||
if request_path_startswith(env, ["/login", "/logout", "/api/login"]) ||
|
||||
requesting_static_file env
|
||||
return call_next(env)
|
||||
end
|
||||
|
||||
unless validate_token(env) || Config.current.disable_login
|
||||
env.session.string "callback", env.request.path
|
||||
return redirect env, "/login"
|
||||
# Check user is logged in
|
||||
if validate_token env
|
||||
# Skip if the request has a valid token
|
||||
elsif Config.current.disable_login
|
||||
# Check default username if login is disabled
|
||||
unless Storage.default.username_exists Config.current.default_username
|
||||
Logger.warn "Default username #{Config.current.default_username} " \
|
||||
"does not exist"
|
||||
return require_auth env
|
||||
end
|
||||
elsif !Config.current.auth_proxy_header_name.empty?
|
||||
# Check auth proxy if present
|
||||
username = env.request.headers[Config.current.auth_proxy_header_name]?
|
||||
unless username && Storage.default.username_exists username
|
||||
Logger.warn "Header #{Config.current.auth_proxy_header_name} unset " \
|
||||
"or is not a valid username"
|
||||
return require_auth env
|
||||
end
|
||||
elsif request_path_startswith env, ["/opds"]
|
||||
# Check auth header if requesting an opds page
|
||||
unless validate_auth_header env
|
||||
return require_basic_auth env
|
||||
end
|
||||
else
|
||||
return require_auth env
|
||||
end
|
||||
|
||||
if request_path_startswith env, ["/admin", "/api/admin", "/download"]
|
||||
# The token (if exists) takes precedence over the default user option.
|
||||
# this is why we check the default username first before checking the
|
||||
# token.
|
||||
should_reject = true
|
||||
if Config.current.disable_login &&
|
||||
Storage.default.username_is_admin Config.current.default_username
|
||||
should_reject = false
|
||||
end
|
||||
if env.session.string? "token"
|
||||
should_reject = !validate_token_admin(env)
|
||||
end
|
||||
if should_reject
|
||||
# Check admin access when requesting an admin page
|
||||
if request_path_startswith env, %w(/admin /api/admin /download)
|
||||
unless is_admin? env
|
||||
env.response.status_code = 403
|
||||
send_error_page "HTTP 403: You are not authorized to visit " \
|
||||
"#{env.request.path}"
|
||||
return
|
||||
return send_error_page "HTTP 403: You are not authorized to visit " \
|
||||
"#{env.request.path}"
|
||||
end
|
||||
end
|
||||
|
||||
# Let the request go through if it passes the above checks
|
||||
call_next env
|
||||
end
|
||||
|
||||
def call(env)
|
||||
if request_path_startswith env, ["/opds"]
|
||||
handle_opds_auth env
|
||||
else
|
||||
handle_auth env
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
188
src/library/cache.cr
Normal file
188
src/library/cache.cr
Normal file
@@ -0,0 +1,188 @@
|
||||
require "digest"
|
||||
|
||||
require "./entry"
|
||||
require "./types"
|
||||
|
||||
# Base class for an entry in the LRU cache.
|
||||
# There are two ways to use it:
|
||||
# 1. Use it as it is by instantiating with the appropriate `SaveT` and
|
||||
# `ReturnT`. Note that in this case, `SaveT` and `ReturnT` must be the
|
||||
# same type. That is, the input value will be stored as it is without
|
||||
# any transformation.
|
||||
# 2. You can also subclass it and provide custom implementations for
|
||||
# `to_save_t` and `to_return_t`. This allows you to transform and store
|
||||
# the input value to a different type. See `SortedEntriesCacheEntry` as
|
||||
# an example.
|
||||
private class CacheEntry(SaveT, ReturnT)
|
||||
getter key : String, atime : Time
|
||||
|
||||
@value : SaveT
|
||||
|
||||
def initialize(@key : String, value : ReturnT)
|
||||
@atime = @ctime = Time.utc
|
||||
@value = self.class.to_save_t value
|
||||
end
|
||||
|
||||
def value
|
||||
@atime = Time.utc
|
||||
self.class.to_return_t @value
|
||||
end
|
||||
|
||||
def self.to_save_t(value : ReturnT)
|
||||
value
|
||||
end
|
||||
|
||||
def self.to_return_t(value : SaveT)
|
||||
value
|
||||
end
|
||||
|
||||
def instance_size
|
||||
instance_sizeof(CacheEntry(SaveT, ReturnT)) + # sizeof itself
|
||||
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||
@value.instance_size
|
||||
end
|
||||
end
|
||||
|
||||
class SortedEntriesCacheEntry < CacheEntry(Array(String), Array(Entry))
|
||||
def self.to_save_t(value : Array(Entry))
|
||||
value.map &.id
|
||||
end
|
||||
|
||||
def self.to_return_t(value : Array(String))
|
||||
ids_to_entries value
|
||||
end
|
||||
|
||||
private def self.ids_to_entries(ids : Array(String))
|
||||
e_map = Library.default.deep_entries.to_h { |entry| {entry.id, entry} }
|
||||
entries = [] of Entry
|
||||
begin
|
||||
ids.each do |id|
|
||||
entries << e_map[id]
|
||||
end
|
||||
return entries if ids.size == entries.size
|
||||
rescue
|
||||
end
|
||||
end
|
||||
|
||||
def instance_size
|
||||
instance_sizeof(SortedEntriesCacheEntry) + # sizeof itself
|
||||
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||
@value.size * (instance_sizeof(String) + sizeof(String)) +
|
||||
@value.sum(&.bytesize) # elements in Array(String)
|
||||
end
|
||||
|
||||
def self.gen_key(book_id : String, username : String,
|
||||
entries : Array(Entry), opt : SortOptions?)
|
||||
entries_sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||
user_context = opt && opt.method == SortMethod::Progress ? username : ""
|
||||
sig = Digest::SHA1.hexdigest (book_id + entries_sig + user_context +
|
||||
(opt ? opt.to_tuple.to_s : "nil"))
|
||||
"#{sig}:sorted_entries"
|
||||
end
|
||||
end
|
||||
|
||||
class String
|
||||
def instance_size
|
||||
instance_sizeof(String) + bytesize
|
||||
end
|
||||
end
|
||||
|
||||
struct Tuple(*T)
|
||||
def instance_size
|
||||
sizeof(T) + # total size of non-reference types
|
||||
self.sum do |e|
|
||||
next 0 unless e.is_a? Reference
|
||||
if e.responds_to? :instance_size
|
||||
e.instance_size
|
||||
else
|
||||
instance_sizeof(typeof(e))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
alias CacheableType = Array(Entry) | String | Tuple(String, Int32)
|
||||
alias CacheEntryType = SortedEntriesCacheEntry |
|
||||
CacheEntry(String, String) |
|
||||
CacheEntry(Tuple(String, Int32), Tuple(String, Int32))
|
||||
|
||||
def generate_cache_entry(key : String, value : CacheableType)
|
||||
if value.is_a? Array(Entry)
|
||||
SortedEntriesCacheEntry.new key, value
|
||||
else
|
||||
CacheEntry(typeof(value), typeof(value)).new key, value
|
||||
end
|
||||
end
|
||||
|
||||
# LRU Cache
|
||||
class LRUCache
|
||||
@@limit : Int128 = Int128.new 0
|
||||
@@should_log = true
|
||||
# key => entry
|
||||
@@cache = {} of String => CacheEntryType
|
||||
|
||||
def self.enabled
|
||||
Config.current.cache_enabled
|
||||
end
|
||||
|
||||
def self.init
|
||||
cache_size = Config.current.cache_size_mbs
|
||||
@@limit = Int128.new cache_size * 1024 * 1024 if enabled
|
||||
@@should_log = Config.current.cache_log_enabled
|
||||
end
|
||||
|
||||
def self.get(key : String)
|
||||
return unless enabled
|
||||
entry = @@cache[key]?
|
||||
if @@should_log
|
||||
Logger.debug "LRUCache #{entry.nil? ? "miss" : "hit"} #{key}"
|
||||
end
|
||||
return entry.value unless entry.nil?
|
||||
end
|
||||
|
||||
def self.set(cache_entry : CacheEntryType)
|
||||
return unless enabled
|
||||
key = cache_entry.key
|
||||
@@cache[key] = cache_entry
|
||||
Logger.debug "LRUCache cached #{key}" if @@should_log
|
||||
remove_least_recent_access
|
||||
end
|
||||
|
||||
def self.invalidate(key : String)
|
||||
return unless enabled
|
||||
@@cache.delete key
|
||||
end
|
||||
|
||||
def self.print
|
||||
return unless @@should_log
|
||||
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||
Logger.debug "---- LRU Cache ----"
|
||||
Logger.debug "Size: #{sum} Bytes"
|
||||
Logger.debug "List:"
|
||||
@@cache.each do |k, v|
|
||||
Logger.debug "#{k} | #{v.atime} | #{v.instance_size}"
|
||||
end
|
||||
Logger.debug "-------------------"
|
||||
end
|
||||
|
||||
private def self.is_cache_full
|
||||
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||
sum > @@limit
|
||||
end
|
||||
|
||||
private def self.remove_least_recent_access
|
||||
if @@should_log && is_cache_full
|
||||
Logger.debug "Removing entries from LRUCache"
|
||||
end
|
||||
while is_cache_full && @@cache.size > 0
|
||||
min_tuple = @@cache.min_by { |_, entry| entry.atime }
|
||||
min_key = min_tuple[0]
|
||||
min_entry = min_tuple[1]
|
||||
|
||||
Logger.debug " \
|
||||
Target: #{min_key}, \
|
||||
Last Access Time: #{min_entry.atime}" if @@should_log
|
||||
invalidate min_key
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,6 +1,9 @@
|
||||
require "image_size"
|
||||
require "yaml"
|
||||
|
||||
class Entry
|
||||
include YAML::Serializable
|
||||
|
||||
getter zip_path : String, book : Title, title : String,
|
||||
size : String, pages : Int32, id : String, encoded_path : String,
|
||||
encoded_title : String, mtime : Time, err_msg : String?
|
||||
@@ -46,16 +49,20 @@ class Entry
|
||||
file.close
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
||||
def build_json(*, slim = false)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "title_id", @book.id
|
||||
json.field "display_name", @book.display_name @title
|
||||
json.field "cover_url", cover_url
|
||||
json.field "pages" { json.number @pages }
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
json.field "title_id", @book.id
|
||||
json.field "pages" { json.number @pages }
|
||||
unless slim
|
||||
json.field "display_name", @book.display_name @title
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -69,9 +76,17 @@ class Entry
|
||||
|
||||
def cover_url
|
||||
return "#{Config.current.base_url}img/icon.png" if @err_msg
|
||||
|
||||
unless @book.entry_cover_url_cache
|
||||
TitleInfo.new @book.dir do |info|
|
||||
@book.entry_cover_url_cache = info.entry_cover_url
|
||||
end
|
||||
end
|
||||
entry_cover_url = @book.entry_cover_url_cache
|
||||
|
||||
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
|
||||
TitleInfo.new @book.dir do |info|
|
||||
info_url = info.entry_cover_url[@title]?
|
||||
if entry_cover_url
|
||||
info_url = entry_cover_url[@title]?
|
||||
unless info_url.nil? || info_url.empty?
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
@@ -86,7 +101,7 @@ class Entry
|
||||
SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? e.filename
|
||||
}
|
||||
.sort { |a, b|
|
||||
.sort! { |a, b|
|
||||
compare_numerically a.filename, b.filename
|
||||
}
|
||||
yield file, entries
|
||||
@@ -134,10 +149,11 @@ class Entry
|
||||
entries[idx + 1]
|
||||
end
|
||||
|
||||
def previous_entry
|
||||
idx = @book.entries.index self
|
||||
def previous_entry(username)
|
||||
entries = @book.sorted_entries username
|
||||
idx = entries.index self
|
||||
return nil if idx.nil? || idx == 0
|
||||
@book.entries[idx - 1]
|
||||
entries[idx - 1]
|
||||
end
|
||||
|
||||
def date_added
|
||||
@@ -157,6 +173,16 @@ class Entry
|
||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||
# instead of IDs in info.json
|
||||
def save_progress(username, page)
|
||||
LRUCache.invalidate "#{@book.id}:#{username}:progress_sum"
|
||||
@book.parents.each do |parent|
|
||||
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||
end
|
||||
[false, true].each do |ascend|
|
||||
sorted_entries_cache_key = SortedEntriesCacheEntry.gen_key @book.id,
|
||||
username, @book.entries, SortOptions.new(SortMethod::Progress, ascend)
|
||||
LRUCache.invalidate sorted_entries_cache_key
|
||||
end
|
||||
|
||||
TitleInfo.new @book.dir do |info|
|
||||
if info.progress[username]?.nil?
|
||||
info.progress[username] = {@title => page}
|
||||
|
||||
@@ -1,12 +1,38 @@
|
||||
class Library
|
||||
include YAML::Serializable
|
||||
|
||||
getter dir : String, title_ids : Array(String),
|
||||
title_hash : Hash(String, Title)
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
register_mime_types
|
||||
def save_instance
|
||||
path = Config.current.library_cache_path
|
||||
Logger.debug "Caching library to #{path}"
|
||||
|
||||
writer = Compress::Gzip::Writer.new path,
|
||||
Compress::Gzip::BEST_COMPRESSION
|
||||
writer.write self.to_yaml.to_slice
|
||||
writer.close
|
||||
end
|
||||
|
||||
def self.load_instance
|
||||
path = Config.current.library_cache_path
|
||||
return unless File.exists? path
|
||||
|
||||
Logger.debug "Loading cached library from #{path}"
|
||||
|
||||
begin
|
||||
Compress::Gzip::Reader.open path do |content|
|
||||
@@default = Library.from_yaml content
|
||||
end
|
||||
Library.default.register_jobs
|
||||
rescue e
|
||||
Logger.error e
|
||||
end
|
||||
end
|
||||
|
||||
def initialize
|
||||
@dir = Config.current.library_path
|
||||
# explicitly initialize @titles to bypass the compiler check. it will
|
||||
# be filled with actual Titles in the `scan` call below
|
||||
@@ -16,6 +42,12 @@ class Library
|
||||
@entries_count = 0
|
||||
@thumbnails_count = 0
|
||||
|
||||
register_jobs
|
||||
end
|
||||
|
||||
protected def register_jobs
|
||||
register_mime_types
|
||||
|
||||
scan_interval = Config.current.scan_interval_minutes
|
||||
if scan_interval < 1
|
||||
scan
|
||||
@@ -25,7 +57,7 @@ class Library
|
||||
start = Time.local
|
||||
scan
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||
Logger.debug "Library initialized in #{ms}ms"
|
||||
sleep scan_interval.minutes
|
||||
end
|
||||
end
|
||||
@@ -51,11 +83,6 @@ class Library
|
||||
def sorted_titles(username, opt : SortOptions? = nil)
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
# Helper function from src/util/util.cr
|
||||
@@ -63,14 +90,24 @@ class Library
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
titles + titles.flat_map &.deep_titles
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
json.field "dir", @dir
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
def deep_entries
|
||||
titles.flat_map &.deep_entries
|
||||
end
|
||||
|
||||
def build_json(*, slim = false, depth = -1)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
json.field "dir", @dir
|
||||
json.field "titles" do
|
||||
json.array do
|
||||
self.titles.each do |title|
|
||||
json.raw title.build_json(slim: slim, depth: depth)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -84,6 +121,7 @@ class Library
|
||||
end
|
||||
|
||||
def scan
|
||||
start = Time.local
|
||||
unless Dir.exists? @dir
|
||||
Logger.info "The library directory #{@dir} does not exist. " \
|
||||
"Attempting to create it"
|
||||
@@ -92,14 +130,36 @@ class Library
|
||||
|
||||
storage = Storage.new auto_close: false
|
||||
|
||||
examine_context : ExamineContext = {
|
||||
cached_contents_signature: {} of String => String,
|
||||
deleted_title_ids: [] of String,
|
||||
deleted_entry_ids: [] of String,
|
||||
}
|
||||
|
||||
@title_ids.select! do |title_id|
|
||||
title = @title_hash[title_id]
|
||||
existence = title.examine examine_context
|
||||
unless existence
|
||||
examine_context["deleted_title_ids"].concat [title_id] +
|
||||
title.deep_titles.map &.id
|
||||
examine_context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||
end
|
||||
existence
|
||||
end
|
||||
remained_title_dirs = @title_ids.map { |id| title_hash[id].dir }
|
||||
examine_context["deleted_title_ids"].each do |title_id|
|
||||
@title_hash.delete title_id
|
||||
end
|
||||
|
||||
cache = examine_context["cached_contents_signature"]
|
||||
(Dir.entries @dir)
|
||||
.select { |fn| !fn.starts_with? "." }
|
||||
.map { |fn| File.join @dir, fn }
|
||||
.select { |path| !(remained_title_dirs.includes? path) }
|
||||
.select { |path| File.directory? path }
|
||||
.map { |path| Title.new path, "" }
|
||||
.map { |path| Title.new path, "", cache }
|
||||
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
||||
.sort { |a, b| a.title <=> b.title }
|
||||
.tap { |_| @title_ids.clear }
|
||||
.sort! { |a, b| a.title <=> b.title }
|
||||
.each do |title|
|
||||
@title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
@@ -108,20 +168,27 @@ class Library
|
||||
storage.bulk_insert_ids
|
||||
storage.close
|
||||
|
||||
Logger.debug "Scan completed"
|
||||
Storage.default.mark_unavailable
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||
|
||||
Storage.default.mark_unavailable examine_context["deleted_entry_ids"],
|
||||
examine_context["deleted_title_ids"]
|
||||
|
||||
spawn do
|
||||
save_instance
|
||||
end
|
||||
end
|
||||
|
||||
def get_continue_reading_entries(username)
|
||||
cr_entries = deep_titles
|
||||
.map { |t| t.get_last_read_entry username }
|
||||
.map(&.get_last_read_entry username)
|
||||
# Select elements with type `Entry` from the array and ignore all `Nil`s
|
||||
.select(Entry)[0...ENTRIES_IN_HOME_SECTIONS]
|
||||
.map { |e|
|
||||
# Get the last read time of the entry. If it hasn't been started, get
|
||||
# the last read time of the previous entry
|
||||
last_read = e.load_last_read username
|
||||
pe = e.previous_entry
|
||||
pe = e.previous_entry username
|
||||
if last_read.nil? && pe
|
||||
last_read = pe.load_last_read username
|
||||
end
|
||||
@@ -150,14 +217,14 @@ class Library
|
||||
recently_added = [] of RA
|
||||
last_date_added = nil
|
||||
|
||||
titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
.select { |e| e[:date_added] > 1.month.ago }
|
||||
.sort { |a, b| b[:date_added] <=> a[:date_added] }
|
||||
titles.flat_map(&.deep_entries_with_date_added)
|
||||
.select(&.[:date_added].> 1.month.ago)
|
||||
.sort! { |a, b| b[:date_added] <=> a[:date_added] }
|
||||
.each do |e|
|
||||
break if recently_added.size > 12
|
||||
last = recently_added.last?
|
||||
if last && e[:entry].book.id == last[:entry].book.id &&
|
||||
(e[:date_added] - last_date_added.not_nil!).duration < 1.day
|
||||
(e[:date_added] - last_date_added.not_nil!).abs < 1.day
|
||||
# A NamedTuple is immutable, so we have to cast it to a Hash first
|
||||
last_hash = last.to_h
|
||||
count = last_hash[:grouped_count].as(Int32)
|
||||
@@ -188,9 +255,9 @@ class Library
|
||||
# If we use `deep_titles`, the start reading section might include `Vol. 2`
|
||||
# when the user hasn't started `Vol. 1` yet
|
||||
titles
|
||||
.select { |t| t.load_percentage(username) == 0 }
|
||||
.select(&.load_percentage(username).== 0)
|
||||
.sample(ENTRIES_IN_HOME_SECTIONS)
|
||||
.shuffle
|
||||
.shuffle!
|
||||
end
|
||||
|
||||
def thumbnail_generation_progress
|
||||
@@ -205,7 +272,7 @@ class Library
|
||||
end
|
||||
|
||||
Logger.info "Starting thumbnail generation"
|
||||
entries = deep_titles.map(&.deep_entries).flatten.reject &.err_msg
|
||||
entries = deep_titles.flat_map(&.deep_entries).reject &.err_msg
|
||||
@entries_count = entries.size
|
||||
@thumbnails_count = 0
|
||||
|
||||
|
||||
@@ -1,13 +1,25 @@
|
||||
require "digest"
|
||||
require "../archive"
|
||||
|
||||
class Title
|
||||
include YAML::Serializable
|
||||
|
||||
getter dir : String, parent_id : String, title_ids : Array(String),
|
||||
entries : Array(Entry), title : String, id : String,
|
||||
encoded_title : String, mtime : Time, signature : UInt64
|
||||
encoded_title : String, mtime : Time, signature : UInt64,
|
||||
entry_cover_url_cache : Hash(String, String)?
|
||||
setter entry_cover_url_cache : Hash(String, String)?
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
@entry_display_name_cache : Hash(String, String)?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@entry_cover_url_cache : Hash(String, String)?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@cached_display_name : String?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@cached_cover_url : String?
|
||||
|
||||
def initialize(@dir : String, @parent_id)
|
||||
def initialize(@dir : String, @parent_id, cache = {} of String => String)
|
||||
storage = Storage.default
|
||||
@signature = Dir.signature dir
|
||||
id = storage.get_title_id dir, signature
|
||||
@@ -20,6 +32,7 @@ class Title
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@contents_signature = Dir.contents_signature dir, cache
|
||||
@title = File.basename dir
|
||||
@encoded_title = URI.encode @title
|
||||
@title_ids = [] of String
|
||||
@@ -30,7 +43,7 @@ class Title
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dir, fn
|
||||
if File.directory? path
|
||||
title = Title.new path, @id
|
||||
title = Title.new path, @id, cache
|
||||
next if title.entries.size == 0 && title.titles.size == 0
|
||||
Library.default.title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
@@ -44,40 +57,164 @@ class Title
|
||||
|
||||
mtimes = [@mtime]
|
||||
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
|
||||
mtimes += @entries.map { |e| e.mtime }
|
||||
mtimes += @entries.map &.mtime
|
||||
@mtime = mtimes.max
|
||||
|
||||
@title_ids.sort! do |a, b|
|
||||
compare_numerically Library.default.title_hash[a].title,
|
||||
Library.default.title_hash[b].title
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
@entries.sort! do |a, b|
|
||||
sorter.compare a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["dir", "title", "id"] %}
|
||||
# Utility method used in library rescanning.
|
||||
# - When the title does not exist on the file system anymore, return false
|
||||
# and let it be deleted from the library instance
|
||||
# - When the title exists, but its contents signature is now different from
|
||||
# the cache, it means some of its content (nested titles or entries)
|
||||
# has been added, deleted, or renamed. In this case we update its
|
||||
# contents signature and instance variables
|
||||
# - When the title exists and its contents signature is still the same, we
|
||||
# return true so it can be reused without rescanning
|
||||
def examine(context : ExamineContext) : Bool
|
||||
return false unless Dir.exists? @dir
|
||||
contents_signature = Dir.contents_signature @dir,
|
||||
context["cached_contents_signature"]
|
||||
return true if @contents_signature == contents_signature
|
||||
|
||||
@contents_signature = contents_signature
|
||||
@signature = Dir.signature @dir
|
||||
storage = Storage.default
|
||||
id = storage.get_title_id dir, signature
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_title_id({
|
||||
path: dir,
|
||||
id: id,
|
||||
signature: signature.to_s,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@mtime = File.info(@dir).modification_time
|
||||
|
||||
previous_titles_size = @title_ids.size
|
||||
@title_ids.select! do |title_id|
|
||||
title = Library.default.get_title! title_id
|
||||
existence = title.examine context
|
||||
unless existence
|
||||
context["deleted_title_ids"].concat [title_id] +
|
||||
title.deep_titles.map &.id
|
||||
context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||
end
|
||||
existence
|
||||
end
|
||||
remained_title_dirs = @title_ids.map do |title_id|
|
||||
title = Library.default.get_title! title_id
|
||||
title.dir
|
||||
end
|
||||
|
||||
previous_entries_size = @entries.size
|
||||
@entries.select! do |entry|
|
||||
existence = File.exists? entry.zip_path
|
||||
Fiber.yield
|
||||
context["deleted_entry_ids"] << entry.id unless existence
|
||||
existence
|
||||
end
|
||||
remained_entry_zip_paths = @entries.map &.zip_path
|
||||
|
||||
is_titles_added = false
|
||||
is_entries_added = false
|
||||
Dir.entries(dir).each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dir, fn
|
||||
if File.directory? path
|
||||
next if remained_title_dirs.includes? path
|
||||
title = Title.new path, @id, context["cached_contents_signature"]
|
||||
next if title.entries.size == 0 && title.titles.size == 0
|
||||
Library.default.title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
is_titles_added = true
|
||||
next
|
||||
end
|
||||
if is_supported_file path
|
||||
next if remained_entry_zip_paths.includes? path
|
||||
entry = Entry.new path, self
|
||||
if entry.pages > 0 || entry.err_msg
|
||||
@entries << entry
|
||||
is_entries_added = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
mtimes = [@mtime]
|
||||
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
|
||||
mtimes += @entries.map &.mtime
|
||||
@mtime = mtimes.max
|
||||
|
||||
if is_titles_added || previous_titles_size != @title_ids.size
|
||||
@title_ids.sort! do |a, b|
|
||||
compare_numerically Library.default.title_hash[a].title,
|
||||
Library.default.title_hash[b].title
|
||||
end
|
||||
end
|
||||
if is_entries_added || previous_entries_size != @entries.size
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
@entries.sort! do |a, b|
|
||||
sorter.compare a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
alias SortContext = NamedTuple(username: String, opt: SortOptions)
|
||||
|
||||
def build_json(*, slim = false, depth = -1,
|
||||
sort_context : SortContext? = nil)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for str in ["dir", "title", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "signature" { json.number @signature }
|
||||
json.field "display_name", display_name
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
end
|
||||
json.field "entries" do
|
||||
json.raw @entries.to_json
|
||||
end
|
||||
json.field "parents" do
|
||||
json.array do
|
||||
self.parents.each do |title|
|
||||
json.object do
|
||||
json.field "title", title.title
|
||||
json.field "id", title.id
|
||||
json.field "signature" { json.number @signature }
|
||||
unless slim
|
||||
json.field "display_name", display_name
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
end
|
||||
unless depth == 0
|
||||
json.field "titles" do
|
||||
json.array do
|
||||
self.titles.each do |title|
|
||||
json.raw title.build_json(slim: slim,
|
||||
depth: depth > 0 ? depth - 1 : depth)
|
||||
end
|
||||
end
|
||||
end
|
||||
json.field "entries" do
|
||||
json.array do
|
||||
_entries = if sort_context
|
||||
sorted_entries sort_context[:username],
|
||||
sort_context[:opt]
|
||||
else
|
||||
@entries
|
||||
end
|
||||
_entries.each do |entry|
|
||||
json.raw entry.build_json(slim: slim)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
json.field "parents" do
|
||||
json.array do
|
||||
self.parents.each do |title|
|
||||
json.object do
|
||||
json.field "title", title.title
|
||||
json.field "id", title.id
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -92,12 +229,12 @@ class Title
|
||||
# Get all entries, including entries in nested titles
|
||||
def deep_entries
|
||||
return @entries if title_ids.empty?
|
||||
@entries + titles.map { |t| t.deep_entries }.flatten
|
||||
@entries + titles.flat_map &.deep_entries
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
return [] of Title if titles.empty?
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
titles + titles.flat_map &.deep_titles
|
||||
end
|
||||
|
||||
def parents
|
||||
@@ -138,15 +275,19 @@ class Title
|
||||
end
|
||||
|
||||
def get_entry(eid)
|
||||
@entries.find { |e| e.id == eid }
|
||||
@entries.find &.id.== eid
|
||||
end
|
||||
|
||||
def display_name
|
||||
cached_display_name = @cached_display_name
|
||||
return cached_display_name unless cached_display_name.nil?
|
||||
|
||||
dn = @title
|
||||
TitleInfo.new @dir do |info|
|
||||
info_dn = info.display_name
|
||||
dn = info_dn unless info_dn.empty?
|
||||
end
|
||||
@cached_display_name = dn
|
||||
dn
|
||||
end
|
||||
|
||||
@@ -170,6 +311,7 @@ class Title
|
||||
end
|
||||
|
||||
def set_display_name(dn)
|
||||
@cached_display_name = dn
|
||||
TitleInfo.new @dir do |info|
|
||||
info.display_name = dn
|
||||
info.save
|
||||
@@ -179,11 +321,15 @@ class Title
|
||||
def set_display_name(entry_name : String, dn)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_display_name[entry_name] = dn
|
||||
@entry_display_name_cache = info.entry_display_name
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def cover_url
|
||||
cached_cover_url = @cached_cover_url
|
||||
return cached_cover_url unless cached_cover_url.nil?
|
||||
|
||||
url = "#{Config.current.base_url}img/icon.png"
|
||||
readable_entries = @entries.select &.err_msg.nil?
|
||||
if readable_entries.size > 0
|
||||
@@ -195,10 +341,12 @@ class Title
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
end
|
||||
@cached_cover_url = url
|
||||
url
|
||||
end
|
||||
|
||||
def set_cover_url(url : String)
|
||||
@cached_cover_url = url
|
||||
TitleInfo.new @dir do |info|
|
||||
info.cover_url = url
|
||||
info.save
|
||||
@@ -208,6 +356,7 @@ class Title
|
||||
def set_cover_url(entry_name : String, url : String)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_cover_url[entry_name] = url
|
||||
@entry_cover_url_cache = info.entry_cover_url
|
||||
info.save
|
||||
end
|
||||
end
|
||||
@@ -217,29 +366,30 @@ class Title
|
||||
@entries.each do |e|
|
||||
e.save_progress username, e.pages
|
||||
end
|
||||
titles.each do |t|
|
||||
t.read_all username
|
||||
end
|
||||
titles.each &.read_all username
|
||||
end
|
||||
|
||||
# Set the reading progress of all entries and nested libraries to 0%
|
||||
def unread_all(username)
|
||||
@entries.each do |e|
|
||||
e.save_progress username, 0
|
||||
end
|
||||
titles.each do |t|
|
||||
t.unread_all username
|
||||
end
|
||||
@entries.each &.save_progress(username, 0)
|
||||
titles.each &.unread_all username
|
||||
end
|
||||
|
||||
def deep_read_page_count(username) : Int32
|
||||
load_progress_for_all_entries(username).sum +
|
||||
titles.map { |t| t.deep_read_page_count username }.flatten.sum
|
||||
key = "#{@id}:#{username}:progress_sum"
|
||||
sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||
cached_sum = LRUCache.get key
|
||||
return cached_sum[1] if cached_sum.is_a? Tuple(String, Int32) &&
|
||||
cached_sum[0] == sig
|
||||
sum = load_progress_for_all_entries(username, nil, true).sum +
|
||||
titles.flat_map(&.deep_read_page_count username).sum
|
||||
LRUCache.set generate_cache_entry key, {sig, sum}
|
||||
sum
|
||||
end
|
||||
|
||||
def deep_total_page_count : Int32
|
||||
entries.map { |e| e.pages }.sum +
|
||||
titles.map { |t| t.deep_total_page_count }.flatten.sum
|
||||
entries.sum(&.pages) +
|
||||
titles.flat_map(&.deep_total_page_count).sum
|
||||
end
|
||||
|
||||
def load_percentage(username)
|
||||
@@ -288,13 +438,12 @@ class Title
|
||||
# use the default (auto, ascending)
|
||||
# When `opt` is not nil, it saves the options to info.json
|
||||
def sorted_entries(username, opt : SortOptions? = nil)
|
||||
cache_key = SortedEntriesCacheEntry.gen_key @id, username, @entries, opt
|
||||
cached_entries = LRUCache.get cache_key
|
||||
return cached_entries if cached_entries.is_a? Array(Entry)
|
||||
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
case opt.not_nil!.method
|
||||
@@ -311,13 +460,13 @@ class Title
|
||||
ary = @entries.zip(percentage_ary)
|
||||
.sort { |a_tp, b_tp| (a_tp[1] <=> b_tp[1]).or \
|
||||
compare_numerically a_tp[0].title, b_tp[0].title }
|
||||
.map { |tp| tp[0] }
|
||||
.map &.[0]
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
ary = @entries.sort do |a, b|
|
||||
sorter.compare(a.title, b.title).or \
|
||||
compare_numerically a.title, b.title
|
||||
@@ -326,6 +475,7 @@ class Title
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
LRUCache.set generate_cache_entry cache_key, ary
|
||||
ary
|
||||
end
|
||||
|
||||
@@ -383,13 +533,24 @@ class Title
|
||||
{entry: e, date_added: da_ary[i]}
|
||||
end
|
||||
return zip if title_ids.empty?
|
||||
zip + titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
zip + titles.flat_map &.deep_entries_with_date_added
|
||||
end
|
||||
|
||||
def bulk_progress(action, ids : Array(String), username)
|
||||
LRUCache.invalidate "#{@id}:#{username}:progress_sum"
|
||||
parents.each do |parent|
|
||||
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||
end
|
||||
[false, true].each do |ascend|
|
||||
sorted_entries_cache_key =
|
||||
SortedEntriesCacheEntry.gen_key @id, username, @entries,
|
||||
SortOptions.new(SortMethod::Progress, ascend)
|
||||
LRUCache.invalidate sorted_entries_cache_key
|
||||
end
|
||||
|
||||
selected_entries = ids
|
||||
.map { |id|
|
||||
@entries.find { |e| e.id == id }
|
||||
@entries.find &.id.==(id)
|
||||
}
|
||||
.select(Entry)
|
||||
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
SUPPORTED_IMG_TYPES = ["image/jpeg", "image/png", "image/webp"]
|
||||
SUPPORTED_IMG_TYPES = %w(
|
||||
image/jpeg
|
||||
image/png
|
||||
image/webp
|
||||
image/apng
|
||||
image/avif
|
||||
image/gif
|
||||
image/svg+xml
|
||||
)
|
||||
|
||||
enum SortMethod
|
||||
Auto
|
||||
@@ -88,6 +96,18 @@ class TitleInfo
|
||||
@@mutex_hash = {} of String => Mutex
|
||||
|
||||
def self.new(dir, &)
|
||||
key = "#{dir}:info.json"
|
||||
info = LRUCache.get key
|
||||
if info.is_a? String
|
||||
begin
|
||||
instance = TitleInfo.from_json info
|
||||
instance.dir = dir
|
||||
yield instance
|
||||
return
|
||||
rescue
|
||||
end
|
||||
end
|
||||
|
||||
if @@mutex_hash[dir]?
|
||||
mutex = @@mutex_hash[dir]
|
||||
else
|
||||
@@ -101,6 +121,7 @@ class TitleInfo
|
||||
instance = TitleInfo.from_json File.read json_path
|
||||
end
|
||||
instance.dir = dir
|
||||
LRUCache.set generate_cache_entry key, instance.to_json
|
||||
yield instance
|
||||
end
|
||||
end
|
||||
@@ -108,5 +129,12 @@ class TitleInfo
|
||||
def save
|
||||
json_path = File.join @dir, "info.json"
|
||||
File.write json_path, self.to_pretty_json
|
||||
key = "#{@dir}:info.json"
|
||||
LRUCache.set generate_cache_entry key, self.to_json
|
||||
end
|
||||
end
|
||||
|
||||
alias ExamineContext = NamedTuple(
|
||||
cached_contents_signature: Hash(String, String),
|
||||
deleted_title_ids: Array(String),
|
||||
deleted_entry_ids: Array(String))
|
||||
|
||||
@@ -34,7 +34,11 @@ class Logger
|
||||
end
|
||||
|
||||
@backend.formatter = Log::Formatter.new &format_proc
|
||||
Log.setup @@severity, @backend
|
||||
|
||||
Log.setup do |c|
|
||||
c.bind "*", @@severity, @backend
|
||||
c.bind "db.*", :error, @backend
|
||||
end
|
||||
end
|
||||
|
||||
def self.get_severity(level = "") : Log::Severity
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
require "json"
|
||||
require "csv"
|
||||
require "../rename"
|
||||
|
||||
macro string_properties(names)
|
||||
{% for name in names %}
|
||||
property {{name.id}} = ""
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro parse_strings_from_json(names)
|
||||
{% for name in names %}
|
||||
@{{name.id}} = obj[{{name}}].as_s
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro properties_to_hash(names)
|
||||
{
|
||||
{% for name in names %}
|
||||
"{{name.id}}" => @{{name.id}}.to_s,
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
|
||||
module MangaDex
|
||||
class Chapter
|
||||
string_properties ["lang_code", "title", "volume", "chapter"]
|
||||
property manga : Manga
|
||||
property time = Time.local
|
||||
property id : String
|
||||
property full_title = ""
|
||||
property language = ""
|
||||
property pages = [] of {String, String} # filename, url
|
||||
property groups = [] of {Int32, String} # group_id, group_name
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any, @manga,
|
||||
lang : Hash(String, String))
|
||||
self.parse_json json_obj, lang
|
||||
end
|
||||
|
||||
def to_info_json
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "volume", "chapter",
|
||||
"language", "full_title"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
json.field "time", @time.to_unix.to_s
|
||||
json.field "manga_title", @manga.title
|
||||
json.field "manga_id", @manga.id
|
||||
json.field "groups" do
|
||||
json.object do
|
||||
@groups.each do |gid, gname|
|
||||
json.field gname, gid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj, lang)
|
||||
parse_strings_from_json ["lang_code", "title", "volume",
|
||||
"chapter"]
|
||||
language = lang[@lang_code]?
|
||||
@language = language if language
|
||||
@time = Time.unix obj["timestamp"].as_i
|
||||
suffixes = ["", "_2", "_3"]
|
||||
suffixes.each do |s|
|
||||
gid = obj["group_id#{s}"].as_i
|
||||
next if gid == 0
|
||||
gname = obj["group_name#{s}"].as_s
|
||||
@groups << {gid, gname}
|
||||
end
|
||||
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["chapter_rename_rule"].to_s
|
||||
@full_title = rename rename_rule
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
hash = properties_to_hash ["id", "title", "volume", "chapter",
|
||||
"lang_code", "language", "pages"]
|
||||
hash["groups"] = @groups.map { |g| g[1] }.join ","
|
||||
rule.render hash
|
||||
end
|
||||
end
|
||||
|
||||
class Manga
|
||||
string_properties ["cover_url", "description", "title", "author", "artist"]
|
||||
property chapters = [] of Chapter
|
||||
property id : String
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any)
|
||||
self.parse_json json_obj
|
||||
end
|
||||
|
||||
def to_info_json(with_chapters = true)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "description", "author", "artist",
|
||||
"cover_url"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
if with_chapters
|
||||
json.field "chapters" do
|
||||
json.array do
|
||||
@chapters.each do |c|
|
||||
json.raw c.to_info_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj)
|
||||
parse_strings_from_json ["cover_url", "description", "title", "author",
|
||||
"artist"]
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
rule.render properties_to_hash ["id", "title", "author", "artist"]
|
||||
end
|
||||
end
|
||||
|
||||
class API
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@base_url = Config.current.mangadex["api_url"].to_s ||
|
||||
"https://mangadex.org/api/"
|
||||
@lang = {} of String => String
|
||||
CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row|
|
||||
@lang[row[1]] = row[0]
|
||||
end
|
||||
end
|
||||
|
||||
def get(url)
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
res = HTTP::Client.get url, headers
|
||||
raise "Failed to get #{url}. [#{res.status_code}] " \
|
||||
"#{res.status_message}" if !res.success?
|
||||
JSON.parse res.body
|
||||
end
|
||||
|
||||
def get_manga(id)
|
||||
obj = self.get File.join @base_url, "manga/#{id}"
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
manga = Manga.new id, obj["manga"]
|
||||
obj["chapter"].as_h.map do |k, v|
|
||||
chapter = Chapter.new k, v, manga, @lang
|
||||
manga.chapters << chapter
|
||||
end
|
||||
manga
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(chapter : Chapter)
|
||||
obj = self.get File.join @base_url, "chapter/#{chapter.id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
server = obj["server"].as_s
|
||||
hash = obj["hash"].as_s
|
||||
chapter.pages = obj["page_array"].as_a.map do |fn|
|
||||
{
|
||||
fn.as_s,
|
||||
"#{server}#{hash}/#{fn.as_s}",
|
||||
}
|
||||
end
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(id : String)
|
||||
obj = self.get File.join @base_url, "chapter/#{id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
manga_id = ""
|
||||
begin
|
||||
manga_id = obj["manga_id"].as_i.to_s
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
manga = self.get_manga manga_id
|
||||
chapter = manga.chapters.find { |c| c.id == id }.not_nil!
|
||||
self.get_chapter chapter
|
||||
chapter
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,167 +0,0 @@
|
||||
require "./api"
|
||||
require "compress/zip"
|
||||
|
||||
module MangaDex
|
||||
class PageJob
|
||||
property success = false
|
||||
property url : String
|
||||
property filename : String
|
||||
property writer : Compress::Zip::Writer
|
||||
property tries_remaning : Int32
|
||||
|
||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||
end
|
||||
end
|
||||
|
||||
class Downloader < Queue::Downloader
|
||||
@wait_seconds : Int32 = Config.current.mangadex["download_wait_seconds"]
|
||||
.to_i32
|
||||
@retries : Int32 = Config.current.mangadex["download_retries"].to_i32
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@api = API.default
|
||||
super
|
||||
end
|
||||
|
||||
def pop : Queue::Job?
|
||||
job = nil
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id not like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
job
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
begin
|
||||
chapter = @api.get_chapter(job.id)
|
||||
rescue e
|
||||
Logger.error e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@queue.add_message e.message.not_nil!, job
|
||||
end
|
||||
@downloading = false
|
||||
return
|
||||
end
|
||||
@queue.set_pages chapter.pages.size, job
|
||||
lib_dir = @library_path
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["manga_rename_rule"].to_s
|
||||
manga_dir = File.join lib_dir, chapter.manga.rename rename_rule
|
||||
unless File.exists? manga_dir
|
||||
Dir.mkdir_p manga_dir
|
||||
end
|
||||
zip_path = File.join manga_dir, "#{job.title}.cbz.part"
|
||||
|
||||
# Find the number of digits needed to store the number of pages
|
||||
len = Math.log10(chapter.pages.size).to_i + 1
|
||||
|
||||
writer = Compress::Zip::Writer.new zip_path
|
||||
# Create a buffered channel. It works as an FIFO queue
|
||||
channel = Channel(PageJob).new chapter.pages.size
|
||||
spawn do
|
||||
chapter.pages.each_with_index do |tuple, i|
|
||||
fn, url = tuple
|
||||
ext = File.extname fn
|
||||
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
||||
page_job = PageJob.new url, fn, writer, @retries
|
||||
Logger.debug "Downloading #{url}"
|
||||
loop do
|
||||
sleep @wait_seconds.seconds
|
||||
download_page page_job
|
||||
break if page_job.success ||
|
||||
page_job.tries_remaning <= 0
|
||||
page_job.tries_remaning -= 1
|
||||
Logger.warn "Failed to download page #{url}. " \
|
||||
"Retrying... Remaining retries: " \
|
||||
"#{page_job.tries_remaning}"
|
||||
end
|
||||
|
||||
channel.send page_job
|
||||
break unless @queue.exists? job
|
||||
end
|
||||
end
|
||||
|
||||
spawn do
|
||||
page_jobs = [] of PageJob
|
||||
chapter.pages.size.times do
|
||||
page_job = channel.receive
|
||||
|
||||
break unless @queue.exists? job
|
||||
|
||||
Logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
||||
"#{page_job.url}"
|
||||
page_jobs << page_job
|
||||
if page_job.success
|
||||
@queue.add_success job
|
||||
else
|
||||
@queue.add_fail job
|
||||
msg = "Failed to download page #{page_job.url}"
|
||||
@queue.add_message msg, job
|
||||
Logger.error msg
|
||||
end
|
||||
end
|
||||
|
||||
unless @queue.exists? job
|
||||
Logger.debug "Download cancelled"
|
||||
@downloading = false
|
||||
next
|
||||
end
|
||||
|
||||
fail_count = page_jobs.count { |j| !j.success }
|
||||
Logger.debug "Download completed. " \
|
||||
"#{fail_count}/#{page_jobs.size} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
".part")
|
||||
File.rename zip_path, filename
|
||||
Logger.debug "cbz File created at #{filename}"
|
||||
|
||||
zip_exception = validate_archive filename
|
||||
if !zip_exception.nil?
|
||||
@queue.add_message "The downloaded archive is corrupted. " \
|
||||
"Error: #{zip_exception}", job
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
elsif fail_count > 0
|
||||
@queue.set_status Queue::JobStatus::MissingPages, job
|
||||
else
|
||||
@queue.set_status Queue::JobStatus::Completed, job
|
||||
end
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
|
||||
private def download_page(job : PageJob)
|
||||
Logger.debug "downloading #{job.url}"
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
begin
|
||||
HTTP::Client.get job.url, headers do |res|
|
||||
unless res.success?
|
||||
raise "Failed to download page #{job.url}. " \
|
||||
"[#{res.status_code}] #{res.status_message}"
|
||||
end
|
||||
job.writer.add job.filename, res.body_io
|
||||
end
|
||||
job.success = true
|
||||
rescue e
|
||||
Logger.error e
|
||||
job.success = false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -2,13 +2,12 @@ require "./config"
|
||||
require "./queue"
|
||||
require "./server"
|
||||
require "./main_fiber"
|
||||
require "./mangadex/*"
|
||||
require "./plugin/*"
|
||||
require "option_parser"
|
||||
require "clim"
|
||||
require "tallboy"
|
||||
|
||||
MANGO_VERSION = "0.20.0"
|
||||
MANGO_VERSION = "0.24.0"
|
||||
|
||||
# From http://www.network-science.de/ascii/
|
||||
BANNER = %{
|
||||
@@ -56,10 +55,11 @@ class CLI < Clim
|
||||
Config.load(opts.config).set_current
|
||||
|
||||
# Initialize main components
|
||||
LRUCache.init
|
||||
Storage.default
|
||||
Queue.default
|
||||
Library.load_instance
|
||||
Library.default
|
||||
MangaDex::Downloader.default
|
||||
Plugin::Downloader.default
|
||||
|
||||
spawn do
|
||||
|
||||
@@ -23,11 +23,6 @@ class Plugin
|
||||
job
|
||||
end
|
||||
|
||||
private def process_filename(str)
|
||||
return "_" if str == ".."
|
||||
str.gsub "/", "_"
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
@@ -42,8 +37,8 @@ class Plugin
|
||||
|
||||
pages = info["pages"].as_i
|
||||
|
||||
manga_title = process_filename job.manga_title
|
||||
chapter_title = process_filename info["title"].as_s
|
||||
manga_title = sanitize_filename job.manga_title
|
||||
chapter_title = sanitize_filename info["title"].as_s
|
||||
|
||||
@queue.set_pages pages, job
|
||||
lib_dir = @library_path
|
||||
@@ -68,7 +63,7 @@ class Plugin
|
||||
while page = plugin.next_page
|
||||
break unless @queue.exists? job
|
||||
|
||||
fn = process_filename page["filename"].as_s
|
||||
fn = sanitize_filename page["filename"].as_s
|
||||
url = page["url"].as_s
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ class Plugin
|
||||
def initialize(id : String)
|
||||
Plugin.build_info_ary
|
||||
|
||||
@info = @@info_ary.find { |i| i.id == id }
|
||||
@info = @@info_ary.find &.id.== id
|
||||
if @info.nil?
|
||||
raise Error.new "Plugin with ID #{id} not found"
|
||||
end
|
||||
|
||||
@@ -303,12 +303,12 @@ class Queue
|
||||
end
|
||||
|
||||
def pause
|
||||
@downloaders.each { |d| d.stopped = true }
|
||||
@downloaders.each &.stopped=(true)
|
||||
@paused = true
|
||||
end
|
||||
|
||||
def resume
|
||||
@downloaders.each { |d| d.stopped = false }
|
||||
@downloaders.each &.stopped=(false)
|
||||
@paused = false
|
||||
end
|
||||
|
||||
|
||||
@@ -35,15 +35,15 @@ module Rename
|
||||
|
||||
class Group < Base(Pattern | String)
|
||||
def render(hash : VHash)
|
||||
return "" if @ary.select(&.is_a? Pattern)
|
||||
return "" if @ary.select(Pattern)
|
||||
.any? &.as(Pattern).render(hash).empty?
|
||||
@ary.map do |e|
|
||||
@ary.join do |e|
|
||||
if e.is_a? Pattern
|
||||
e.render hash
|
||||
else
|
||||
e
|
||||
end
|
||||
end.join
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -129,13 +129,13 @@ module Rename
|
||||
end
|
||||
|
||||
def render(hash : VHash)
|
||||
str = @ary.map do |e|
|
||||
str = @ary.join do |e|
|
||||
if e.is_a? String
|
||||
e
|
||||
else
|
||||
e.render hash
|
||||
end
|
||||
end.join.strip
|
||||
end.strip
|
||||
post_process str
|
||||
end
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
require "../mangadex/*"
|
||||
require "../upload"
|
||||
require "koa"
|
||||
require "digest"
|
||||
|
||||
struct APIRouter
|
||||
@@api_json : String?
|
||||
@@ -10,7 +10,7 @@ struct APIRouter
|
||||
macro s(fields)
|
||||
{
|
||||
{% for field in fields %}
|
||||
{{field}} => "string",
|
||||
{{field}} => String,
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
@@ -23,7 +23,7 @@ struct APIRouter
|
||||
|
||||
# Authentication
|
||||
|
||||
All endpoints require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
||||
All endpoints except `/api/login` require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
||||
|
||||
# Terminologies
|
||||
|
||||
@@ -33,165 +33,66 @@ struct APIRouter
|
||||
MD
|
||||
|
||||
Koa.cookie_auth "cookie", "mango-sessid-#{Config.current.port}"
|
||||
Koa.global_tag "admin", desc: <<-MD
|
||||
Koa.define_tag "admin", desc: <<-MD
|
||||
These are the admin endpoints only accessible for users with admin access. A non-admin user will get HTTP 403 when calling the endpoints.
|
||||
MD
|
||||
|
||||
Koa.binary "binary", desc: "A binary file"
|
||||
Koa.array "entryAry", "$entry", desc: "An array of entries"
|
||||
Koa.array "titleAry", "$title", desc: "An array of titles"
|
||||
Koa.array "strAry", "string", desc: "An array of strings"
|
||||
Koa.schema "entry", {
|
||||
"pages" => Int32,
|
||||
"mtime" => Int64,
|
||||
}.merge(s %w(zip_path title size id title_id display_name cover_url)),
|
||||
desc: "An entry in a book"
|
||||
|
||||
entry_schema = {
|
||||
"pages" => "integer",
|
||||
"mtime" => "integer",
|
||||
}.merge s %w(zip_path title size id title_id display_name cover_url)
|
||||
Koa.object "entry", entry_schema, desc: "An entry in a book"
|
||||
|
||||
title_schema = {
|
||||
"mtime" => "integer",
|
||||
"entries" => "$entryAry",
|
||||
"titles" => "$titleAry",
|
||||
"parents" => "$strAry",
|
||||
}.merge s %w(dir title id display_name cover_url)
|
||||
Koa.object "title", title_schema,
|
||||
Koa.schema "title", {
|
||||
"mtime" => Int64,
|
||||
"entries" => ["entry"],
|
||||
"titles" => ["title"],
|
||||
"parents" => [String],
|
||||
}.merge(s %w(dir title id display_name cover_url)),
|
||||
desc: "A manga title (a collection of entries and sub-titles)"
|
||||
|
||||
Koa.object "library", {
|
||||
"dir" => "string",
|
||||
"titles" => "$titleAry",
|
||||
}, desc: "A library containing a list of top-level titles"
|
||||
|
||||
Koa.object "scanResult", {
|
||||
"milliseconds" => "integer",
|
||||
"titles" => "integer",
|
||||
Koa.schema "result", {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
}
|
||||
|
||||
Koa.object "progressResult", {
|
||||
"progress" => "number",
|
||||
Koa.describe "Authenticates a user", <<-MD
|
||||
After successful login, the cookie `mango-sessid-#{Config.current.port}` will contain a valid session ID that can be used for subsequent requests
|
||||
MD
|
||||
Koa.body schema: {
|
||||
"username" => String,
|
||||
"password" => String,
|
||||
}
|
||||
Koa.tag "users"
|
||||
post "/api/login" do |env|
|
||||
begin
|
||||
username = env.params.json["username"].as String
|
||||
password = env.params.json["password"].as String
|
||||
token = Storage.default.verify_user(username, password).not_nil!
|
||||
|
||||
Koa.object "result", {
|
||||
"success" => "boolean",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
mc_schema = {
|
||||
"groups" => "object",
|
||||
}.merge s %w(id title volume chapter language full_title time manga_title manga_id)
|
||||
Koa.object "mangadexChapter", mc_schema, desc: "A MangaDex chapter"
|
||||
|
||||
Koa.array "chapterAry", "$mangadexChapter"
|
||||
|
||||
mm_schema = {
|
||||
"chapers" => "$chapterAry",
|
||||
}.merge s %w(id title description author artist cover_url)
|
||||
Koa.object "mangadexManga", mm_schema, desc: "A MangaDex manga"
|
||||
|
||||
Koa.object "chaptersObj", {
|
||||
"chapters" => "$chapterAry",
|
||||
}
|
||||
|
||||
Koa.object "successFailCount", {
|
||||
"success" => "integer",
|
||||
"fail" => "integer",
|
||||
}
|
||||
|
||||
job_schema = {
|
||||
"pages" => "integer",
|
||||
"success_count" => "integer",
|
||||
"fail_count" => "integer",
|
||||
"time" => "integer",
|
||||
}.merge s %w(id manga_id title manga_title status_message status)
|
||||
Koa.object "job", job_schema, desc: "A download job in the queue"
|
||||
|
||||
Koa.array "jobAry", "$job"
|
||||
|
||||
Koa.object "jobs", {
|
||||
"success" => "boolean",
|
||||
"paused" => "boolean",
|
||||
"jobs" => "$jobAry",
|
||||
}
|
||||
|
||||
Koa.object "binaryUpload", {
|
||||
"file" => "$binary",
|
||||
}
|
||||
|
||||
Koa.object "pluginListBody", {
|
||||
"plugin" => "string",
|
||||
"query" => "string",
|
||||
}
|
||||
|
||||
Koa.object "pluginChapter", {
|
||||
"id" => "string",
|
||||
"title" => "string",
|
||||
}
|
||||
|
||||
Koa.array "pluginChapterAry", "$pluginChapter"
|
||||
|
||||
Koa.object "pluginList", {
|
||||
"success" => "boolean",
|
||||
"chapters" => "$pluginChapterAry?",
|
||||
"title" => "string?",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
Koa.object "pluginDownload", {
|
||||
"plugin" => "string",
|
||||
"title" => "string",
|
||||
"chapters" => "$pluginChapterAry",
|
||||
}
|
||||
|
||||
Koa.object "dimension", {
|
||||
"width" => "integer",
|
||||
"height" => "integer",
|
||||
}
|
||||
|
||||
Koa.array "dimensionAry", "$dimension"
|
||||
|
||||
Koa.object "dimensionResult", {
|
||||
"success" => "boolean",
|
||||
"dimensions" => "$dimensionAry?",
|
||||
"margin" => "number",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
Koa.object "ids", {
|
||||
"ids" => "$strAry",
|
||||
}
|
||||
|
||||
Koa.object "tagsResult", {
|
||||
"success" => "boolean",
|
||||
"tags" => "$strAry?",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
Koa.object "missing", {
|
||||
"path" => "string",
|
||||
"id" => "string",
|
||||
"signature" => "string",
|
||||
}
|
||||
|
||||
Koa.array "missingAry", "$missing"
|
||||
|
||||
Koa.object "missingResult", {
|
||||
"success" => "boolean",
|
||||
"error" => "string?",
|
||||
"entries" => "$missingAry?",
|
||||
"titles" => "$missingAry?",
|
||||
}
|
||||
env.session.string "token", token
|
||||
"Authenticated"
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 403
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns a page in a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.path "page", type: "integer", desc: "The page number to return (starts from 1)"
|
||||
Koa.response 200, ref: "$binary", media_type: "image/*"
|
||||
Koa.path "page", schema: Int32, desc: "The page number to return (starts from 1)"
|
||||
Koa.response 200, schema: Bytes, media_type: "image/*"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
|
||||
Koa.tag "reader"
|
||||
get "/api/page/:tid/:eid/:page" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
page = env.params.url["page"].to_i
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
@@ -201,7 +102,15 @@ struct APIRouter
|
||||
raise "Failed to load page #{page} of " \
|
||||
"`#{title.title}/#{entry.title}`" if img.nil?
|
||||
|
||||
send_img env, img
|
||||
e_tag = Digest::SHA1.hexdigest img.data
|
||||
if prev_e_tag == e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
env.response.headers["ETag"] = e_tag
|
||||
env.response.headers["Cache-Control"] = "public, max-age=86400"
|
||||
send_img env, img
|
||||
end
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
@@ -212,12 +121,15 @@ struct APIRouter
|
||||
Koa.describe "Returns the cover image of a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.response 200, ref: "$binary", media_type: "image/*"
|
||||
Koa.response 200, schema: Bytes, media_type: "image/*"
|
||||
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
Koa.tag "library"
|
||||
get "/api/cover/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
@@ -228,7 +140,14 @@ struct APIRouter
|
||||
raise "Failed to get cover of `#{title.title}/#{entry.title}`" \
|
||||
if img.nil?
|
||||
|
||||
send_img env, img
|
||||
e_tag = Digest::SHA1.hexdigest img.data
|
||||
if prev_e_tag == e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
env.response.headers["ETag"] = e_tag
|
||||
send_img env, img
|
||||
end
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
@@ -236,17 +155,39 @@ struct APIRouter
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the book with title `tid`"
|
||||
Koa.describe "Returns the book with title `tid`", <<-MD
|
||||
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||
- Supply the `depth` query parameter to control the depth of nested titles to return.
|
||||
- When `depth` is 1, returns the top-level titles and sub-titles/entries one level in them
|
||||
- When `depth` is 0, returns the top-level titles without their sub-titles/entries
|
||||
- When `depth` is N, returns the top-level titles and sub-titles/entries N levels in them
|
||||
- When `depth` is negative, returns the entire library
|
||||
MD
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.response 200, ref: "$title"
|
||||
Koa.query "slim"
|
||||
Koa.query "depth"
|
||||
Koa.query "sort", desc: "Sorting option for entries. Can be one of 'auto', 'title', 'progress', 'time_added' and 'time_modified'"
|
||||
Koa.query "ascend", desc: "Sorting direction for entries. Set to 0 for the descending order. Doesn't work without specifying 'sort'"
|
||||
Koa.response 200, schema: "title"
|
||||
Koa.response 404, "Title not found"
|
||||
Koa.tag "library"
|
||||
get "/api/book/:tid" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.new
|
||||
get_sort_opt
|
||||
|
||||
tid = env.params.url["tid"]
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
|
||||
send_json env, title.to_json
|
||||
slim = !env.params.query["slim"]?.nil?
|
||||
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||
|
||||
send_json env, title.build_json(slim: slim, depth: depth,
|
||||
sort_context: {username: username,
|
||||
opt: sort_opt})
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
@@ -254,15 +195,34 @@ struct APIRouter
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the entire library with all titles and entries"
|
||||
Koa.response 200, ref: "$library"
|
||||
Koa.describe "Returns the entire library with all titles and entries", <<-MD
|
||||
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||
- Supply the `dpeth` query parameter to control the depth of nested titles to return.
|
||||
- When `depth` is 1, returns the requested title and sub-titles/entries one level in it
|
||||
- When `depth` is 0, returns the requested title without its sub-titles/entries
|
||||
- When `depth` is N, returns the requested title and sub-titles/entries N levels in it
|
||||
- When `depth` is negative, returns the requested title and all sub-titles/entries in it
|
||||
MD
|
||||
Koa.query "slim"
|
||||
Koa.query "depth"
|
||||
Koa.response 200, schema: {
|
||||
"dir" => String,
|
||||
"titles" => ["title"],
|
||||
}
|
||||
Koa.tag "library"
|
||||
get "/api/library" do |env|
|
||||
send_json env, Library.default.to_json
|
||||
slim = !env.params.query["slim"]?.nil?
|
||||
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||
|
||||
send_json env, Library.default.build_json(slim: slim, depth: depth)
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a library scan"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$scanResult"
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.response 200, schema: {
|
||||
"milliseconds" => Float64,
|
||||
"titles" => Int32,
|
||||
}
|
||||
post "/api/admin/scan" do |env|
|
||||
start = Time.utc
|
||||
Library.default.scan
|
||||
@@ -274,8 +234,10 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Returns the thumbnail generation progress between 0 and 1"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$progressResult"
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.response 200, schema: {
|
||||
"progress" => Float64,
|
||||
}
|
||||
get "/api/admin/thumbnail_progress" do |env|
|
||||
send_json env, {
|
||||
"progress" => Library.default.thumbnail_generation_progress,
|
||||
@@ -283,7 +245,7 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a thumbnail generation"
|
||||
Koa.tag "admin"
|
||||
Koa.tags ["admin", "library"]
|
||||
post "/api/admin/generate_thumbnails" do |env|
|
||||
spawn do
|
||||
Library.default.generate_thumbnails
|
||||
@@ -291,8 +253,8 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a user with `username`"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tags ["admin", "users"]
|
||||
Koa.response 200, schema: "result"
|
||||
delete "/api/admin/user/delete/:username" do |env|
|
||||
begin
|
||||
username = env.params.url["username"]
|
||||
@@ -319,7 +281,8 @@ struct APIRouter
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "page", desc: "The new page number indicating the progress"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tag "progress"
|
||||
put "/api/progress/:tid/:page" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
@@ -350,8 +313,11 @@ struct APIRouter
|
||||
Koa.describe "Updates the reading progress of multiple entries in a title"
|
||||
Koa.path "action", desc: "The action to perform. Can be either `read` or `unread`"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.body ref: "$ids", desc: "An array of entry IDs"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.body schema: {
|
||||
"ids" => [String],
|
||||
}, desc: "An array of entry IDs"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tag "progress"
|
||||
put "/api/bulk_progress/:action/:tid" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
@@ -377,11 +343,11 @@ struct APIRouter
|
||||
Koa.describe "Sets the display name of a title or an entry", <<-MD
|
||||
When `eid` is provided, apply the display name to the entry. Otherwise, apply the display name to the title identified by `tid`.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "name", desc: "The new display name"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.response 200, schema: "result"
|
||||
put "/api/admin/display_name/:tid/:name" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"])
|
||||
@@ -405,60 +371,12 @@ struct APIRouter
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns a MangaDex manga identified by `id`", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.path "id", desc: "A MangaDex manga ID"
|
||||
Koa.response 200, ref: "$mangadexManga"
|
||||
get "/api/admin/mangadex/manga/:id" do |env|
|
||||
begin
|
||||
id = env.params.url["id"]
|
||||
api = MangaDex::API.default
|
||||
manga = api.get_manga id
|
||||
send_json env, manga.to_info_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Adds a list of MangaDex chapters to the download queue", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$chaptersObj"
|
||||
Koa.response 200, ref: "$successFailCount"
|
||||
post "/api/admin/mangadex/download" do |env|
|
||||
begin
|
||||
chapters = env.params.json["chapters"].as(Array).map { |c| c.as_h }
|
||||
jobs = chapters.map { |chapter|
|
||||
Queue::Job.new(
|
||||
chapter["id"].as_s,
|
||||
chapter["manga_id"].as_s,
|
||||
chapter["full_title"].as_s,
|
||||
chapter["manga_title"].as_s,
|
||||
Queue::JobStatus::Pending,
|
||||
Time.unix chapter["time"].as_s.to_i
|
||||
)
|
||||
}
|
||||
inserted_count = Queue.default.push jobs
|
||||
send_json env, {
|
||||
"success": inserted_count,
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
ws "/api/admin/mangadex/queue" do |socket, env|
|
||||
interval_raw = env.params.query["interval"]?
|
||||
interval = (interval_raw.to_i? if interval_raw) || 5
|
||||
loop do
|
||||
socket.send({
|
||||
"jobs" => Queue.default.get_all,
|
||||
"jobs" => Queue.default.get_all.reverse,
|
||||
"paused" => Queue.default.paused?,
|
||||
}.to_json)
|
||||
sleep interval.seconds
|
||||
@@ -468,17 +386,27 @@ struct APIRouter
|
||||
Koa.describe "Returns the current download queue", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$jobs"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"paused" => Bool?,
|
||||
"jobs?" => [{
|
||||
"pages" => Int32,
|
||||
"success_count" => Int32,
|
||||
"fail_count" => Int32,
|
||||
"time" => Int64,
|
||||
}.merge(s %w(id manga_id title manga_title status_message status))],
|
||||
}
|
||||
get "/api/admin/mangadex/queue" do |env|
|
||||
begin
|
||||
jobs = Queue.default.get_all
|
||||
send_json env, {
|
||||
"jobs" => jobs,
|
||||
"jobs" => Queue.default.get_all.reverse,
|
||||
"paused" => Queue.default.paused?,
|
||||
"success" => true,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -495,10 +423,10 @@ struct APIRouter
|
||||
|
||||
When `action` is set to `retry`, the behavior depends on `id`. If `id` is provided, restarts the job identified by the ID. Otherwise, retries all jobs in the `Error` or `MissingPages` status in the queue.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.path "action", desc: "The action to perform. It should be one of the followins: `delete`, `retry`, `pause` and `resume`."
|
||||
Koa.query "id", required: false, desc: "A job ID"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.response 200, schema: "result"
|
||||
post "/api/admin/mangadex/queue/:action" do |env|
|
||||
begin
|
||||
action = env.params.url["action"]
|
||||
@@ -526,6 +454,7 @@ struct APIRouter
|
||||
|
||||
send_json env, {"success" => true}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -547,8 +476,10 @@ struct APIRouter
|
||||
When `eid` is omitted, the new cover image will be applied to the title. Otherwise, applies the image to the specified entry.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.body type: "multipart/form-data", ref: "$binaryUpload"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.body media_type: "multipart/form-data", schema: {
|
||||
"file" => Bytes,
|
||||
}
|
||||
Koa.response 200, schema: "result"
|
||||
post "/api/admin/upload/:target" do |env|
|
||||
begin
|
||||
target = env.params.url["target"]
|
||||
@@ -596,6 +527,7 @@ struct APIRouter
|
||||
|
||||
raise "No part with name `file` found"
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -604,9 +536,18 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Lists the chapters in a title from a plugin"
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$pluginListBody"
|
||||
Koa.response 200, ref: "$pluginList"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.query "plugin", schema: String
|
||||
Koa.query "query", schema: String
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"chapters?" => [{
|
||||
"id" => String,
|
||||
"title" => String,
|
||||
}],
|
||||
"title" => String?,
|
||||
}
|
||||
get "/api/admin/plugin/list" do |env|
|
||||
begin
|
||||
query = env.params.query["query"].as String
|
||||
@@ -622,6 +563,7 @@ struct APIRouter
|
||||
"title" => title,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -630,9 +572,19 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Adds a list of chapters from a plugin to the download queue"
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$pluginDownload"
|
||||
Koa.response 200, ref: "$successFailCount"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.body schema: {
|
||||
"plugin" => String,
|
||||
"title" => String,
|
||||
"chapters" => [{
|
||||
"id" => String,
|
||||
"title" => String,
|
||||
}],
|
||||
}
|
||||
Koa.response 200, schema: {
|
||||
"success" => Int32,
|
||||
"fail" => Int32,
|
||||
}
|
||||
post "/api/admin/plugin/download" do |env|
|
||||
begin
|
||||
plugin = Plugin.new env.params.json["plugin"].as String
|
||||
@@ -655,6 +607,7 @@ struct APIRouter
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -665,24 +618,43 @@ struct APIRouter
|
||||
Koa.describe "Returns the image dimensions of all pages in an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.response 200, ref: "$dimensionResult"
|
||||
Koa.tag "reader"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"dimensions?" => [{
|
||||
"width" => Int32,
|
||||
"height" => Int32,
|
||||
}],
|
||||
}
|
||||
Koa.response 304, "Not modified (only available when `If-None-Match` is set)"
|
||||
get "/api/dimensions/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
|
||||
sizes = entry.page_dimensions
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"dimensions" => sizes,
|
||||
"margin" => Config.current.page_margin,
|
||||
}.to_json
|
||||
file_hash = Digest::SHA1.hexdigest (entry.zip_path + entry.mtime.to_s)
|
||||
e_tag = "W/#{file_hash}"
|
||||
if e_tag == prev_e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
sizes = entry.page_dimensions
|
||||
env.response.headers["ETag"] = e_tag
|
||||
env.response.headers["Cache-Control"] = "public, max-age=86400"
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"dimensions" => sizes,
|
||||
}.to_json
|
||||
end
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -693,8 +665,9 @@ struct APIRouter
|
||||
Koa.describe "Downloads an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.response 200, ref: "$binary"
|
||||
Koa.response 200, schema: Bytes
|
||||
Koa.response 404, "Entry not found"
|
||||
Koa.tags ["library", "reader"]
|
||||
get "/api/download/:tid/:eid" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
@@ -709,7 +682,12 @@ struct APIRouter
|
||||
|
||||
Koa.describe "Gets the tags of a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, ref: "$tagsResult"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"tags" => [String?],
|
||||
}
|
||||
Koa.tags ["library", "tags"]
|
||||
get "/api/tags/:tid" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
@@ -729,7 +707,12 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Returns all tags"
|
||||
Koa.response 200, ref: "$tagsResult"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"tags" => [String?],
|
||||
}
|
||||
Koa.tags ["library", "tags"]
|
||||
get "/api/tags" do |env|
|
||||
begin
|
||||
tags = Storage.default.list_tags
|
||||
@@ -748,8 +731,8 @@ struct APIRouter
|
||||
|
||||
Koa.describe "Adds a new tag to a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library", "tags"]
|
||||
put "/api/admin/tags/:tid/:tag" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
@@ -771,8 +754,8 @@ struct APIRouter
|
||||
|
||||
Koa.describe "Deletes a tag from a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library", "tags"]
|
||||
delete "/api/admin/tags/:tid/:tag" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
@@ -793,8 +776,16 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Lists all missing titles"
|
||||
Koa.response 200, ref: "$missingResult"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"titles?" => [{
|
||||
"path" => String,
|
||||
"id" => String,
|
||||
"signature" => String,
|
||||
}],
|
||||
}
|
||||
Koa.tags ["admin", "library"]
|
||||
get "/api/admin/titles/missing" do |env|
|
||||
begin
|
||||
send_json env, {
|
||||
@@ -803,6 +794,7 @@ struct APIRouter
|
||||
"titles" => Storage.default.missing_titles,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -811,8 +803,16 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Lists all missing entries"
|
||||
Koa.response 200, ref: "$missingResult"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"entries?" => [{
|
||||
"path" => String,
|
||||
"id" => String,
|
||||
"signature" => String,
|
||||
}],
|
||||
}
|
||||
Koa.tags ["admin", "library"]
|
||||
get "/api/admin/entries/missing" do |env|
|
||||
begin
|
||||
send_json env, {
|
||||
@@ -821,6 +821,7 @@ struct APIRouter
|
||||
"entries" => Storage.default.missing_entries,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -829,8 +830,8 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Deletes all missing titles"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/titles/missing" do |env|
|
||||
begin
|
||||
Storage.default.delete_missing_title
|
||||
@@ -839,6 +840,7 @@ struct APIRouter
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -847,8 +849,8 @@ struct APIRouter
|
||||
end
|
||||
|
||||
Koa.describe "Deletes all missing entries"
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/entries/missing" do |env|
|
||||
begin
|
||||
Storage.default.delete_missing_entry
|
||||
@@ -857,6 +859,7 @@ struct APIRouter
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -867,8 +870,8 @@ struct APIRouter
|
||||
Koa.describe "Deletes a missing title identified by `tid`", <<-MD
|
||||
Does nothing if the given `tid` is not found or if the title is not missing.
|
||||
MD
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/titles/missing/:tid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
@@ -878,6 +881,7 @@ struct APIRouter
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -888,8 +892,8 @@ struct APIRouter
|
||||
Koa.describe "Deletes a missing entry identified by `eid`", <<-MD
|
||||
Does nothing if the given `eid` is not found or if the entry is not missing.
|
||||
MD
|
||||
Koa.response 200, ref: "$result"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/entries/missing/:eid" do |env|
|
||||
begin
|
||||
eid = env.params.url["eid"]
|
||||
@@ -899,6 +903,7 @@ struct APIRouter
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
|
||||
@@ -30,7 +30,8 @@ struct MainRouter
|
||||
else
|
||||
redirect env, "/"
|
||||
end
|
||||
rescue
|
||||
rescue e
|
||||
Logger.error e
|
||||
redirect env, "/login"
|
||||
end
|
||||
end
|
||||
@@ -40,7 +41,7 @@ struct MainRouter
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json Library.default.dir, username
|
||||
get_sort_opt
|
||||
get_and_save_sort_opt Library.default.dir
|
||||
|
||||
titles = Library.default.sorted_titles username, sort_opt
|
||||
percentage = titles.map &.load_percentage username
|
||||
@@ -58,12 +59,12 @@ struct MainRouter
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json title.dir, username
|
||||
get_sort_opt
|
||||
get_and_save_sort_opt title.dir
|
||||
|
||||
entries = title.sorted_entries username, sort_opt
|
||||
|
||||
percentage = title.load_percentage_for_all_entries username, sort_opt
|
||||
title_percentage = title.titles.map &.load_percentage username
|
||||
|
||||
layout "title"
|
||||
rescue e
|
||||
Logger.error e
|
||||
@@ -71,11 +72,6 @@ struct MainRouter
|
||||
end
|
||||
end
|
||||
|
||||
get "/download" do |env|
|
||||
mangadex_base_url = Config.current.mangadex["base_url"]
|
||||
layout "download"
|
||||
end
|
||||
|
||||
get "/download/plugins" do |env|
|
||||
begin
|
||||
id = env.params.query["plugin"]?
|
||||
@@ -103,7 +99,7 @@ struct MainRouter
|
||||
recently_added = Library.default.get_recently_added_entries username
|
||||
start_reading = Library.default.get_start_reading_titles username
|
||||
titles = Library.default.titles
|
||||
new_user = !titles.any? { |t| t.load_percentage(username) > 0 }
|
||||
new_user = !titles.any? &.load_percentage(username).> 0
|
||||
empty_library = titles.size == 0
|
||||
layout "home"
|
||||
rescue e
|
||||
|
||||
@@ -30,6 +30,11 @@ struct ReaderRouter
|
||||
|
||||
title = (Library.default.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
|
||||
sort_opt = SortOptions.from_info_json title.dir, username
|
||||
get_sort_opt
|
||||
entries = title.sorted_entries username, sort_opt
|
||||
|
||||
page_idx = env.params.url["page"].to_i
|
||||
if page_idx > entry.pages || page_idx <= 0
|
||||
raise "Page #{page_idx} not found."
|
||||
@@ -37,10 +42,12 @@ struct ReaderRouter
|
||||
|
||||
exit_url = "#{base_url}book/#{title.id}"
|
||||
|
||||
next_entry_url = nil
|
||||
next_entry = entry.next_entry username
|
||||
unless next_entry.nil?
|
||||
next_entry_url = "#{base_url}reader/#{title.id}/#{next_entry.id}"
|
||||
next_entry_url = entry.next_entry(username).try do |e|
|
||||
"#{base_url}reader/#{title.id}/#{e.id}"
|
||||
end
|
||||
|
||||
previous_entry_url = entry.previous_entry(username).try do |e|
|
||||
"#{base_url}reader/#{title.id}/#{e.id}"
|
||||
end
|
||||
|
||||
render "src/views/reader.html.ecr"
|
||||
|
||||
@@ -49,6 +49,7 @@ class Server
|
||||
{% if flag?(:release) %}
|
||||
Kemal.config.env = "production"
|
||||
{% end %}
|
||||
Kemal.config.host_binding = Config.current.host
|
||||
Kemal.config.port = Config.current.port
|
||||
Kemal.run
|
||||
end
|
||||
|
||||
@@ -34,7 +34,7 @@ class Storage
|
||||
dir = File.dirname @path
|
||||
unless Dir.exists? dir
|
||||
Logger.info "The DB directory #{dir} does not exist. " \
|
||||
"Attepmting to create it"
|
||||
"Attempting to create it"
|
||||
Dir.mkdir_p dir
|
||||
end
|
||||
MainFiber.run do
|
||||
@@ -48,14 +48,6 @@ class Storage
|
||||
|
||||
user_count = db.query_one "select count(*) from users", as: Int32
|
||||
init_admin if init_user && user_count == 0
|
||||
|
||||
# Verifies that the default username in config is valid
|
||||
if Config.current.disable_login
|
||||
username = Config.current.default_username
|
||||
unless username_exists username
|
||||
raise "Default username #{username} does not exist"
|
||||
end
|
||||
end
|
||||
end
|
||||
unless @auto_close
|
||||
@db = DB.open "sqlite3://#{@path}"
|
||||
@@ -436,12 +428,21 @@ class Storage
|
||||
end
|
||||
end
|
||||
|
||||
def mark_unavailable
|
||||
# Mark titles and entries that no longer exist on the file system as
|
||||
# unavailable. By supplying `id_candidates` and `titles_candidates`, it
|
||||
# only checks the existence of the candidate titles/entries to speed up
|
||||
# the process.
|
||||
def mark_unavailable(ids_candidates : Array(String)?,
|
||||
titles_candidates : Array(String)?)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
# Detect dangling entry IDs
|
||||
trash_ids = [] of String
|
||||
db.query "select path, id from ids where unavailable = 0" do |rs|
|
||||
query = "select path, id from ids where unavailable = 0"
|
||||
unless ids_candidates.nil?
|
||||
query += " and id in (#{ids_candidates.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
db.query query do |rs|
|
||||
rs.each do
|
||||
path = rs.read String
|
||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||
@@ -453,11 +454,15 @@ class Storage
|
||||
Logger.debug "Marking #{trash_ids.size} entries as unavailable"
|
||||
end
|
||||
db.exec "update ids set unavailable = 1 where id in " \
|
||||
"(#{trash_ids.map { |i| "'#{i}'" }.join ","})"
|
||||
"(#{trash_ids.join "," { |i| "'#{i}'" }})"
|
||||
|
||||
# Detect dangling title IDs
|
||||
trash_titles = [] of String
|
||||
db.query "select path, id from titles where unavailable = 0" do |rs|
|
||||
query = "select path, id from titles where unavailable = 0"
|
||||
unless titles_candidates.nil?
|
||||
query += " and id in (#{titles_candidates.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
db.query query do |rs|
|
||||
rs.each do
|
||||
path = rs.read String
|
||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||
@@ -469,7 +474,7 @@ class Storage
|
||||
Logger.debug "Marking #{trash_titles.size} titles as unavailable"
|
||||
end
|
||||
db.exec "update titles set unavailable = 1 where id in " \
|
||||
"(#{trash_titles.map { |i| "'#{i}'" }.join ","})"
|
||||
"(#{trash_titles.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -522,6 +527,37 @@ class Storage
|
||||
delete_missing "titles", id
|
||||
end
|
||||
|
||||
def save_md_token(username : String, token : String, expire : Time)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
count = db.query_one "select count(*) from md_account where " \
|
||||
"username = (?)", username, as: Int64
|
||||
if count == 0
|
||||
db.exec "insert into md_account values (?, ?, ?)", username, token,
|
||||
expire.to_unix
|
||||
else
|
||||
db.exec "update md_account set token = (?), expire = (?) " \
|
||||
"where username = (?)", token, expire.to_unix, username
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_md_token(username) : Tuple(String?, Time?)
|
||||
token = nil
|
||||
expires = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query_one? "select token, expire from md_account where " \
|
||||
"username = (?)", username do |res|
|
||||
token = res.read String
|
||||
expires = Time.unix res.read Int64
|
||||
end
|
||||
end
|
||||
end
|
||||
{token, expires}
|
||||
end
|
||||
|
||||
def close
|
||||
MainFiber.run do
|
||||
unless @db.nil?
|
||||
|
||||
83
src/subscription.cr
Normal file
83
src/subscription.cr
Normal file
@@ -0,0 +1,83 @@
|
||||
require "db"
|
||||
require "json"
|
||||
|
||||
struct Subscription
|
||||
include DB::Serializable
|
||||
include JSON::Serializable
|
||||
|
||||
getter id : Int64 = 0
|
||||
getter username : String
|
||||
getter manga_id : Int64
|
||||
property language : String?
|
||||
property group_id : Int64?
|
||||
property min_volume : Int64?
|
||||
property max_volume : Int64?
|
||||
property min_chapter : Int64?
|
||||
property max_chapter : Int64?
|
||||
@[DB::Field(key: "last_checked")]
|
||||
@[JSON::Field(key: "last_checked")]
|
||||
@raw_last_checked : Int64
|
||||
@[DB::Field(key: "created_at")]
|
||||
@[JSON::Field(key: "created_at")]
|
||||
@raw_created_at : Int64
|
||||
|
||||
def last_checked : Time
|
||||
Time.unix @raw_last_checked
|
||||
end
|
||||
|
||||
def created_at : Time
|
||||
Time.unix @raw_created_at
|
||||
end
|
||||
|
||||
def initialize(@manga_id, @username)
|
||||
@raw_created_at = Time.utc.to_unix
|
||||
@raw_last_checked = Time.utc.to_unix
|
||||
end
|
||||
|
||||
private def in_range?(value : String, lowerbound : Int64?,
|
||||
upperbound : Int64?) : Bool
|
||||
lb = lowerbound.try &.to_f64
|
||||
ub = upperbound.try &.to_f64
|
||||
|
||||
return true if lb.nil? && ub.nil?
|
||||
|
||||
v = value.to_f64?
|
||||
return false unless v
|
||||
|
||||
if lb.nil?
|
||||
v <= ub.not_nil!
|
||||
elsif ub.nil?
|
||||
v >= lb.not_nil!
|
||||
else
|
||||
v >= lb.not_nil! && v <= ub.not_nil!
|
||||
end
|
||||
end
|
||||
|
||||
def match?(chapter : MangaDex::Chapter) : Bool
|
||||
if chapter.manga_id != manga_id ||
|
||||
(language && chapter.language != language) ||
|
||||
(group_id && !chapter.groups.map(&.id).includes? group_id)
|
||||
return false
|
||||
end
|
||||
|
||||
in_range?(chapter.volume, min_volume, max_volume) &&
|
||||
in_range?(chapter.chapter, min_chapter, max_chapter)
|
||||
end
|
||||
|
||||
def check_for_updates : Int32
|
||||
Logger.debug "Checking updates for subscription with ID #{id}"
|
||||
jobs = [] of Queue::Job
|
||||
get_client(username).user.updates_after last_checked do |chapter|
|
||||
next unless match? chapter
|
||||
jobs << chapter.to_job
|
||||
end
|
||||
Storage.default.update_subscription_last_checked id
|
||||
count = Queue.default.push jobs
|
||||
Logger.debug "#{count}/#{jobs.size} of updates added to queue"
|
||||
count
|
||||
rescue e
|
||||
Logger.error "Error occurred when checking updates for " \
|
||||
"subscription with ID #{id}. #{e}"
|
||||
0
|
||||
end
|
||||
end
|
||||
@@ -73,7 +73,7 @@ class ChapterSorter
|
||||
.select do |key|
|
||||
keys[key].count >= str_ary.size / 2
|
||||
end
|
||||
.sort do |a_key, b_key|
|
||||
.sort! do |a_key, b_key|
|
||||
a = keys[a_key]
|
||||
b = keys[b_key]
|
||||
# Sort keys by the number of times they appear
|
||||
|
||||
@@ -11,7 +11,7 @@ end
|
||||
def split_by_alphanumeric(str)
|
||||
arr = [] of String
|
||||
str.scan(/([^\d\n\r]*)(\d*)([^\d\n\r]*)/) do |match|
|
||||
arr += match.captures.select { |s| s != "" }
|
||||
arr += match.captures.select &.!= ""
|
||||
end
|
||||
arr
|
||||
end
|
||||
|
||||
@@ -48,4 +48,32 @@ class Dir
|
||||
end
|
||||
Digest::CRC32.checksum(signatures.sort.join).to_u64
|
||||
end
|
||||
|
||||
# Returns the contents signature of the directory at dirname for checking
|
||||
# to rescan.
|
||||
# Rescan conditions:
|
||||
# - When a file added, moved, removed, renamed (including which in nested
|
||||
# directories)
|
||||
def self.contents_signature(dirname, cache = {} of String => String) : String
|
||||
return cache[dirname] if cache[dirname]?
|
||||
Fiber.yield
|
||||
signatures = [] of String
|
||||
self.open dirname do |dir|
|
||||
dir.entries.sort.each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dirname, fn
|
||||
if File.directory? path
|
||||
signatures << Dir.contents_signature path, cache
|
||||
else
|
||||
# Only add its signature value to `signatures` when it is a
|
||||
# supported file
|
||||
signatures << fn if is_supported_file fn
|
||||
end
|
||||
Fiber.yield
|
||||
end
|
||||
end
|
||||
hash = Digest::SHA1.hexdigest(signatures.join)
|
||||
cache[dirname] = hash
|
||||
hash
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
IMGS_PER_PAGE = 5
|
||||
ENTRIES_IN_HOME_SECTIONS = 8
|
||||
UPLOAD_URL_PREFIX = "/uploads"
|
||||
STATIC_DIRS = ["/css", "/js", "/img", "/favicon.ico"]
|
||||
STATIC_DIRS = %w(/css /js /img /webfonts /favicon.ico /robots.txt)
|
||||
SUPPORTED_FILE_EXTNAMES = [".zip", ".cbz", ".rar", ".cbr"]
|
||||
|
||||
def random_str
|
||||
@@ -23,10 +23,23 @@ end
|
||||
|
||||
def register_mime_types
|
||||
{
|
||||
# Comic Archives
|
||||
".zip" => "application/zip",
|
||||
".rar" => "application/x-rar-compressed",
|
||||
".cbz" => "application/vnd.comicbook+zip",
|
||||
".cbr" => "application/vnd.comicbook-rar",
|
||||
|
||||
# Favicon
|
||||
".ico" => "image/x-icon",
|
||||
|
||||
# FontAwesome fonts
|
||||
".woff" => "font/woff",
|
||||
".woff2" => "font/woff2",
|
||||
|
||||
# Supported image formats. JPG, PNG, GIF, WebP, and SVG are already
|
||||
# defiend by Crystal in `MIME.DEFAULT_TYPES`
|
||||
".apng" => "image/apng",
|
||||
".avif" => "image/avif",
|
||||
}.each do |k, v|
|
||||
MIME.register k, v
|
||||
end
|
||||
@@ -106,9 +119,28 @@ class String
|
||||
def components_similarity(other : String) : Float64
|
||||
s, l = [self, other]
|
||||
.map { |str| Path.new(str).parts }
|
||||
.sort_by &.size
|
||||
.sort_by! &.size
|
||||
|
||||
match = s.reverse.zip(l.reverse).count { |a, b| a == b }
|
||||
match / s.size
|
||||
end
|
||||
end
|
||||
|
||||
# Does the followings:
|
||||
# - turns space-like characters into the normal whitespaces ( )
|
||||
# - strips and collapses spaces
|
||||
# - removes ASCII control characters
|
||||
# - replaces slashes (/) with underscores (_)
|
||||
# - removes leading dots (.)
|
||||
# - removes the following special characters: \:*?"<>|
|
||||
#
|
||||
# If the sanitized string is empty, returns a random string instead.
|
||||
def sanitize_filename(str : String) : String
|
||||
sanitized = str
|
||||
.gsub(/\s+/, " ")
|
||||
.strip
|
||||
.gsub(/\//, "_")
|
||||
.gsub(/^[\.\s]+/, "")
|
||||
.gsub(/[\177\000-\031\\:\*\?\"<>\|]/, "")
|
||||
sanitized.size > 0 ? sanitized : random_str
|
||||
end
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
# Web related helper functions/macros
|
||||
|
||||
# This macro defines `is_admin` when used
|
||||
macro check_admin_access
|
||||
def is_admin?(env) : Bool
|
||||
is_admin = false
|
||||
# The token (if exists) takes precedence over the default user option.
|
||||
# this is why we check the default username first before checking the
|
||||
# token.
|
||||
if Config.current.disable_login
|
||||
is_admin = Storage.default.
|
||||
username_is_admin Config.current.default_username
|
||||
if !Config.current.auth_proxy_header_name.empty? ||
|
||||
Config.current.disable_login
|
||||
is_admin = Storage.default.username_is_admin get_username env
|
||||
end
|
||||
|
||||
# The token (if exists) takes precedence over other authentication methods.
|
||||
if token = env.session.string? "token"
|
||||
is_admin = Storage.default.verify_admin token
|
||||
end
|
||||
|
||||
is_admin
|
||||
end
|
||||
|
||||
macro layout(name)
|
||||
base_url = Config.current.base_url
|
||||
check_admin_access
|
||||
is_admin = is_admin? env
|
||||
begin
|
||||
page = {{name}}
|
||||
render "src/views/#{{{name}}}.html.ecr", "src/views/layout.html.ecr"
|
||||
@@ -32,7 +32,7 @@ end
|
||||
macro send_error_page(msg)
|
||||
message = {{msg}}
|
||||
base_url = Config.current.base_url
|
||||
check_admin_access
|
||||
is_admin = is_admin? env
|
||||
page = "Error"
|
||||
html = render "src/views/message.html.ecr", "src/views/layout.html.ecr"
|
||||
send_file env, html.to_slice, "text/html"
|
||||
@@ -49,6 +49,8 @@ macro get_username(env)
|
||||
rescue e
|
||||
if Config.current.disable_login
|
||||
Config.current.default_username
|
||||
elsif (header = Config.current.auth_proxy_header_name) && !header.empty?
|
||||
env.request.headers[header]
|
||||
else
|
||||
raise e
|
||||
end
|
||||
@@ -70,7 +72,7 @@ def redirect(env, path)
|
||||
end
|
||||
|
||||
def hash_to_query(hash)
|
||||
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
||||
hash.join "&" { |k, v| "#{k}=#{v}" }
|
||||
end
|
||||
|
||||
def request_path_startswith(env, ary)
|
||||
@@ -105,6 +107,26 @@ macro get_sort_opt
|
||||
end
|
||||
end
|
||||
|
||||
macro get_and_save_sort_opt(dir)
|
||||
sort_method = env.params.query["sort"]?
|
||||
|
||||
if sort_method
|
||||
is_ascending = true
|
||||
|
||||
ascend = env.params.query["ascend"]?
|
||||
if ascend && ascend.to_i? == 0
|
||||
is_ascending = false
|
||||
end
|
||||
|
||||
sort_opt = SortOptions.new sort_method, is_ascending
|
||||
|
||||
TitleInfo.new {{dir}} do |info|
|
||||
info.sort_by[username] = sort_opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module HTTP
|
||||
class Client
|
||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/protonet-jquery.inview/1.1.2/jquery.inview.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/protonet-jquery.inview/1.1.2/jquery.inview.min.js"></script>
|
||||
<script src="<%= base_url %>js/dots.js"></script>
|
||||
@@ -4,13 +4,10 @@
|
||||
<title>Mango - <%= page.split("-").map(&.capitalize).join(" ") %></title>
|
||||
<meta name="description" content="Mango - Manga Server and Web Reader">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="<%= base_url %>css/uikit.css" />
|
||||
<link rel="stylesheet" href="<%= base_url %>css/mango.css" />
|
||||
<link rel="icon" href="<%= base_url %>favicon.ico">
|
||||
|
||||
<script src="https://polyfill.io/v3/polyfill.min.js?features=matchMedia%2Cdefault&flags=gated"></script>
|
||||
<script defer src="<%= base_url %>js/fontawesome.min.js"></script>
|
||||
<script defer src="<%= base_url %>js/solid.min.js"></script>
|
||||
<script src="https://polyfill.io/v3/polyfill.min.js?features=MutationObserver%2Cdefault%2CmatchMedia&flats=gated"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
|
||||
<script type="module" src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.8.0/dist/alpine.min.js"></script>
|
||||
<script nomodule src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.8.0/dist/alpine-ie11.min.js" defer></script>
|
||||
|
||||
1
src/views/components/jquery-ui.html.ecr
Normal file
1
src/views/components/jquery-ui.html.ecr
Normal file
@@ -0,0 +1 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
1
src/views/components/moment.html.ecr
Normal file
1
src/views/components/moment.html.ecr
Normal file
@@ -0,0 +1 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
2
src/views/components/uikit.html.ecr
Normal file
2
src/views/components/uikit.html.ecr
Normal file
@@ -0,0 +1,2 @@
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.5.9/dist/js/uikit.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.5.9/dist/js/uikit-icons.min.js"></script>
|
||||
@@ -25,14 +25,14 @@
|
||||
<td x-text="job.title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`${'<%= mangadex_base_url %>'.replace(/\/$/, '')}/chapter/${job.id}`" x-text="job.title"></td>
|
||||
<td><a :href="`<%= mangadex_base_url %>/chapter/${job.id}`" x-text="job.title"></td>
|
||||
</template>
|
||||
|
||||
<template x-if="job.plugin_id">
|
||||
<td x-text="job.manga_title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`${'<%= mangadex_base_url %>'.replace(/\/$/, '')}/manga/${job.manga_id}`" x-text="job.manga_title"></td>
|
||||
<td><a :href="`<%= mangadex_base_url %>/manga/${job.manga_id}`" x-text="job.manga_title"></td>
|
||||
</template>
|
||||
|
||||
<td x-text="`${job.success_count}/${job.pages}`"></td>
|
||||
@@ -49,11 +49,10 @@
|
||||
</td>
|
||||
|
||||
<td x-text="`${job.plugin_id || ''}`"></td>
|
||||
|
||||
<td>
|
||||
<a @click="jobAction('delete', $event)" uk-icon="trash"></a>
|
||||
<a @click="jobAction('delete', $event)" uk-icon="trash" uk-tooltip="Delete"></a>
|
||||
<template x-if="job.status_message.length > 0">
|
||||
<a @click="jobAction('retry', $event)" uk-icon="refresh"></a>
|
||||
<a @click="jobAction('retry', $event)" uk-icon="refresh" uk-tooltip="Retry"></a>
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -61,9 +60,10 @@
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<%= render_component "moment" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download-manager.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -1,83 +1,162 @@
|
||||
<h2 class=uk-title>Download from MangaDex</h2>
|
||||
<div class="uk-grid-small" uk-grid>
|
||||
<div class="uk-width-3-4">
|
||||
<input id="search-input" class="uk-input" type="text" placeholder="MangaDex manga ID or URL">
|
||||
<div x-data="downloadComponent()" x-init="init()">
|
||||
<div class="uk-grid-small" uk-grid style="margin-bottom:40px;">
|
||||
<div class="uk-width-expand">
|
||||
<input class="uk-input" type="text" :placeholder="searchAvailable ? 'Search MangaDex or enter a manga ID/URL' : 'MangaDex manga ID or URL'" x-model="searchInput" @keydown.enter.debounce="search()">
|
||||
</div>
|
||||
<div class="uk-width-auto">
|
||||
<div uk-spinner class="uk-align-center" x-show="loading" x-cloak></div>
|
||||
<button class="uk-button uk-button-default" x-show="!loading" @click="search()">Search</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-width-1-4">
|
||||
<div id="spinner" uk-spinner class="uk-align-center" hidden></div>
|
||||
<button id="search-btn" class="uk-button uk-button-default" onclick="search()">Search</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class"uk-grid-small" uk-grid hidden id="manga-details">
|
||||
<div class="uk-width-1-4@s">
|
||||
<img id="cover">
|
||||
</div>
|
||||
<div class="uk-width-1-4@s">
|
||||
<p id="title"></p>
|
||||
<p id="artist"></p>
|
||||
<p id="author"></p>
|
||||
</div>
|
||||
<div id="filter-form" class="uk-form-stacked uk-width-1-2@s" hidden>
|
||||
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" id="count-text"></p>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="lang-select">Language</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="lang-select">
|
||||
</select>
|
||||
|
||||
<template x-if="mangaAry">
|
||||
<div>
|
||||
<p x-show="mangaAry.length === 0">No matching manga found.</p>
|
||||
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<template x-for="manga in mangaAry" :key="manga.id">
|
||||
<div class="item" :data-id="manga.id" @click="chooseManga(manga)">
|
||||
<div class="uk-card uk-card-default">
|
||||
<div class="uk-card-media-top uk-inline">
|
||||
<img uk-img :data-src="manga.mainCover">
|
||||
</div>
|
||||
<div class="uk-card-body">
|
||||
<h3 class="uk-card-title break-word uk-margin-remove-bottom free-height" x-text="manga.title"></h3>
|
||||
<p class="uk-text-meta" x-text="`ID: ${manga.id}`"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="group-select">Group</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="group-select">
|
||||
</select>
|
||||
</template>
|
||||
|
||||
<div x-show="data && data.chapters" x-cloak>
|
||||
<div class"uk-grid-small" uk-grid>
|
||||
<div class="uk-width-1-4@s">
|
||||
<img :src="data.mainCover">
|
||||
</div>
|
||||
<div class="uk-width-1-4@s">
|
||||
<p>Title: <a :href="`<%= mangadex_base_url %>/manga/${data.id}`" x-text="data.title"></a></p>
|
||||
<p x-text="`Artist: ${data.artist}`"></p>
|
||||
<p x-text="`Author: ${data.author}`"></p>
|
||||
</div>
|
||||
<div class="uk-form-stacked uk-width-1-2@s" id="filters">
|
||||
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" x-text="`${chapters.length} chapters found`"></p>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Language</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" x-model="langChoice" @change="filtersUpdated()">
|
||||
<template x-for="lang in languages" :key="lang">
|
||||
<option x-text="lang"></option>
|
||||
</template>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Group</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" x-model="groupChoice" @change="filtersUpdated()">
|
||||
<template x-for="group in groups" :key="group">
|
||||
<option x-text="group"></option>
|
||||
</template>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Volume</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="volumeRange" @keydown.enter="filtersUpdated()">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Chapter</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="chapterRange" @keydown.enter="filtersUpdated()">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="volume-range">Volume</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="volume-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" @click="selectAll()">Select All</button>
|
||||
<button class="uk-button uk-button-default" @click="clearSelection()">Clear Selections</button>
|
||||
<button class="uk-button uk-button-primary" @click="download()" x-show="!addingToDownload">Download Selected</button>
|
||||
<div uk-spinner class="uk-margin-left" x-show="addingToDownload"></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="chapter-range">Chapter</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="chapter-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
<p x-text="`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters. Please use the filter options above to narrow down your search.`" x-show="chapters.length > chaptersLimit"></p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto" x-show="chapters.length <= chaptersLimit">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Title</th>
|
||||
<th>Language</th>
|
||||
<th>Group</th>
|
||||
<th>Volume</th>
|
||||
<th>Chapter</th>
|
||||
<th>Timestamp</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<template x-if="chapters.length <= chaptersLimit">
|
||||
<tbody id="selectable">
|
||||
<template x-for="chp in chapters" :key="chp">
|
||||
<tr class="ui-widget-content">
|
||||
<td><a :href="`<%= mangadex_base_url %>/chapter/${chp.id}`" x-text="chp.id"></a></td>
|
||||
<td x-text="chp.title"></td>
|
||||
<td x-text="chp.language"></td>
|
||||
<td>
|
||||
<template x-for="grp in Object.entries(chp.groups)">
|
||||
<div>
|
||||
<a :href="`<%= mangadex_base_url %>/group/${grp[1]}`" x-text="grp[0]"></a>
|
||||
</div>
|
||||
</template>
|
||||
</td>
|
||||
<td x-text="chp.volume"></td>
|
||||
<td x-text="chp.chapter"></td>
|
||||
<td x-text="`${moment.unix(chp.timestamp).fromNow()}`"></td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</template>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id="modal" class="uk-flex-top" uk-modal="container: false">
|
||||
<div class="uk-modal-dialog uk-margin-auto-vertical">
|
||||
<button class="uk-modal-close-default" type="button" uk-close></button>
|
||||
<div class="uk-modal-header">
|
||||
<h3 class="uk-modal-title break-word" x-text="candidateManga.title"></h3>
|
||||
</div>
|
||||
<div class="uk-modal-body">
|
||||
<div class="uk-grid">
|
||||
<div class="uk-width-1-3@s">
|
||||
<img uk-img data-width data-height :src="candidateManga.mainCover" style="width:100%;margin-bottom:10px;">
|
||||
<a :href="`<%= mangadex_base_url %>/manga/${candidateManga.id}`" x-text="`ID: ${candidateManga.id}`" class="uk-link-muted"></a>
|
||||
</div>
|
||||
<div class="uk-width-2-3@s" uk-overflow-auto>
|
||||
<p x-text="candidateManga.description"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-modal-footer">
|
||||
<button class="uk-button uk-button-primary" type="button" @click="confirmManga(candidateManga.id)">Choose</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="selection-controls" class="uk-margin" hidden>
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" onclick="selectAll()">Select All</button>
|
||||
<button class="uk-button uk-button-default" onclick="unselect()">Clear Selections</button>
|
||||
<button class="uk-button uk-button-primary" id="download-btn" onclick="download()">Download Selected</button>
|
||||
<div id="download-spinner" uk-spinner class="uk-margin-left" hidden></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
</div>
|
||||
<p id="filter-notification" hidden></p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto" hidden>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Title</th>
|
||||
<th>Language</th>
|
||||
<th>Group</th>
|
||||
<th>Volume</th>
|
||||
<th>Chapter</th>
|
||||
<th>Timestamp</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script>
|
||||
var baseURL = "<%= mangadex_base_url %>".replace(/\/$/, "");
|
||||
</script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<%= render_component "moment" %>
|
||||
<%= render_component "jquery-ui" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
<%- end -%>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<%= render_component "dots" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/title.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
<li class="uk-parent">
|
||||
<a href="#">Download</a>
|
||||
<ul class="uk-nav-sub">
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
</ul>
|
||||
@@ -49,7 +48,6 @@
|
||||
<div class="uk-navbar-dropdown">
|
||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
||||
<li class="uk-nav-header">Source</li>
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li class="uk-nav-divider"></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
@@ -82,9 +80,7 @@
|
||||
setTheme();
|
||||
const base_url = "<%= base_url %>";
|
||||
</script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
|
||||
<%= render_component "uikit" %>
|
||||
<%= yield_content "script" %>
|
||||
</body>
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<%= render_component "dots" %>
|
||||
<script src="<%= base_url %>js/search.js"></script>
|
||||
<script src="<%= base_url %>js/sort-items.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -30,8 +30,7 @@
|
||||
<script>
|
||||
setTheme();
|
||||
</script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
<%= render_component "uikit" %>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
39
src/views/mangadex.html.ecr
Normal file
39
src/views/mangadex.html.ecr
Normal file
@@ -0,0 +1,39 @@
|
||||
<div x-data="component()" x-init="init()">
|
||||
<h2 class="uk-title">Connect to MangaDex</h2>
|
||||
<div class"uk-grid-small" uk-grid x-show="!loading" x-cloak>
|
||||
<div class="uk-width-1-2@s" x-show="!expires">
|
||||
<p>This step is optional but highly recommended if you are using the MangaDex downloader. Connecting to MangaDex allows you to:</p>
|
||||
<ul>
|
||||
<li>Search MangaDex by search terms in addition to manga IDs</li>
|
||||
<li>Automatically download new chapters when they are available (coming soon)</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="uk-width-1-2@s" x-show="expires">
|
||||
<p>
|
||||
<span x-show="!expired">You have logged in to MangaDex!</span>
|
||||
<span x-show="expired">You have logged in to MangaDex but the token has expired.</span>
|
||||
The expiration date of your token is <code x-text="moment.unix(expires).format('MMMM Do YYYY, HH:mm:ss')"></code>.
|
||||
<span x-show="!expired">If the integration is not working, you</span>
|
||||
<span x-show="expired">You</span>
|
||||
can log in again and the token will be updated.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="uk-width-1-2@s">
|
||||
<div class="uk-margin">
|
||||
<div class="uk-inline uk-width-1-1"><span class="uk-form-icon" uk-icon="icon:user"></span><input class="uk-input uk-form-large" type="text" x-model="username" @keydown.enter.debounce="login()"></div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<div class="uk-inline uk-width-1-1"><span class="uk-form-icon" uk-icon="icon:lock"></span><input class="uk-input uk-form-large" type="password" x-model="password" @keydown.enter.debounce="login()"></div>
|
||||
</div>
|
||||
<div class="uk-margin"><button class="uk-button uk-button-primary uk-button-large uk-width-1-1" @click="login()" :disabled="loggingIn">Login to MangaDex</button></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "moment" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/mangadex.js"></script>
|
||||
<% end %>
|
||||
@@ -56,8 +56,10 @@
|
||||
<div id="download-spinner" uk-spinner class="uk-margin-left" hidden></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto tablesorter">
|
||||
</table>
|
||||
<div class="uk-overflow-auto">
|
||||
<table class="uk-table uk-table-striped tablesorter">
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
@@ -68,7 +70,7 @@
|
||||
var pid = "<%= plugin.not_nil!.info.id %>";
|
||||
</script>
|
||||
<% end %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<%= render_component "jquery-ui" %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.tablesorter/2.31.3/js/jquery.tablesorter.combined.min.js"></script>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/plugin-download.js"></script>
|
||||
|
||||
@@ -21,22 +21,22 @@
|
||||
<div
|
||||
:class="{'uk-container': true, 'uk-container-small': mode === 'continuous', 'uk-container-expand': mode !== 'continuous'}">
|
||||
<div x-show="!loading && mode === 'continuous'" x-cloak>
|
||||
<template x-for="item in items">
|
||||
<template x-if="!loading && mode === 'continuous'" x-for="item in items">
|
||||
<img
|
||||
uk-img
|
||||
class="uk-align-center"
|
||||
:style="item.style"
|
||||
:class="{'uk-align-center': true, 'spine': item.width < 50}"
|
||||
:data-src="item.url"
|
||||
:width="item.width"
|
||||
:height="item.height"
|
||||
:id="item.id"
|
||||
:style="`margin-top:${margin}px; margin-bottom:${margin}px`"
|
||||
@click="showControl($event)"
|
||||
/>
|
||||
</template>
|
||||
<%- if next_entry_url -%>
|
||||
<button id="next-btn" class="uk-align-center uk-button uk-button-primary" @click="nextEntry('<%= next_entry_url %>')">Next Entry</button>
|
||||
<%- else -%>
|
||||
<button id="next-btn" class="uk-align-center uk-button uk-button-primary" @click="exitReader('<%= exit_url %>', true)">Exit Reader</button>
|
||||
<button id="next-btn" class="uk-align-center uk-button uk-button-primary" @click="exitReader('<%= exit_url %>')">Exit Reader</button>
|
||||
<%- end -%>
|
||||
</div>
|
||||
|
||||
@@ -50,6 +50,9 @@
|
||||
width:${mode === 'width' ? '100vw' : 'auto'};
|
||||
height:${mode === 'height' ? '100vh' : 'auto'};
|
||||
margin-bottom:0;
|
||||
max-width:100%;
|
||||
max-height:100%;
|
||||
object-fit: contain;
|
||||
`" />
|
||||
|
||||
<div style="position:absolute;z-index:1; top:0;left:0; width:30%;height:100%;" @click="flipPage(false)"></div>
|
||||
@@ -68,18 +71,19 @@
|
||||
</div>
|
||||
<div class="uk-modal-body">
|
||||
<div class="uk-margin">
|
||||
<p id="progress-label"></p>
|
||||
<p x-text="`Progress: ${selectedIndex}/${items.length} (${(selectedIndex/items.length * 100).toFixed(1)}%)`"></p>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="page-select">Jump to page</label>
|
||||
<label class="uk-form-label" for="page-select">Jump to Page</label>
|
||||
<div class="uk-form-controls">
|
||||
<select id="page-select" class="uk-select" @change="pageChanged()">
|
||||
<select id="page-select" class="uk-select" @change="pageChanged()" x-model="selectedIndex">
|
||||
<%- (1..entry.pages).each do |p| -%>
|
||||
<option value="<%= p %>"><%= p %></option>
|
||||
<%- end -%>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="mode-select">Mode</label>
|
||||
<div class="uk-form-controls">
|
||||
@@ -89,9 +93,53 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin" x-show="mode === 'continuous'">
|
||||
<label class="uk-form-label" for="margin-range" x-text="`Page Margin: ${margin}px`"></label>
|
||||
<div class="uk-form-controls">
|
||||
<input id="margin-range" class="uk-range" type="range" min="0" max="50" step="5" x-model="margin" @change="marginChanged()">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin uk-form-horizontal" x-show="mode !== 'continuous'">
|
||||
<label class="uk-form-label" for="enable-flip-animation">Enable Flip Animation</label>
|
||||
<div class="uk-form-controls">
|
||||
<input id="enable-flip-animation" class="uk-checkbox" type="checkbox" x-model="enableFlipAnimation" @change="enableFlipAnimationChanged()">
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin uk-form-horizontal" x-show="mode !== 'continuous'">
|
||||
<label class="uk-form-label" for="preload-lookahead" x-text="`Preload Image: ${preloadLookahead} page(s)`"></label>
|
||||
<div class="uk-form-controls">
|
||||
<input id="preload-lookahead" class="uk-range" type="range" min="0" max="5" step="1" x-model.number="preloadLookahead" @change="preloadLookaheadChanged()">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class="uk-divider-icon">
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="entry-select">Jump to Entry</label>
|
||||
<div class="uk-form-controls">
|
||||
<select id="entry-select" class="uk-select" @change="entryChanged()">
|
||||
<% entries.each do |e| %>
|
||||
<option value="<%= e.id %>"
|
||||
<% if e.id == entry.id %>
|
||||
selected
|
||||
<% end %>>
|
||||
<%= e.title %>
|
||||
</option>
|
||||
<% end %>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-modal-footer uk-text-right">
|
||||
<button class="uk-button uk-button-danger" type="button" @click="exitReader('<%= exit_url %>')">Exit Reader</button>
|
||||
<% if previous_entry_url %>
|
||||
<a class="uk-button uk-button-default uk-margin-small-bottom uk-margin-small-right" href="<%= previous_entry_url %>">Previous Entry</a>
|
||||
<% end %>
|
||||
<% if next_entry_url %>
|
||||
<a class="uk-button uk-button-default uk-margin-small-bottom uk-margin-small-right" href="<%= next_entry_url %>">Next Entry</a>
|
||||
<% end %>
|
||||
<a class="uk-button uk-button-danger uk-margin-small-bottom uk-margin-small-right" href="<%= exit_url %>">Exit Reader</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -103,15 +151,14 @@
|
||||
const eid = "<%= entry.id %>";
|
||||
</script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/protonet-jquery.inview/1.1.2/jquery.inview.min.js"></script>
|
||||
<%= render_component "uikit" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
<script src="<%= base_url %>js/reader.js"></script>
|
||||
</body>
|
||||
|
||||
<style>
|
||||
img[data-src][src*='data:image'] { background: white; }
|
||||
img { width: 100%; }
|
||||
img:not(.spine) { width: 100%; }
|
||||
.reader-bg { background: black; }
|
||||
</style>
|
||||
|
||||
|
||||
54
src/views/subscription.html.ecr
Normal file
54
src/views/subscription.html.ecr
Normal file
@@ -0,0 +1,54 @@
|
||||
<h2 class="uk-title">MangaDex Subscription Manager</h2>
|
||||
|
||||
<div x-data="component()" x-init="init()">
|
||||
<p x-show="available === false">The subscription manager uses a MangaDex API that requires authentication. Please <a href="<%= base_url %>admin/mangadex">connect to MangaDex</a> before using this feature.</p>
|
||||
|
||||
<p x-show="available && subscriptions.length === 0">No subscription found. Go to the <a href="<%= base_url %>download">MangaDex download page</a> and start subscribing.</p>
|
||||
|
||||
<template x-if="subscriptions.length > 0">
|
||||
<div class="uk-overflow-auto">
|
||||
<table class="uk-table uk-table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Manga ID</th>
|
||||
<th>Language</th>
|
||||
<th>Group ID</th>
|
||||
<th>Volume Range</th>
|
||||
<th>Chapter Range</th>
|
||||
<th>Creator</th>
|
||||
<th>Last Checked</th>
|
||||
<th>Created At</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<template x-for="sub in subscriptions" :key="sub">
|
||||
<tr>
|
||||
<td><a :href="`<%= mangadex_base_url %>/manga/${sub.manga_id}`" x-text="sub.manga_id"></a></td>
|
||||
<td x-text="sub.language || 'All'"></td>
|
||||
<td>
|
||||
<a x-show="sub.group_id" :href="`<%= mangadex_base_url %>/group/${sub.group_id}`" x-text="sub.group_id"></a>
|
||||
<span x-show="!sub.group_id">All</span>
|
||||
</td>
|
||||
<td x-text="formatRange(sub.min_volume, sub.max_volume)"></td>
|
||||
<td x-text="formatRange(sub.min_chapter, sub.max_chapter)"></td>
|
||||
<td x-text="sub.username"></td>
|
||||
<td x-text="`${moment.unix(sub.last_checked).fromNow()}`"></td>
|
||||
<td x-text="`${moment.unix(sub.created_at).fromNow()}`"></td>
|
||||
<td :data-id="sub.id">
|
||||
<a @click="check($event)" x-show="sub.username === '<%= username %>'" uk-icon="refresh" uk-tooltip="Check for updates"></a>
|
||||
<a @click="rm($event)" x-show="sub.username === '<%= username %>'" uk-icon="trash" uk-tooltip="Delete"></a>
|
||||
</td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "moment" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/subscription.js"></script>
|
||||
<% end %>
|
||||
@@ -24,7 +24,7 @@
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<%= render_component "dots" %>
|
||||
<script src="<%= base_url %>js/search.js"></script>
|
||||
<script src="<%= base_url %>js/sort-items.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -123,7 +123,7 @@
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<%= render_component "dots" %>
|
||||
<link href="https://cdn.jsdelivr.net/npm/select2@4.1.0-beta.1/dist/css/select2.min.css" rel="stylesheet" />
|
||||
<link href="<%= base_url %>css/tags.css" rel="stylesheet" />
|
||||
<script src="https://cdn.jsdelivr.net/npm/select2@4.1.0-beta.1/dist/js/select2.min.js"></script>
|
||||
|
||||
Reference in New Issue
Block a user