mirror of
https://github.com/hkalexling/Mango.git
synced 2026-01-24 00:03:14 -05:00
Compare commits
376 Commits
v0.12.0
...
feature/lo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
80344c3bf0 | ||
|
|
8a732804ae | ||
|
|
9df372f784 | ||
|
|
cf7431b8b6 | ||
|
|
974b6cfe9b | ||
|
|
4fbe5b471c | ||
|
|
33e7e31fbc | ||
|
|
72fae7f5ed | ||
|
|
f50a7e3b3e | ||
|
|
66c4037f2b | ||
|
|
2c022a07e7 | ||
|
|
91362dfc7d | ||
|
|
97168b65d8 | ||
|
|
6e04e249e7 | ||
|
|
16397050dd | ||
|
|
3f73591dd4 | ||
|
|
ec25109fa5 | ||
|
|
96f1ef3dde | ||
|
|
b56e16e1e1 | ||
|
|
9769e760a0 | ||
|
|
70ab198a33 | ||
|
|
44a6f822cd | ||
|
|
2c241a96bb | ||
|
|
219d4446d1 | ||
|
|
d330db131e | ||
|
|
de193906a2 | ||
|
|
d13cfc045f | ||
|
|
a3b2cdd372 | ||
|
|
f4d7128b59 | ||
|
|
663c0c0b38 | ||
|
|
57b2f7c625 | ||
|
|
9489d6abfd | ||
|
|
670cf54957 | ||
|
|
2e09efbd62 | ||
|
|
523195d649 | ||
|
|
be47f309b0 | ||
|
|
03e044a1aa | ||
|
|
4eaf271fa4 | ||
|
|
4b464ed361 | ||
|
|
a9520d6f26 | ||
|
|
a151ec486d | ||
|
|
8f1383a818 | ||
|
|
f5933a48d9 | ||
|
|
7734dae138 | ||
|
|
8c90b46114 | ||
|
|
cd48b45f11 | ||
|
|
bdbdf9c94b | ||
|
|
7e36c91ea7 | ||
|
|
9309f51df6 | ||
|
|
a8f729f5c1 | ||
|
|
4e8b561f70 | ||
|
|
e6214ddc5d | ||
|
|
80e13abc4a | ||
|
|
fb43abb950 | ||
|
|
eb3e37b950 | ||
|
|
0a90e3b333 | ||
|
|
4409ed8f45 | ||
|
|
291a340cdd | ||
|
|
0667f01471 | ||
|
|
d5847bb105 | ||
|
|
3d295e961e | ||
|
|
e408398523 | ||
|
|
566cebfcdd | ||
|
|
a190ae3ed6 | ||
|
|
17d7cefa12 | ||
|
|
eaef0556fa | ||
|
|
53226eab61 | ||
|
|
ccf558eaa7 | ||
|
|
0305433e46 | ||
|
|
d2cad6c496 | ||
|
|
371796cce9 | ||
|
|
d9adb49c27 | ||
|
|
f67e4e6cb9 | ||
|
|
60a126024c | ||
|
|
da8a485087 | ||
|
|
d809c21ee1 | ||
|
|
ca1e221b10 | ||
|
|
44d9c51ff9 | ||
|
|
15a54f4f23 | ||
|
|
51806f18db | ||
|
|
79ef7bcd1c | ||
|
|
5cb85ea857 | ||
|
|
9807db6ac0 | ||
|
|
565a535d22 | ||
|
|
c5b6a8b5b9 | ||
|
|
c75c71709f | ||
|
|
11976b15f9 | ||
|
|
847f516a65 | ||
|
|
de410f42b8 | ||
|
|
0fd7caef4b | ||
|
|
5e919d3e19 | ||
|
|
9e90aa17b9 | ||
|
|
0a8fd993e5 | ||
|
|
365f71cd1d | ||
|
|
601346b209 | ||
|
|
e988a8c121 | ||
|
|
bf81a4e48b | ||
|
|
4a09aee177 | ||
|
|
00c9cc1fcd | ||
|
|
51a47b5ddd | ||
|
|
244f97a68e | ||
|
|
8d84a3c502 | ||
|
|
a26b4b3965 | ||
|
|
f2dd20cdec | ||
|
|
64d6cd293c | ||
|
|
08dc0601e8 | ||
|
|
9c983df7e9 | ||
|
|
efc547f5b2 | ||
|
|
995ca3b40f | ||
|
|
864435d3f9 | ||
|
|
64c145cf80 | ||
|
|
6549253ed1 | ||
|
|
d9565718a4 | ||
|
|
400c3024fd | ||
|
|
a703175b3a | ||
|
|
83b122ab75 | ||
|
|
1e7d6ba5b1 | ||
|
|
4d1ad8fb38 | ||
|
|
d544252e3e | ||
|
|
b02b28d3e3 | ||
|
|
d7efe1e553 | ||
|
|
1973564272 | ||
|
|
29923f6dc7 | ||
|
|
4a261d5ff8 | ||
|
|
31d425d462 | ||
|
|
a21681a6d7 | ||
|
|
208019a0b9 | ||
|
|
54e2a54ecb | ||
|
|
2426ef05ec | ||
|
|
25b90a8724 | ||
|
|
cd8944ed2d | ||
|
|
7f0c256fe6 | ||
|
|
46e6e41bfe | ||
|
|
c9f55e7a8e | ||
|
|
741c3a4e20 | ||
|
|
f6da20321d | ||
|
|
2764e955b2 | ||
|
|
00c15014a1 | ||
|
|
c6fdbfd9fd | ||
|
|
e03bf32358 | ||
|
|
bbf1520c73 | ||
|
|
8950c3a1ed | ||
|
|
17837d8a29 | ||
|
|
b4a69425c8 | ||
|
|
a612500b0f | ||
|
|
9bb7144479 | ||
|
|
ee52c52f46 | ||
|
|
daec2bdac6 | ||
|
|
e9a490676b | ||
|
|
757f7c8214 | ||
|
|
eed1a9717e | ||
|
|
8829d2e237 | ||
|
|
eec6ec60bf | ||
|
|
3a82effa40 | ||
|
|
0b3e78bcb7 | ||
|
|
cb4e4437a6 | ||
|
|
6a275286ea | ||
|
|
2743868438 | ||
|
|
d3f26ecbc9 | ||
|
|
f62344806a | ||
|
|
b7b7e6f718 | ||
|
|
05b4e77fa9 | ||
|
|
8aab113aab | ||
|
|
371c8056e7 | ||
|
|
a9a2c9faa8 | ||
|
|
011768ed1f | ||
|
|
c36d2608e8 | ||
|
|
1b25a1fa47 | ||
|
|
df7e2270a4 | ||
|
|
3c3549a489 | ||
|
|
8160b0a18e | ||
|
|
a7eff772be | ||
|
|
bf3900f9a2 | ||
|
|
6fa575cf4f | ||
|
|
604c5d49a6 | ||
|
|
7449d19075 | ||
|
|
c5c9305a0b | ||
|
|
fdceab9060 | ||
|
|
c18591c5cf | ||
|
|
bb5cb9b94c | ||
|
|
fb499a5caf | ||
|
|
154d85e197 | ||
|
|
933617503e | ||
|
|
31c6893bbb | ||
|
|
171125e8ac | ||
|
|
d81334026b | ||
|
|
2b3b2eb8ba | ||
|
|
ffd5f4454b | ||
|
|
cb25d7ba00 | ||
|
|
3abd2924d0 | ||
|
|
21233df754 | ||
|
|
c61eb7554e | ||
|
|
edd9a2e093 | ||
|
|
1f50785e8f | ||
|
|
70d418d1a1 | ||
|
|
45e20c94f9 | ||
|
|
ca8e9a164e | ||
|
|
4da263c594 | ||
|
|
d67a24809b | ||
|
|
cd268af9dd | ||
|
|
135fa9fde6 | ||
|
|
77333aaafd | ||
|
|
1fad530331 | ||
|
|
a1bd87098c | ||
|
|
a389fa7178 | ||
|
|
b5db508005 | ||
|
|
30178c42ef | ||
|
|
b712db9e8f | ||
|
|
dd9c75d1c9 | ||
|
|
2d150c3bf2 | ||
|
|
40f74ea375 | ||
|
|
adf260bc35 | ||
|
|
432d6f0cd5 | ||
|
|
3de314ae9a | ||
|
|
c1c8cca877 | ||
|
|
07965b98b7 | ||
|
|
5779d225f6 | ||
|
|
bf18a14016 | ||
|
|
605dc61777 | ||
|
|
def64d9f98 | ||
|
|
0ba2409c9a | ||
|
|
2b0cf41336 | ||
|
|
c51cb28df2 | ||
|
|
2b079c652d | ||
|
|
68050a9025 | ||
|
|
54cd15d542 | ||
|
|
781de97c68 | ||
|
|
c7be0e0e7c | ||
|
|
667d390be4 | ||
|
|
7f76322377 | ||
|
|
377c4c6554 | ||
|
|
952aa0c6ca | ||
|
|
bd81c2e005 | ||
|
|
b471ed2fa0 | ||
|
|
7507ab64ad | ||
|
|
e4587d36bc | ||
|
|
7d6d3640ad | ||
|
|
3071d44e32 | ||
|
|
7a09c9006a | ||
|
|
959560c7a7 | ||
|
|
ff679b30d8 | ||
|
|
f7a360c2d8 | ||
|
|
1065b430e3 | ||
|
|
5abf7032a5 | ||
|
|
18e8e88c66 | ||
|
|
44336c546a | ||
|
|
a4c6e6611c | ||
|
|
0b457a2797 | ||
|
|
653751bede | ||
|
|
a02bf4a81e | ||
|
|
5271d12f4c | ||
|
|
c2e2f0b9b3 | ||
|
|
72d319902e | ||
|
|
bbd0fd68cb | ||
|
|
0fb1e1598d | ||
|
|
4645582f5d | ||
|
|
ac9c51dd33 | ||
|
|
f51d27860a | ||
|
|
4a7439a1ea | ||
|
|
00e19399d7 | ||
|
|
cb723acef7 | ||
|
|
794bed12bd | ||
|
|
bae8220e75 | ||
|
|
0cc5e1626b | ||
|
|
da0ca665a6 | ||
|
|
a91cf21aa9 | ||
|
|
39b2636711 | ||
|
|
2618d8412b | ||
|
|
445ebdf357 | ||
|
|
60134dc364 | ||
|
|
aa70752244 | ||
|
|
0f39535097 | ||
|
|
e086bec9da | ||
|
|
dcdcf29114 | ||
|
|
c5c73ddff3 | ||
|
|
f18ee4284f | ||
|
|
0fbc11386e | ||
|
|
a68282b4bf | ||
|
|
e64908ad06 | ||
|
|
af0913df64 | ||
|
|
5685dd1cc5 | ||
|
|
af2fd2a66a | ||
|
|
db2a51a26b | ||
|
|
cf930418cb | ||
|
|
911848ad11 | ||
|
|
93f745aecb | ||
|
|
981a1f0226 | ||
|
|
8188456788 | ||
|
|
1eace2c64c | ||
|
|
c6ee5409f8 | ||
|
|
b05ed57762 | ||
|
|
0f1d1099f6 | ||
|
|
40a24f4247 | ||
|
|
a6862e86d4 | ||
|
|
bfc1b697bd | ||
|
|
276f62cb76 | ||
|
|
45a81ad5f6 | ||
|
|
ce88acb9e5 | ||
|
|
bd34b803f1 | ||
|
|
2559f65f35 | ||
|
|
93c21ea659 | ||
|
|
85ad38c321 | ||
|
|
b6a204f5bd | ||
|
|
f7b8e2d852 | ||
|
|
946017c8bd | ||
|
|
ec5256dabd | ||
|
|
4e707076a1 | ||
|
|
66a3cc268b | ||
|
|
96949905b9 | ||
|
|
30c0199039 | ||
|
|
7a7cb78f82 | ||
|
|
8931ba8c43 | ||
|
|
d50981c151 | ||
|
|
df4deb1415 | ||
|
|
aa5e999ed4 | ||
|
|
84d4b0c529 | ||
|
|
d3e5691478 | ||
|
|
1000b02ae0 | ||
|
|
1f795889a9 | ||
|
|
d33b45233a | ||
|
|
4f6df5b9a3 | ||
|
|
341b586cb3 | ||
|
|
9dcc9665ce | ||
|
|
1cd90926df | ||
|
|
ac1ff61e6d | ||
|
|
6ea41f79e9 | ||
|
|
dad02a2a30 | ||
|
|
280490fb36 | ||
|
|
455315a362 | ||
|
|
df51406638 | ||
|
|
531d42ef18 | ||
|
|
2645e8cd05 | ||
|
|
b2dc44a919 | ||
|
|
c8db397a3b | ||
|
|
6384d4b77a | ||
|
|
1039732d87 | ||
|
|
011123f690 | ||
|
|
e602a35b0c | ||
|
|
7792d3426e | ||
|
|
b59c8f85ad | ||
|
|
18834ac28e | ||
|
|
bf68e32ac8 | ||
|
|
54eb041fe4 | ||
|
|
57d8c100f9 | ||
|
|
56d973b99d | ||
|
|
670e5cdf6a | ||
|
|
1b35392f9c | ||
|
|
c4e1ffe023 | ||
|
|
44f4959477 | ||
|
|
0582b57d60 | ||
|
|
83d96fd2a1 | ||
|
|
8ac89c420c | ||
|
|
968c2f4ad5 | ||
|
|
ad940f30d5 | ||
|
|
308ad4e063 | ||
|
|
4d709b7eb5 | ||
|
|
5760ad924e | ||
|
|
fff171c8c9 | ||
|
|
44ff566a1d | ||
|
|
853f422964 | ||
|
|
3bb0917374 | ||
|
|
a86f0d0f34 | ||
|
|
16a9d7fc2e | ||
|
|
ee2b4abc85 | ||
|
|
a6c2799521 | ||
|
|
2370e4d2c6 | ||
|
|
32b0384ea0 | ||
|
|
50d4ffdb7b | ||
|
|
96463641f9 | ||
|
|
ddbba5d596 | ||
|
|
2a04f4531e | ||
|
|
a5b6fb781f | ||
|
|
8dfdab9d73 | ||
|
|
3a95270dfb | ||
|
|
2960ca54df | ||
|
|
f5fe3c6b1c |
120
.all-contributorsrc
Normal file
120
.all-contributorsrc
Normal file
@@ -0,0 +1,120 @@
|
||||
{
|
||||
"projectName": "Mango",
|
||||
"projectOwner": "hkalexling",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": false,
|
||||
"commitConvention": "none",
|
||||
"contributors": [
|
||||
{
|
||||
"login": "hkalexling",
|
||||
"name": "Alex Ling",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/7845831?v=4",
|
||||
"profile": "https://github.com/hkalexling/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jaredlt",
|
||||
"name": "jaredlt",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/8590311?v=4",
|
||||
"profile": "https://github.com/jaredlt",
|
||||
"contributions": [
|
||||
"code",
|
||||
"ideas",
|
||||
"design"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "shincurry",
|
||||
"name": "ココロ",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/4946624?v=4",
|
||||
"profile": "https://windisco.com/",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "noirscape",
|
||||
"name": "Valentijn",
|
||||
"avatar_url": "https://avatars0.githubusercontent.com/u/13433513?v=4",
|
||||
"profile": "https://catgirlsin.space/",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "flying-sausages",
|
||||
"name": "flying-sausages",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/23618693?v=4",
|
||||
"profile": "https://github.com/flying-sausages",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"ideas"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "XavierSchiller",
|
||||
"name": "Xavier",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/22575255?v=4",
|
||||
"profile": "https://github.com/XavierSchiller",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "WROIATE",
|
||||
"name": "Jarao",
|
||||
"avatar_url": "https://avatars3.githubusercontent.com/u/44677306?v=4",
|
||||
"profile": "https://github.com/WROIATE",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Leeingnyo",
|
||||
"name": "이인용",
|
||||
"avatar_url": "https://avatars0.githubusercontent.com/u/6760150?v=4",
|
||||
"profile": "https://github.com/Leeingnyo",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "h45h74x",
|
||||
"name": "Simon",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/27204033?v=4",
|
||||
"profile": "http://h45h74x.eu.org",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "davidkna",
|
||||
"name": "David Knaack",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/835177?v=4",
|
||||
"profile": "https://github.com/davidkna",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "lincolnthedev",
|
||||
"name": "i use arch btw",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/41193328?v=4",
|
||||
"profile": "https://lncn.dev",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"skipCi": true
|
||||
}
|
||||
@@ -7,3 +7,8 @@ Lint/UnusedArgument:
|
||||
- src/routes/*
|
||||
Metrics/CyclomaticComplexity:
|
||||
Enabled: false
|
||||
Layout/LineLength:
|
||||
Enabled: true
|
||||
MaxLength: 80
|
||||
Excluded:
|
||||
- src/routes/api.cr
|
||||
|
||||
@@ -1,2 +1,9 @@
|
||||
node_modules
|
||||
lib
|
||||
Dockerfile
|
||||
Dockerfile.arm32v7
|
||||
Dockerfile.arm64v8
|
||||
README.md
|
||||
.all-contributorsrc
|
||||
env.example
|
||||
.github/
|
||||
|
||||
6
.github/autoapproval.yml
vendored
Normal file
6
.github/autoapproval.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from_owner:
|
||||
- hkalexling
|
||||
required_labels:
|
||||
- autoapprove
|
||||
apply_labels:
|
||||
- autoapproved
|
||||
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
@@ -2,7 +2,7 @@ name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev ]
|
||||
branches: [ master, dev, hotfix/* ]
|
||||
pull_request:
|
||||
branches: [ master, dev ]
|
||||
|
||||
@@ -12,12 +12,12 @@ jobs:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: crystallang/crystal:0.34.0-alpine
|
||||
image: crystallang/crystal:1.0.0-alpine
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
run: apk add --no-cache yarn yaml-static sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
- name: Build
|
||||
run: make static || make static
|
||||
- name: Linter
|
||||
|
||||
2
.github/workflows/dockerhub.yml
vendored
2
.github/workflows/dockerhub.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get release version
|
||||
id: get_version
|
||||
run: echo ::set-env name=RELEASE_VERSION::$(echo ${GITHUB_REF:10})
|
||||
run: echo "RELEASE_VERSION=$(echo ${GITHUB_REF:10})" >> $GITHUB_ENV
|
||||
- name: Publish to Dockerhub
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
with:
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -12,3 +12,5 @@ mango
|
||||
public/css/uikit.css
|
||||
public/img/*.svg
|
||||
public/js/*.min.js
|
||||
public/css/*.css
|
||||
public/webfonts
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
FROM crystallang/crystal:0.34.0-alpine AS builder
|
||||
FROM crystallang/crystal:1.0.0-alpine AS builder
|
||||
|
||||
WORKDIR /Mango
|
||||
|
||||
COPY . .
|
||||
RUN apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
RUN apk add --no-cache yarn yaml-static sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
RUN make static || make static
|
||||
|
||||
FROM library/alpine
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=builder /Mango/mango .
|
||||
COPY --from=builder /Mango/mango /usr/local/bin/mango
|
||||
|
||||
CMD ["./mango"]
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
@@ -2,13 +2,14 @@ FROM arm32v7/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.34.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr/ext/libwebp && git checkout v0.1.1 && make && cd ../stbi && make
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
|
||||
|
||||
COPY mango-arm32v7.o .
|
||||
|
||||
RUN cc 'mango-arm32v7.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
RUN cc 'mango-arm32v7.o' -o '/usr/local/bin/mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
CMD ["./mango"]
|
||||
|
||||
@@ -2,13 +2,13 @@ FROM arm64v8/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.34.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr/ext/libwebp && git checkout v0.1.1 && make && cd ../stbi && make
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
|
||||
|
||||
COPY mango-arm64v8.o .
|
||||
|
||||
RUN cc 'mango-arm64v8.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
RUN cc 'mango-arm64v8.o' -o '/usr/local/bin/mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/aarch64-linux-gnu/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["./mango"]
|
||||
CMD ["/usr/local/bin/mango"]
|
||||
|
||||
1
Makefile
1
Makefile
@@ -29,7 +29,6 @@ test:
|
||||
check:
|
||||
crystal tool format --check
|
||||
./bin/ameba
|
||||
./dev/linewidth.sh
|
||||
|
||||
arm32v7:
|
||||
crystal build src/mango.cr --release --progress --error-trace --cross-compile --target='arm-linux-gnueabihf' -o mango-arm32v7
|
||||
|
||||
50
README.md
50
README.md
@@ -2,7 +2,7 @@
|
||||
|
||||
# Mango
|
||||
|
||||
[](https://www.patreon.com/hkalexling)  [](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
|
||||
[](https://www.patreon.com/hkalexling)  [](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](http://discord.com/invite/ezKtacCp9Q)
|
||||
|
||||
Mango is a self-hosted manga server and reader. Its features include
|
||||
|
||||
@@ -12,7 +12,7 @@ Mango is a self-hosted manga server and reader. Its features include
|
||||
- Supported formats: `.cbz`, `.zip`, `.cbr` and `.rar`
|
||||
- Supports nested folders in library
|
||||
- Automatically stores reading progress
|
||||
- Built-in [MangaDex](https://mangadex.org/) downloader
|
||||
- Thumbnail generation
|
||||
- Supports [plugins](https://github.com/hkalexling/mango-plugins) to download from thrid-party sites
|
||||
- The web reader is responsive and works well on mobile, so there is no need for a mobile app
|
||||
- All the static files are embedded in the binary, so the deployment process is easy and painless
|
||||
@@ -51,7 +51,7 @@ The official docker images are available on [Dockerhub](https://hub.docker.com/r
|
||||
### CLI
|
||||
|
||||
```
|
||||
Mango - Manga Server and Web Reader. Version 0.12.0
|
||||
Mango - Manga Server and Web Reader. Version 0.24.0
|
||||
|
||||
Usage:
|
||||
|
||||
@@ -74,16 +74,28 @@ The default config file location is `~/.config/mango/config.yml`. It might be di
|
||||
|
||||
```yaml
|
||||
---
|
||||
host: 0.0.0.0
|
||||
port: 9000
|
||||
base_url: /
|
||||
session_secret: mango-session-secret
|
||||
library_path: ~/mango/library
|
||||
db_path: ~/mango/mango.db
|
||||
scan_interval_minutes: 5
|
||||
thumbnail_generation_interval_hours: 24
|
||||
log_level: info
|
||||
upload_path: ~/mango/uploads
|
||||
plugin_path: ~/mango/plugins
|
||||
download_timeout_seconds: 30
|
||||
library_cache_path: ~/mango/library.yml.gz
|
||||
cache_enabled: false
|
||||
cache_size_mbs: 50
|
||||
cache_log_enabled: true
|
||||
disable_login: false
|
||||
default_username: ""
|
||||
auth_proxy_header_name: ""
|
||||
mangadex:
|
||||
base_url: https://mangadex.org
|
||||
api_url: https://mangadex.org/api
|
||||
api_url: https://api.mangadex.org/v2
|
||||
download_wait_seconds: 5
|
||||
download_retries: 4
|
||||
download_queue_db_path: ~/mango/queue.db
|
||||
@@ -91,8 +103,10 @@ mangadex:
|
||||
manga_rename_rule: '{title}'
|
||||
```
|
||||
|
||||
- `scan_interval_minutes` can be any non-negative integer. Setting it to `0` disables the periodic scan
|
||||
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
|
||||
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
|
||||
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
|
||||
- By setting `cache_enabled` to `true`, you can enable an experimental feature where Mango caches library metadata to improve page load time. You can further fine-tune the feature with `cache_size_mbs` and `cache_log_enabled`.
|
||||
|
||||
### Library Structure
|
||||
|
||||
@@ -142,9 +156,33 @@ Mobile UI:
|
||||
## Sponsors
|
||||
|
||||
<a href="https://casinoshunter.com/online-casinos/"><img src="https://i.imgur.com/EJb3wBo.png" width="150" height="auto"></a>
|
||||
<a href="https://www.browserstack.com/open-source"><img src="https://i.imgur.com/hGJUJXD.png" width="150" height="auto"></a>
|
||||
|
||||
## Contributors
|
||||
|
||||
Please check the [development guideline](https://github.com/hkalexling/Mango/wiki/Development) if you are interested in code contributions.
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/hkalexling/"><img src="https://avatars1.githubusercontent.com/u/7845831?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Alex Ling</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=hkalexling" title="Code">💻</a> <a href="https://github.com/hkalexling/Mango/commits?author=hkalexling" title="Documentation">📖</a> <a href="#infra-hkalexling" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://github.com/jaredlt"><img src="https://avatars1.githubusercontent.com/u/8590311?v=4?s=100" width="100px;" alt=""/><br /><sub><b>jaredlt</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=jaredlt" title="Code">💻</a> <a href="#ideas-jaredlt" title="Ideas, Planning, & Feedback">🤔</a> <a href="#design-jaredlt" title="Design">🎨</a></td>
|
||||
<td align="center"><a href="https://windisco.com/"><img src="https://avatars1.githubusercontent.com/u/4946624?v=4?s=100" width="100px;" alt=""/><br /><sub><b>ココロ</b></sub></a><br /><a href="#infra-shincurry" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://catgirlsin.space/"><img src="https://avatars0.githubusercontent.com/u/13433513?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Valentijn</b></sub></a><br /><a href="#infra-noirscape" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://github.com/flying-sausages"><img src="https://avatars1.githubusercontent.com/u/23618693?v=4?s=100" width="100px;" alt=""/><br /><sub><b>flying-sausages</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=flying-sausages" title="Documentation">📖</a> <a href="#ideas-flying-sausages" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||
<td align="center"><a href="https://github.com/XavierSchiller"><img src="https://avatars1.githubusercontent.com/u/22575255?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Xavier</b></sub></a><br /><a href="#infra-XavierSchiller" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://github.com/WROIATE"><img src="https://avatars3.githubusercontent.com/u/44677306?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jarao</b></sub></a><br /><a href="#infra-WROIATE" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/Leeingnyo"><img src="https://avatars0.githubusercontent.com/u/6760150?v=4?s=100" width="100px;" alt=""/><br /><sub><b>이인용</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=Leeingnyo" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://h45h74x.eu.org"><img src="https://avatars1.githubusercontent.com/u/27204033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=h45h74x" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/davidkna"><img src="https://avatars.githubusercontent.com/u/835177?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Knaack</b></sub></a><br /><a href="#infra-davidkna" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://lncn.dev"><img src="https://avatars.githubusercontent.com/u/41193328?v=4?s=100" width="100px;" alt=""/><br /><sub><b>i use arch btw</b></sub></a><br /><a href="#infra-lincolnthedev" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/0)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/1)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/2)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/3)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/4)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/5)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/6)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/7)
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
[ ! -z "$(grep '.\{80\}' --exclude-dir=lib --include="*.cr" -nr --color=always . | tee /dev/tty)" ] \
|
||||
&& echo "The above lines exceed the 80 characters limit" \
|
||||
|| exit 0
|
||||
82
gulpfile.js
82
gulpfile.js
@@ -1,15 +1,42 @@
|
||||
const gulp = require('gulp');
|
||||
const minify = require("gulp-babel-minify");
|
||||
const babel = require('gulp-babel');
|
||||
const minify = require('gulp-babel-minify');
|
||||
const minifyCss = require('gulp-minify-css');
|
||||
const less = require('gulp-less');
|
||||
|
||||
gulp.task('copy-uikit-js', () => {
|
||||
return gulp.src('node_modules/uikit/dist/js/*.min.js')
|
||||
.pipe(gulp.dest('public/js'));
|
||||
gulp.task('copy-img', () => {
|
||||
return gulp.src('node_modules/uikit/src/images/backgrounds/*.svg')
|
||||
.pipe(gulp.dest('public/img'));
|
||||
});
|
||||
|
||||
gulp.task('minify-js', () => {
|
||||
return gulp.src('public/js/*.js')
|
||||
gulp.task('copy-font', () => {
|
||||
return gulp.src('node_modules/@fortawesome/fontawesome-free/webfonts/fa-solid-900.woff**')
|
||||
.pipe(gulp.dest('public/webfonts'));
|
||||
});
|
||||
|
||||
// Copy files from node_modules
|
||||
gulp.task('node-modules-copy', gulp.parallel('copy-img', 'copy-font'));
|
||||
|
||||
// Compile less
|
||||
gulp.task('less', () => {
|
||||
return gulp.src([
|
||||
'public/css/mango.less',
|
||||
'public/css/tags.less'
|
||||
])
|
||||
.pipe(less())
|
||||
.pipe(gulp.dest('public/css'));
|
||||
});
|
||||
|
||||
// Transpile and minify JS files and output to dist
|
||||
gulp.task('babel', () => {
|
||||
return gulp.src(['public/js/*.js', '!public/js/*.min.js'])
|
||||
.pipe(babel({
|
||||
presets: [
|
||||
['@babel/preset-env', {
|
||||
targets: '>0.25%, not dead, ios>=9'
|
||||
}]
|
||||
],
|
||||
}))
|
||||
.pipe(minify({
|
||||
removeConsole: true,
|
||||
builtIns: false
|
||||
@@ -17,40 +44,31 @@ gulp.task('minify-js', () => {
|
||||
.pipe(gulp.dest('dist/js'));
|
||||
});
|
||||
|
||||
gulp.task('less', () => {
|
||||
return gulp.src('public/css/*.less')
|
||||
.pipe(less())
|
||||
.pipe(gulp.dest('public/css'));
|
||||
});
|
||||
|
||||
// Minify CSS and output to dist
|
||||
gulp.task('minify-css', () => {
|
||||
return gulp.src('public/css/*.css')
|
||||
.pipe(minifyCss())
|
||||
.pipe(gulp.dest('dist/css'));
|
||||
});
|
||||
|
||||
gulp.task('copy-uikit-icons', () => {
|
||||
return gulp.src('node_modules/uikit/src/images/backgrounds/*.svg')
|
||||
.pipe(gulp.dest('public/img'));
|
||||
});
|
||||
|
||||
gulp.task('img', () => {
|
||||
return gulp.src('public/img/*')
|
||||
.pipe(gulp.dest('dist/img'));
|
||||
});
|
||||
|
||||
// Copy static files (includeing images) to dist
|
||||
gulp.task('copy-files', () => {
|
||||
return gulp.src('public/*.*')
|
||||
return gulp.src([
|
||||
'public/*.*',
|
||||
'public/img/*',
|
||||
'public/webfonts/*',
|
||||
'public/js/*.min.js'
|
||||
], {
|
||||
base: 'public'
|
||||
})
|
||||
.pipe(gulp.dest('dist'));
|
||||
});
|
||||
|
||||
gulp.task('default', gulp.parallel(
|
||||
gulp.series('copy-uikit-js', 'minify-js'),
|
||||
gulp.series('less', 'minify-css'),
|
||||
gulp.series('copy-uikit-icons', 'img'),
|
||||
'copy-files'
|
||||
));
|
||||
// Set up the public folder for development
|
||||
gulp.task('dev', gulp.parallel('node-modules-copy', 'less'));
|
||||
|
||||
gulp.task('dev', gulp.parallel(
|
||||
'copy-uikit-js', 'less', 'copy-uikit-icons'
|
||||
));
|
||||
// Set up the dist folder for deployment
|
||||
gulp.task('deploy', gulp.parallel('babel', 'minify-css', 'copy-files'));
|
||||
|
||||
// Default task
|
||||
gulp.task('default', gulp.series('dev', 'deploy'));
|
||||
|
||||
85
migration/foreign_keys.6.cr
Normal file
85
migration/foreign_keys.6.cr
Normal file
@@ -0,0 +1,85 @@
|
||||
class ForeignKeys < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
-- add foreign key to tags
|
||||
ALTER TABLE tags RENAME TO tmp;
|
||||
|
||||
CREATE TABLE tags (
|
||||
id TEXT NOT NULL,
|
||||
tag TEXT NOT NULL,
|
||||
UNIQUE (id, tag),
|
||||
FOREIGN KEY (id) REFERENCES titles (id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO tags
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE INDEX tags_id_idx ON tags (id);
|
||||
CREATE INDEX tags_tag_idx ON tags (tag);
|
||||
|
||||
-- add foreign key to thumbnails
|
||||
ALTER TABLE thumbnails RENAME TO tmp;
|
||||
|
||||
CREATE TABLE thumbnails (
|
||||
id TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
mime TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
FOREIGN KEY (id) REFERENCES ids (id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO thumbnails
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE UNIQUE INDEX tn_index ON thumbnails (id);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
-- remove foreign key from thumbnails
|
||||
ALTER TABLE thumbnails RENAME TO tmp;
|
||||
|
||||
CREATE TABLE thumbnails (
|
||||
id TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
mime TEXT NOT NULL,
|
||||
size INTEGER NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO thumbnails
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE UNIQUE INDEX tn_index ON thumbnails (id);
|
||||
|
||||
-- remove foreign key from tags
|
||||
ALTER TABLE tags RENAME TO tmp;
|
||||
|
||||
CREATE TABLE tags (
|
||||
id TEXT NOT NULL,
|
||||
tag TEXT NOT NULL,
|
||||
UNIQUE (id, tag)
|
||||
);
|
||||
|
||||
INSERT INTO tags
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE INDEX tags_id_idx ON tags (id);
|
||||
CREATE INDEX tags_tag_idx ON tags (tag);
|
||||
SQL
|
||||
end
|
||||
end
|
||||
19
migration/ids.2.cr
Normal file
19
migration/ids.2.cr
Normal file
@@ -0,0 +1,19 @@
|
||||
class CreateIds < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE IF NOT EXISTS ids (
|
||||
path TEXT NOT NULL,
|
||||
id TEXT NOT NULL,
|
||||
is_title INTEGER NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS path_idx ON ids (path);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS id_idx ON ids (id);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE ids;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
50
migration/ids_signature.7.cr
Normal file
50
migration/ids_signature.7.cr
Normal file
@@ -0,0 +1,50 @@
|
||||
class IDSignature < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
ALTER TABLE ids ADD COLUMN signature TEXT;
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
-- remove signature column from ids
|
||||
ALTER TABLE ids RENAME TO tmp;
|
||||
|
||||
CREATE TABLE ids (
|
||||
path TEXT NOT NULL,
|
||||
id TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO ids
|
||||
SELECT path, id
|
||||
FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
-- recreate the indices
|
||||
CREATE UNIQUE INDEX path_idx ON ids (path);
|
||||
CREATE UNIQUE INDEX id_idx ON ids (id);
|
||||
|
||||
-- recreate the foreign key constraint on thumbnails
|
||||
ALTER TABLE thumbnails RENAME TO tmp;
|
||||
|
||||
CREATE TABLE thumbnails (
|
||||
id TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
mime TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
FOREIGN KEY (id) REFERENCES ids (id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO thumbnails
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE UNIQUE INDEX tn_index ON thumbnails (id);
|
||||
SQL
|
||||
end
|
||||
end
|
||||
20
migration/md_account.11.cr
Normal file
20
migration/md_account.11.cr
Normal file
@@ -0,0 +1,20 @@
|
||||
class CreateMangaDexAccount < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE md_account (
|
||||
username TEXT NOT NULL PRIMARY KEY,
|
||||
token TEXT NOT NULL,
|
||||
expire INTEGER NOT NULL,
|
||||
FOREIGN KEY (username) REFERENCES users (username)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE md_account;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
33
migration/relative_path.8.cr
Normal file
33
migration/relative_path.8.cr
Normal file
@@ -0,0 +1,33 @@
|
||||
class RelativePath < MG::Base
|
||||
def up : String
|
||||
base = Config.current.library_path
|
||||
# Escape single quotes in case the path contains them, and remove the
|
||||
# trailing slash (this is a mistake, fixed in DB version 10)
|
||||
base = base.gsub("'", "''").rstrip "/"
|
||||
|
||||
<<-SQL
|
||||
-- update the path column in ids to relative paths
|
||||
UPDATE ids
|
||||
SET path = REPLACE(path, '#{base}', '');
|
||||
|
||||
-- update the path column in titles to relative paths
|
||||
UPDATE titles
|
||||
SET path = REPLACE(path, '#{base}', '');
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
base = Config.current.library_path
|
||||
base = base.gsub("'", "''").rstrip "/"
|
||||
|
||||
<<-SQL
|
||||
-- update the path column in ids to absolute paths
|
||||
UPDATE ids
|
||||
SET path = '#{base}' || path;
|
||||
|
||||
-- update the path column in titles to absolute paths
|
||||
UPDATE titles
|
||||
SET path = '#{base}' || path;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
31
migration/relative_path_fix.10.cr
Normal file
31
migration/relative_path_fix.10.cr
Normal file
@@ -0,0 +1,31 @@
|
||||
# In DB version 8, we replaced the absolute paths in DB with relative paths,
|
||||
# but we mistakenly left the starting slashes. This migration removes them.
|
||||
class RelativePathFix < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
-- remove leading slashes from the paths in ids
|
||||
UPDATE ids
|
||||
SET path = SUBSTR(path, 2, LENGTH(path) - 1)
|
||||
WHERE path LIKE '/%';
|
||||
|
||||
-- remove leading slashes from the paths in titles
|
||||
UPDATE titles
|
||||
SET path = SUBSTR(path, 2, LENGTH(path) - 1)
|
||||
WHERE path LIKE '/%';
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
-- add leading slashes to paths in ids
|
||||
UPDATE ids
|
||||
SET path = '/' || path
|
||||
WHERE path NOT LIKE '/%';
|
||||
|
||||
-- add leading slashes to paths in titles
|
||||
UPDATE titles
|
||||
SET path = '/' || path
|
||||
WHERE path NOT LIKE '/%';
|
||||
SQL
|
||||
end
|
||||
end
|
||||
19
migration/tags.4.cr
Normal file
19
migration/tags.4.cr
Normal file
@@ -0,0 +1,19 @@
|
||||
class CreateTags < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE IF NOT EXISTS tags (
|
||||
id TEXT NOT NULL,
|
||||
tag TEXT NOT NULL,
|
||||
UNIQUE (id, tag)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS tags_id_idx ON tags (id);
|
||||
CREATE INDEX IF NOT EXISTS tags_tag_idx ON tags (tag);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE tags;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
20
migration/thumbnails.3.cr
Normal file
20
migration/thumbnails.3.cr
Normal file
@@ -0,0 +1,20 @@
|
||||
class CreateThumbnails < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE IF NOT EXISTS thumbnails (
|
||||
id TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
mime TEXT NOT NULL,
|
||||
size INTEGER NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS tn_index ON thumbnails (id);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE thumbnails;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
56
migration/titles.5.cr
Normal file
56
migration/titles.5.cr
Normal file
@@ -0,0 +1,56 @@
|
||||
class CreateTitles < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
-- create titles
|
||||
CREATE TABLE titles (
|
||||
id TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
signature TEXT
|
||||
);
|
||||
CREATE UNIQUE INDEX titles_id_idx on titles (id);
|
||||
CREATE UNIQUE INDEX titles_path_idx on titles (path);
|
||||
|
||||
-- migrate data from ids to titles
|
||||
INSERT INTO titles
|
||||
SELECT id, path, null
|
||||
FROM ids
|
||||
WHERE is_title = 1;
|
||||
|
||||
DELETE FROM ids
|
||||
WHERE is_title = 1;
|
||||
|
||||
-- remove the is_title column from ids
|
||||
ALTER TABLE ids RENAME TO tmp;
|
||||
|
||||
CREATE TABLE ids (
|
||||
path TEXT NOT NULL,
|
||||
id TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO ids
|
||||
SELECT path, id
|
||||
FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
-- recreate the indices
|
||||
CREATE UNIQUE INDEX path_idx ON ids (path);
|
||||
CREATE UNIQUE INDEX id_idx ON ids (id);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
-- insert the is_title column
|
||||
ALTER TABLE ids ADD COLUMN is_title INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- migrate data from titles to ids
|
||||
INSERT INTO ids
|
||||
SELECT path, id, 1
|
||||
FROM titles;
|
||||
|
||||
-- remove titles
|
||||
DROP TABLE titles;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
94
migration/unavailable.9.cr
Normal file
94
migration/unavailable.9.cr
Normal file
@@ -0,0 +1,94 @@
|
||||
class UnavailableIDs < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
-- add unavailable column to ids
|
||||
ALTER TABLE ids ADD COLUMN unavailable INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- add unavailable column to titles
|
||||
ALTER TABLE titles ADD COLUMN unavailable INTEGER NOT NULL DEFAULT 0;
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
-- remove unavailable column from ids
|
||||
ALTER TABLE ids RENAME TO tmp;
|
||||
|
||||
CREATE TABLE ids (
|
||||
path TEXT NOT NULL,
|
||||
id TEXT NOT NULL,
|
||||
signature TEXT
|
||||
);
|
||||
|
||||
INSERT INTO ids
|
||||
SELECT path, id, signature
|
||||
FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
-- recreate the indices
|
||||
CREATE UNIQUE INDEX path_idx ON ids (path);
|
||||
CREATE UNIQUE INDEX id_idx ON ids (id);
|
||||
|
||||
-- recreate the foreign key constraint on thumbnails
|
||||
ALTER TABLE thumbnails RENAME TO tmp;
|
||||
|
||||
CREATE TABLE thumbnails (
|
||||
id TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
mime TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
FOREIGN KEY (id) REFERENCES ids (id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO thumbnails
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE UNIQUE INDEX tn_index ON thumbnails (id);
|
||||
|
||||
-- remove unavailable column from titles
|
||||
ALTER TABLE titles RENAME TO tmp;
|
||||
|
||||
CREATE TABLE titles (
|
||||
id TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
signature TEXT
|
||||
);
|
||||
|
||||
INSERT INTO titles
|
||||
SELECT path, id, signature
|
||||
FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
-- recreate the indices
|
||||
CREATE UNIQUE INDEX titles_id_idx on titles (id);
|
||||
CREATE UNIQUE INDEX titles_path_idx on titles (path);
|
||||
|
||||
-- recreate the foreign key constraint on tags
|
||||
ALTER TABLE tags RENAME TO tmp;
|
||||
|
||||
CREATE TABLE tags (
|
||||
id TEXT NOT NULL,
|
||||
tag TEXT NOT NULL,
|
||||
UNIQUE (id, tag),
|
||||
FOREIGN KEY (id) REFERENCES titles (id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO tags
|
||||
SELECT * FROM tmp;
|
||||
|
||||
DROP TABLE tmp;
|
||||
|
||||
CREATE INDEX tags_id_idx ON tags (id);
|
||||
CREATE INDEX tags_tag_idx ON tags (tag);
|
||||
SQL
|
||||
end
|
||||
end
|
||||
20
migration/users.1.cr
Normal file
20
migration/users.1.cr
Normal file
@@ -0,0 +1,20 @@
|
||||
class CreateUsers < MG::Base
|
||||
def up : String
|
||||
<<-SQL
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
token TEXT,
|
||||
admin INTEGER NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS username_idx ON users (username);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS token_idx ON users (token);
|
||||
SQL
|
||||
end
|
||||
|
||||
def down : String
|
||||
<<-SQL
|
||||
DROP TABLE users;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
42
package.json
42
package.json
@@ -1,21 +1,25 @@
|
||||
{
|
||||
"name": "mango",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/hkalexling/Mango.git",
|
||||
"author": "Alex Ling <hkalexling@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-babel-minify": "^0.5.1",
|
||||
"gulp-less": "^4.0.1",
|
||||
"gulp-minify-css": "^1.2.4",
|
||||
"less": "^3.11.3"
|
||||
},
|
||||
"scripts": {
|
||||
"uglify": "gulp"
|
||||
},
|
||||
"dependencies": {
|
||||
"uikit": "^3.5.4"
|
||||
}
|
||||
"name": "mango",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/hkalexling/Mango.git",
|
||||
"author": "Alex Ling <hkalexling@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@babel/preset-env": "^7.11.5",
|
||||
"all-contributors-cli": "^6.19.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-babel": "^8.0.0",
|
||||
"gulp-babel-minify": "^0.5.1",
|
||||
"gulp-less": "^4.0.1",
|
||||
"gulp-minify-css": "^1.2.4",
|
||||
"less": "^3.11.3"
|
||||
},
|
||||
"scripts": {
|
||||
"uglify": "gulp"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-free": "^5.14.0",
|
||||
"uikit": "^3.5.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
.uk-alert-close {
|
||||
color: black !important;
|
||||
}
|
||||
|
||||
.uk-card-body {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.uk-card-media-top {
|
||||
width: 100%;
|
||||
height: 250px;
|
||||
}
|
||||
|
||||
@media (min-width: 600px) {
|
||||
.uk-card-media-top {
|
||||
height: 300px;
|
||||
}
|
||||
}
|
||||
|
||||
.uk-card-media-top>img {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.uk-card-title {
|
||||
max-height: 3em;
|
||||
}
|
||||
|
||||
.acard:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.uk-list li:not(.nopointer) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#scan-status {
|
||||
cursor: auto;
|
||||
}
|
||||
|
||||
.reader-bg {
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.break-word {
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.uk-logo>img {
|
||||
height: 90px;
|
||||
width: 90px;
|
||||
}
|
||||
|
||||
.uk-search {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#selectable .ui-selecting {
|
||||
background: #EEE6B9;
|
||||
}
|
||||
|
||||
#selectable .ui-selected {
|
||||
background: #F4E487;
|
||||
}
|
||||
|
||||
.uk-light #selectable .ui-selecting {
|
||||
background: #5E5731;
|
||||
}
|
||||
|
||||
.uk-light #selectable .ui-selected {
|
||||
background: #9D9252;
|
||||
}
|
||||
|
||||
td>.uk-dropdown {
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
#edit-modal .uk-grid>div {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
#edit-modal #cover {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
#edit-modal #cover-upload {
|
||||
height: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
#edit-modal .uk-modal-body .uk-inline {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.item .uk-card-title {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.grayscale {
|
||||
filter: grayscale(100%);
|
||||
}
|
||||
|
||||
.uk-light .uk-navbar-dropdown,
|
||||
.uk-light .uk-modal-header,
|
||||
.uk-light .uk-modal-body,
|
||||
.uk-light .uk-modal-footer {
|
||||
background: #222;
|
||||
}
|
||||
|
||||
.uk-light .uk-dropdown {
|
||||
background: #333;
|
||||
}
|
||||
|
||||
.uk-light .uk-navbar-dropdown,
|
||||
.uk-light .uk-dropdown {
|
||||
color: #ccc;
|
||||
}
|
||||
|
||||
.uk-light .uk-nav-header,
|
||||
.uk-light .uk-description-list>dt {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
[x-cloak] {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#select-bar-controls a {
|
||||
transform: scale(1.5, 1.5);
|
||||
}
|
||||
|
||||
#select-bar-controls a:hover {
|
||||
color: orange;
|
||||
}
|
||||
|
||||
#main-section {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#totop-wrapper {
|
||||
position: absolute;
|
||||
top: 100vh;
|
||||
right: 2em;
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
#totop-wrapper a {
|
||||
position: fixed;
|
||||
position: sticky;
|
||||
top: calc(100vh - 5em);
|
||||
}
|
||||
139
public/css/mango.less
Normal file
139
public/css/mango.less
Normal file
@@ -0,0 +1,139 @@
|
||||
// UIKit
|
||||
@import "./uikit.less";
|
||||
|
||||
// FontAwesome
|
||||
@import "../../node_modules/@fortawesome/fontawesome-free/less/fontawesome.less";
|
||||
@import "../../node_modules/@fortawesome/fontawesome-free/less/solid.less";
|
||||
|
||||
@font-face {
|
||||
src: url('@{fa-font-path}/fa-solid-900.woff2');
|
||||
src: url('@{fa-font-path}/fa-solid-900.woff2') format('woff2'),
|
||||
url('@{fa-font-path}/fa-solid-900.woff') format('woff');
|
||||
}
|
||||
|
||||
// Item cards
|
||||
.item .uk-card {
|
||||
cursor: pointer;
|
||||
.uk-card-media-top {
|
||||
width: 100%;
|
||||
height: 250px;
|
||||
@media (min-width: 600px) {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
img {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
|
||||
&.grayscale {
|
||||
filter: grayscale(100%);
|
||||
}
|
||||
}
|
||||
}
|
||||
.uk-card-body {
|
||||
padding: 20px;
|
||||
.uk-card-title {
|
||||
font-size: 1rem;
|
||||
}
|
||||
.uk-card-title:not(.free-height) {
|
||||
max-height: 3em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// jQuery selectable
|
||||
#selectable {
|
||||
.ui-selecting {
|
||||
background: #EEE6B9;
|
||||
}
|
||||
.ui-selected {
|
||||
background: #F4E487;
|
||||
}
|
||||
.uk-light & {
|
||||
.ui-selecting {
|
||||
background: #5E5731;
|
||||
}
|
||||
.ui-selected {
|
||||
background: #9D9252;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Edit modal
|
||||
#edit-modal {
|
||||
.uk-grid > div {
|
||||
height: 300px;
|
||||
}
|
||||
#cover {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
#cover-upload {
|
||||
height: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.uk-modal-body .uk-inline {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
// Dark theme
|
||||
.uk-light {
|
||||
.uk-modal-header,
|
||||
.uk-modal-body,
|
||||
.uk-modal-footer {
|
||||
background: #222;
|
||||
}
|
||||
.uk-navbar-dropdown,
|
||||
.uk-dropdown {
|
||||
color: #ccc;
|
||||
background: #333;
|
||||
}
|
||||
.uk-nav-header,
|
||||
.uk-description-list > dt {
|
||||
color: #555;
|
||||
}
|
||||
}
|
||||
|
||||
// Alpine magic
|
||||
[x-cloak] {
|
||||
display: none;
|
||||
}
|
||||
|
||||
// Batch select bar on title page
|
||||
#select-bar-controls {
|
||||
a {
|
||||
transform: scale(1.5, 1.5);
|
||||
|
||||
&:hover {
|
||||
color: orange;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Totop button
|
||||
#totop-wrapper {
|
||||
position: absolute;
|
||||
top: 100vh;
|
||||
right: 2em;
|
||||
bottom: 0;
|
||||
|
||||
a {
|
||||
position: fixed;
|
||||
position: sticky;
|
||||
top: calc(100vh - 5em);
|
||||
}
|
||||
}
|
||||
|
||||
// Misc
|
||||
.uk-alert-close {
|
||||
color: black !important;
|
||||
}
|
||||
.break-word {
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.uk-search {
|
||||
width: 100%;
|
||||
}
|
||||
58
public/css/tags.less
Normal file
58
public/css/tags.less
Normal file
@@ -0,0 +1,58 @@
|
||||
@light-gray: #e5e5e5;
|
||||
@gray: #666666;
|
||||
@black: #141414;
|
||||
@blue: rgb(30, 135, 240);
|
||||
@white1: rgba(255, 255, 255, .1);
|
||||
@white2: rgba(255, 255, 255, .2);
|
||||
@white7: rgba(255, 255, 255, .7);
|
||||
|
||||
.select2-container--default {
|
||||
.select2-selection--multiple {
|
||||
border: 1px solid @light-gray;
|
||||
.select2-selection__choice,
|
||||
.select2-selection__choice__remove,
|
||||
.select2-selection__choice__remove:hover
|
||||
{
|
||||
background-color: @blue;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 2px;
|
||||
}
|
||||
}
|
||||
.select2-dropdown {
|
||||
.select2-results__option--highlighted.select2-results__option--selectable {
|
||||
background-color: @blue;
|
||||
}
|
||||
.select2-results__option--selected:not(.select2-results__option--highlighted) {
|
||||
background-color: @light-gray
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uk-light {
|
||||
.select2-container--default {
|
||||
.select2-selection {
|
||||
background-color: @white1;
|
||||
}
|
||||
.select2-selection--multiple {
|
||||
border: 1px solid @white2;
|
||||
.select2-selection__choice,
|
||||
.select2-selection__choice__remove,
|
||||
.select2-selection__choice__remove:hover
|
||||
{
|
||||
background-color: white;
|
||||
color: @gray;
|
||||
border: none;
|
||||
}
|
||||
.select2-search__field {
|
||||
color: @white7;
|
||||
}
|
||||
}
|
||||
}
|
||||
.select2-dropdown {
|
||||
background-color: @black;
|
||||
.select2-results__option--selected:not(.select2-results__option--highlighted) {
|
||||
background-color: @white2;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -43,3 +43,22 @@
|
||||
@internal-list-bullet-image: "../img/list-bullet.svg";
|
||||
@internal-accordion-open-image: "../img/accordion-open.svg";
|
||||
@internal-accordion-close-image: "../img/accordion-close.svg";
|
||||
|
||||
.hook-card-default() {
|
||||
.uk-light & {
|
||||
background: @card-secondary-background;
|
||||
color: @card-secondary-color;
|
||||
}
|
||||
}
|
||||
|
||||
.hook-card-default-title() {
|
||||
.uk-light & {
|
||||
color: @card-secondary-title-color;
|
||||
}
|
||||
}
|
||||
|
||||
.hook-card-default-hover() {
|
||||
.uk-light & {
|
||||
background-color: @card-secondary-hover-background;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,55 @@
|
||||
let scanning = false;
|
||||
const component = () => {
|
||||
return {
|
||||
progress: 1.0,
|
||||
generating: false,
|
||||
scanning: false,
|
||||
scanTitles: 0,
|
||||
scanMs: -1,
|
||||
themeSetting: '',
|
||||
|
||||
const scan = () => {
|
||||
scanning = true;
|
||||
$('#scan-status > div').removeAttr('hidden');
|
||||
$('#scan-status > span').attr('hidden', '');
|
||||
const color = $('#scan').css('color');
|
||||
$('#scan').css('color', 'gray');
|
||||
$.post(base_url + 'api/admin/scan', (data) => {
|
||||
const ms = data.milliseconds;
|
||||
const titles = data.titles;
|
||||
$('#scan-status > span').text('Scanned ' + titles + ' titles in ' + ms + 'ms');
|
||||
$('#scan-status > span').removeAttr('hidden');
|
||||
$('#scan').css('color', color);
|
||||
$('#scan-status > div').attr('hidden', '');
|
||||
scanning = false;
|
||||
});
|
||||
}
|
||||
init() {
|
||||
this.getProgress();
|
||||
setInterval(() => {
|
||||
this.getProgress();
|
||||
}, 5000);
|
||||
|
||||
String.prototype.capitalize = function() {
|
||||
return this.charAt(0).toUpperCase() + this.slice(1);
|
||||
}
|
||||
|
||||
$(() => {
|
||||
$('li').click((e) => {
|
||||
const url = $(e.currentTarget).attr('data-url');
|
||||
if (url) {
|
||||
$(location).attr('href', url);
|
||||
}
|
||||
});
|
||||
|
||||
const setting = loadThemeSetting();
|
||||
$('#theme-select').val(setting.capitalize());
|
||||
|
||||
$('#theme-select').change((e) => {
|
||||
const newSetting = $(e.currentTarget).val().toLowerCase();
|
||||
saveThemeSetting(newSetting);
|
||||
setTheme();
|
||||
});
|
||||
});
|
||||
const setting = loadThemeSetting();
|
||||
this.themeSetting = setting.charAt(0).toUpperCase() + setting.slice(1);
|
||||
},
|
||||
themeChanged(event) {
|
||||
const newSetting = $(event.currentTarget).val().toLowerCase();
|
||||
saveThemeSetting(newSetting);
|
||||
setTheme();
|
||||
},
|
||||
scan() {
|
||||
if (this.scanning) return;
|
||||
this.scanning = true;
|
||||
this.scanMs = -1;
|
||||
this.scanTitles = 0;
|
||||
$.post(`${base_url}api/admin/scan`)
|
||||
.then(data => {
|
||||
this.scanMs = data.milliseconds;
|
||||
this.scanTitles = data.titles;
|
||||
})
|
||||
.always(() => {
|
||||
this.scanning = false;
|
||||
});
|
||||
},
|
||||
generateThumbnails() {
|
||||
if (this.generating) return;
|
||||
this.generating = true;
|
||||
this.progress = 0.0;
|
||||
$.post(`${base_url}api/admin/generate_thumbnails`)
|
||||
.then(() => {
|
||||
this.getProgress()
|
||||
});
|
||||
},
|
||||
getProgress() {
|
||||
$.get(`${base_url}api/admin/thumbnail_progress`)
|
||||
.then(data => {
|
||||
this.progress = data.progress;
|
||||
this.generating = data.progress > 0;
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
143
public/js/common.js
Normal file
143
public/js/common.js
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* --- Alpine helper functions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Set an alpine.js property
|
||||
*
|
||||
* @function setProp
|
||||
* @param {string} key - Key of the data property
|
||||
* @param {*} prop - The data property
|
||||
* @param {string} selector - The jQuery selector to the root element
|
||||
*/
|
||||
const setProp = (key, prop, selector = '#root') => {
|
||||
$(selector).get(0).__x.$data[key] = prop;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get an alpine.js property
|
||||
*
|
||||
* @function getProp
|
||||
* @param {string} key - Key of the data property
|
||||
* @param {string} selector - The jQuery selector to the root element
|
||||
* @return {*} The data property
|
||||
*/
|
||||
const getProp = (key, selector = '#root') => {
|
||||
return $(selector).get(0).__x.$data[key];
|
||||
};
|
||||
|
||||
/**
|
||||
* --- Theme related functions
|
||||
* Note: In the comments below we treat "theme" and "theme setting"
|
||||
* differently. A theme can have only two values, either "dark" or
|
||||
* "light", while a theme setting can have the third value "system".
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if the system setting prefers dark theme.
|
||||
* from https://flaviocopes.com/javascript-detect-dark-mode/
|
||||
*
|
||||
* @function preferDarkMode
|
||||
* @return {bool}
|
||||
*/
|
||||
const preferDarkMode = () => {
|
||||
return window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a given string represents a valid theme setting
|
||||
*
|
||||
* @function validThemeSetting
|
||||
* @param {string} theme - The string representing the theme setting
|
||||
* @return {bool}
|
||||
*/
|
||||
const validThemeSetting = (theme) => {
|
||||
return ['dark', 'light', 'system'].indexOf(theme) >= 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load theme setting from local storage, or use 'light'
|
||||
*
|
||||
* @function loadThemeSetting
|
||||
* @return {string} A theme setting ('dark', 'light', or 'system')
|
||||
*/
|
||||
const loadThemeSetting = () => {
|
||||
let str = localStorage.getItem('theme');
|
||||
if (!str || !validThemeSetting(str)) str = 'system';
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load the current theme (not theme setting)
|
||||
*
|
||||
* @function loadTheme
|
||||
* @return {string} The current theme to use ('dark' or 'light')
|
||||
*/
|
||||
const loadTheme = () => {
|
||||
let setting = loadThemeSetting();
|
||||
if (setting === 'system') {
|
||||
setting = preferDarkMode() ? 'dark' : 'light';
|
||||
}
|
||||
return setting;
|
||||
};
|
||||
|
||||
/**
|
||||
* Save a theme setting
|
||||
*
|
||||
* @function saveThemeSetting
|
||||
* @param {string} setting - A theme setting
|
||||
*/
|
||||
const saveThemeSetting = setting => {
|
||||
if (!validThemeSetting(setting)) setting = 'system';
|
||||
localStorage.setItem('theme', setting);
|
||||
};
|
||||
|
||||
/**
|
||||
* Toggle the current theme. When the current theme setting is 'system', it
|
||||
* will be changed to either 'light' or 'dark'
|
||||
*
|
||||
* @function toggleTheme
|
||||
*/
|
||||
const toggleTheme = () => {
|
||||
const theme = loadTheme();
|
||||
const newTheme = theme === 'dark' ? 'light' : 'dark';
|
||||
saveThemeSetting(newTheme);
|
||||
setTheme(newTheme);
|
||||
};
|
||||
|
||||
/**
|
||||
* Apply a theme, or load a theme and then apply it
|
||||
*
|
||||
* @function setTheme
|
||||
* @param {string?} theme - (Optional) The theme to apply. When omitted, use
|
||||
* `loadTheme` to get a theme and apply it.
|
||||
*/
|
||||
const setTheme = (theme) => {
|
||||
if (!theme) theme = loadTheme();
|
||||
if (theme === 'dark') {
|
||||
$('html').css('background', 'rgb(20, 20, 20)');
|
||||
$('body').addClass('uk-light');
|
||||
$('.ui-widget-content').addClass('dark');
|
||||
} else {
|
||||
$('html').css('background', '');
|
||||
$('body').removeClass('uk-light');
|
||||
$('.ui-widget-content').removeClass('dark');
|
||||
}
|
||||
};
|
||||
|
||||
// do it before document is ready to prevent the initial flash of white on
|
||||
// most pages
|
||||
setTheme();
|
||||
$(() => {
|
||||
// hack for the reader page
|
||||
setTheme();
|
||||
|
||||
// on system dark mode setting change
|
||||
if (window.matchMedia) {
|
||||
window.matchMedia('(prefers-color-scheme: dark)')
|
||||
.addEventListener('change', event => {
|
||||
if (loadThemeSetting() === 'system')
|
||||
setTheme(event.matches ? 'dark' : 'light');
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -1,17 +1,26 @@
|
||||
const truncate = () => {
|
||||
$('.uk-card-title').each((i, e) => {
|
||||
$(e).dotdotdot({
|
||||
truncate: 'letter',
|
||||
watch: true,
|
||||
callback: (truncated) => {
|
||||
if (truncated) {
|
||||
$(e).attr('uk-tooltip', $(e).attr('data-title'));
|
||||
} else {
|
||||
$(e).removeAttr('uk-tooltip');
|
||||
}
|
||||
/**
|
||||
* Truncate a .uk-card-title element
|
||||
*
|
||||
* @function truncate
|
||||
* @param {object} e - The title element to truncate
|
||||
*/
|
||||
const truncate = (e) => {
|
||||
$(e).dotdotdot({
|
||||
truncate: 'letter',
|
||||
watch: true,
|
||||
callback: (truncated) => {
|
||||
if (truncated) {
|
||||
$(e).attr('uk-tooltip', $(e).attr('data-title'));
|
||||
} else {
|
||||
$(e).removeAttr('uk-tooltip');
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
truncate();
|
||||
$('.uk-card-title').each((i, e) => {
|
||||
// Truncate the title when it first enters the view
|
||||
$(e).one('inview', () => {
|
||||
truncate(e);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,145 +1,116 @@
|
||||
$(() => {
|
||||
$('input.uk-checkbox').each((i, e) => {
|
||||
$(e).change(() => {
|
||||
loadConfig();
|
||||
});
|
||||
});
|
||||
loadConfig();
|
||||
load();
|
||||
const component = () => {
|
||||
return {
|
||||
jobs: [],
|
||||
paused: undefined,
|
||||
loading: false,
|
||||
toggling: false,
|
||||
ws: undefined,
|
||||
|
||||
const intervalMS = 5000;
|
||||
setTimeout(() => {
|
||||
setInterval(() => {
|
||||
if (globalConfig.autoRefresh !== true) return;
|
||||
load();
|
||||
}, intervalMS);
|
||||
}, intervalMS);
|
||||
});
|
||||
var globalConfig = {};
|
||||
var loading = false;
|
||||
|
||||
const loadConfig = () => {
|
||||
globalConfig.autoRefresh = $('#auto-refresh').prop('checked');
|
||||
};
|
||||
const remove = (id) => {
|
||||
var url = base_url + 'api/admin/mangadex/queue/delete';
|
||||
if (id !== undefined)
|
||||
url += '?' + $.param({
|
||||
id: id
|
||||
});
|
||||
console.log(url);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to remove job from download queue. Error: ${data.error}`);
|
||||
return;
|
||||
wsConnect(secure = true) {
|
||||
const url = `${secure ? 'wss' : 'ws'}://${location.host}${base_url}api/admin/mangadex/queue`;
|
||||
console.log(`Connecting to ${url}`);
|
||||
this.ws = new WebSocket(url);
|
||||
this.ws.onmessage = event => {
|
||||
const data = JSON.parse(event.data);
|
||||
this.jobs = data.jobs;
|
||||
this.paused = data.paused;
|
||||
};
|
||||
this.ws.onclose = () => {
|
||||
if (this.ws.failed)
|
||||
return this.wsConnect(false);
|
||||
alert('danger', 'Socket connection closed');
|
||||
};
|
||||
this.ws.onerror = () => {
|
||||
if (secure)
|
||||
return this.ws.failed = true;
|
||||
alert('danger', 'Socket connection failed');
|
||||
};
|
||||
},
|
||||
init() {
|
||||
this.wsConnect();
|
||||
this.load();
|
||||
},
|
||||
load() {
|
||||
this.loading = true;
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: base_url + 'api/admin/mangadex/queue',
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to fetch download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
this.jobs = data.jobs;
|
||||
this.paused = data.paused;
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to fetch download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
this.loading = false;
|
||||
});
|
||||
},
|
||||
jobAction(action, event) {
|
||||
let url = `${base_url}api/admin/mangadex/queue/${action}`;
|
||||
if (event) {
|
||||
const id = event.currentTarget.closest('tr').id.split('-')[1];
|
||||
url = `${url}?${$.param({
|
||||
id: id
|
||||
})}`;
|
||||
}
|
||||
load();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to remove job from download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
const refresh = (id) => {
|
||||
var url = base_url + 'api/admin/mangadex/queue/retry';
|
||||
if (id !== undefined)
|
||||
url += '?' + $.param({
|
||||
id: id
|
||||
});
|
||||
console.log(url);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to restart download job. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
load();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to restart download job. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
const toggle = () => {
|
||||
$('#pause-resume-btn').attr('disabled', '');
|
||||
const paused = $('#pause-resume-btn').text() === 'Resume download';
|
||||
const action = paused ? 'resume' : 'pause';
|
||||
const url = `${base_url}api/admin/mangadex/queue/${action}`;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${action} download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
load();
|
||||
$('#pause-resume-btn').removeAttr('disabled');
|
||||
});
|
||||
};
|
||||
const load = () => {
|
||||
if (loading) return;
|
||||
loading = true;
|
||||
console.log('fetching');
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: base_url + 'api/admin/mangadex/queue',
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to fetch download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
console.log(data);
|
||||
const btnText = data.paused ? "Resume download" : "Pause download";
|
||||
$('#pause-resume-btn').text(btnText);
|
||||
$('#pause-resume-btn').removeAttr('hidden');
|
||||
const rows = data.jobs.map(obj => {
|
||||
var cls = 'label ';
|
||||
if (obj.status === 'Pending')
|
||||
console.log(url);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to ${action} job from download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
this.load();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${action} job from download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
},
|
||||
toggle() {
|
||||
this.toggling = true;
|
||||
const action = this.paused ? 'resume' : 'pause';
|
||||
const url = `${base_url}api/admin/mangadex/queue/${action}`;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${action} download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
this.load();
|
||||
this.toggling = false;
|
||||
});
|
||||
},
|
||||
statusClass(status) {
|
||||
let cls = 'label ';
|
||||
switch (status) {
|
||||
case 'Pending':
|
||||
cls += 'label-pending';
|
||||
if (obj.status === 'Completed')
|
||||
break;
|
||||
case 'Completed':
|
||||
cls += 'label-success';
|
||||
if (obj.status === 'Error')
|
||||
break;
|
||||
case 'Error':
|
||||
cls += 'label-danger';
|
||||
if (obj.status === 'MissingPages')
|
||||
break;
|
||||
case 'MissingPages':
|
||||
cls += 'label-warning';
|
||||
|
||||
const info = obj.status_message.length > 0 ? '<span uk-icon="info"></span>' : '';
|
||||
const statusSpan = `<span class="${cls}">${obj.status} ${info}</span>`;
|
||||
const dropdown = obj.status_message.length > 0 ? `<div uk-dropdown>${obj.status_message}</div>` : '';
|
||||
const retryBtn = obj.status_message.length > 0 ? `<a onclick="refresh('${obj.id}')" uk-icon="refresh"></a>` : '';
|
||||
return `<tr id="chapter-${obj.id}">
|
||||
<td>${obj.plugin_id ? obj.title : `<a href="${baseURL}/chapter/${obj.id}">${obj.title}</a>`}</td>
|
||||
<td>${obj.plugin_id ? obj.manga_title : `<a href="${baseURL}/manga/${obj.manga_id}">${obj.manga_title}</a>`}</td>
|
||||
<td>${obj.success_count}/${obj.pages}</td>
|
||||
<td>${moment(obj.time).fromNow()}</td>
|
||||
<td>${statusSpan} ${dropdown}</td>
|
||||
<td>${obj.plugin_id || ""}</td>
|
||||
<td>
|
||||
<a onclick="remove('${obj.id}')" uk-icon="trash"></a>
|
||||
${retryBtn}
|
||||
</td>
|
||||
</tr>`;
|
||||
});
|
||||
|
||||
const tbody = `<tbody>${rows.join('')}</tbody>`;
|
||||
$('tbody').remove();
|
||||
$('table').append(tbody);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to fetch download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
loading = false;
|
||||
});
|
||||
break;
|
||||
}
|
||||
return cls;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,305 +0,0 @@
|
||||
$(() => {
|
||||
$('#search-input').keypress(event => {
|
||||
if (event.which === 13) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
$('.filter-field').each((i, ele) => {
|
||||
$(ele).change(() => {
|
||||
buildTable();
|
||||
});
|
||||
});
|
||||
});
|
||||
const selectAll = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).addClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const unselect = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).removeClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const download = () => {
|
||||
const selected = $('tbody > tr.ui-selected');
|
||||
if (selected.length === 0) return;
|
||||
UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => {
|
||||
$('#download-btn').attr('hidden', '');
|
||||
$('#download-spinner').removeAttr('hidden');
|
||||
const ids = selected.map((i, e) => {
|
||||
return $(e).find('td').first().text();
|
||||
}).get();
|
||||
const chapters = globalChapters.filter(c => ids.indexOf(c.id) >= 0);
|
||||
console.log(ids);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: base_url + 'api/admin/mangadex/download',
|
||||
data: JSON.stringify({
|
||||
chapters: chapters
|
||||
}),
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
console.log(data);
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
$('#download-spinner').attr('hidden', '');
|
||||
$('#download-btn').removeAttr('hidden');
|
||||
});
|
||||
});
|
||||
};
|
||||
const toggleSpinner = () => {
|
||||
var attr = $('#spinner').attr('hidden');
|
||||
if (attr) {
|
||||
$('#spinner').removeAttr('hidden');
|
||||
$('#search-btn').attr('hidden', '');
|
||||
} else {
|
||||
$('#search-btn').removeAttr('hidden');
|
||||
$('#spinner').attr('hidden', '');
|
||||
}
|
||||
searching = !searching;
|
||||
};
|
||||
var searching = false;
|
||||
var globalChapters;
|
||||
const search = () => {
|
||||
if (searching) {
|
||||
return;
|
||||
}
|
||||
$('#manga-details').attr('hidden', '');
|
||||
$('#filter-form').attr('hidden', '');
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
toggleSpinner();
|
||||
const input = $('input').val();
|
||||
|
||||
if (input === "") {
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
var int_id = -1;
|
||||
|
||||
try {
|
||||
const path = new URL(input).pathname;
|
||||
const match = /\/title\/([0-9]+)/.exec(path);
|
||||
int_id = parseInt(match[1]);
|
||||
} catch (e) {
|
||||
int_id = parseInt(input);
|
||||
}
|
||||
|
||||
if (int_id <= 0 || isNaN(int_id)) {
|
||||
alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex.');
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
$.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`)
|
||||
.done((data) => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to get manga info. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
const cover = baseURL + data.cover_url;
|
||||
$('#cover').attr("src", cover);
|
||||
$('#title').text("Title: " + data.title);
|
||||
$('#artist').text("Artist: " + data.artist);
|
||||
$('#author').text("Author: " + data.author);
|
||||
|
||||
$('#manga-details').removeAttr('hidden');
|
||||
|
||||
console.log(data.chapters);
|
||||
globalChapters = data.chapters;
|
||||
|
||||
let langs = new Set();
|
||||
let group_names = new Set();
|
||||
data.chapters.forEach(chp => {
|
||||
Object.entries(chp.groups).forEach(([k, v]) => {
|
||||
group_names.add(k);
|
||||
});
|
||||
langs.add(chp.language);
|
||||
});
|
||||
|
||||
const comp = (a, b) => {
|
||||
var ai;
|
||||
var bi;
|
||||
try {
|
||||
ai = parseFloat(a);
|
||||
} catch (e) {}
|
||||
try {
|
||||
bi = parseFloat(b);
|
||||
} catch (e) {}
|
||||
if (typeof ai === 'undefined') return -1;
|
||||
if (typeof bi === 'undefined') return 1;
|
||||
if (ai < bi) return 1;
|
||||
if (ai > bi) return -1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
langs = [...langs].sort();
|
||||
group_names = [...group_names].sort();
|
||||
|
||||
langs.unshift('All');
|
||||
group_names.unshift('All');
|
||||
|
||||
$('select#lang-select').append(langs.map(e => `<option>${e}</option>`).join(''));
|
||||
$('select#group-select').append(group_names.map(e => `<option>${e}</option>`).join(''));
|
||||
|
||||
$('#filter-form').removeAttr('hidden');
|
||||
|
||||
buildTable();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
toggleSpinner();
|
||||
});
|
||||
};
|
||||
const parseRange = str => {
|
||||
const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m;
|
||||
const matches = str.match(regex);
|
||||
var num;
|
||||
|
||||
if (!matches) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
} else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') {
|
||||
// e.g., <= 30
|
||||
num = parseInt(matches[2]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
switch (matches[1]) {
|
||||
case '<':
|
||||
return [null, num - 1];
|
||||
case '<=':
|
||||
return [null, num];
|
||||
case '>':
|
||||
return [num + 1, null];
|
||||
case '>=':
|
||||
return [num, null];
|
||||
}
|
||||
} else if (typeof matches[3] !== 'undefined') {
|
||||
// a single number
|
||||
num = parseInt(matches[3]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, num];
|
||||
} else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') {
|
||||
// e.g., 10 - 23
|
||||
num = parseInt(matches[4]);
|
||||
const n2 = parseInt(matches[5]);
|
||||
if (isNaN(num) || isNaN(n2) || num > n2) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, n2];
|
||||
} else {
|
||||
// empty or space only
|
||||
return [null, null];
|
||||
}
|
||||
};
|
||||
const getFilters = () => {
|
||||
const filters = {};
|
||||
$('.uk-select').each((i, ele) => {
|
||||
const id = $(ele).attr('id');
|
||||
const by = id.split('-')[0];
|
||||
const choice = $(ele).val();
|
||||
filters[by] = choice;
|
||||
});
|
||||
filters.volume = parseRange($('#volume-range').val());
|
||||
filters.chapter = parseRange($('#chapter-range').val());
|
||||
return filters;
|
||||
};
|
||||
const buildTable = () => {
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
console.log('rebuilding table');
|
||||
const filters = getFilters();
|
||||
console.log('filters:', filters);
|
||||
var chapters = globalChapters.slice();
|
||||
Object.entries(filters).forEach(([k, v]) => {
|
||||
if (v === 'All') return;
|
||||
if (k === 'group') {
|
||||
chapters = chapters.filter(c => {
|
||||
unescaped_groups = Object.entries(c.groups).map(([g, id]) => unescapeHTML(g));
|
||||
return unescaped_groups.indexOf(v) >= 0;
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (k === 'lang') {
|
||||
chapters = chapters.filter(c => c.language === v);
|
||||
return;
|
||||
}
|
||||
const lb = parseFloat(v[0]);
|
||||
const ub = parseFloat(v[1]);
|
||||
if (isNaN(lb) && isNaN(ub)) return;
|
||||
chapters = chapters.filter(c => {
|
||||
const val = parseFloat(c[k]);
|
||||
if (isNaN(val)) return false;
|
||||
if (isNaN(lb))
|
||||
return val <= ub;
|
||||
else if (isNaN(ub))
|
||||
return val >= lb;
|
||||
else
|
||||
return val >= lb && val <= ub;
|
||||
});
|
||||
});
|
||||
console.log('filtered chapters:', chapters);
|
||||
$('#count-text').text(`${chapters.length} chapters found`);
|
||||
|
||||
const chaptersLimit = 1000;
|
||||
if (chapters.length > chaptersLimit) {
|
||||
$('#filter-notification').text(`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters in this manga. Please use the filter options above to narrow down your search.`);
|
||||
$('#filter-notification').removeAttr('hidden');
|
||||
return;
|
||||
}
|
||||
|
||||
const inner = chapters.map(chp => {
|
||||
const group_str = Object.entries(chp.groups).map(([k, v]) => {
|
||||
return `<a href="${baseURL }/group/${v}">${k}</a>`;
|
||||
}).join(' | ');
|
||||
return `<tr class="ui-widget-content">
|
||||
<td><a href="${baseURL}/chapter/${chp.id}">${chp.id}</a></td>
|
||||
<td>${chp.title}</td>
|
||||
<td>${chp.language}</td>
|
||||
<td>${group_str}</td>
|
||||
<td>${chp.volume}</td>
|
||||
<td>${chp.chapter}</td>
|
||||
<td>${moment.unix(chp.time).fromNow()}</td>
|
||||
</tr>`;
|
||||
}).join('');
|
||||
const tbody = `<tbody id="selectable">${inner}</tbody>`;
|
||||
$('tbody').remove();
|
||||
$('table').append(tbody);
|
||||
$('table').removeAttr('hidden');
|
||||
$("#selectable").selectable({
|
||||
filter: 'tr'
|
||||
});
|
||||
$('#selection-controls').removeAttr('hidden');
|
||||
};
|
||||
|
||||
const unescapeHTML = (str) => {
|
||||
var elt = document.createElement("span");
|
||||
elt.innerHTML = str;
|
||||
return elt.innerText;
|
||||
};
|
||||
5
public/js/fontawesome.min.js
vendored
5
public/js/fontawesome.min.js
vendored
File diff suppressed because one or more lines are too long
60
public/js/missing-items.js
Normal file
60
public/js/missing-items.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const component = () => {
|
||||
return {
|
||||
empty: true,
|
||||
titles: [],
|
||||
entries: [],
|
||||
loading: true,
|
||||
|
||||
load() {
|
||||
this.loading = true;
|
||||
this.request('GET', `${base_url}api/admin/titles/missing`, data => {
|
||||
this.titles = data.titles;
|
||||
this.request('GET', `${base_url}api/admin/entries/missing`, data => {
|
||||
this.entries = data.entries;
|
||||
this.loading = false;
|
||||
this.empty = this.entries.length === 0 && this.titles.length === 0;
|
||||
});
|
||||
});
|
||||
},
|
||||
rm(event) {
|
||||
const rawID = event.currentTarget.closest('tr').id;
|
||||
const [type, id] = rawID.split('-');
|
||||
const url = `${base_url}api/admin/${type === 'title' ? 'titles' : 'entries'}/missing/${id}`;
|
||||
this.request('DELETE', url, () => {
|
||||
this.load();
|
||||
});
|
||||
},
|
||||
rmAll() {
|
||||
UIkit.modal.confirm('Are you sure? All metadata associated with these items, including their tags and thumbnails, will be deleted from the database.', {
|
||||
labels: {
|
||||
ok: 'Yes, delete them',
|
||||
cancel: 'Cancel'
|
||||
}
|
||||
}).then(() => {
|
||||
this.request('DELETE', `${base_url}api/admin/titles/missing`, () => {
|
||||
this.request('DELETE', `${base_url}api/admin/entries/missing`, () => {
|
||||
this.load();
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
request(method, url, cb) {
|
||||
console.log(url);
|
||||
$.ajax({
|
||||
type: method,
|
||||
url: url,
|
||||
contentType: 'application/json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to ${method} ${url}. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
if (cb) cb(data);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${method} ${url}. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
@@ -33,14 +33,13 @@ const search = () => {
|
||||
if (searching)
|
||||
return;
|
||||
|
||||
const query = $('#search-input').val();
|
||||
const query = $.param({
|
||||
query: $('#search-input').val(),
|
||||
plugin: pid
|
||||
});
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: base_url + 'api/admin/plugin/list',
|
||||
data: JSON.stringify({
|
||||
query: query,
|
||||
plugin: pid
|
||||
}),
|
||||
type: 'GET',
|
||||
url: `${base_url}api/admin/plugin/list?${query}`,
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
@@ -127,9 +126,7 @@ const download = () => {
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
alert('success', `${successCount} of ${successCount + failCount} chapters added to the download queue. You can view and manage your download queue on the <a href="${base_url}admin/downloads">download manager page</a>.`);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
|
||||
@@ -1,161 +1,324 @@
|
||||
$(() => {
|
||||
getPages();
|
||||
const readerComponent = () => {
|
||||
return {
|
||||
loading: true,
|
||||
mode: 'continuous', // Can be 'continuous', 'height' or 'width'
|
||||
msg: 'Loading the web reader. Please wait...',
|
||||
alertClass: 'uk-alert-primary',
|
||||
items: [],
|
||||
curItem: {},
|
||||
enableFlipAnimation: true,
|
||||
flipAnimation: null,
|
||||
longPages: false,
|
||||
lastSavedPage: page,
|
||||
selectedIndex: 0, // 0: not selected; 1: the first page
|
||||
margin: 30,
|
||||
preloadLookahead: 3,
|
||||
|
||||
$('#page-select').change(() => {
|
||||
const p = parseInt($('#page-select').val());
|
||||
toPage(p);
|
||||
});
|
||||
});
|
||||
/**
|
||||
* Initialize the component by fetching the page dimensions
|
||||
*/
|
||||
init(nextTick) {
|
||||
$.get(`${base_url}api/dimensions/${tid}/${eid}`)
|
||||
.then(data => {
|
||||
if (!data.success && data.error)
|
||||
throw new Error(resp.error);
|
||||
const dimensions = data.dimensions;
|
||||
|
||||
/**
|
||||
* Set an alpine.js property
|
||||
*
|
||||
* @function setProp
|
||||
* @param {string} key - Key of the data property
|
||||
* @param {*} prop - The data property
|
||||
*/
|
||||
const setProp = (key, prop) => {
|
||||
$('#root').get(0).__x.$data[key] = prop;
|
||||
};
|
||||
this.items = dimensions.map((d, i) => {
|
||||
return {
|
||||
id: i + 1,
|
||||
url: `${base_url}api/page/${tid}/${eid}/${i+1}`,
|
||||
width: d.width,
|
||||
height: d.height,
|
||||
};
|
||||
});
|
||||
|
||||
/**
|
||||
* Get dimension of the pages in the entry from the API and update the view
|
||||
*/
|
||||
const getPages = () => {
|
||||
$.get(`${base_url}api/dimensions/${tid}/${eid}`)
|
||||
.then(data => {
|
||||
if (!data.success && data.error)
|
||||
throw new Error(resp.error);
|
||||
const dimensions = data.dimensions;
|
||||
const avgRatio = this.items.reduce((acc, cur) => {
|
||||
return acc + cur.height / cur.width
|
||||
}, 0) / this.items.length;
|
||||
|
||||
const items = dimensions.map((d, i) => {
|
||||
return {
|
||||
id: i + 1,
|
||||
url: `${base_url}api/page/${tid}/${eid}/${i+1}`,
|
||||
width: d.width,
|
||||
height: d.height
|
||||
};
|
||||
});
|
||||
console.log(avgRatio);
|
||||
this.longPages = avgRatio > 2;
|
||||
this.loading = false;
|
||||
this.mode = localStorage.getItem('mode') || 'continuous';
|
||||
|
||||
setProp('items', items);
|
||||
setProp('loading', false);
|
||||
// Here we save a copy of this.mode, and use the copy as
|
||||
// the model-select value. This is because `updateMode`
|
||||
// might change this.mode and make it `height` or `width`,
|
||||
// which are not available in mode-select
|
||||
const mode = this.mode;
|
||||
this.updateMode(this.mode, page, nextTick);
|
||||
$('#mode-select').val(mode);
|
||||
|
||||
waitForPage(items.length, () => {
|
||||
toPage(page);
|
||||
setupScroller();
|
||||
});
|
||||
})
|
||||
.catch(e => {
|
||||
const errMsg = `Failed to get the page dimensions. ${e}`;
|
||||
console.error(e);
|
||||
setProp('alertClass', 'uk-alert-danger');
|
||||
setProp('msg', errMsg);
|
||||
})
|
||||
};
|
||||
const savedMargin = localStorage.getItem('margin');
|
||||
if (savedMargin) {
|
||||
this.margin = savedMargin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Jump to a specific page
|
||||
*
|
||||
* @function toPage
|
||||
* @param {number} idx - One-based index of the page
|
||||
*/
|
||||
const toPage = (idx) => {
|
||||
$(`#${idx}`).get(0).scrollIntoView(true);
|
||||
UIkit.modal($('#modal-sections')).hide();
|
||||
};
|
||||
// Preload Images
|
||||
this.preloadLookahead = +(localStorage.getItem('preloadLookahead') ?? 3);
|
||||
const limit = Math.min(page + this.preloadLookahead, this.items.length + 1);
|
||||
for (let idx = page + 1; idx <= limit; idx++) {
|
||||
this.preloadImage(this.items[idx - 1].url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a page exists every 100ms. If so, invoke the callback function.
|
||||
*
|
||||
* @function waitForPage
|
||||
* @param {number} idx - One-based index of the page
|
||||
* @param {function} cb - Callback function
|
||||
*/
|
||||
const waitForPage = (idx, cb) => {
|
||||
if ($(`#${idx}`).length > 0) return cb();
|
||||
setTimeout(() => {
|
||||
waitForPage(idx, cb)
|
||||
}, 100);
|
||||
};
|
||||
const savedFlipAnimation = localStorage.getItem('enableFlipAnimation');
|
||||
this.enableFlipAnimation = savedFlipAnimation === null || savedFlipAnimation === 'true';
|
||||
})
|
||||
.catch(e => {
|
||||
const errMsg = `Failed to get the page dimensions. ${e}`;
|
||||
console.error(e);
|
||||
this.alertClass = 'uk-alert-danger';
|
||||
this.msg = errMsg;
|
||||
})
|
||||
},
|
||||
/**
|
||||
* Preload an image, which is expected to be cached
|
||||
*/
|
||||
preloadImage(url) {
|
||||
(new Image()).src = url;
|
||||
},
|
||||
/**
|
||||
* Handles the `change` event for the page selector
|
||||
*/
|
||||
pageChanged() {
|
||||
const p = parseInt($('#page-select').val());
|
||||
this.toPage(p);
|
||||
},
|
||||
/**
|
||||
* Handles the `change` event for the mode selector
|
||||
*
|
||||
* @param {function} nextTick - Alpine $nextTick magic property
|
||||
*/
|
||||
modeChanged(nextTick) {
|
||||
const mode = $('#mode-select').val();
|
||||
const curIdx = parseInt($('#page-select').val());
|
||||
|
||||
/**
|
||||
* Show the control modal
|
||||
*
|
||||
* @function showControl
|
||||
* @param {object} event - The onclick event that triggers the function
|
||||
*/
|
||||
const showControl = (event) => {
|
||||
const idx = parseInt($(event.currentTarget).attr('id'));
|
||||
const pageCount = $('#page-select > option').length;
|
||||
const progressText = `Progress: ${idx}/${pageCount} (${(idx/pageCount * 100).toFixed(1)}%)`;
|
||||
$('#progress-label').text(progressText);
|
||||
$('#page-select').val(idx);
|
||||
UIkit.modal($('#modal-sections')).show();
|
||||
}
|
||||
this.updateMode(mode, curIdx, nextTick);
|
||||
},
|
||||
/**
|
||||
* Handles the window `resize` event
|
||||
*/
|
||||
resized() {
|
||||
if (this.mode === 'continuous') return;
|
||||
|
||||
/**
|
||||
* Redirect to a URL
|
||||
*
|
||||
* @function redirect
|
||||
* @param {string} url - The target URL
|
||||
*/
|
||||
const redirect = (url) => {
|
||||
window.location.replace(url);
|
||||
}
|
||||
const wideScreen = $(window).width() > $(window).height();
|
||||
this.mode = wideScreen ? 'height' : 'width';
|
||||
},
|
||||
/**
|
||||
* Handles the window `keydown` event
|
||||
*
|
||||
* @param {Event} event - The triggering event
|
||||
*/
|
||||
keyHandler(event) {
|
||||
if (this.mode === 'continuous') return;
|
||||
|
||||
/**
|
||||
* Replace the address bar history and save th ereading progress if necessary
|
||||
*
|
||||
* @function replaceHistory
|
||||
* @param {number} idx - One-based index of the current page
|
||||
*/
|
||||
const replaceHistory = (idx) => {
|
||||
const ary = window.location.pathname.split('/');
|
||||
ary[ary.length - 1] = idx;
|
||||
ary.shift(); // remove leading `/`
|
||||
ary.unshift(window.location.origin);
|
||||
const url = ary.join('/');
|
||||
saveProgress(idx);
|
||||
history.replaceState(null, "", url);
|
||||
}
|
||||
if (event.key === 'ArrowLeft' || event.key === 'k')
|
||||
this.flipPage(false);
|
||||
if (event.key === 'ArrowRight' || event.key === 'j')
|
||||
this.flipPage(true);
|
||||
},
|
||||
/**
|
||||
* Flips to the next or the previous page
|
||||
*
|
||||
* @param {bool} isNext - Whether we are going to the next page
|
||||
*/
|
||||
flipPage(isNext) {
|
||||
const idx = parseInt(this.curItem.id);
|
||||
const newIdx = idx + (isNext ? 1 : -1);
|
||||
|
||||
/**
|
||||
* Set up the scroll handler that calls `replaceHistory` when an image
|
||||
* enters the view port
|
||||
*
|
||||
* @function setupScroller
|
||||
*/
|
||||
const setupScroller = () => {
|
||||
$('#root img').each((idx, el) => {
|
||||
$(el).on('inview', (event, inView) => {
|
||||
if (inView) {
|
||||
const current = $(event.currentTarget).attr('id');
|
||||
replaceHistory(current);
|
||||
if (newIdx <= 0 || newIdx > this.items.length) return;
|
||||
|
||||
if (newIdx + this.preloadLookahead < this.items.length + 1) {
|
||||
this.preloadImage(this.items[newIdx + this.preloadLookahead - 1].url);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
let lastSavedPage = page;
|
||||
this.toPage(newIdx);
|
||||
|
||||
/**
|
||||
* Update the backend reading progress if the current page is more than
|
||||
* five pages away from the last saved page
|
||||
*
|
||||
* @function saveProgress
|
||||
* @param {number} idx - One-based index of the page
|
||||
*/
|
||||
const saveProgress = (idx) => {
|
||||
if (Math.abs(idx - lastSavedPage) < 5) return;
|
||||
lastSavedPage = idx;
|
||||
if (this.enableFlipAnimation) {
|
||||
if (isNext)
|
||||
this.flipAnimation = 'right';
|
||||
else
|
||||
this.flipAnimation = 'left';
|
||||
}
|
||||
|
||||
const url = `${base_url}api/progress/${tid}/${idx}?${$.param({entry: eid})}`;
|
||||
$.post(url)
|
||||
.then(data => {
|
||||
if (data.error) throw new Error(data.error);
|
||||
})
|
||||
.catch(e => {
|
||||
console.error(e);
|
||||
alert('danger', e);
|
||||
});
|
||||
};
|
||||
setTimeout(() => {
|
||||
this.flipAnimation = null;
|
||||
}, 500);
|
||||
|
||||
this.replaceHistory(newIdx);
|
||||
},
|
||||
/**
|
||||
* Jumps to a specific page
|
||||
*
|
||||
* @param {number} idx - One-based index of the page
|
||||
*/
|
||||
toPage(idx) {
|
||||
if (this.mode === 'continuous') {
|
||||
$(`#${idx}`).get(0).scrollIntoView(true);
|
||||
} else {
|
||||
if (idx >= 1 && idx <= this.items.length) {
|
||||
this.curItem = this.items[idx - 1];
|
||||
}
|
||||
}
|
||||
this.replaceHistory(idx);
|
||||
UIkit.modal($('#modal-sections')).hide();
|
||||
},
|
||||
/**
|
||||
* Replace the address bar history and save the reading progress if necessary
|
||||
*
|
||||
* @param {number} idx - One-based index of the page
|
||||
*/
|
||||
replaceHistory(idx) {
|
||||
const ary = window.location.pathname.split('/');
|
||||
ary[ary.length - 1] = idx;
|
||||
ary.shift(); // remove leading `/`
|
||||
ary.unshift(window.location.origin);
|
||||
const url = ary.join('/');
|
||||
this.saveProgress(idx);
|
||||
history.replaceState(null, "", url);
|
||||
},
|
||||
/**
|
||||
* Updates the backend reading progress if:
|
||||
* 1) the current page is more than five pages away from the last
|
||||
* saved page, or
|
||||
* 2) the average height/width ratio of the pages is over 2, or
|
||||
* 3) the current page is the first page, or
|
||||
* 4) the current page is the last page
|
||||
*
|
||||
* @param {number} idx - One-based index of the page
|
||||
* @param {function} cb - Callback
|
||||
*/
|
||||
saveProgress(idx, cb) {
|
||||
idx = parseInt(idx);
|
||||
if (Math.abs(idx - this.lastSavedPage) >= 5 ||
|
||||
this.longPages ||
|
||||
idx === 1 || idx === this.items.length
|
||||
) {
|
||||
this.lastSavedPage = idx;
|
||||
console.log('saving progress', idx);
|
||||
|
||||
const url = `${base_url}api/progress/${tid}/${idx}?${$.param({eid: eid})}`;
|
||||
$.ajax({
|
||||
method: 'PUT',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error)
|
||||
alert('danger', data.error);
|
||||
if (cb) cb();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Updates the reader mode
|
||||
*
|
||||
* @param {string} mode - Either `continuous` or `paged`
|
||||
* @param {number} targetPage - The one-based index of the target page
|
||||
* @param {function} nextTick - Alpine $nextTick magic property
|
||||
*/
|
||||
updateMode(mode, targetPage, nextTick) {
|
||||
localStorage.setItem('mode', mode);
|
||||
|
||||
// The mode to be put into the `mode` prop. It can't be `screen`
|
||||
let propMode = mode;
|
||||
|
||||
if (mode === 'paged') {
|
||||
const wideScreen = $(window).width() > $(window).height();
|
||||
propMode = wideScreen ? 'height' : 'width';
|
||||
}
|
||||
|
||||
this.mode = propMode;
|
||||
|
||||
if (mode === 'continuous') {
|
||||
nextTick(() => {
|
||||
this.setupScroller();
|
||||
});
|
||||
}
|
||||
|
||||
nextTick(() => {
|
||||
this.toPage(targetPage);
|
||||
});
|
||||
},
|
||||
/**
|
||||
* Shows the control modal
|
||||
*
|
||||
* @param {Event} event - The triggering event
|
||||
*/
|
||||
showControl(event) {
|
||||
const idx = event.currentTarget.id;
|
||||
this.selectedIndex = idx;
|
||||
UIkit.modal($('#modal-sections')).show();
|
||||
},
|
||||
/**
|
||||
* Redirects to a URL
|
||||
*
|
||||
* @param {string} url - The target URL
|
||||
*/
|
||||
redirect(url) {
|
||||
window.location.replace(url);
|
||||
},
|
||||
/**
|
||||
* Set up the scroll handler that calls `replaceHistory` when an image
|
||||
* enters the view port
|
||||
*/
|
||||
setupScroller() {
|
||||
if (this.mode !== 'continuous') return;
|
||||
$('img').each((idx, el) => {
|
||||
$(el).on('inview', (event, inView) => {
|
||||
if (inView) {
|
||||
const current = $(event.currentTarget).attr('id');
|
||||
|
||||
this.curItem = this.items[current - 1];
|
||||
this.replaceHistory(current);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
/**
|
||||
* Marks progress as 100% and jumps to the next entry
|
||||
*
|
||||
* @param {string} nextUrl - URL of the next entry
|
||||
*/
|
||||
nextEntry(nextUrl) {
|
||||
this.saveProgress(this.items.length, () => {
|
||||
this.redirect(nextUrl);
|
||||
});
|
||||
},
|
||||
/**
|
||||
* Exits the reader, and sets the reading progress tp 100%
|
||||
*
|
||||
* @param {string} exitUrl - The Exit URL
|
||||
*/
|
||||
exitReader(exitUrl) {
|
||||
this.saveProgress(this.items.length, () => {
|
||||
this.redirect(exitUrl);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Handles the `change` event for the entry selector
|
||||
*/
|
||||
entryChanged() {
|
||||
const id = $('#entry-select').val();
|
||||
this.redirect(`${base_url}reader/${tid}/${id}`);
|
||||
},
|
||||
|
||||
marginChanged() {
|
||||
localStorage.setItem('margin', this.margin);
|
||||
this.toPage(this.selectedIndex);
|
||||
},
|
||||
|
||||
preloadLookaheadChanged() {
|
||||
localStorage.setItem('preloadLookahead', this.preloadLookahead);
|
||||
},
|
||||
|
||||
enableFlipAnimationChanged() {
|
||||
localStorage.setItem('enableFlipAnimation', this.enableFlipAnimation);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
5
public/js/solid.min.js
vendored
5
public/js/solid.min.js
vendored
File diff suppressed because one or more lines are too long
82
public/js/subscription.js
Normal file
82
public/js/subscription.js
Normal file
@@ -0,0 +1,82 @@
|
||||
const component = () => {
|
||||
return {
|
||||
available: undefined,
|
||||
subscriptions: [],
|
||||
|
||||
init() {
|
||||
$.getJSON(`${base_url}api/admin/mangadex/expires`)
|
||||
.done((data) => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to check MangaDex integration status. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
this.available = Boolean(data.expires && data.expires > Math.floor(Date.now() / 1000));
|
||||
|
||||
if (this.available) this.getSubscriptions();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to check MangaDex integration status. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
},
|
||||
|
||||
getSubscriptions() {
|
||||
$.getJSON(`${base_url}api/admin/mangadex/subscriptions`)
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to get subscriptions. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
this.subscriptions = data.subscriptions;
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to get subscriptions. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
},
|
||||
|
||||
rm(event) {
|
||||
const id = event.currentTarget.parentNode.getAttribute('data-id');
|
||||
$.ajax({
|
||||
type: 'DELETE',
|
||||
url: `${base_url}api/admin/mangadex/subscriptions/${id}`,
|
||||
contentType: 'application/json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to delete subscription. Error: ${data.error}`);
|
||||
}
|
||||
this.getSubscriptions();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to delete subscription. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
},
|
||||
|
||||
check(event) {
|
||||
const id = event.currentTarget.parentNode.getAttribute('data-id');
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: `${base_url}api/admin/mangadex/subscriptions/check/${id}`,
|
||||
contentType: 'application/json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to check subscription. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
alert('success', 'Mango is now checking the subscription for updates. This might take a while, but you can safely leave the page.');
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to check subscription. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
},
|
||||
|
||||
formatRange(min, max) {
|
||||
if (!isNaN(min) && isNaN(max)) return `≥ ${min}`;
|
||||
if (isNaN(min) && !isNaN(max)) return `≤ ${max}`;
|
||||
if (isNaN(min) && isNaN(max)) return 'All';
|
||||
|
||||
if (min === max) return `= ${min}`;
|
||||
return `${min} - ${max}`;
|
||||
}
|
||||
};
|
||||
};
|
||||
@@ -1,72 +0,0 @@
|
||||
// https://flaviocopes.com/javascript-detect-dark-mode/
|
||||
const preferDarkMode = () => {
|
||||
return window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
};
|
||||
|
||||
const validThemeSetting = (theme) => {
|
||||
return ['dark', 'light', 'system'].indexOf(theme) >= 0;
|
||||
};
|
||||
|
||||
// dark / light / system
|
||||
const loadThemeSetting = () => {
|
||||
let str = localStorage.getItem('theme');
|
||||
if (!str || !validThemeSetting(str)) str = 'light';
|
||||
return str;
|
||||
};
|
||||
|
||||
// dark / light
|
||||
const loadTheme = () => {
|
||||
let setting = loadThemeSetting();
|
||||
if (setting === 'system') {
|
||||
setting = preferDarkMode() ? 'dark' : 'light';
|
||||
}
|
||||
return setting;
|
||||
};
|
||||
|
||||
const saveThemeSetting = setting => {
|
||||
if (!validThemeSetting(setting)) setting = 'light';
|
||||
localStorage.setItem('theme', setting);
|
||||
};
|
||||
|
||||
// when toggled, Auto will be changed to light or dark
|
||||
const toggleTheme = () => {
|
||||
const theme = loadTheme();
|
||||
const newTheme = theme === 'dark' ? 'light' : 'dark';
|
||||
saveThemeSetting(newTheme);
|
||||
setTheme(newTheme);
|
||||
};
|
||||
|
||||
const setTheme = (theme) => {
|
||||
if (!theme) theme = loadTheme();
|
||||
if (theme === 'dark') {
|
||||
$('html').css('background', 'rgb(20, 20, 20)');
|
||||
$('body').addClass('uk-light');
|
||||
$('.uk-card').addClass('uk-card-secondary');
|
||||
$('.uk-card').removeClass('uk-card-default');
|
||||
$('.ui-widget-content').addClass('dark');
|
||||
} else {
|
||||
$('html').css('background', '');
|
||||
$('body').removeClass('uk-light');
|
||||
$('.uk-card').removeClass('uk-card-secondary');
|
||||
$('.uk-card').addClass('uk-card-default');
|
||||
$('.ui-widget-content').removeClass('dark');
|
||||
}
|
||||
};
|
||||
|
||||
// do it before document is ready to prevent the initial flash of white on
|
||||
// most pages
|
||||
setTheme();
|
||||
|
||||
$(() => {
|
||||
// hack for the reader page
|
||||
setTheme();
|
||||
|
||||
// on system dark mode setting change
|
||||
if (window.matchMedia) {
|
||||
window.matchMedia('(prefers-color-scheme: dark)')
|
||||
.addEventListener('change', event => {
|
||||
if (loadThemeSetting() === 'system')
|
||||
setTheme(event.matches ? 'dark' : 'light');
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -55,7 +55,7 @@ function showModal(encodedPath, pages, percentage, encodedeTitle, encodedEntryTi
|
||||
|
||||
$('#modal-edit-btn').attr('onclick', `edit("${entryID}")`);
|
||||
|
||||
$('#modal-download-btn').attr('href', `/opds/download/${titleID}/${entryID}`);
|
||||
$('#modal-download-btn').attr('href', `${base_url}api/download/${titleID}/${entryID}`);
|
||||
|
||||
UIkit.modal($('#modal')).show();
|
||||
}
|
||||
@@ -63,18 +63,27 @@ function showModal(encodedPath, pages, percentage, encodedeTitle, encodedEntryTi
|
||||
const updateProgress = (tid, eid, page) => {
|
||||
let url = `${base_url}api/progress/${tid}/${page}`
|
||||
const query = $.param({
|
||||
entry: eid
|
||||
eid: eid
|
||||
});
|
||||
if (eid)
|
||||
url += `?${query}`;
|
||||
$.post(url, (data) => {
|
||||
if (data.success) {
|
||||
location.reload();
|
||||
} else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
}
|
||||
});
|
||||
|
||||
$.ajax({
|
||||
method: 'PUT',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.success) {
|
||||
location.reload();
|
||||
} else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
}
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
const renameSubmit = (name, eid) => {
|
||||
@@ -89,14 +98,14 @@ const renameSubmit = (name, eid) => {
|
||||
}
|
||||
|
||||
const query = $.param({
|
||||
entry: eid
|
||||
eid: eid
|
||||
});
|
||||
let url = `${base_url}api/admin/display_name/${titleId}/${name}`;
|
||||
if (eid)
|
||||
url += `?${query}`;
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
type: 'PUT',
|
||||
url: url,
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
@@ -131,6 +140,7 @@ const edit = (eid) => {
|
||||
|
||||
const displayNameField = $('#display-name-field');
|
||||
displayNameField.attr('value', displayName);
|
||||
console.log(displayNameField);
|
||||
displayNameField.keyup(event => {
|
||||
if (event.keyCode === 13) {
|
||||
renameSubmit(displayNameField.val(), eid);
|
||||
@@ -150,10 +160,10 @@ const setupUpload = (eid) => {
|
||||
const bar = $('#upload-progress').get(0);
|
||||
const titleId = upload.attr('data-title-id');
|
||||
const queryObj = {
|
||||
title: titleId
|
||||
tid: titleId
|
||||
};
|
||||
if (eid)
|
||||
queryObj['entry'] = eid;
|
||||
queryObj['eid'] = eid;
|
||||
const query = $.param(queryObj);
|
||||
const url = `${base_url}api/admin/upload/cover?${query}`;
|
||||
console.log(url);
|
||||
@@ -218,9 +228,9 @@ const selectedIDs = () => {
|
||||
const bulkProgress = (action, el) => {
|
||||
const tid = $(el).attr('data-id');
|
||||
const ids = selectedIDs();
|
||||
const url = `${base_url}api/bulk-progress/${action}/${tid}`;
|
||||
const url = `${base_url}api/bulk_progress/${action}/${tid}`;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
type: 'PUT',
|
||||
url: url,
|
||||
contentType: "application/json",
|
||||
dataType: 'json',
|
||||
@@ -242,3 +252,85 @@ const bulkProgress = (action, el) => {
|
||||
deselectAll();
|
||||
});
|
||||
};
|
||||
|
||||
const tagsComponent = () => {
|
||||
return {
|
||||
isAdmin: false,
|
||||
tags: [],
|
||||
tid: $('.upload-field').attr('data-title-id'),
|
||||
loading: true,
|
||||
|
||||
load(admin) {
|
||||
this.isAdmin = admin;
|
||||
|
||||
$('.tag-select').select2({
|
||||
tags: true,
|
||||
placeholder: this.isAdmin ? 'Tag the title' : 'No tags found',
|
||||
disabled: !this.isAdmin,
|
||||
templateSelection(state) {
|
||||
const a = document.createElement('a');
|
||||
a.setAttribute('href', `${base_url}tags/${encodeURIComponent(state.text)}`);
|
||||
a.setAttribute('class', 'uk-link-reset');
|
||||
a.onclick = event => {
|
||||
event.stopPropagation();
|
||||
};
|
||||
a.innerText = state.text;
|
||||
return a;
|
||||
}
|
||||
});
|
||||
|
||||
this.request(`${base_url}api/tags`, 'GET', (data) => {
|
||||
const allTags = data.tags;
|
||||
const url = `${base_url}api/tags/${this.tid}`;
|
||||
this.request(url, 'GET', data => {
|
||||
this.tags = data.tags;
|
||||
allTags.forEach(t => {
|
||||
const op = new Option(t, t, false, this.tags.indexOf(t) >= 0);
|
||||
$('.tag-select').append(op);
|
||||
});
|
||||
$('.tag-select').on('select2:select', e => {
|
||||
this.onAdd(e);
|
||||
});
|
||||
$('.tag-select').on('select2:unselect', e => {
|
||||
this.onDelete(e);
|
||||
});
|
||||
$('.tag-select').on('change', () => {
|
||||
this.onChange();
|
||||
});
|
||||
$('.tag-select').trigger('change');
|
||||
this.loading = false;
|
||||
});
|
||||
});
|
||||
},
|
||||
onChange() {
|
||||
this.tags = $('.tag-select').select2('data').map(o => o.text);
|
||||
},
|
||||
onAdd(event) {
|
||||
const tag = event.params.data.text;
|
||||
const url = `${base_url}api/admin/tags/${this.tid}/${encodeURIComponent(tag)}`;
|
||||
this.request(url, 'PUT');
|
||||
},
|
||||
onDelete(event) {
|
||||
const tag = event.params.data.text;
|
||||
const url = `${base_url}api/admin/tags/${this.tid}/${encodeURIComponent(tag)}`;
|
||||
this.request(url, 'DELETE');
|
||||
},
|
||||
request(url, method, cb) {
|
||||
$.ajax({
|
||||
url: url,
|
||||
method: method,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.success) {
|
||||
if (cb) cb(data);
|
||||
} else {
|
||||
alert('danger', data.error);
|
||||
}
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
function remove(username) {
|
||||
$.post(base_url + 'api/admin/user/delete/' + username, function(data) {
|
||||
if (data.success) {
|
||||
location.reload();
|
||||
}
|
||||
else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
const remove = (username) => {
|
||||
$.ajax({
|
||||
url: `${base_url}api/admin/user/delete/${username}`,
|
||||
type: 'DELETE',
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.success)
|
||||
location.reload();
|
||||
else
|
||||
alert('danger', data.error);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to delete the user. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
78
shard.lock
78
shard.lock
@@ -1,62 +1,78 @@
|
||||
version: 1.0
|
||||
version: 2.0
|
||||
shards:
|
||||
ameba:
|
||||
github: crystal-ameba/ameba
|
||||
version: 0.12.1
|
||||
git: https://github.com/crystal-ameba/ameba.git
|
||||
version: 0.14.3
|
||||
|
||||
archive:
|
||||
github: hkalexling/archive.cr
|
||||
version: 0.4.0
|
||||
git: https://github.com/hkalexling/archive.cr.git
|
||||
version: 0.5.0
|
||||
|
||||
baked_file_system:
|
||||
github: schovi/baked_file_system
|
||||
version: 0.9.8
|
||||
git: https://github.com/schovi/baked_file_system.git
|
||||
version: 0.10.0
|
||||
|
||||
clim:
|
||||
github: at-grandpa/clim
|
||||
version: 0.12.0
|
||||
git: https://github.com/at-grandpa/clim.git
|
||||
version: 0.17.1
|
||||
|
||||
db:
|
||||
github: crystal-lang/crystal-db
|
||||
version: 0.9.0
|
||||
git: https://github.com/crystal-lang/crystal-db.git
|
||||
version: 0.10.1
|
||||
|
||||
duktape:
|
||||
github: jessedoyle/duktape.cr
|
||||
version: 0.20.0
|
||||
git: https://github.com/jessedoyle/duktape.cr.git
|
||||
version: 1.0.0
|
||||
|
||||
exception_page:
|
||||
github: crystal-loot/exception_page
|
||||
version: 0.1.4
|
||||
git: https://github.com/crystal-loot/exception_page.git
|
||||
version: 0.1.5
|
||||
|
||||
http_proxy:
|
||||
github: mamantoha/http_proxy
|
||||
version: 0.7.1
|
||||
git: https://github.com/mamantoha/http_proxy.git
|
||||
version: 0.8.0
|
||||
|
||||
image_size:
|
||||
github: hkalexling/image_size.cr
|
||||
version: 0.1.1
|
||||
git: https://github.com/hkalexling/image_size.cr.git
|
||||
version: 0.5.0
|
||||
|
||||
kemal:
|
||||
github: kemalcr/kemal
|
||||
version: 0.26.1
|
||||
git: https://github.com/kemalcr/kemal.git
|
||||
version: 1.0.0
|
||||
|
||||
kemal-session:
|
||||
github: kemalcr/kemal-session
|
||||
version: 0.12.1
|
||||
git: https://github.com/kemalcr/kemal-session.git
|
||||
version: 1.0.0
|
||||
|
||||
kilt:
|
||||
github: jeromegn/kilt
|
||||
version: 0.4.0
|
||||
git: https://github.com/jeromegn/kilt.git
|
||||
version: 0.4.1
|
||||
|
||||
koa:
|
||||
git: https://github.com/hkalexling/koa.git
|
||||
version: 0.8.0
|
||||
|
||||
mg:
|
||||
git: https://github.com/hkalexling/mg.git
|
||||
version: 0.5.0+git.commit.697e46e27cde8c3969346e228e372db2455a6264
|
||||
|
||||
myhtml:
|
||||
github: kostya/myhtml
|
||||
version: 1.5.1
|
||||
git: https://github.com/kostya/myhtml.git
|
||||
version: 1.5.8
|
||||
|
||||
open_api:
|
||||
git: https://github.com/hkalexling/open_api.cr.git
|
||||
version: 1.2.1+git.commit.1d3c55dd5534c6b0af18964d031858a08515553a
|
||||
|
||||
radix:
|
||||
github: luislavena/radix
|
||||
version: 0.3.9
|
||||
git: https://github.com/luislavena/radix.git
|
||||
version: 0.4.1
|
||||
|
||||
sqlite3:
|
||||
github: crystal-lang/crystal-sqlite3
|
||||
version: 0.16.0
|
||||
git: https://github.com/crystal-lang/crystal-sqlite3.git
|
||||
version: 0.18.0
|
||||
|
||||
tallboy:
|
||||
git: https://github.com/epoch/tallboy.git
|
||||
version: 0.9.3+git.commit.9be1510bb0391c95e92f1b288f3afb429a73caa6
|
||||
|
||||
|
||||
12
shard.yml
12
shard.yml
@@ -1,5 +1,5 @@
|
||||
name: mango
|
||||
version: 0.12.0
|
||||
version: 0.24.0
|
||||
|
||||
authors:
|
||||
- Alex Ling <hkalexling@gmail.com>
|
||||
@@ -8,7 +8,7 @@ targets:
|
||||
mango:
|
||||
main: src/mango.cr
|
||||
|
||||
crystal: 0.34.0
|
||||
crystal: 1.0.0
|
||||
|
||||
license: MIT
|
||||
|
||||
@@ -29,10 +29,16 @@ dependencies:
|
||||
github: at-grandpa/clim
|
||||
duktape:
|
||||
github: jessedoyle/duktape.cr
|
||||
version: ~> 0.20.0
|
||||
myhtml:
|
||||
github: kostya/myhtml
|
||||
http_proxy:
|
||||
github: mamantoha/http_proxy
|
||||
image_size:
|
||||
github: hkalexling/image_size.cr
|
||||
koa:
|
||||
github: hkalexling/koa
|
||||
tallboy:
|
||||
github: epoch/tallboy
|
||||
branch: master
|
||||
mg:
|
||||
github: hkalexling/mg
|
||||
|
||||
@@ -40,11 +40,6 @@ describe Rule do
|
||||
rule.render({"a" => "a", "b" => "b"}).should eq "a"
|
||||
end
|
||||
|
||||
it "allows `|` outside of patterns" do
|
||||
rule = Rule.new "hello|world"
|
||||
rule.render({} of String => String).should eq "hello|world"
|
||||
end
|
||||
|
||||
it "raises on escaped characters" do
|
||||
expect_raises Exception do
|
||||
Rule.new "hello/world"
|
||||
@@ -69,8 +64,13 @@ describe Rule do
|
||||
rule.render({} of String => String).should eq "testing"
|
||||
end
|
||||
|
||||
it "escapes slash" do
|
||||
rule = Rule.new "{id}"
|
||||
rule.render({"id" => "/hello/world"}).should eq "_hello_world"
|
||||
it "escapes illegal characters" do
|
||||
rule = Rule.new "{a}"
|
||||
rule.render({"a" => "/?<>:*|\"^"}).should eq "_________"
|
||||
end
|
||||
|
||||
it "strips trailing spaces and dots" do
|
||||
rule = Rule.new "hello. world. .."
|
||||
rule.render({} of String => String).should eq "hello. world"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -8,9 +8,7 @@ describe Storage do
|
||||
end
|
||||
|
||||
it "deletes user" do
|
||||
with_storage do |storage|
|
||||
storage.delete_user "admin"
|
||||
end
|
||||
with_storage &.delete_user "admin"
|
||||
end
|
||||
|
||||
it "creates new user" do
|
||||
|
||||
@@ -21,7 +21,7 @@ describe "compare_numerically" do
|
||||
it "sorts like the stack exchange post" do
|
||||
ary = ["2", "12", "200000", "1000000", "a", "a12", "b2", "text2",
|
||||
"text2a", "text2a2", "text2a12", "text2ab", "text12", "text12a"]
|
||||
ary.reverse.sort { |a, b|
|
||||
ary.reverse.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
@@ -29,18 +29,45 @@ describe "compare_numerically" do
|
||||
# https://github.com/hkalexling/Mango/issues/22
|
||||
it "handles numbers larger than Int32" do
|
||||
ary = ["14410155591588.jpg", "21410155591588.png", "104410155591588.jpg"]
|
||||
ary.reverse.sort { |a, b|
|
||||
ary.reverse.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
end
|
||||
|
||||
describe "is_supported_file" do
|
||||
it "returns true when the filename has a supported extension" do
|
||||
filename = "manga.cbz"
|
||||
is_supported_file(filename).should eq true
|
||||
end
|
||||
|
||||
it "returns true when the filename does not have a supported extension" do
|
||||
filename = "info.json"
|
||||
is_supported_file(filename).should eq false
|
||||
end
|
||||
|
||||
it "is case insensitive" do
|
||||
filename = "manga.ZiP"
|
||||
is_supported_file(filename).should eq true
|
||||
end
|
||||
end
|
||||
|
||||
describe "chapter_sort" do
|
||||
it "sorts correctly" do
|
||||
ary = ["Vol.1 Ch.01", "Vol.1 Ch.02", "Vol.2 Ch. 2.5", "Ch. 3", "Ch.04"]
|
||||
sorter = ChapterSorter.new ary
|
||||
ary.reverse.sort do |a, b|
|
||||
ary.reverse.sort! do |a, b|
|
||||
sorter.compare a, b
|
||||
end.should eq ary
|
||||
end
|
||||
end
|
||||
|
||||
describe "sanitize_filename" do
|
||||
it "returns a random string for empty sanitized string" do
|
||||
sanitize_filename("..").should_not eq sanitize_filename("..")
|
||||
end
|
||||
it "sanitizes correctly" do
|
||||
sanitize_filename(".. \n\v.\rマンゴー/|*()<[1/2] 3.14 hello world ")
|
||||
.should eq "マンゴー_()[1_2] 3.14 hello world"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
require "zip"
|
||||
require "compress/zip"
|
||||
require "archive"
|
||||
|
||||
# A unified class to handle all supported archive formats. It uses the ::Zip
|
||||
# module in crystal standard library if the target file is a zip archive.
|
||||
# Otherwise it uses `archive.cr`.
|
||||
# A unified class to handle all supported archive formats. It uses the
|
||||
# Compress::Zip module in crystal standard library if the target file is
|
||||
# a zip archive. Otherwise it uses `archive.cr`.
|
||||
class ArchiveFile
|
||||
def initialize(@filename : String)
|
||||
if [".cbz", ".zip"].includes? File.extname filename
|
||||
@archive_file = Zip::File.new filename
|
||||
@archive_file = Compress::Zip::File.new filename
|
||||
else
|
||||
@archive_file = Archive::File.new filename
|
||||
end
|
||||
@@ -20,16 +20,16 @@ class ArchiveFile
|
||||
end
|
||||
|
||||
def close
|
||||
if @archive_file.is_a? Zip::File
|
||||
@archive_file.as(Zip::File).close
|
||||
if @archive_file.is_a? Compress::Zip::File
|
||||
@archive_file.as(Compress::Zip::File).close
|
||||
end
|
||||
end
|
||||
|
||||
# Lists all file entries
|
||||
def entries
|
||||
ary = [] of Zip::File::Entry | Archive::Entry
|
||||
ary = [] of Compress::Zip::File::Entry | Archive::Entry
|
||||
@archive_file.entries.map do |e|
|
||||
if (e.is_a? Zip::File::Entry && e.file?) ||
|
||||
if (e.is_a? Compress::Zip::File::Entry && e.file?) ||
|
||||
(e.is_a? Archive::Entry && e.info.file?)
|
||||
ary.push e
|
||||
end
|
||||
@@ -37,8 +37,8 @@ class ArchiveFile
|
||||
ary
|
||||
end
|
||||
|
||||
def read_entry(e : Zip::File::Entry | Archive::Entry) : Bytes?
|
||||
if e.is_a? Zip::File::Entry
|
||||
def read_entry(e : Compress::Zip::File::Entry | Archive::Entry) : Bytes?
|
||||
if e.is_a? Compress::Zip::File::Entry
|
||||
data = nil
|
||||
e.open do |io|
|
||||
slice = Bytes.new e.uncompressed_size
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
Arabic,sa
|
||||
Bengali,bd
|
||||
Bulgarian,bg
|
||||
Burmese,mm
|
||||
Catalan,ct
|
||||
Chinese (Simp),cn
|
||||
Chinese (Trad),hk
|
||||
Czech,cz
|
||||
Danish,dk
|
||||
Dutch,nl
|
||||
English,gb
|
||||
Filipino,ph
|
||||
Finnish,fi
|
||||
French,fr
|
||||
German,de
|
||||
Greek,gr
|
||||
Hebrew,il
|
||||
Hindi,in
|
||||
Hungarian,hu
|
||||
Indonesian,id
|
||||
Italian,it
|
||||
Japanese,jp
|
||||
Korean,kr
|
||||
Lithuanian,lt
|
||||
Malay,my
|
||||
Mongolian,mn
|
||||
Other,
|
||||
Persian,ir
|
||||
Polish,pl
|
||||
Portuguese (Br),br
|
||||
Portuguese (Pt),pt
|
||||
Romanian,ro
|
||||
Russian,ru
|
||||
Serbo-Croatian,rs
|
||||
Spanish (Es),es
|
||||
Spanish (LATAM),mx
|
||||
Swedish,se
|
||||
Thai,th
|
||||
Turkish,tr
|
||||
Ukrainian,ua
|
||||
Vietnames,vn
|
||||
|
@@ -5,25 +5,35 @@ class Config
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
property path : String = ""
|
||||
property host : String = "0.0.0.0"
|
||||
property port : Int32 = 9000
|
||||
property base_url : String = "/"
|
||||
property session_secret : String = "mango-session-secret"
|
||||
property library_path : String = File.expand_path "~/mango/library",
|
||||
home: true
|
||||
property library_cache_path = File.expand_path "~/mango/library.yml.gz",
|
||||
home: true
|
||||
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
||||
@[YAML::Field(key: "scan_interval_minutes")]
|
||||
property scan_interval : Int32 = 5
|
||||
property scan_interval_minutes : Int32 = 5
|
||||
property thumbnail_generation_interval_hours : Int32 = 24
|
||||
property log_level : String = "info"
|
||||
property upload_path : String = File.expand_path "~/mango/uploads",
|
||||
home: true
|
||||
property plugin_path : String = File.expand_path "~/mango/plugins",
|
||||
home: true
|
||||
property download_timeout_seconds : Int32 = 30
|
||||
property cache_enabled = false
|
||||
property cache_size_mbs = 50
|
||||
property cache_log_enabled = true
|
||||
property disable_login = false
|
||||
property default_username = ""
|
||||
property auth_proxy_header_name = ""
|
||||
property mangadex = Hash(String, String | Int32).new
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
@mangadex_defaults = {
|
||||
"base_url" => "https://mangadex.org",
|
||||
"api_url" => "https://mangadex.org/api",
|
||||
"api_url" => "https://api.mangadex.org/v2",
|
||||
"download_wait_seconds" => 5,
|
||||
"download_retries" => 4,
|
||||
"download_queue_db_path" => File.expand_path("~/mango/queue.db",
|
||||
@@ -47,9 +57,9 @@ class Config
|
||||
cfg_path = File.expand_path path, home: true
|
||||
if File.exists? cfg_path
|
||||
config = self.from_yaml File.read cfg_path
|
||||
config.preprocess
|
||||
config.path = path
|
||||
config.fill_defaults
|
||||
config.preprocess
|
||||
return config
|
||||
end
|
||||
puts "The config file #{cfg_path} does not exist. " \
|
||||
@@ -83,5 +93,28 @@ class Config
|
||||
unless base_url.ends_with? "/"
|
||||
@base_url += "/"
|
||||
end
|
||||
if disable_login && default_username.empty?
|
||||
raise "Login is disabled, but default username is not set. " \
|
||||
"Please set a default username"
|
||||
end
|
||||
|
||||
# `Logger.default` is not available yet
|
||||
Log.setup :debug
|
||||
unless mangadex["api_url"] =~ /\/v2/
|
||||
Log.warn { "It looks like you are using the deprecated MangaDex API " \
|
||||
"v1 in your config file. Please update it to " \
|
||||
"https://api.mangadex.org/v2 to suppress this warning." }
|
||||
mangadex["api_url"] = "https://api.mangadex.org/v2"
|
||||
end
|
||||
if mangadex["api_url"] =~ /\/api\/v2/
|
||||
Log.warn { "It looks like you are using the outdated MangaDex API " \
|
||||
"url (mangadex.org/api/v2) in your config file. Please " \
|
||||
"update it to https://api.mangadex.org/v2 to suppress this " \
|
||||
"warning." }
|
||||
mangadex["api_url"] = "https://api.mangadex.org/v2"
|
||||
end
|
||||
|
||||
mangadex["api_url"] = mangadex["api_url"].to_s.rstrip "/"
|
||||
mangadex["base_url"] = mangadex["base_url"].to_s.rstrip "/"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,24 +11,25 @@ class AuthHandler < Kemal::Handler
|
||||
"You have to login with proper credentials"
|
||||
HEADER_LOGIN_REQUIRED = "Basic realm=\"Login Required\""
|
||||
|
||||
def initialize(@storage : Storage)
|
||||
end
|
||||
|
||||
def require_basic_auth(env)
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
call_next env
|
||||
end
|
||||
|
||||
def require_auth(env)
|
||||
env.session.string "callback", env.request.path
|
||||
redirect env, "/login"
|
||||
end
|
||||
|
||||
def validate_token(env)
|
||||
token = env.session.string? "token"
|
||||
!token.nil? && @storage.verify_token token
|
||||
!token.nil? && Storage.default.verify_token token
|
||||
end
|
||||
|
||||
def validate_token_admin(env)
|
||||
token = env.session.string? "token"
|
||||
!token.nil? && @storage.verify_admin token
|
||||
!token.nil? && Storage.default.verify_admin token
|
||||
end
|
||||
|
||||
def validate_auth_header(env)
|
||||
@@ -49,44 +50,54 @@ class AuthHandler < Kemal::Handler
|
||||
def verify_user(value)
|
||||
username, password = Base64.decode_string(value[BASIC.size + 1..-1])
|
||||
.split(":")
|
||||
@storage.verify_user username, password
|
||||
Storage.default.verify_user username, password
|
||||
end
|
||||
|
||||
def handle_opds_auth(env)
|
||||
if validate_token(env) || validate_auth_header(env)
|
||||
call_next env
|
||||
else
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
end
|
||||
end
|
||||
|
||||
def handle_auth(env)
|
||||
if request_path_startswith(env, ["/login", "/logout"]) ||
|
||||
def call(env)
|
||||
# Skip all authentication if requesting /login, /logout, /api/login,
|
||||
# or a static file
|
||||
if request_path_startswith(env, ["/login", "/logout", "/api/login"]) ||
|
||||
requesting_static_file env
|
||||
return call_next(env)
|
||||
end
|
||||
|
||||
unless validate_token env
|
||||
env.session.string "callback", env.request.path
|
||||
return redirect env, "/login"
|
||||
# Check user is logged in
|
||||
if validate_token env
|
||||
# Skip if the request has a valid token
|
||||
elsif Config.current.disable_login
|
||||
# Check default username if login is disabled
|
||||
unless Storage.default.username_exists Config.current.default_username
|
||||
Logger.warn "Default username #{Config.current.default_username} " \
|
||||
"does not exist"
|
||||
return require_auth env
|
||||
end
|
||||
elsif !Config.current.auth_proxy_header_name.empty?
|
||||
# Check auth proxy if present
|
||||
username = env.request.headers[Config.current.auth_proxy_header_name]?
|
||||
unless username && Storage.default.username_exists username
|
||||
Logger.warn "Header #{Config.current.auth_proxy_header_name} unset " \
|
||||
"or is not a valid username"
|
||||
return require_auth env
|
||||
end
|
||||
elsif request_path_startswith env, ["/opds"]
|
||||
# Check auth header if requesting an opds page
|
||||
unless validate_auth_header env
|
||||
return require_basic_auth env
|
||||
end
|
||||
else
|
||||
return require_auth env
|
||||
end
|
||||
|
||||
if request_path_startswith env, ["/admin", "/api/admin", "/download"]
|
||||
unless validate_token_admin env
|
||||
# Check admin access when requesting an admin page
|
||||
if request_path_startswith env, %w(/admin /api/admin /download)
|
||||
unless is_admin? env
|
||||
env.response.status_code = 403
|
||||
return send_error_page "HTTP 403: You are not authorized to visit " \
|
||||
"#{env.request.path}"
|
||||
end
|
||||
end
|
||||
|
||||
# Let the request go through if it passes the above checks
|
||||
call_next env
|
||||
end
|
||||
|
||||
def call(env)
|
||||
if request_path_startswith env, ["/opds"]
|
||||
handle_opds_auth env
|
||||
else
|
||||
handle_auth env
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -23,7 +23,7 @@ class StaticHandler < Kemal::Handler
|
||||
|
||||
slice = Bytes.new file.size
|
||||
file.read slice
|
||||
return send_file env, slice, file.mime_type
|
||||
return send_file env, slice, MIME.from_filename file.path
|
||||
end
|
||||
call_next env
|
||||
end
|
||||
|
||||
188
src/library/cache.cr
Normal file
188
src/library/cache.cr
Normal file
@@ -0,0 +1,188 @@
|
||||
require "digest"
|
||||
|
||||
require "./entry"
|
||||
require "./types"
|
||||
|
||||
# Base class for an entry in the LRU cache.
|
||||
# There are two ways to use it:
|
||||
# 1. Use it as it is by instantiating with the appropriate `SaveT` and
|
||||
# `ReturnT`. Note that in this case, `SaveT` and `ReturnT` must be the
|
||||
# same type. That is, the input value will be stored as it is without
|
||||
# any transformation.
|
||||
# 2. You can also subclass it and provide custom implementations for
|
||||
# `to_save_t` and `to_return_t`. This allows you to transform and store
|
||||
# the input value to a different type. See `SortedEntriesCacheEntry` as
|
||||
# an example.
|
||||
private class CacheEntry(SaveT, ReturnT)
|
||||
getter key : String, atime : Time
|
||||
|
||||
@value : SaveT
|
||||
|
||||
def initialize(@key : String, value : ReturnT)
|
||||
@atime = @ctime = Time.utc
|
||||
@value = self.class.to_save_t value
|
||||
end
|
||||
|
||||
def value
|
||||
@atime = Time.utc
|
||||
self.class.to_return_t @value
|
||||
end
|
||||
|
||||
def self.to_save_t(value : ReturnT)
|
||||
value
|
||||
end
|
||||
|
||||
def self.to_return_t(value : SaveT)
|
||||
value
|
||||
end
|
||||
|
||||
def instance_size
|
||||
instance_sizeof(CacheEntry(SaveT, ReturnT)) + # sizeof itself
|
||||
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||
@value.instance_size
|
||||
end
|
||||
end
|
||||
|
||||
class SortedEntriesCacheEntry < CacheEntry(Array(String), Array(Entry))
|
||||
def self.to_save_t(value : Array(Entry))
|
||||
value.map &.id
|
||||
end
|
||||
|
||||
def self.to_return_t(value : Array(String))
|
||||
ids_to_entries value
|
||||
end
|
||||
|
||||
private def self.ids_to_entries(ids : Array(String))
|
||||
e_map = Library.default.deep_entries.to_h { |entry| {entry.id, entry} }
|
||||
entries = [] of Entry
|
||||
begin
|
||||
ids.each do |id|
|
||||
entries << e_map[id]
|
||||
end
|
||||
return entries if ids.size == entries.size
|
||||
rescue
|
||||
end
|
||||
end
|
||||
|
||||
def instance_size
|
||||
instance_sizeof(SortedEntriesCacheEntry) + # sizeof itself
|
||||
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||
@value.size * (instance_sizeof(String) + sizeof(String)) +
|
||||
@value.sum(&.bytesize) # elements in Array(String)
|
||||
end
|
||||
|
||||
def self.gen_key(book_id : String, username : String,
|
||||
entries : Array(Entry), opt : SortOptions?)
|
||||
entries_sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||
user_context = opt && opt.method == SortMethod::Progress ? username : ""
|
||||
sig = Digest::SHA1.hexdigest (book_id + entries_sig + user_context +
|
||||
(opt ? opt.to_tuple.to_s : "nil"))
|
||||
"#{sig}:sorted_entries"
|
||||
end
|
||||
end
|
||||
|
||||
class String
|
||||
def instance_size
|
||||
instance_sizeof(String) + bytesize
|
||||
end
|
||||
end
|
||||
|
||||
struct Tuple(*T)
|
||||
def instance_size
|
||||
sizeof(T) + # total size of non-reference types
|
||||
self.sum do |e|
|
||||
next 0 unless e.is_a? Reference
|
||||
if e.responds_to? :instance_size
|
||||
e.instance_size
|
||||
else
|
||||
instance_sizeof(typeof(e))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
alias CacheableType = Array(Entry) | String | Tuple(String, Int32)
|
||||
alias CacheEntryType = SortedEntriesCacheEntry |
|
||||
CacheEntry(String, String) |
|
||||
CacheEntry(Tuple(String, Int32), Tuple(String, Int32))
|
||||
|
||||
def generate_cache_entry(key : String, value : CacheableType)
|
||||
if value.is_a? Array(Entry)
|
||||
SortedEntriesCacheEntry.new key, value
|
||||
else
|
||||
CacheEntry(typeof(value), typeof(value)).new key, value
|
||||
end
|
||||
end
|
||||
|
||||
# LRU Cache
|
||||
class LRUCache
|
||||
@@limit : Int128 = Int128.new 0
|
||||
@@should_log = true
|
||||
# key => entry
|
||||
@@cache = {} of String => CacheEntryType
|
||||
|
||||
def self.enabled
|
||||
Config.current.cache_enabled
|
||||
end
|
||||
|
||||
def self.init
|
||||
cache_size = Config.current.cache_size_mbs
|
||||
@@limit = Int128.new cache_size * 1024 * 1024 if enabled
|
||||
@@should_log = Config.current.cache_log_enabled
|
||||
end
|
||||
|
||||
def self.get(key : String)
|
||||
return unless enabled
|
||||
entry = @@cache[key]?
|
||||
if @@should_log
|
||||
Logger.debug "LRUCache #{entry.nil? ? "miss" : "hit"} #{key}"
|
||||
end
|
||||
return entry.value unless entry.nil?
|
||||
end
|
||||
|
||||
def self.set(cache_entry : CacheEntryType)
|
||||
return unless enabled
|
||||
key = cache_entry.key
|
||||
@@cache[key] = cache_entry
|
||||
Logger.debug "LRUCache cached #{key}" if @@should_log
|
||||
remove_least_recent_access
|
||||
end
|
||||
|
||||
def self.invalidate(key : String)
|
||||
return unless enabled
|
||||
@@cache.delete key
|
||||
end
|
||||
|
||||
def self.print
|
||||
return unless @@should_log
|
||||
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||
Logger.debug "---- LRU Cache ----"
|
||||
Logger.debug "Size: #{sum} Bytes"
|
||||
Logger.debug "List:"
|
||||
@@cache.each do |k, v|
|
||||
Logger.debug "#{k} | #{v.atime} | #{v.instance_size}"
|
||||
end
|
||||
Logger.debug "-------------------"
|
||||
end
|
||||
|
||||
private def self.is_cache_full
|
||||
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||
sum > @@limit
|
||||
end
|
||||
|
||||
private def self.remove_least_recent_access
|
||||
if @@should_log && is_cache_full
|
||||
Logger.debug "Removing entries from LRUCache"
|
||||
end
|
||||
while is_cache_full && @@cache.size > 0
|
||||
min_tuple = @@cache.min_by { |_, entry| entry.atime }
|
||||
min_key = min_tuple[0]
|
||||
min_entry = min_tuple[1]
|
||||
|
||||
Logger.debug " \
|
||||
Target: #{min_key}, \
|
||||
Last Access Time: #{min_entry.atime}" if @@should_log
|
||||
invalidate min_key
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,22 +1,26 @@
|
||||
require "image_size"
|
||||
require "yaml"
|
||||
|
||||
class Entry
|
||||
property zip_path : String, book : Title, title : String,
|
||||
include YAML::Serializable
|
||||
|
||||
getter zip_path : String, book : Title, title : String,
|
||||
size : String, pages : Int32, id : String, encoded_path : String,
|
||||
encoded_title : String, mtime : Time, err_msg : String?
|
||||
|
||||
def initialize(@zip_path, @book, storage)
|
||||
def initialize(@zip_path, @book)
|
||||
storage = Storage.default
|
||||
@encoded_path = URI.encode @zip_path
|
||||
@title = File.basename @zip_path, File.extname @zip_path
|
||||
@encoded_title = URI.encode @title
|
||||
@size = (File.size @zip_path).humanize_bytes
|
||||
id = storage.get_id @zip_path, false
|
||||
id = storage.get_entry_id @zip_path, File.signature(@zip_path)
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_id({
|
||||
path: @zip_path,
|
||||
id: id,
|
||||
is_title: false,
|
||||
storage.insert_entry_id({
|
||||
path: @zip_path,
|
||||
id: id,
|
||||
signature: File.signature(@zip_path).to_s,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@@ -45,17 +49,20 @@ class Entry
|
||||
file.close
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["zip_path", "title", "size", "id",
|
||||
"encoded_path", "encoded_title"] %}
|
||||
def build_json(*, slim = false)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "title_id", @book.id
|
||||
json.field "display_name", @book.display_name @title
|
||||
json.field "cover_url", cover_url
|
||||
json.field "pages" { json.number @pages }
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
json.field "title_id", @book.id
|
||||
json.field "pages" { json.number @pages }
|
||||
unless slim
|
||||
json.field "display_name", @book.display_name @title
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -69,9 +76,17 @@ class Entry
|
||||
|
||||
def cover_url
|
||||
return "#{Config.current.base_url}img/icon.png" if @err_msg
|
||||
url = "#{Config.current.base_url}api/page/#{@book.id}/#{@id}/1"
|
||||
TitleInfo.new @book.dir do |info|
|
||||
info_url = info.entry_cover_url[@title]?
|
||||
|
||||
unless @book.entry_cover_url_cache
|
||||
TitleInfo.new @book.dir do |info|
|
||||
@book.entry_cover_url_cache = info.entry_cover_url
|
||||
end
|
||||
end
|
||||
entry_cover_url = @book.entry_cover_url_cache
|
||||
|
||||
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
|
||||
if entry_cover_url
|
||||
info_url = entry_cover_url[@title]?
|
||||
unless info_url.nil? || info_url.empty?
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
@@ -86,7 +101,7 @@ class Entry
|
||||
SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? e.filename
|
||||
}
|
||||
.sort { |a, b|
|
||||
.sort! { |a, b|
|
||||
compare_numerically a.filename, b.filename
|
||||
}
|
||||
yield file, entries
|
||||
@@ -118,8 +133,8 @@ class Entry
|
||||
"width" => size.width,
|
||||
"height" => size.height,
|
||||
}
|
||||
rescue
|
||||
Logger.warn "Failed to read page #{i} of entry #{@id}"
|
||||
rescue e
|
||||
Logger.warn "Failed to read page #{i} of entry #{zip_path}. #{e}"
|
||||
sizes << {"width" => 1000_i32, "height" => 1000_i32}
|
||||
end
|
||||
end
|
||||
@@ -134,10 +149,11 @@ class Entry
|
||||
entries[idx + 1]
|
||||
end
|
||||
|
||||
def previous_entry
|
||||
idx = @book.entries.index self
|
||||
def previous_entry(username)
|
||||
entries = @book.sorted_entries username
|
||||
idx = entries.index self
|
||||
return nil if idx.nil? || idx == 0
|
||||
@book.entries[idx - 1]
|
||||
entries[idx - 1]
|
||||
end
|
||||
|
||||
def date_added
|
||||
@@ -157,6 +173,16 @@ class Entry
|
||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||
# instead of IDs in info.json
|
||||
def save_progress(username, page)
|
||||
LRUCache.invalidate "#{@book.id}:#{username}:progress_sum"
|
||||
@book.parents.each do |parent|
|
||||
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||
end
|
||||
[false, true].each do |ascend|
|
||||
sorted_entries_cache_key = SortedEntriesCacheEntry.gen_key @book.id,
|
||||
username, @book.entries, SortOptions.new(SortMethod::Progress, ascend)
|
||||
LRUCache.invalidate sorted_entries_cache_key
|
||||
end
|
||||
|
||||
TitleInfo.new @book.dir do |info|
|
||||
if info.progress[username]?.nil?
|
||||
info.progress[username] = {@title => page}
|
||||
@@ -207,4 +233,33 @@ class Entry
|
||||
def started?(username)
|
||||
load_progress(username) > 0
|
||||
end
|
||||
|
||||
def generate_thumbnail : Image?
|
||||
return if @err_msg
|
||||
|
||||
img = read_page(1).not_nil!
|
||||
begin
|
||||
size = ImageSize.get img.data
|
||||
if size.height > size.width
|
||||
thumbnail = ImageSize.resize img.data, width: 200
|
||||
else
|
||||
thumbnail = ImageSize.resize img.data, height: 300
|
||||
end
|
||||
img.data = thumbnail
|
||||
img.size = thumbnail.size
|
||||
unless img.mime == "image/webp"
|
||||
# image_size.cr resizes non-webp images to jpg
|
||||
img.mime = "image/jpeg"
|
||||
end
|
||||
Storage.default.save_thumbnail @id, img
|
||||
rescue e
|
||||
Logger.warn "Failed to generate thumbnail for file #{@zip_path}. #{e}"
|
||||
end
|
||||
|
||||
img
|
||||
end
|
||||
|
||||
def get_thumbnail : Image?
|
||||
Storage.default.get_thumbnail @id
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,27 +1,77 @@
|
||||
class Library
|
||||
property dir : String, title_ids : Array(String), scan_interval : Int32,
|
||||
include YAML::Serializable
|
||||
|
||||
getter dir : String, title_ids : Array(String),
|
||||
title_hash : Hash(String, Title)
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
register_mime_types
|
||||
def save_instance
|
||||
path = Config.current.library_cache_path
|
||||
Logger.debug "Caching library to #{path}"
|
||||
|
||||
writer = Compress::Gzip::Writer.new path,
|
||||
Compress::Gzip::BEST_COMPRESSION
|
||||
writer.write self.to_yaml.to_slice
|
||||
writer.close
|
||||
end
|
||||
|
||||
def self.load_instance
|
||||
path = Config.current.library_cache_path
|
||||
return unless File.exists? path
|
||||
|
||||
Logger.debug "Loading cached library from #{path}"
|
||||
|
||||
begin
|
||||
Compress::Gzip::Reader.open path do |content|
|
||||
@@default = Library.from_yaml content
|
||||
end
|
||||
Library.default.register_jobs
|
||||
rescue e
|
||||
Logger.error e
|
||||
end
|
||||
end
|
||||
|
||||
def initialize
|
||||
@dir = Config.current.library_path
|
||||
@scan_interval = Config.current.scan_interval
|
||||
# explicitly initialize @titles to bypass the compiler check. it will
|
||||
# be filled with actual Titles in the `scan` call below
|
||||
@title_ids = [] of String
|
||||
@title_hash = {} of String => Title
|
||||
|
||||
return scan if @scan_interval < 1
|
||||
spawn do
|
||||
loop do
|
||||
start = Time.local
|
||||
scan
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||
sleep @scan_interval * 60
|
||||
@entries_count = 0
|
||||
@thumbnails_count = 0
|
||||
|
||||
register_jobs
|
||||
end
|
||||
|
||||
protected def register_jobs
|
||||
register_mime_types
|
||||
|
||||
scan_interval = Config.current.scan_interval_minutes
|
||||
if scan_interval < 1
|
||||
scan
|
||||
else
|
||||
spawn do
|
||||
loop do
|
||||
start = Time.local
|
||||
scan
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.debug "Library initialized in #{ms}ms"
|
||||
sleep scan_interval.minutes
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
thumbnail_interval = Config.current.thumbnail_generation_interval_hours
|
||||
unless thumbnail_interval < 1
|
||||
spawn do
|
||||
loop do
|
||||
# Wait for scan to complete (in most cases)
|
||||
sleep 1.minutes
|
||||
generate_thumbnails
|
||||
sleep thumbnail_interval.hours
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -33,47 +83,31 @@ class Library
|
||||
def sorted_titles(username, opt : SortOptions? = nil)
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
# This is a hack to bypass a compiler bug
|
||||
ary = titles
|
||||
|
||||
case opt.not_nil!.method
|
||||
when .time_modified?
|
||||
ary.sort! { |a, b| (a.mtime <=> b.mtime).or \
|
||||
compare_numerically a.title, b.title }
|
||||
when .progress?
|
||||
ary.sort! do |a, b|
|
||||
(a.load_percentage(username) <=> b.load_percentage(username)).or \
|
||||
compare_numerically a.title, b.title
|
||||
end
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
ary.sort! { |a, b| compare_numerically a.title, b.title }
|
||||
end
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
ary
|
||||
# Helper function from src/util/util.cr
|
||||
sort_titles titles, opt.not_nil!, username
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
titles + titles.flat_map &.deep_titles
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
json.field "dir", @dir
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
def deep_entries
|
||||
titles.flat_map &.deep_entries
|
||||
end
|
||||
|
||||
def build_json(*, slim = false, depth = -1)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
json.field "dir", @dir
|
||||
json.field "titles" do
|
||||
json.array do
|
||||
self.titles.each do |title|
|
||||
json.raw title.build_json(slim: slim, depth: depth)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -87,6 +121,7 @@ class Library
|
||||
end
|
||||
|
||||
def scan
|
||||
start = Time.local
|
||||
unless Dir.exists? @dir
|
||||
Logger.info "The library directory #{@dir} does not exist. " \
|
||||
"Attempting to create it"
|
||||
@@ -95,14 +130,36 @@ class Library
|
||||
|
||||
storage = Storage.new auto_close: false
|
||||
|
||||
examine_context : ExamineContext = {
|
||||
cached_contents_signature: {} of String => String,
|
||||
deleted_title_ids: [] of String,
|
||||
deleted_entry_ids: [] of String,
|
||||
}
|
||||
|
||||
@title_ids.select! do |title_id|
|
||||
title = @title_hash[title_id]
|
||||
existence = title.examine examine_context
|
||||
unless existence
|
||||
examine_context["deleted_title_ids"].concat [title_id] +
|
||||
title.deep_titles.map &.id
|
||||
examine_context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||
end
|
||||
existence
|
||||
end
|
||||
remained_title_dirs = @title_ids.map { |id| title_hash[id].dir }
|
||||
examine_context["deleted_title_ids"].each do |title_id|
|
||||
@title_hash.delete title_id
|
||||
end
|
||||
|
||||
cache = examine_context["cached_contents_signature"]
|
||||
(Dir.entries @dir)
|
||||
.select { |fn| !fn.starts_with? "." }
|
||||
.map { |fn| File.join @dir, fn }
|
||||
.select { |path| !(remained_title_dirs.includes? path) }
|
||||
.select { |path| File.directory? path }
|
||||
.map { |path| Title.new path, "", storage, self }
|
||||
.map { |path| Title.new path, "", cache }
|
||||
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
||||
.sort { |a, b| a.title <=> b.title }
|
||||
.tap { |_| @title_ids.clear }
|
||||
.sort! { |a, b| a.title <=> b.title }
|
||||
.each do |title|
|
||||
@title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
@@ -111,19 +168,27 @@ class Library
|
||||
storage.bulk_insert_ids
|
||||
storage.close
|
||||
|
||||
Logger.debug "Scan completed"
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||
|
||||
Storage.default.mark_unavailable examine_context["deleted_entry_ids"],
|
||||
examine_context["deleted_title_ids"]
|
||||
|
||||
spawn do
|
||||
save_instance
|
||||
end
|
||||
end
|
||||
|
||||
def get_continue_reading_entries(username)
|
||||
cr_entries = deep_titles
|
||||
.map { |t| t.get_last_read_entry username }
|
||||
.map(&.get_last_read_entry username)
|
||||
# Select elements with type `Entry` from the array and ignore all `Nil`s
|
||||
.select(Entry)[0...ENTRIES_IN_HOME_SECTIONS]
|
||||
.map { |e|
|
||||
# Get the last read time of the entry. If it hasn't been started, get
|
||||
# the last read time of the previous entry
|
||||
last_read = e.load_last_read username
|
||||
pe = e.previous_entry
|
||||
pe = e.previous_entry username
|
||||
if last_read.nil? && pe
|
||||
last_read = pe.load_last_read username
|
||||
end
|
||||
@@ -152,14 +217,14 @@ class Library
|
||||
recently_added = [] of RA
|
||||
last_date_added = nil
|
||||
|
||||
titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
.select { |e| e[:date_added] > 1.month.ago }
|
||||
.sort { |a, b| b[:date_added] <=> a[:date_added] }
|
||||
titles.flat_map(&.deep_entries_with_date_added)
|
||||
.select(&.[:date_added].> 1.month.ago)
|
||||
.sort! { |a, b| b[:date_added] <=> a[:date_added] }
|
||||
.each do |e|
|
||||
break if recently_added.size > 12
|
||||
last = recently_added.last?
|
||||
if last && e[:entry].book.id == last[:entry].book.id &&
|
||||
(e[:date_added] - last_date_added.not_nil!).duration < 1.day
|
||||
(e[:date_added] - last_date_added.not_nil!).abs < 1.day
|
||||
# A NamedTuple is immutable, so we have to cast it to a Hash first
|
||||
last_hash = last.to_h
|
||||
count = last_hash[:grouped_count].as(Int32)
|
||||
@@ -190,8 +255,54 @@ class Library
|
||||
# If we use `deep_titles`, the start reading section might include `Vol. 2`
|
||||
# when the user hasn't started `Vol. 1` yet
|
||||
titles
|
||||
.select { |t| t.load_percentage(username) == 0 }
|
||||
.select(&.load_percentage(username).== 0)
|
||||
.sample(ENTRIES_IN_HOME_SECTIONS)
|
||||
.shuffle
|
||||
.shuffle!
|
||||
end
|
||||
|
||||
def thumbnail_generation_progress
|
||||
return 0 if @entries_count == 0
|
||||
@thumbnails_count / @entries_count
|
||||
end
|
||||
|
||||
def generate_thumbnails
|
||||
if @thumbnails_count > 0
|
||||
Logger.debug "Thumbnail generation in progress"
|
||||
return
|
||||
end
|
||||
|
||||
Logger.info "Starting thumbnail generation"
|
||||
entries = deep_titles.flat_map(&.deep_entries).reject &.err_msg
|
||||
@entries_count = entries.size
|
||||
@thumbnails_count = 0
|
||||
|
||||
# Report generation progress regularly
|
||||
spawn do
|
||||
loop do
|
||||
unless @thumbnails_count == 0
|
||||
Logger.debug "Thumbnail generation progress: " \
|
||||
"#{(thumbnail_generation_progress * 100).round 1}%"
|
||||
end
|
||||
# Generation is completed. We reset the count to 0 to allow subsequent
|
||||
# calls to the function, and break from the loop to stop the progress
|
||||
# report fiber
|
||||
if thumbnail_generation_progress.to_i == 1
|
||||
@thumbnails_count = 0
|
||||
break
|
||||
end
|
||||
sleep 10.seconds
|
||||
end
|
||||
end
|
||||
|
||||
entries.each do |e|
|
||||
unless e.get_thumbnail
|
||||
e.generate_thumbnail
|
||||
# Sleep after each generation to minimize the impact on disk IO
|
||||
# and CPU
|
||||
sleep 1.seconds
|
||||
end
|
||||
@thumbnails_count += 1
|
||||
end
|
||||
Logger.info "Thumbnail generation finished"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
require "digest"
|
||||
require "../archive"
|
||||
|
||||
class Title
|
||||
property dir : String, parent_id : String, title_ids : Array(String),
|
||||
entries : Array(Entry), title : String, id : String,
|
||||
encoded_title : String, mtime : Time
|
||||
include YAML::Serializable
|
||||
|
||||
def initialize(@dir : String, @parent_id, storage,
|
||||
@library : Library)
|
||||
id = storage.get_id @dir, true
|
||||
getter dir : String, parent_id : String, title_ids : Array(String),
|
||||
entries : Array(Entry), title : String, id : String,
|
||||
encoded_title : String, mtime : Time, signature : UInt64,
|
||||
entry_cover_url_cache : Hash(String, String)?
|
||||
setter entry_cover_url_cache : Hash(String, String)?
|
||||
|
||||
@[YAML::Field(ignore: true)]
|
||||
@entry_display_name_cache : Hash(String, String)?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@entry_cover_url_cache : Hash(String, String)?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@cached_display_name : String?
|
||||
@[YAML::Field(ignore: true)]
|
||||
@cached_cover_url : String?
|
||||
|
||||
def initialize(@dir : String, @parent_id, cache = {} of String => String)
|
||||
storage = Storage.default
|
||||
@signature = Dir.signature dir
|
||||
id = storage.get_title_id dir, signature
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_id({
|
||||
path: @dir,
|
||||
id: id,
|
||||
is_title: true,
|
||||
storage.insert_title_id({
|
||||
path: dir,
|
||||
id: id,
|
||||
signature: signature.to_s,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@contents_signature = Dir.contents_signature dir, cache
|
||||
@title = File.basename dir
|
||||
@encoded_title = URI.encode @title
|
||||
@title_ids = [] of String
|
||||
@@ -27,53 +43,178 @@ class Title
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dir, fn
|
||||
if File.directory? path
|
||||
title = Title.new path, @id, storage, library
|
||||
title = Title.new path, @id, cache
|
||||
next if title.entries.size == 0 && title.titles.size == 0
|
||||
@library.title_hash[title.id] = title
|
||||
Library.default.title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
next
|
||||
end
|
||||
if [".zip", ".cbz", ".rar", ".cbr"].includes? File.extname path
|
||||
entry = Entry.new path, self, storage
|
||||
if is_supported_file path
|
||||
entry = Entry.new path, self
|
||||
@entries << entry if entry.pages > 0 || entry.err_msg
|
||||
end
|
||||
end
|
||||
|
||||
mtimes = [@mtime]
|
||||
mtimes += @title_ids.map { |e| @library.title_hash[e].mtime }
|
||||
mtimes += @entries.map { |e| e.mtime }
|
||||
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
|
||||
mtimes += @entries.map &.mtime
|
||||
@mtime = mtimes.max
|
||||
|
||||
@title_ids.sort! do |a, b|
|
||||
compare_numerically @library.title_hash[a].title,
|
||||
@library.title_hash[b].title
|
||||
compare_numerically Library.default.title_hash[a].title,
|
||||
Library.default.title_hash[b].title
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
@entries.sort! do |a, b|
|
||||
sorter.compare a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["dir", "title", "id", "encoded_title"] %}
|
||||
# Utility method used in library rescanning.
|
||||
# - When the title does not exist on the file system anymore, return false
|
||||
# and let it be deleted from the library instance
|
||||
# - When the title exists, but its contents signature is now different from
|
||||
# the cache, it means some of its content (nested titles or entries)
|
||||
# has been added, deleted, or renamed. In this case we update its
|
||||
# contents signature and instance variables
|
||||
# - When the title exists and its contents signature is still the same, we
|
||||
# return true so it can be reused without rescanning
|
||||
def examine(context : ExamineContext) : Bool
|
||||
return false unless Dir.exists? @dir
|
||||
contents_signature = Dir.contents_signature @dir,
|
||||
context["cached_contents_signature"]
|
||||
return true if @contents_signature == contents_signature
|
||||
|
||||
@contents_signature = contents_signature
|
||||
@signature = Dir.signature @dir
|
||||
storage = Storage.default
|
||||
id = storage.get_title_id dir, signature
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_title_id({
|
||||
path: dir,
|
||||
id: id,
|
||||
signature: signature.to_s,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@mtime = File.info(@dir).modification_time
|
||||
|
||||
previous_titles_size = @title_ids.size
|
||||
@title_ids.select! do |title_id|
|
||||
title = Library.default.get_title! title_id
|
||||
existence = title.examine context
|
||||
unless existence
|
||||
context["deleted_title_ids"].concat [title_id] +
|
||||
title.deep_titles.map &.id
|
||||
context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||
end
|
||||
existence
|
||||
end
|
||||
remained_title_dirs = @title_ids.map do |title_id|
|
||||
title = Library.default.get_title! title_id
|
||||
title.dir
|
||||
end
|
||||
|
||||
previous_entries_size = @entries.size
|
||||
@entries.select! do |entry|
|
||||
existence = File.exists? entry.zip_path
|
||||
Fiber.yield
|
||||
context["deleted_entry_ids"] << entry.id unless existence
|
||||
existence
|
||||
end
|
||||
remained_entry_zip_paths = @entries.map &.zip_path
|
||||
|
||||
is_titles_added = false
|
||||
is_entries_added = false
|
||||
Dir.entries(dir).each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dir, fn
|
||||
if File.directory? path
|
||||
next if remained_title_dirs.includes? path
|
||||
title = Title.new path, @id, context["cached_contents_signature"]
|
||||
next if title.entries.size == 0 && title.titles.size == 0
|
||||
Library.default.title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
is_titles_added = true
|
||||
next
|
||||
end
|
||||
if is_supported_file path
|
||||
next if remained_entry_zip_paths.includes? path
|
||||
entry = Entry.new path, self
|
||||
if entry.pages > 0 || entry.err_msg
|
||||
@entries << entry
|
||||
is_entries_added = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
mtimes = [@mtime]
|
||||
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
|
||||
mtimes += @entries.map &.mtime
|
||||
@mtime = mtimes.max
|
||||
|
||||
if is_titles_added || previous_titles_size != @title_ids.size
|
||||
@title_ids.sort! do |a, b|
|
||||
compare_numerically Library.default.title_hash[a].title,
|
||||
Library.default.title_hash[b].title
|
||||
end
|
||||
end
|
||||
if is_entries_added || previous_entries_size != @entries.size
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
@entries.sort! do |a, b|
|
||||
sorter.compare a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
alias SortContext = NamedTuple(username: String, opt: SortOptions)
|
||||
|
||||
def build_json(*, slim = false, depth = -1,
|
||||
sort_context : SortContext? = nil)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for str in ["dir", "title", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "display_name", display_name
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
end
|
||||
json.field "entries" do
|
||||
json.raw @entries.to_json
|
||||
end
|
||||
json.field "parents" do
|
||||
json.array do
|
||||
self.parents.each do |title|
|
||||
json.object do
|
||||
json.field "title", title.title
|
||||
json.field "id", title.id
|
||||
json.field "signature" { json.number @signature }
|
||||
unless slim
|
||||
json.field "display_name", display_name
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
end
|
||||
unless depth == 0
|
||||
json.field "titles" do
|
||||
json.array do
|
||||
self.titles.each do |title|
|
||||
json.raw title.build_json(slim: slim,
|
||||
depth: depth > 0 ? depth - 1 : depth)
|
||||
end
|
||||
end
|
||||
end
|
||||
json.field "entries" do
|
||||
json.array do
|
||||
_entries = if sort_context
|
||||
sorted_entries sort_context[:username],
|
||||
sort_context[:opt]
|
||||
else
|
||||
@entries
|
||||
end
|
||||
_entries.each do |entry|
|
||||
json.raw entry.build_json(slim: slim)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
json.field "parents" do
|
||||
json.array do
|
||||
self.parents.each do |title|
|
||||
json.object do
|
||||
json.field "title", title.title
|
||||
json.field "id", title.id
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -82,45 +223,71 @@ class Title
|
||||
end
|
||||
|
||||
def titles
|
||||
@title_ids.map { |tid| @library.get_title! tid }
|
||||
@title_ids.map { |tid| Library.default.get_title! tid }
|
||||
end
|
||||
|
||||
# Get all entries, including entries in nested titles
|
||||
def deep_entries
|
||||
return @entries if title_ids.empty?
|
||||
@entries + titles.map { |t| t.deep_entries }.flatten
|
||||
@entries + titles.flat_map &.deep_entries
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
return [] of Title if titles.empty?
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
titles + titles.flat_map &.deep_titles
|
||||
end
|
||||
|
||||
def parents
|
||||
ary = [] of Title
|
||||
tid = @parent_id
|
||||
while !tid.empty?
|
||||
title = @library.get_title! tid
|
||||
title = Library.default.get_title! tid
|
||||
ary << title
|
||||
tid = title.parent_id
|
||||
end
|
||||
ary.reverse
|
||||
end
|
||||
|
||||
def size
|
||||
@entries.size + @title_ids.size
|
||||
# Returns a string the describes the content of the title
|
||||
# e.g., - 3 titles and 1 entry
|
||||
# - 4 entries
|
||||
# - 1 title
|
||||
def content_label
|
||||
ary = [] of String
|
||||
tsize = titles.size
|
||||
esize = entries.size
|
||||
|
||||
ary << "#{tsize} #{tsize > 1 ? "titles" : "title"}" if tsize > 0
|
||||
ary << "#{esize} #{esize > 1 ? "entries" : "entry"}" if esize > 0
|
||||
ary.join " and "
|
||||
end
|
||||
|
||||
def tags
|
||||
Storage.default.get_title_tags @id
|
||||
end
|
||||
|
||||
def add_tag(tag)
|
||||
Storage.default.add_tag @id, tag
|
||||
end
|
||||
|
||||
def delete_tag(tag)
|
||||
Storage.default.delete_tag @id, tag
|
||||
end
|
||||
|
||||
def get_entry(eid)
|
||||
@entries.find { |e| e.id == eid }
|
||||
@entries.find &.id.== eid
|
||||
end
|
||||
|
||||
def display_name
|
||||
cached_display_name = @cached_display_name
|
||||
return cached_display_name unless cached_display_name.nil?
|
||||
|
||||
dn = @title
|
||||
TitleInfo.new @dir do |info|
|
||||
info_dn = info.display_name
|
||||
dn = info_dn unless info_dn.empty?
|
||||
end
|
||||
@cached_display_name = dn
|
||||
dn
|
||||
end
|
||||
|
||||
@@ -129,17 +296,22 @@ class Title
|
||||
end
|
||||
|
||||
def display_name(entry_name)
|
||||
dn = entry_name
|
||||
TitleInfo.new @dir do |info|
|
||||
info_dn = info.entry_display_name[entry_name]?
|
||||
unless info_dn.nil? || info_dn.empty?
|
||||
dn = info_dn
|
||||
unless @entry_display_name_cache
|
||||
TitleInfo.new @dir do |info|
|
||||
@entry_display_name_cache = info.entry_display_name
|
||||
end
|
||||
end
|
||||
|
||||
dn = entry_name
|
||||
info_dn = @entry_display_name_cache.not_nil![entry_name]?
|
||||
unless info_dn.nil? || info_dn.empty?
|
||||
dn = info_dn
|
||||
end
|
||||
dn
|
||||
end
|
||||
|
||||
def set_display_name(dn)
|
||||
@cached_display_name = dn
|
||||
TitleInfo.new @dir do |info|
|
||||
info.display_name = dn
|
||||
info.save
|
||||
@@ -149,11 +321,15 @@ class Title
|
||||
def set_display_name(entry_name : String, dn)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_display_name[entry_name] = dn
|
||||
@entry_display_name_cache = info.entry_display_name
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def cover_url
|
||||
cached_cover_url = @cached_cover_url
|
||||
return cached_cover_url unless cached_cover_url.nil?
|
||||
|
||||
url = "#{Config.current.base_url}img/icon.png"
|
||||
readable_entries = @entries.select &.err_msg.nil?
|
||||
if readable_entries.size > 0
|
||||
@@ -165,10 +341,12 @@ class Title
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
end
|
||||
@cached_cover_url = url
|
||||
url
|
||||
end
|
||||
|
||||
def set_cover_url(url : String)
|
||||
@cached_cover_url = url
|
||||
TitleInfo.new @dir do |info|
|
||||
info.cover_url = url
|
||||
info.save
|
||||
@@ -178,6 +356,7 @@ class Title
|
||||
def set_cover_url(entry_name : String, url : String)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_cover_url[entry_name] = url
|
||||
@entry_cover_url_cache = info.entry_cover_url
|
||||
info.save
|
||||
end
|
||||
end
|
||||
@@ -187,29 +366,30 @@ class Title
|
||||
@entries.each do |e|
|
||||
e.save_progress username, e.pages
|
||||
end
|
||||
titles.each do |t|
|
||||
t.read_all username
|
||||
end
|
||||
titles.each &.read_all username
|
||||
end
|
||||
|
||||
# Set the reading progress of all entries and nested libraries to 0%
|
||||
def unread_all(username)
|
||||
@entries.each do |e|
|
||||
e.save_progress username, 0
|
||||
end
|
||||
titles.each do |t|
|
||||
t.unread_all username
|
||||
end
|
||||
@entries.each &.save_progress(username, 0)
|
||||
titles.each &.unread_all username
|
||||
end
|
||||
|
||||
def deep_read_page_count(username) : Int32
|
||||
load_progress_for_all_entries(username).sum +
|
||||
titles.map { |t| t.deep_read_page_count username }.flatten.sum
|
||||
key = "#{@id}:#{username}:progress_sum"
|
||||
sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||
cached_sum = LRUCache.get key
|
||||
return cached_sum[1] if cached_sum.is_a? Tuple(String, Int32) &&
|
||||
cached_sum[0] == sig
|
||||
sum = load_progress_for_all_entries(username, nil, true).sum +
|
||||
titles.flat_map(&.deep_read_page_count username).sum
|
||||
LRUCache.set generate_cache_entry key, {sig, sum}
|
||||
sum
|
||||
end
|
||||
|
||||
def deep_total_page_count : Int32
|
||||
entries.map { |e| e.pages }.sum +
|
||||
titles.map { |t| t.deep_total_page_count }.flatten.sum
|
||||
entries.sum(&.pages) +
|
||||
titles.flat_map(&.deep_total_page_count).sum
|
||||
end
|
||||
|
||||
def load_percentage(username)
|
||||
@@ -258,13 +438,12 @@ class Title
|
||||
# use the default (auto, ascending)
|
||||
# When `opt` is not nil, it saves the options to info.json
|
||||
def sorted_entries(username, opt : SortOptions? = nil)
|
||||
cache_key = SortedEntriesCacheEntry.gen_key @id, username, @entries, opt
|
||||
cached_entries = LRUCache.get cache_key
|
||||
return cached_entries if cached_entries.is_a? Array(Entry)
|
||||
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
case opt.not_nil!.method
|
||||
@@ -281,13 +460,13 @@ class Title
|
||||
ary = @entries.zip(percentage_ary)
|
||||
.sort { |a_tp, b_tp| (a_tp[1] <=> b_tp[1]).or \
|
||||
compare_numerically a_tp[0].title, b_tp[0].title }
|
||||
.map { |tp| tp[0] }
|
||||
.map &.[0]
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
sorter = ChapterSorter.new @entries.map &.title
|
||||
ary = @entries.sort do |a, b|
|
||||
sorter.compare(a.title, b.title).or \
|
||||
compare_numerically a.title, b.title
|
||||
@@ -296,6 +475,7 @@ class Title
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
LRUCache.set generate_cache_entry cache_key, ary
|
||||
ary
|
||||
end
|
||||
|
||||
@@ -353,13 +533,24 @@ class Title
|
||||
{entry: e, date_added: da_ary[i]}
|
||||
end
|
||||
return zip if title_ids.empty?
|
||||
zip + titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
zip + titles.flat_map &.deep_entries_with_date_added
|
||||
end
|
||||
|
||||
def bulk_progress(action, ids : Array(String), username)
|
||||
LRUCache.invalidate "#{@id}:#{username}:progress_sum"
|
||||
parents.each do |parent|
|
||||
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||
end
|
||||
[false, true].each do |ascend|
|
||||
sorted_entries_cache_key =
|
||||
SortedEntriesCacheEntry.gen_key @id, username, @entries,
|
||||
SortOptions.new(SortMethod::Progress, ascend)
|
||||
LRUCache.invalidate sorted_entries_cache_key
|
||||
end
|
||||
|
||||
selected_entries = ids
|
||||
.map { |id|
|
||||
@entries.find { |e| e.id == id }
|
||||
@entries.find &.id.==(id)
|
||||
}
|
||||
.select(Entry)
|
||||
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
SUPPORTED_IMG_TYPES = ["image/jpeg", "image/png", "image/webp"]
|
||||
SUPPORTED_IMG_TYPES = %w(
|
||||
image/jpeg
|
||||
image/png
|
||||
image/webp
|
||||
image/apng
|
||||
image/avif
|
||||
image/gif
|
||||
image/svg+xml
|
||||
)
|
||||
|
||||
enum SortMethod
|
||||
Auto
|
||||
@@ -57,6 +65,16 @@ struct Image
|
||||
|
||||
def initialize(@data, @mime, @filename, @size)
|
||||
end
|
||||
|
||||
def self.from_db(res : DB::ResultSet)
|
||||
img = Image.allocate
|
||||
res.read String
|
||||
img.data = res.read Bytes
|
||||
img.filename = res.read String
|
||||
img.mime = res.read String
|
||||
img.size = res.read Int32
|
||||
img
|
||||
end
|
||||
end
|
||||
|
||||
class TitleInfo
|
||||
@@ -78,6 +96,18 @@ class TitleInfo
|
||||
@@mutex_hash = {} of String => Mutex
|
||||
|
||||
def self.new(dir, &)
|
||||
key = "#{dir}:info.json"
|
||||
info = LRUCache.get key
|
||||
if info.is_a? String
|
||||
begin
|
||||
instance = TitleInfo.from_json info
|
||||
instance.dir = dir
|
||||
yield instance
|
||||
return
|
||||
rescue
|
||||
end
|
||||
end
|
||||
|
||||
if @@mutex_hash[dir]?
|
||||
mutex = @@mutex_hash[dir]
|
||||
else
|
||||
@@ -91,6 +121,7 @@ class TitleInfo
|
||||
instance = TitleInfo.from_json File.read json_path
|
||||
end
|
||||
instance.dir = dir
|
||||
LRUCache.set generate_cache_entry key, instance.to_json
|
||||
yield instance
|
||||
end
|
||||
end
|
||||
@@ -98,5 +129,12 @@ class TitleInfo
|
||||
def save
|
||||
json_path = File.join @dir, "info.json"
|
||||
File.write json_path, self.to_pretty_json
|
||||
key = "#{@dir}:info.json"
|
||||
LRUCache.set generate_cache_entry key, self.to_json
|
||||
end
|
||||
end
|
||||
|
||||
alias ExamineContext = NamedTuple(
|
||||
cached_contents_signature: Hash(String, String),
|
||||
deleted_title_ids: Array(String),
|
||||
deleted_entry_ids: Array(String))
|
||||
|
||||
@@ -6,29 +6,17 @@ class Logger
|
||||
SEVERITY_IDS = [0, 4, 5, 2, 3]
|
||||
COLORS = [:light_cyan, :light_red, :red, :light_yellow, :light_magenta]
|
||||
|
||||
getter raw_log = Log.for ""
|
||||
|
||||
@@severity : Log::Severity = :info
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
level = Config.current.log_level
|
||||
{% begin %}
|
||||
case level.downcase
|
||||
when "off"
|
||||
@@severity = :none
|
||||
{% for lvl, i in LEVELS %}
|
||||
when {{lvl}}
|
||||
@@severity = Log::Severity.new SEVERITY_IDS[{{i}}]
|
||||
{% end %}
|
||||
else
|
||||
raise "Unknown log level #{level}"
|
||||
end
|
||||
{% end %}
|
||||
|
||||
@log = Log.for("")
|
||||
|
||||
@@severity = Logger.get_severity
|
||||
@backend = Log::IOBackend.new
|
||||
@backend.formatter = ->(entry : Log::Entry, io : IO) do
|
||||
|
||||
format_proc = ->(entry : Log::Entry, io : IO) do
|
||||
color = :default
|
||||
{% begin %}
|
||||
case entry.severity.label.to_s().downcase
|
||||
@@ -45,12 +33,36 @@ class Logger
|
||||
io << entry.message
|
||||
end
|
||||
|
||||
Log.builder.bind "*", @@severity, @backend
|
||||
@backend.formatter = Log::Formatter.new &format_proc
|
||||
|
||||
Log.setup do |c|
|
||||
c.bind "*", @@severity, @backend
|
||||
c.bind "db.*", :error, @backend
|
||||
end
|
||||
end
|
||||
|
||||
def self.get_severity(level = "") : Log::Severity
|
||||
if level.empty?
|
||||
level = Config.current.log_level
|
||||
end
|
||||
{% begin %}
|
||||
case level.downcase
|
||||
when "off"
|
||||
return Log::Severity::None
|
||||
{% for lvl, i in LEVELS %}
|
||||
when {{lvl}}
|
||||
return Log::Severity.new SEVERITY_IDS[{{i}}]
|
||||
{% end %}
|
||||
else
|
||||
raise "Unknown log level #{level}"
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
# Ignores @@severity and always log msg
|
||||
def log(msg)
|
||||
@backend.write Log::Entry.new "", Log::Severity::None, msg, nil
|
||||
@backend.write Log::Entry.new "", Log::Severity::None, msg,
|
||||
Log::Metadata.empty, nil
|
||||
end
|
||||
|
||||
def self.log(msg)
|
||||
@@ -59,7 +71,7 @@ class Logger
|
||||
|
||||
{% for lvl in LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
@log.{{lvl.id}} { msg }
|
||||
raw_log.{{lvl.id}} { msg }
|
||||
end
|
||||
def self.{{lvl.id}}(msg)
|
||||
default.not_nil!.{{lvl.id}} msg
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
require "json"
|
||||
require "csv"
|
||||
require "../rename"
|
||||
|
||||
macro string_properties(names)
|
||||
{% for name in names %}
|
||||
property {{name.id}} = ""
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro parse_strings_from_json(names)
|
||||
{% for name in names %}
|
||||
@{{name.id}} = obj[{{name}}].as_s
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro properties_to_hash(names)
|
||||
{
|
||||
{% for name in names %}
|
||||
"{{name.id}}" => @{{name.id}}.to_s,
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
|
||||
module MangaDex
|
||||
class Chapter
|
||||
string_properties ["lang_code", "title", "volume", "chapter"]
|
||||
property manga : Manga
|
||||
property time = Time.local
|
||||
property id : String
|
||||
property full_title = ""
|
||||
property language = ""
|
||||
property pages = [] of {String, String} # filename, url
|
||||
property groups = [] of {Int32, String} # group_id, group_name
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any, @manga,
|
||||
lang : Hash(String, String))
|
||||
self.parse_json json_obj, lang
|
||||
end
|
||||
|
||||
def to_info_json
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "volume", "chapter",
|
||||
"language", "full_title"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
json.field "time", @time.to_unix.to_s
|
||||
json.field "manga_title", @manga.title
|
||||
json.field "manga_id", @manga.id
|
||||
json.field "groups" do
|
||||
json.object do
|
||||
@groups.each do |gid, gname|
|
||||
json.field gname, gid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj, lang)
|
||||
parse_strings_from_json ["lang_code", "title", "volume",
|
||||
"chapter"]
|
||||
language = lang[@lang_code]?
|
||||
@language = language if language
|
||||
@time = Time.unix obj["timestamp"].as_i
|
||||
suffixes = ["", "_2", "_3"]
|
||||
suffixes.each do |s|
|
||||
gid = obj["group_id#{s}"].as_i
|
||||
next if gid == 0
|
||||
gname = obj["group_name#{s}"].as_s
|
||||
@groups << {gid, gname}
|
||||
end
|
||||
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["chapter_rename_rule"].to_s
|
||||
@full_title = rename rename_rule
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
hash = properties_to_hash ["id", "title", "volume", "chapter",
|
||||
"lang_code", "language", "pages"]
|
||||
hash["groups"] = @groups.map { |g| g[1] }.join ","
|
||||
rule.render hash
|
||||
end
|
||||
end
|
||||
|
||||
class Manga
|
||||
string_properties ["cover_url", "description", "title", "author", "artist"]
|
||||
property chapters = [] of Chapter
|
||||
property id : String
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any)
|
||||
self.parse_json json_obj
|
||||
end
|
||||
|
||||
def to_info_json(with_chapters = true)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "description", "author", "artist",
|
||||
"cover_url"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
if with_chapters
|
||||
json.field "chapters" do
|
||||
json.array do
|
||||
@chapters.each do |c|
|
||||
json.raw c.to_info_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj)
|
||||
parse_strings_from_json ["cover_url", "description", "title", "author",
|
||||
"artist"]
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
rule.render properties_to_hash ["id", "title", "author", "artist"]
|
||||
end
|
||||
end
|
||||
|
||||
class API
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@base_url = Config.current.mangadex["api_url"].to_s ||
|
||||
"https://mangadex.org/api/"
|
||||
@lang = {} of String => String
|
||||
CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row|
|
||||
@lang[row[1]] = row[0]
|
||||
end
|
||||
end
|
||||
|
||||
def get(url)
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
res = HTTP::Client.get url, headers
|
||||
raise "Failed to get #{url}. [#{res.status_code}] " \
|
||||
"#{res.status_message}" if !res.success?
|
||||
JSON.parse res.body
|
||||
end
|
||||
|
||||
def get_manga(id)
|
||||
obj = self.get File.join @base_url, "manga/#{id}"
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
manga = Manga.new id, obj["manga"]
|
||||
obj["chapter"].as_h.map do |k, v|
|
||||
chapter = Chapter.new k, v, manga, @lang
|
||||
manga.chapters << chapter
|
||||
end
|
||||
manga
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(chapter : Chapter)
|
||||
obj = self.get File.join @base_url, "chapter/#{chapter.id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
server = obj["server"].as_s
|
||||
hash = obj["hash"].as_s
|
||||
chapter.pages = obj["page_array"].as_a.map do |fn|
|
||||
{
|
||||
fn.as_s,
|
||||
"#{server}#{hash}/#{fn.as_s}",
|
||||
}
|
||||
end
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(id : String)
|
||||
obj = self.get File.join @base_url, "chapter/#{id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
manga_id = ""
|
||||
begin
|
||||
manga_id = obj["manga_id"].as_i.to_s
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
manga = self.get_manga manga_id
|
||||
chapter = manga.chapters.find { |c| c.id == id }.not_nil!
|
||||
self.get_chapter chapter
|
||||
chapter
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,156 +0,0 @@
|
||||
require "./api"
|
||||
require "zip"
|
||||
|
||||
module MangaDex
|
||||
class PageJob
|
||||
property success = false
|
||||
property url : String
|
||||
property filename : String
|
||||
property writer : Zip::Writer
|
||||
property tries_remaning : Int32
|
||||
|
||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||
end
|
||||
end
|
||||
|
||||
class Downloader < Queue::Downloader
|
||||
@wait_seconds : Int32 = Config.current.mangadex["download_wait_seconds"]
|
||||
.to_i32
|
||||
@retries : Int32 = Config.current.mangadex["download_retries"].to_i32
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@api = API.default
|
||||
super
|
||||
end
|
||||
|
||||
def pop : Queue::Job?
|
||||
job = nil
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id not like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
job
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
begin
|
||||
chapter = @api.get_chapter(job.id)
|
||||
rescue e
|
||||
Logger.error e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@queue.add_message e.message.not_nil!, job
|
||||
end
|
||||
@downloading = false
|
||||
return
|
||||
end
|
||||
@queue.set_pages chapter.pages.size, job
|
||||
lib_dir = @library_path
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["manga_rename_rule"].to_s
|
||||
manga_dir = File.join lib_dir, chapter.manga.rename rename_rule
|
||||
unless File.exists? manga_dir
|
||||
Dir.mkdir_p manga_dir
|
||||
end
|
||||
zip_path = File.join manga_dir, "#{job.title}.cbz.part"
|
||||
|
||||
# Find the number of digits needed to store the number of pages
|
||||
len = Math.log10(chapter.pages.size).to_i + 1
|
||||
|
||||
writer = Zip::Writer.new zip_path
|
||||
# Create a buffered channel. It works as an FIFO queue
|
||||
channel = Channel(PageJob).new chapter.pages.size
|
||||
spawn do
|
||||
chapter.pages.each_with_index do |tuple, i|
|
||||
fn, url = tuple
|
||||
ext = File.extname fn
|
||||
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
||||
page_job = PageJob.new url, fn, writer, @retries
|
||||
Logger.debug "Downloading #{url}"
|
||||
loop do
|
||||
sleep @wait_seconds.seconds
|
||||
download_page page_job
|
||||
break if page_job.success ||
|
||||
page_job.tries_remaning <= 0
|
||||
page_job.tries_remaning -= 1
|
||||
Logger.warn "Failed to download page #{url}. " \
|
||||
"Retrying... Remaining retries: " \
|
||||
"#{page_job.tries_remaning}"
|
||||
end
|
||||
|
||||
channel.send page_job
|
||||
end
|
||||
end
|
||||
|
||||
spawn do
|
||||
page_jobs = [] of PageJob
|
||||
chapter.pages.size.times do
|
||||
page_job = channel.receive
|
||||
Logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
||||
"#{page_job.url}"
|
||||
page_jobs << page_job
|
||||
if page_job.success
|
||||
@queue.add_success job
|
||||
else
|
||||
@queue.add_fail job
|
||||
msg = "Failed to download page #{page_job.url}"
|
||||
@queue.add_message msg, job
|
||||
Logger.error msg
|
||||
end
|
||||
end
|
||||
fail_count = page_jobs.count { |j| !j.success }
|
||||
Logger.debug "Download completed. " \
|
||||
"#{fail_count}/#{page_jobs.size} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
".part")
|
||||
File.rename zip_path, filename
|
||||
Logger.debug "cbz File created at #{filename}"
|
||||
|
||||
zip_exception = validate_archive filename
|
||||
if !zip_exception.nil?
|
||||
@queue.add_message "The downloaded archive is corrupted. " \
|
||||
"Error: #{zip_exception}", job
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
elsif fail_count > 0
|
||||
@queue.set_status Queue::JobStatus::MissingPages, job
|
||||
else
|
||||
@queue.set_status Queue::JobStatus::Completed, job
|
||||
end
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
|
||||
private def download_page(job : PageJob)
|
||||
Logger.debug "downloading #{job.url}"
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
begin
|
||||
HTTP::Client.get job.url, headers do |res|
|
||||
unless res.success?
|
||||
raise "Failed to download page #{job.url}. " \
|
||||
"[#{res.status_code}] #{res.status_message}"
|
||||
end
|
||||
job.writer.add job.filename, res.body_io
|
||||
end
|
||||
job.success = true
|
||||
rescue e
|
||||
Logger.error e
|
||||
job.success = false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
38
src/mango.cr
38
src/mango.cr
@@ -2,12 +2,12 @@ require "./config"
|
||||
require "./queue"
|
||||
require "./server"
|
||||
require "./main_fiber"
|
||||
require "./mangadex/*"
|
||||
require "./plugin/*"
|
||||
require "option_parser"
|
||||
require "clim"
|
||||
require "./plugin/*"
|
||||
require "tallboy"
|
||||
|
||||
MANGO_VERSION = "0.12.0"
|
||||
MANGO_VERSION = "0.24.0"
|
||||
|
||||
# From http://www.network-science.de/ascii/
|
||||
BANNER = %{
|
||||
@@ -53,11 +53,22 @@ class CLI < Clim
|
||||
ARGV.clear
|
||||
|
||||
Config.load(opts.config).set_current
|
||||
MangaDex::Downloader.default
|
||||
|
||||
# Initialize main components
|
||||
LRUCache.init
|
||||
Storage.default
|
||||
Queue.default
|
||||
Library.load_instance
|
||||
Library.default
|
||||
Plugin::Downloader.default
|
||||
|
||||
spawn do
|
||||
Server.new.start
|
||||
begin
|
||||
Server.new.start
|
||||
rescue e
|
||||
Logger.fatal e
|
||||
Process.exit 1
|
||||
end
|
||||
end
|
||||
|
||||
MainFiber.start_and_block
|
||||
@@ -105,18 +116,13 @@ class CLI < Clim
|
||||
password.not_nil!, opts.admin
|
||||
when "list"
|
||||
users = storage.list_users
|
||||
name_length = users.map(&.[0].size).max? || 0
|
||||
l_cell_width = ["username".size, name_length].max
|
||||
r_cell_width = "admin access".size
|
||||
header = " #{"username".ljust l_cell_width} | admin access "
|
||||
puts "-" * header.size
|
||||
puts header
|
||||
puts "-" * header.size
|
||||
users.each do |name, admin|
|
||||
puts " #{name.ljust l_cell_width} | " \
|
||||
"#{admin.to_s.ljust r_cell_width} "
|
||||
table = Tallboy.table do
|
||||
header ["username", "admin access"]
|
||||
users.each do |name, admin|
|
||||
row [name, admin]
|
||||
end
|
||||
end
|
||||
puts "-" * header.size
|
||||
puts table
|
||||
when nil
|
||||
puts opts.help_string
|
||||
else
|
||||
|
||||
@@ -23,11 +23,6 @@ class Plugin
|
||||
job
|
||||
end
|
||||
|
||||
private def process_filename(str)
|
||||
return "_" if str == ".."
|
||||
str.gsub "/", "_"
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
@@ -42,8 +37,8 @@ class Plugin
|
||||
|
||||
pages = info["pages"].as_i
|
||||
|
||||
manga_title = process_filename job.manga_title
|
||||
chapter_title = process_filename info["title"].as_s
|
||||
manga_title = sanitize_filename job.manga_title
|
||||
chapter_title = sanitize_filename info["title"].as_s
|
||||
|
||||
@queue.set_pages pages, job
|
||||
lib_dir = @library_path
|
||||
@@ -53,7 +48,7 @@ class Plugin
|
||||
end
|
||||
|
||||
zip_path = File.join manga_dir, "#{chapter_title}.cbz.part"
|
||||
writer = Zip::Writer.new zip_path
|
||||
writer = Compress::Zip::Writer.new zip_path
|
||||
rescue e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@@ -66,7 +61,9 @@ class Plugin
|
||||
fail_count = 0
|
||||
|
||||
while page = plugin.next_page
|
||||
fn = process_filename page["filename"].as_s
|
||||
break unless @queue.exists? job
|
||||
|
||||
fn = sanitize_filename page["filename"].as_s
|
||||
url = page["url"].as_s
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
@@ -109,6 +106,12 @@ class Plugin
|
||||
end
|
||||
end
|
||||
|
||||
unless @queue.exists? job
|
||||
Logger.debug "Download cancelled"
|
||||
@downloading = false
|
||||
return
|
||||
end
|
||||
|
||||
Logger.debug "Download completed. #{fail_count}/#{pages} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
|
||||
@@ -117,7 +117,7 @@ class Plugin
|
||||
def initialize(id : String)
|
||||
Plugin.build_info_ary
|
||||
|
||||
@info = @@info_ary.find { |i| i.id == id }
|
||||
@info = @@info_ary.find &.id.== id
|
||||
if @info.nil?
|
||||
raise Error.new "Plugin with ID #{id} not found"
|
||||
end
|
||||
@@ -257,6 +257,48 @@ class Plugin
|
||||
end
|
||||
sbx.put_prop_string -2, "get"
|
||||
|
||||
sbx.push_proc LibDUK::VARARGS do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
url = env.require_string 0
|
||||
body = env.require_string 1
|
||||
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
if env.get_top == 3
|
||||
env.enum 2, LibDUK::Enum::OwnPropertiesOnly
|
||||
while env.next -1, true
|
||||
key = env.require_string -2
|
||||
val = env.require_string -1
|
||||
headers.add key, val
|
||||
env.pop_2
|
||||
end
|
||||
end
|
||||
|
||||
res = HTTP::Client.post url, headers, body
|
||||
|
||||
env.push_object
|
||||
|
||||
env.push_int res.status_code
|
||||
env.put_prop_string -2, "status_code"
|
||||
|
||||
env.push_string res.body
|
||||
env.put_prop_string -2, "body"
|
||||
|
||||
env.push_object
|
||||
res.headers.each do |k, v|
|
||||
if v.size == 1
|
||||
env.push_string v[0]
|
||||
else
|
||||
env.push_string v.join ","
|
||||
end
|
||||
env.put_prop_string -2, k
|
||||
end
|
||||
env.put_prop_string -2, "headers"
|
||||
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "post"
|
||||
|
||||
sbx.push_proc 2 do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
html = env.require_string 0
|
||||
|
||||
19
src/queue.cr
19
src/queue.cr
@@ -196,6 +196,21 @@ class Queue
|
||||
self.delete job.id
|
||||
end
|
||||
|
||||
def exists?(id : String)
|
||||
res = false
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
res = db.query_one "select count(*) from queue where id = (?)", id,
|
||||
as: Bool
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
|
||||
def exists?(job : Job)
|
||||
self.exists? job.id
|
||||
end
|
||||
|
||||
def delete_status(status : JobStatus)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
@@ -288,12 +303,12 @@ class Queue
|
||||
end
|
||||
|
||||
def pause
|
||||
@downloaders.each { |d| d.stopped = true }
|
||||
@downloaders.each &.stopped=(true)
|
||||
@paused = true
|
||||
end
|
||||
|
||||
def resume
|
||||
@downloaders.each { |d| d.stopped = false }
|
||||
@downloaders.each &.stopped=(false)
|
||||
@paused = false
|
||||
end
|
||||
|
||||
|
||||
@@ -35,15 +35,15 @@ module Rename
|
||||
|
||||
class Group < Base(Pattern | String)
|
||||
def render(hash : VHash)
|
||||
return "" if @ary.select(&.is_a? Pattern)
|
||||
return "" if @ary.select(Pattern)
|
||||
.any? &.as(Pattern).render(hash).empty?
|
||||
@ary.map do |e|
|
||||
@ary.join do |e|
|
||||
if e.is_a? Pattern
|
||||
e.render hash
|
||||
else
|
||||
e
|
||||
end
|
||||
end.join
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -129,19 +129,23 @@ module Rename
|
||||
end
|
||||
|
||||
def render(hash : VHash)
|
||||
str = @ary.map do |e|
|
||||
str = @ary.join do |e|
|
||||
if e.is_a? String
|
||||
e
|
||||
else
|
||||
e.render hash
|
||||
end
|
||||
end.join.strip
|
||||
end.strip
|
||||
post_process str
|
||||
end
|
||||
|
||||
# Post-processes the generated file/folder name
|
||||
# - Handles the rare case where the string is `..`
|
||||
# - Removes trailing spaces and periods
|
||||
# - Replace illegal characters with `_`
|
||||
private def post_process(str)
|
||||
return "_" if str == ".."
|
||||
str.gsub "/", "_"
|
||||
str.rstrip(" .").gsub /[\/?<>\\:*|"^]/, "_"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
require "./router"
|
||||
|
||||
class AdminRouter < Router
|
||||
struct AdminRouter
|
||||
def initialize
|
||||
get "/admin" do |env|
|
||||
storage = Storage.default
|
||||
missing_count = storage.missing_titles.size +
|
||||
storage.missing_entries.size
|
||||
layout "admin"
|
||||
end
|
||||
|
||||
get "/admin/user" do |env|
|
||||
users = @context.storage.list_users
|
||||
users = Storage.default.list_users
|
||||
username = get_username env
|
||||
layout "user"
|
||||
end
|
||||
@@ -32,11 +33,11 @@ class AdminRouter < Router
|
||||
# would not contain `admin`
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
|
||||
@context.storage.new_user username, password, admin
|
||||
Storage.default.new_user username, password, admin
|
||||
|
||||
redirect env, "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",
|
||||
query: hash_to_query({"error" => e.message})
|
||||
@@ -51,12 +52,12 @@ class AdminRouter < Router
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
original_username = env.params.url["original_username"]
|
||||
|
||||
@context.storage.update_user \
|
||||
Storage.default.update_user \
|
||||
original_username, username, password, admin
|
||||
|
||||
redirect env, "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",
|
||||
query: hash_to_query({"username" => original_username, \
|
||||
@@ -68,5 +69,9 @@ class AdminRouter < Router
|
||||
mangadex_base_url = Config.current.mangadex["base_url"]
|
||||
layout "download-manager"
|
||||
end
|
||||
|
||||
get "/admin/missing" do |env|
|
||||
layout "missing-items"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,16 +1,100 @@
|
||||
require "./router"
|
||||
require "../mangadex/*"
|
||||
require "../upload"
|
||||
require "koa"
|
||||
require "digest"
|
||||
|
||||
struct APIRouter
|
||||
@@api_json : String?
|
||||
|
||||
API_VERSION = "0.1.0"
|
||||
|
||||
macro s(fields)
|
||||
{
|
||||
{% for field in fields %}
|
||||
{{field}} => String,
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
|
||||
class APIRouter < Router
|
||||
def initialize
|
||||
Koa.init "Mango API", version: API_VERSION, desc: <<-MD
|
||||
# A Word of Caution
|
||||
|
||||
This API was designed for internal use only, and the design doesn't comply with the resources convention of a RESTful API. Because of this, most of the API endpoints listed here will soon be updated and removed in future versions of Mango, so use them at your own risk!
|
||||
|
||||
# Authentication
|
||||
|
||||
All endpoints except `/api/login` require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
||||
|
||||
# Terminologies
|
||||
|
||||
- Entry: An entry is a `cbz`/`cbr` file in your library. Depending on how you organize your manga collection, an entry can contain a chapter, a volume or even an entire manga.
|
||||
- Title: A title contains a list of entries and optionally some sub-titles. For example, you can have a title to store a manga, and it contains a list of sub-titles representing the volumes in the manga. Each sub-title would then contain a list of entries representing the chapters in the volume.
|
||||
- Library: The library is a collection of top-level titles, and it does not contain entries (though the titles do). A Mango instance can only have one library.
|
||||
MD
|
||||
|
||||
Koa.cookie_auth "cookie", "mango-sessid-#{Config.current.port}"
|
||||
Koa.define_tag "admin", desc: <<-MD
|
||||
These are the admin endpoints only accessible for users with admin access. A non-admin user will get HTTP 403 when calling the endpoints.
|
||||
MD
|
||||
|
||||
Koa.schema "entry", {
|
||||
"pages" => Int32,
|
||||
"mtime" => Int64,
|
||||
}.merge(s %w(zip_path title size id title_id display_name cover_url)),
|
||||
desc: "An entry in a book"
|
||||
|
||||
Koa.schema "title", {
|
||||
"mtime" => Int64,
|
||||
"entries" => ["entry"],
|
||||
"titles" => ["title"],
|
||||
"parents" => [String],
|
||||
}.merge(s %w(dir title id display_name cover_url)),
|
||||
desc: "A manga title (a collection of entries and sub-titles)"
|
||||
|
||||
Koa.schema "result", {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
}
|
||||
|
||||
Koa.describe "Authenticates a user", <<-MD
|
||||
After successful login, the cookie `mango-sessid-#{Config.current.port}` will contain a valid session ID that can be used for subsequent requests
|
||||
MD
|
||||
Koa.body schema: {
|
||||
"username" => String,
|
||||
"password" => String,
|
||||
}
|
||||
Koa.tag "users"
|
||||
post "/api/login" do |env|
|
||||
begin
|
||||
username = env.params.json["username"].as String
|
||||
password = env.params.json["password"].as String
|
||||
token = Storage.default.verify_user(username, password).not_nil!
|
||||
|
||||
env.session.string "token", token
|
||||
"Authenticated"
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 403
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns a page in a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.path "page", schema: Int32, desc: "The page number to return (starts from 1)"
|
||||
Koa.response 200, schema: Bytes, media_type: "image/*"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
|
||||
Koa.tag "reader"
|
||||
get "/api/page/:tid/:eid/:page" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
page = env.params.url["page"].to_i
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
title = @context.library.get_title tid
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
@@ -18,48 +102,165 @@ class APIRouter < Router
|
||||
raise "Failed to load page #{page} of " \
|
||||
"`#{title.title}/#{entry.title}`" if img.nil?
|
||||
|
||||
send_img env, img
|
||||
e_tag = Digest::SHA1.hexdigest img.data
|
||||
if prev_e_tag == e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
env.response.headers["ETag"] = e_tag
|
||||
env.response.headers["Cache-Control"] = "public, max-age=86400"
|
||||
send_img env, img
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
get "/api/book/:tid" do |env|
|
||||
Koa.describe "Returns the cover image of a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.response 200, schema: Bytes, media_type: "image/*"
|
||||
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
Koa.tag "library"
|
||||
get "/api/cover/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
eid = env.params.url["eid"]
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
send_json env, title.to_json
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
|
||||
img = entry.get_thumbnail || entry.read_page 1
|
||||
raise "Failed to get cover of `#{title.title}/#{entry.title}`" \
|
||||
if img.nil?
|
||||
|
||||
e_tag = Digest::SHA1.hexdigest img.data
|
||||
if prev_e_tag == e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
env.response.headers["ETag"] = e_tag
|
||||
send_img env, img
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
get "/api/book" do |env|
|
||||
send_json env, @context.library.to_json
|
||||
Koa.describe "Returns the book with title `tid`", <<-MD
|
||||
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||
- Supply the `depth` query parameter to control the depth of nested titles to return.
|
||||
- When `depth` is 1, returns the top-level titles and sub-titles/entries one level in them
|
||||
- When `depth` is 0, returns the top-level titles without their sub-titles/entries
|
||||
- When `depth` is N, returns the top-level titles and sub-titles/entries N levels in them
|
||||
- When `depth` is negative, returns the entire library
|
||||
MD
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "slim"
|
||||
Koa.query "depth"
|
||||
Koa.query "sort", desc: "Sorting option for entries. Can be one of 'auto', 'title', 'progress', 'time_added' and 'time_modified'"
|
||||
Koa.query "ascend", desc: "Sorting direction for entries. Set to 0 for the descending order. Doesn't work without specifying 'sort'"
|
||||
Koa.response 200, schema: "title"
|
||||
Koa.response 404, "Title not found"
|
||||
Koa.tag "library"
|
||||
get "/api/book/:tid" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.new
|
||||
get_sort_opt
|
||||
|
||||
tid = env.params.url["tid"]
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
|
||||
slim = !env.params.query["slim"]?.nil?
|
||||
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||
|
||||
send_json env, title.build_json(slim: slim, depth: depth,
|
||||
sort_context: {username: username,
|
||||
opt: sort_opt})
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the entire library with all titles and entries", <<-MD
|
||||
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||
- Supply the `dpeth` query parameter to control the depth of nested titles to return.
|
||||
- When `depth` is 1, returns the requested title and sub-titles/entries one level in it
|
||||
- When `depth` is 0, returns the requested title without its sub-titles/entries
|
||||
- When `depth` is N, returns the requested title and sub-titles/entries N levels in it
|
||||
- When `depth` is negative, returns the requested title and all sub-titles/entries in it
|
||||
MD
|
||||
Koa.query "slim"
|
||||
Koa.query "depth"
|
||||
Koa.response 200, schema: {
|
||||
"dir" => String,
|
||||
"titles" => ["title"],
|
||||
}
|
||||
Koa.tag "library"
|
||||
get "/api/library" do |env|
|
||||
slim = !env.params.query["slim"]?.nil?
|
||||
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||
|
||||
send_json env, Library.default.build_json(slim: slim, depth: depth)
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a library scan"
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.response 200, schema: {
|
||||
"milliseconds" => Float64,
|
||||
"titles" => Int32,
|
||||
}
|
||||
post "/api/admin/scan" do |env|
|
||||
start = Time.utc
|
||||
@context.library.scan
|
||||
Library.default.scan
|
||||
ms = (Time.utc - start).total_milliseconds
|
||||
send_json env, {
|
||||
"milliseconds" => ms,
|
||||
"titles" => @context.library.titles.size,
|
||||
"titles" => Library.default.titles.size,
|
||||
}.to_json
|
||||
end
|
||||
|
||||
post "/api/admin/user/delete/:username" do |env|
|
||||
Koa.describe "Returns the thumbnail generation progress between 0 and 1"
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.response 200, schema: {
|
||||
"progress" => Float64,
|
||||
}
|
||||
get "/api/admin/thumbnail_progress" do |env|
|
||||
send_json env, {
|
||||
"progress" => Library.default.thumbnail_generation_progress,
|
||||
}.to_json
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a thumbnail generation"
|
||||
Koa.tags ["admin", "library"]
|
||||
post "/api/admin/generate_thumbnails" do |env|
|
||||
spawn do
|
||||
Library.default.generate_thumbnails
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a user with `username`"
|
||||
Koa.tags ["admin", "users"]
|
||||
Koa.response 200, schema: "result"
|
||||
delete "/api/admin/user/delete/:username" do |env|
|
||||
begin
|
||||
username = env.params.url["username"]
|
||||
@context.storage.delete_user username
|
||||
Storage.default.delete_user username
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -69,13 +270,25 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
post "/api/progress/:title/:page" do |env|
|
||||
Koa.describe "Updates the reading progress of an entry or the whole title for the current user", <<-MD
|
||||
When `eid` is provided, sets the reading progress of the entry to `page`.
|
||||
|
||||
When `eid` is omitted, updates the progress of the entire title. Specifically:
|
||||
|
||||
- if `page` is 0, marks the entire title as unread
|
||||
- otherwise, marks the entire title as read
|
||||
MD
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "page", desc: "The new page number indicating the progress"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tag "progress"
|
||||
put "/api/progress/:tid/:page" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
.not_nil!
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
entry_id = env.params.query["entry"]?
|
||||
entry_id = env.params.query["eid"]?
|
||||
|
||||
if !entry_id.nil?
|
||||
entry = title.get_entry(entry_id).not_nil!
|
||||
@@ -87,7 +300,7 @@ class APIRouter < Router
|
||||
title.read_all username
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -97,10 +310,18 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
post "/api/bulk-progress/:action/:title" do |env|
|
||||
Koa.describe "Updates the reading progress of multiple entries in a title"
|
||||
Koa.path "action", desc: "The action to perform. Can be either `read` or `unread`"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.body schema: {
|
||||
"ids" => [String],
|
||||
}, desc: "An array of entry IDs"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tag "progress"
|
||||
put "/api/bulk_progress/:action/:tid" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
action = env.params.url["action"]
|
||||
ids = env.params.json["ids"].as(Array).map &.as_s
|
||||
|
||||
@@ -109,7 +330,7 @@ class APIRouter < Router
|
||||
end
|
||||
title.bulk_progress action, ids, username
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -119,12 +340,20 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
post "/api/admin/display_name/:title/:name" do |env|
|
||||
Koa.describe "Sets the display name of a title or an entry", <<-MD
|
||||
When `eid` is provided, apply the display name to the entry. Otherwise, apply the display name to the title identified by `tid`.
|
||||
MD
|
||||
Koa.tags ["admin", "library"]
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "name", desc: "The new display name"
|
||||
Koa.response 200, schema: "result"
|
||||
put "/api/admin/display_name/:tid/:name" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
title = (Library.default.get_title env.params.url["tid"])
|
||||
.not_nil!
|
||||
name = env.params.url["name"]
|
||||
entry = env.params.query["entry"]?
|
||||
entry = env.params.query["eid"]?
|
||||
if entry.nil?
|
||||
title.set_display_name name
|
||||
else
|
||||
@@ -132,7 +361,7 @@ class APIRouter < Router
|
||||
title.set_display_name eobj.not_nil!.title, name
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -142,51 +371,42 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
get "/api/admin/mangadex/manga/:id" do |env|
|
||||
begin
|
||||
id = env.params.url["id"]
|
||||
api = MangaDex::API.default
|
||||
manga = api.get_manga id
|
||||
send_json env, manga.to_info_json
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
post "/api/admin/mangadex/download" do |env|
|
||||
begin
|
||||
chapters = env.params.json["chapters"].as(Array).map { |c| c.as_h }
|
||||
jobs = chapters.map { |chapter|
|
||||
Queue::Job.new(
|
||||
chapter["id"].as_s,
|
||||
chapter["manga_id"].as_s,
|
||||
chapter["full_title"].as_s,
|
||||
chapter["manga_title"].as_s,
|
||||
Queue::JobStatus::Pending,
|
||||
Time.unix chapter["time"].as_s.to_i
|
||||
)
|
||||
}
|
||||
inserted_count = @context.queue.push jobs
|
||||
send_json env, {
|
||||
"success": inserted_count,
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
ws "/api/admin/mangadex/queue" do |socket, env|
|
||||
interval_raw = env.params.query["interval"]?
|
||||
interval = (interval_raw.to_i? if interval_raw) || 5
|
||||
loop do
|
||||
socket.send({
|
||||
"jobs" => Queue.default.get_all.reverse,
|
||||
"paused" => Queue.default.paused?,
|
||||
}.to_json)
|
||||
sleep interval.seconds
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the current download queue", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"paused" => Bool?,
|
||||
"jobs?" => [{
|
||||
"pages" => Int32,
|
||||
"success_count" => Int32,
|
||||
"fail_count" => Int32,
|
||||
"time" => Int64,
|
||||
}.merge(s %w(id manga_id title manga_title status_message status))],
|
||||
}
|
||||
get "/api/admin/mangadex/queue" do |env|
|
||||
begin
|
||||
jobs = @context.queue.get_all
|
||||
send_json env, {
|
||||
"jobs" => jobs,
|
||||
"paused" => @context.queue.paused?,
|
||||
"jobs" => Queue.default.get_all.reverse,
|
||||
"paused" => Queue.default.paused?,
|
||||
"success" => true,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -194,6 +414,19 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Perform an action on a download job or all jobs in the queue", <<-MD
|
||||
The `action` parameter can be `delete`, `retry`, `pause` or `resume`.
|
||||
|
||||
When `action` is `pause` or `resume`, pauses or resumes the download queue, respectively.
|
||||
|
||||
When `action` is set to `delete`, the behavior depends on `id`. If `id` is provided, deletes the specific job identified by the ID. Otherwise, deletes all **completed** jobs in the queue.
|
||||
|
||||
When `action` is set to `retry`, the behavior depends on `id`. If `id` is provided, restarts the job identified by the ID. Otherwise, retries all jobs in the `Error` or `MissingPages` status in the queue.
|
||||
MD
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.path "action", desc: "The action to perform. It should be one of the followins: `delete`, `retry`, `pause` and `resume`."
|
||||
Koa.query "id", required: false, desc: "A job ID"
|
||||
Koa.response 200, schema: "result"
|
||||
post "/api/admin/mangadex/queue/:action" do |env|
|
||||
begin
|
||||
action = env.params.url["action"]
|
||||
@@ -201,26 +434,27 @@ class APIRouter < Router
|
||||
case action
|
||||
when "delete"
|
||||
if id.nil?
|
||||
@context.queue.delete_status Queue::JobStatus::Completed
|
||||
Queue.default.delete_status Queue::JobStatus::Completed
|
||||
else
|
||||
@context.queue.delete id
|
||||
Queue.default.delete id
|
||||
end
|
||||
when "retry"
|
||||
if id.nil?
|
||||
@context.queue.reset
|
||||
Queue.default.reset
|
||||
else
|
||||
@context.queue.reset id
|
||||
Queue.default.reset id
|
||||
end
|
||||
when "pause"
|
||||
@context.queue.pause
|
||||
Queue.default.pause
|
||||
when "resume"
|
||||
@context.queue.resume
|
||||
Queue.default.resume
|
||||
else
|
||||
raise "Unknown queue action #{action}"
|
||||
end
|
||||
|
||||
send_json env, {"success" => true}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -228,6 +462,24 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Uploads a file to the server", <<-MD
|
||||
Currently the only supported value for the `target` parameter is `cover`.
|
||||
|
||||
### Cover
|
||||
|
||||
Uploads a cover image for a title or an entry.
|
||||
|
||||
Query parameters:
|
||||
- `tid`: A title ID
|
||||
- `eid`: (Optional) An entry ID
|
||||
|
||||
When `eid` is omitted, the new cover image will be applied to the title. Otherwise, applies the image to the specified entry.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.body media_type: "multipart/form-data", schema: {
|
||||
"file" => Bytes,
|
||||
}
|
||||
Koa.response 200, schema: "result"
|
||||
post "/api/admin/upload/:target" do |env|
|
||||
begin
|
||||
target = env.params.url["target"]
|
||||
@@ -242,9 +494,9 @@ class APIRouter < Router
|
||||
|
||||
case target
|
||||
when "cover"
|
||||
title_id = env.params.query["title"]
|
||||
entry_id = env.params.query["entry"]?
|
||||
title = @context.library.get_title(title_id).not_nil!
|
||||
title_id = env.params.query["tid"]
|
||||
entry_id = env.params.query["eid"]?
|
||||
title = Library.default.get_title(title_id).not_nil!
|
||||
|
||||
unless SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? filename
|
||||
@@ -275,6 +527,7 @@ class APIRouter < Router
|
||||
|
||||
raise "No part with name `file` found"
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -282,10 +535,23 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
post "/api/admin/plugin/list" do |env|
|
||||
Koa.describe "Lists the chapters in a title from a plugin"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.query "plugin", schema: String
|
||||
Koa.query "query", schema: String
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"chapters?" => [{
|
||||
"id" => String,
|
||||
"title" => String,
|
||||
}],
|
||||
"title" => String?,
|
||||
}
|
||||
get "/api/admin/plugin/list" do |env|
|
||||
begin
|
||||
query = env.params.json["query"].as String
|
||||
plugin = Plugin.new env.params.json["plugin"].as String
|
||||
query = env.params.query["query"].as String
|
||||
plugin = Plugin.new env.params.query["plugin"].as String
|
||||
|
||||
json = plugin.list_chapters query
|
||||
chapters = json["chapters"]
|
||||
@@ -297,6 +563,7 @@ class APIRouter < Router
|
||||
"title" => title,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -304,6 +571,20 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Adds a list of chapters from a plugin to the download queue"
|
||||
Koa.tags ["admin", "downloader"]
|
||||
Koa.body schema: {
|
||||
"plugin" => String,
|
||||
"title" => String,
|
||||
"chapters" => [{
|
||||
"id" => String,
|
||||
"title" => String,
|
||||
}],
|
||||
}
|
||||
Koa.response 200, schema: {
|
||||
"success" => Int32,
|
||||
"fail" => Int32,
|
||||
}
|
||||
post "/api/admin/plugin/download" do |env|
|
||||
begin
|
||||
plugin = Plugin.new env.params.json["plugin"].as String
|
||||
@@ -320,12 +601,13 @@ class APIRouter < Router
|
||||
Time.utc
|
||||
)
|
||||
}
|
||||
inserted_count = @context.queue.push jobs
|
||||
inserted_count = Queue.default.push jobs
|
||||
send_json env, {
|
||||
"success": inserted_count,
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
@@ -333,27 +615,311 @@ class APIRouter < Router
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the image dimensions of all pages in an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.tag "reader"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"dimensions?" => [{
|
||||
"width" => Int32,
|
||||
"height" => Int32,
|
||||
}],
|
||||
}
|
||||
Koa.response 304, "Not modified (only available when `If-None-Match` is set)"
|
||||
get "/api/dimensions/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
prev_e_tag = env.request.headers["If-None-Match"]?
|
||||
|
||||
title = @context.library.get_title tid
|
||||
title = Library.default.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
|
||||
sizes = entry.page_dimensions
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"dimensions" => sizes,
|
||||
}.to_json
|
||||
file_hash = Digest::SHA1.hexdigest (entry.zip_path + entry.mtime.to_s)
|
||||
e_tag = "W/#{file_hash}"
|
||||
if e_tag == prev_e_tag
|
||||
env.response.status_code = 304
|
||||
""
|
||||
else
|
||||
sizes = entry.page_dimensions
|
||||
env.response.headers["ETag"] = e_tag
|
||||
env.response.headers["Cache-Control"] = "public, max-age=86400"
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"dimensions" => sizes,
|
||||
}.to_json
|
||||
end
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Downloads an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.response 200, schema: Bytes
|
||||
Koa.response 404, "Entry not found"
|
||||
Koa.tags ["library", "reader"]
|
||||
get "/api/download/:tid/:eid" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["eid"]).not_nil!
|
||||
|
||||
send_attachment env, entry.zip_path
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Gets the tags of a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"tags" => [String?],
|
||||
}
|
||||
Koa.tags ["library", "tags"]
|
||||
get "/api/tags/:tid" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
tags = title.tags
|
||||
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"tags" => tags,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns all tags"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"tags" => [String?],
|
||||
}
|
||||
Koa.tags ["library", "tags"]
|
||||
get "/api/tags" do |env|
|
||||
begin
|
||||
tags = Storage.default.list_tags
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"tags" => tags,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Adds a new tag to a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library", "tags"]
|
||||
put "/api/admin/tags/:tid/:tag" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
tag = env.params.url["tag"]
|
||||
|
||||
title.add_tag tag
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a tag from a title"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library", "tags"]
|
||||
delete "/api/admin/tags/:tid/:tag" do |env|
|
||||
begin
|
||||
title = (Library.default.get_title env.params.url["tid"]).not_nil!
|
||||
tag = env.params.url["tag"]
|
||||
|
||||
title.delete_tag tag
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Lists all missing titles"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"titles?" => [{
|
||||
"path" => String,
|
||||
"id" => String,
|
||||
"signature" => String,
|
||||
}],
|
||||
}
|
||||
Koa.tags ["admin", "library"]
|
||||
get "/api/admin/titles/missing" do |env|
|
||||
begin
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
"titles" => Storage.default.missing_titles,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Lists all missing entries"
|
||||
Koa.response 200, schema: {
|
||||
"success" => Bool,
|
||||
"error" => String?,
|
||||
"entries?" => [{
|
||||
"path" => String,
|
||||
"id" => String,
|
||||
"signature" => String,
|
||||
}],
|
||||
}
|
||||
Koa.tags ["admin", "library"]
|
||||
get "/api/admin/entries/missing" do |env|
|
||||
begin
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
"entries" => Storage.default.missing_entries,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes all missing titles"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/titles/missing" do |env|
|
||||
begin
|
||||
Storage.default.delete_missing_title
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes all missing entries"
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/entries/missing" do |env|
|
||||
begin
|
||||
Storage.default.delete_missing_entry
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a missing title identified by `tid`", <<-MD
|
||||
Does nothing if the given `tid` is not found or if the title is not missing.
|
||||
MD
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/titles/missing/:tid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
Storage.default.delete_missing_title tid
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a missing entry identified by `eid`", <<-MD
|
||||
Does nothing if the given `eid` is not found or if the entry is not missing.
|
||||
MD
|
||||
Koa.response 200, schema: "result"
|
||||
Koa.tags ["admin", "library"]
|
||||
delete "/api/admin/entries/missing/:eid" do |env|
|
||||
begin
|
||||
eid = env.params.url["eid"]
|
||||
Storage.default.delete_missing_entry eid
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"error" => nil,
|
||||
}.to_json
|
||||
rescue e
|
||||
Logger.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
doc = Koa.generate
|
||||
@@api_json = doc.to_json if doc
|
||||
|
||||
get "/openapi.json" do |env|
|
||||
if @@api_json
|
||||
send_json env, @@api_json
|
||||
else
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
require "./router"
|
||||
|
||||
class MainRouter < Router
|
||||
struct MainRouter
|
||||
def initialize
|
||||
get "/login" do |env|
|
||||
base_url = Config.current.base_url
|
||||
@@ -11,7 +9,7 @@ class MainRouter < Router
|
||||
begin
|
||||
env.session.delete_string "token"
|
||||
rescue e
|
||||
@context.error "Error when attempting to log out: #{e}"
|
||||
Logger.error "Error when attempting to log out: #{e}"
|
||||
ensure
|
||||
redirect env, "/login"
|
||||
end
|
||||
@@ -21,7 +19,7 @@ class MainRouter < Router
|
||||
begin
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
token = @context.storage.verify_user(username, password).not_nil!
|
||||
token = Storage.default.verify_user(username, password).not_nil!
|
||||
|
||||
env.session.string "token", token
|
||||
|
||||
@@ -32,7 +30,8 @@ class MainRouter < Router
|
||||
else
|
||||
redirect env, "/"
|
||||
end
|
||||
rescue
|
||||
rescue e
|
||||
Logger.error e
|
||||
redirect env, "/login"
|
||||
end
|
||||
end
|
||||
@@ -41,43 +40,38 @@ class MainRouter < Router
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json @context.library.dir, username
|
||||
get_sort_opt
|
||||
sort_opt = SortOptions.from_info_json Library.default.dir, username
|
||||
get_and_save_sort_opt Library.default.dir
|
||||
|
||||
titles = @context.library.sorted_titles username, sort_opt
|
||||
titles = Library.default.sorted_titles username, sort_opt
|
||||
percentage = titles.map &.load_percentage username
|
||||
|
||||
layout "library"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/book/:title" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
title = (Library.default.get_title env.params.url["title"]).not_nil!
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json title.dir, username
|
||||
get_sort_opt
|
||||
get_and_save_sort_opt title.dir
|
||||
|
||||
entries = title.sorted_entries username, sort_opt
|
||||
|
||||
percentage = title.load_percentage_for_all_entries username, sort_opt
|
||||
title_percentage = title.titles.map &.load_percentage username
|
||||
|
||||
layout "title"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/download" do |env|
|
||||
mangadex_base_url = Config.current.mangadex["base_url"]
|
||||
layout "download"
|
||||
end
|
||||
|
||||
get "/download/plugins" do |env|
|
||||
begin
|
||||
id = env.params.query["plugin"]?
|
||||
@@ -92,7 +86,7 @@ class MainRouter < Router
|
||||
|
||||
layout "plugin-download"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
@@ -100,18 +94,64 @@ class MainRouter < Router
|
||||
get "/" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
continue_reading = @context
|
||||
.library.get_continue_reading_entries username
|
||||
recently_added = @context.library.get_recently_added_entries username
|
||||
start_reading = @context.library.get_start_reading_titles username
|
||||
titles = @context.library.titles
|
||||
new_user = !titles.any? { |t| t.load_percentage(username) > 0 }
|
||||
continue_reading = Library.default
|
||||
.get_continue_reading_entries username
|
||||
recently_added = Library.default.get_recently_added_entries username
|
||||
start_reading = Library.default.get_start_reading_titles username
|
||||
titles = Library.default.titles
|
||||
new_user = !titles.any? &.load_percentage(username).> 0
|
||||
empty_library = titles.size == 0
|
||||
layout "home"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/tags/:tag" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
tag = env.params.url["tag"]
|
||||
|
||||
sort_opt = SortOptions.new
|
||||
get_sort_opt
|
||||
|
||||
title_ids = Storage.default.get_tag_titles tag
|
||||
|
||||
raise "Tag #{tag} not found" if title_ids.empty?
|
||||
|
||||
titles = title_ids.map { |id| Library.default.get_title id }
|
||||
.select Title
|
||||
|
||||
titles = sort_titles titles, sort_opt, username
|
||||
percentage = titles.map &.load_percentage username
|
||||
|
||||
layout "tag"
|
||||
rescue e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
get "/tags" do |env|
|
||||
tags = Storage.default.list_tags.map do |tag|
|
||||
{
|
||||
tag: tag,
|
||||
encoded_tag: URI.encode_www_form(tag, space_to_plus: false),
|
||||
count: Storage.default.get_tag_titles(tag).size,
|
||||
}
|
||||
end
|
||||
# Sort by :count reversly, and then sort by :tag
|
||||
tags.sort! do |a, b|
|
||||
(b[:count] <=> a[:count]).or(a[:tag] <=> b[:tag])
|
||||
end
|
||||
|
||||
layout "tags"
|
||||
end
|
||||
|
||||
get "/api" do |env|
|
||||
base_url = Config.current.base_url
|
||||
render "src/views/api.html.ecr"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,30 +1,16 @@
|
||||
require "./router"
|
||||
|
||||
class OPDSRouter < Router
|
||||
struct OPDSRouter
|
||||
def initialize
|
||||
get "/opds" do |env|
|
||||
titles = @context.library.titles
|
||||
titles = Library.default.titles
|
||||
render_xml "src/views/opds/index.xml.ecr"
|
||||
end
|
||||
|
||||
get "/opds/book/:title_id" do |env|
|
||||
begin
|
||||
title = @context.library.get_title(env.params.url["title_id"]).not_nil!
|
||||
title = Library.default.get_title(env.params.url["title_id"]).not_nil!
|
||||
render_xml "src/views/opds/title.xml.ecr"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
get "/opds/download/:title/:entry" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
|
||||
send_attachment env, entry.zip_path
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
require "./router"
|
||||
|
||||
class ReaderRouter < Router
|
||||
struct ReaderRouter
|
||||
def initialize
|
||||
get "/reader/:title/:entry" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
title = (Library.default.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
|
||||
next layout "reader-error" if entry.err_msg
|
||||
|
||||
# load progress
|
||||
page = entry.load_progress username
|
||||
page_idx = [1, entry.load_progress username].max
|
||||
|
||||
# start from page 1 if the user has finished reading the entry
|
||||
page = 1 if entry.finished? username
|
||||
page_idx = 1 if entry.finished? username
|
||||
|
||||
redirect env, "/reader/#{title.id}/#{entry.id}/#{page}"
|
||||
redirect env, "/reader/#{title.id}/#{entry.id}/#{page_idx}"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
@@ -30,22 +28,31 @@ class ReaderRouter < Router
|
||||
|
||||
username = get_username env
|
||||
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
title = (Library.default.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
raise "" if page > entry.pages || page <= 0
|
||||
|
||||
sort_opt = SortOptions.from_info_json title.dir, username
|
||||
get_sort_opt
|
||||
entries = title.sorted_entries username, sort_opt
|
||||
|
||||
page_idx = env.params.url["page"].to_i
|
||||
if page_idx > entry.pages || page_idx <= 0
|
||||
raise "Page #{page_idx} not found."
|
||||
end
|
||||
|
||||
exit_url = "#{base_url}book/#{title.id}"
|
||||
|
||||
next_entry_url = nil
|
||||
next_entry = entry.next_entry username
|
||||
unless next_entry.nil?
|
||||
next_entry_url = "#{base_url}reader/#{title.id}/#{next_entry.id}"
|
||||
next_entry_url = entry.next_entry(username).try do |e|
|
||||
"#{base_url}reader/#{title.id}/#{e.id}"
|
||||
end
|
||||
|
||||
previous_entry_url = entry.previous_entry(username).try do |e|
|
||||
"#{base_url}reader/#{title.id}/#{e.id}"
|
||||
end
|
||||
|
||||
render "src/views/reader.html.ecr"
|
||||
rescue e
|
||||
@context.error e
|
||||
Logger.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
class Router
|
||||
@context : Context = Context.default
|
||||
end
|
||||
@@ -5,34 +5,8 @@ require "./handlers/*"
|
||||
require "./util/*"
|
||||
require "./routes/*"
|
||||
|
||||
class Context
|
||||
property library : Library
|
||||
property storage : Storage
|
||||
property queue : Queue
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@storage = Storage.default
|
||||
@library = Library.default
|
||||
@queue = Queue.default
|
||||
end
|
||||
|
||||
{% for lvl in Logger::LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
Logger.{{lvl.id}} msg
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
class Server
|
||||
@context : Context = Context.default
|
||||
|
||||
def initialize
|
||||
error 403 do |env|
|
||||
message = "HTTP 403: You are not authorized to visit #{env.request.path}"
|
||||
layout "message"
|
||||
end
|
||||
error 404 do |env|
|
||||
message = "HTTP 404: Mango cannot find the page #{env.request.path}"
|
||||
layout "message"
|
||||
@@ -53,11 +27,11 @@ class Server
|
||||
|
||||
Kemal.config.logging = false
|
||||
add_handler LogHandler.new
|
||||
add_handler AuthHandler.new @context.storage
|
||||
add_handler AuthHandler.new
|
||||
add_handler UploadHandler.new Config.current.upload_path
|
||||
{% if flag?(:release) %}
|
||||
# when building for relase, embed the static files in binary
|
||||
@context.debug "We are in release mode. Using embedded static files."
|
||||
Logger.debug "We are in release mode. Using embedded static files."
|
||||
serve_static false
|
||||
add_handler StaticHandler.new
|
||||
{% end %}
|
||||
@@ -71,10 +45,11 @@ class Server
|
||||
end
|
||||
|
||||
def start
|
||||
@context.debug "Starting Kemal server"
|
||||
Logger.debug "Starting Kemal server"
|
||||
{% if flag?(:release) %}
|
||||
Kemal.config.env = "production"
|
||||
{% end %}
|
||||
Kemal.config.host_binding = Config.current.host
|
||||
Kemal.config.port = Config.current.port
|
||||
Kemal.run
|
||||
end
|
||||
|
||||
393
src/storage.cr
393
src/storage.cr
@@ -3,6 +3,8 @@ require "crypto/bcrypt"
|
||||
require "uuid"
|
||||
require "base64"
|
||||
require "./util/*"
|
||||
require "mg"
|
||||
require "../migration/*"
|
||||
|
||||
def hash_password(pw)
|
||||
Crypto::Bcrypt::Password.create(pw).to_s
|
||||
@@ -13,13 +15,16 @@ def verify_password(hash, pw)
|
||||
end
|
||||
|
||||
class Storage
|
||||
@@insert_entry_ids = [] of IDTuple
|
||||
@@insert_title_ids = [] of IDTuple
|
||||
|
||||
@path : String
|
||||
@db : DB::Database?
|
||||
@insert_ids = [] of IDTuple
|
||||
|
||||
alias IDTuple = NamedTuple(path: String,
|
||||
alias IDTuple = NamedTuple(
|
||||
path: String,
|
||||
id: String,
|
||||
is_title: Bool)
|
||||
signature: String?)
|
||||
|
||||
use_default
|
||||
|
||||
@@ -29,39 +34,20 @@ class Storage
|
||||
dir = File.dirname @path
|
||||
unless Dir.exists? dir
|
||||
Logger.info "The DB directory #{dir} does not exist. " \
|
||||
"Attepmting to create it"
|
||||
"Attempting to create it"
|
||||
Dir.mkdir_p dir
|
||||
end
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
# We create the `ids` table first. even if the uses has an
|
||||
# early version installed and has the `user` table only,
|
||||
# we will still be able to create `ids`
|
||||
db.exec "create table ids" \
|
||||
"(path text, id text, is_title integer)"
|
||||
db.exec "create unique index path_idx on ids (path)"
|
||||
db.exec "create unique index id_idx on ids (id)"
|
||||
|
||||
db.exec "create table users" \
|
||||
"(username text, password text, token text, admin integer)"
|
||||
MG::Migration.new(db, log: Logger.default.raw_log).migrate
|
||||
rescue e
|
||||
unless e.message.not_nil!.ends_with? "already exists"
|
||||
Logger.fatal "Error when checking tables in DB: #{e}"
|
||||
raise e
|
||||
end
|
||||
|
||||
# If the DB is initialized through CLI but no user is added, we need
|
||||
# to create the admin user when first starting the app
|
||||
user_count = db.query_one "select count(*) from users", as: Int32
|
||||
init_admin if init_user && user_count == 0
|
||||
else
|
||||
Logger.debug "Creating DB file at #{@path}"
|
||||
db.exec "create unique index username_idx on users (username)"
|
||||
db.exec "create unique index token_idx on users (token)"
|
||||
|
||||
init_admin if init_user
|
||||
Logger.fatal "DB migration failed. #{e}"
|
||||
raise e
|
||||
end
|
||||
|
||||
user_count = db.query_one "select count(*) from users", as: Int32
|
||||
init_admin if init_user && user_count == 0
|
||||
end
|
||||
unless @auto_close
|
||||
@db = DB.open "sqlite3://#{@path}"
|
||||
@@ -81,13 +67,37 @@ class Storage
|
||||
private def get_db(&block : DB::Database ->)
|
||||
if @db.nil?
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "PRAGMA foreign_keys = 1"
|
||||
yield db
|
||||
end
|
||||
else
|
||||
@db.not_nil!.exec "PRAGMA foreign_keys = 1"
|
||||
yield @db.not_nil!
|
||||
end
|
||||
end
|
||||
|
||||
def username_exists(username)
|
||||
exists = false
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
exists = db.query_one("select count(*) from users where " \
|
||||
"username = (?)", username, as: Int32) > 0
|
||||
end
|
||||
end
|
||||
exists
|
||||
end
|
||||
|
||||
def username_is_admin(username)
|
||||
is_admin = false
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
is_admin = db.query_one("select admin from users where " \
|
||||
"username = (?)", username, as: Int32) > 0
|
||||
end
|
||||
end
|
||||
is_admin
|
||||
end
|
||||
|
||||
def verify_user(username, password)
|
||||
out_token = nil
|
||||
MainFiber.run do
|
||||
@@ -214,35 +224,340 @@ class Storage
|
||||
end
|
||||
end
|
||||
|
||||
def get_id(path, is_title)
|
||||
def get_title_id(path, signature)
|
||||
id = nil
|
||||
path = Path.new(path).relative_to(Config.current.library_path).to_s
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
id = db.query_one? "select id from ids where path = (?)", path,
|
||||
as: {String}
|
||||
# First attempt to find the matching title in DB using BOTH path
|
||||
# and signature
|
||||
id = db.query_one? "select id from titles where path = (?) and " \
|
||||
"signature = (?) and unavailable = 0",
|
||||
path, signature.to_s, as: String
|
||||
|
||||
should_update = id.nil?
|
||||
# If it fails, try to match using the path only. This could happen
|
||||
# for example when a new entry is added to the title
|
||||
id ||= db.query_one? "select id from titles where path = (?)", path,
|
||||
as: String
|
||||
|
||||
# If it still fails, we will have to rely on the signature values.
|
||||
# This could happen when the user moved or renamed the title, or
|
||||
# a title containing the title
|
||||
unless id
|
||||
# If there are multiple rows with the same signature (this could
|
||||
# happen simply by bad luck, or when the user copied a title),
|
||||
# pick the row that has the most similar path to the give path
|
||||
rows = [] of Tuple(String, String)
|
||||
db.query "select id, path from titles where signature = (?)",
|
||||
signature.to_s do |rs|
|
||||
rs.each do
|
||||
rows << {rs.read(String), rs.read(String)}
|
||||
end
|
||||
end
|
||||
row = rows.max_by?(&.[1].components_similarity(path))
|
||||
id = row[0] if row
|
||||
end
|
||||
|
||||
# At this point, `id` would still be nil if there's no row matching
|
||||
# either the path or the signature
|
||||
|
||||
# If we did identify a matching title, save the path and signature
|
||||
# values back to the DB
|
||||
if id && should_update
|
||||
db.exec "update titles set path = (?), signature = (?), " \
|
||||
"unavailable = 0 where id = (?)", path, signature.to_s, id
|
||||
end
|
||||
end
|
||||
end
|
||||
id
|
||||
end
|
||||
|
||||
def insert_id(tp : IDTuple)
|
||||
@insert_ids << tp
|
||||
# See the comments in `#get_title_id` to see how this method works.
|
||||
def get_entry_id(path, signature)
|
||||
id = nil
|
||||
path = Path.new(path).relative_to(Config.current.library_path).to_s
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
id = db.query_one? "select id from ids where path = (?) and " \
|
||||
"signature = (?) and unavailable = 0",
|
||||
path, signature.to_s, as: String
|
||||
|
||||
should_update = id.nil?
|
||||
id ||= db.query_one? "select id from ids where path = (?)", path,
|
||||
as: String
|
||||
|
||||
unless id
|
||||
rows = [] of Tuple(String, String)
|
||||
db.query "select id, path from ids where signature = (?)",
|
||||
signature.to_s do |rs|
|
||||
rs.each do
|
||||
rows << {rs.read(String), rs.read(String)}
|
||||
end
|
||||
end
|
||||
row = rows.max_by?(&.[1].components_similarity(path))
|
||||
id = row[0] if row
|
||||
end
|
||||
|
||||
if id && should_update
|
||||
db.exec "update ids set path = (?), signature = (?), " \
|
||||
"unavailable = 0 where id = (?)", path, signature.to_s, id
|
||||
end
|
||||
end
|
||||
end
|
||||
id
|
||||
end
|
||||
|
||||
def insert_entry_id(tp)
|
||||
@@insert_entry_ids << tp
|
||||
end
|
||||
|
||||
def insert_title_id(tp)
|
||||
@@insert_title_ids << tp
|
||||
end
|
||||
|
||||
def bulk_insert_ids
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.transaction do |tx|
|
||||
@insert_ids.each do |tp|
|
||||
tx.connection.exec "insert into ids values (?, ?, ?)", tp[:path],
|
||||
tp[:id], tp[:is_title] ? 1 : 0
|
||||
db.transaction do |tran|
|
||||
conn = tran.connection
|
||||
@@insert_title_ids.each do |tp|
|
||||
path = Path.new(tp[:path])
|
||||
.relative_to(Config.current.library_path).to_s
|
||||
conn.exec "insert into titles (id, path, signature, " \
|
||||
"unavailable) values (?, ?, ?, 0)",
|
||||
tp[:id], path, tp[:signature].to_s
|
||||
end
|
||||
@@insert_entry_ids.each do |tp|
|
||||
path = Path.new(tp[:path])
|
||||
.relative_to(Config.current.library_path).to_s
|
||||
conn.exec "insert into ids (id, path, signature, " \
|
||||
"unavailable) values (?, ?, ?, 0)",
|
||||
tp[:id], path, tp[:signature].to_s
|
||||
end
|
||||
end
|
||||
end
|
||||
@insert_ids.clear
|
||||
@@insert_entry_ids.clear
|
||||
@@insert_title_ids.clear
|
||||
end
|
||||
end
|
||||
|
||||
def save_thumbnail(id : String, img : Image)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.exec "insert into thumbnails values (?, ?, ?, ?, ?)", id, img.data,
|
||||
img.filename, img.mime, img.size
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_thumbnail(id : String) : Image?
|
||||
img = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query_one? "select * from thumbnails where id = (?)", id do |res|
|
||||
img = Image.from_db res
|
||||
end
|
||||
end
|
||||
end
|
||||
img
|
||||
end
|
||||
|
||||
def get_title_tags(id : String) : Array(String)
|
||||
tags = [] of String
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query "select tag from tags where id = (?) order by tag", id do |rs|
|
||||
rs.each do
|
||||
tags << rs.read String
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
tags
|
||||
end
|
||||
|
||||
def get_tag_titles(tag : String) : Array(String)
|
||||
tids = [] of String
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query "select id from tags where tag = (?)", tag do |rs|
|
||||
rs.each do
|
||||
tids << rs.read String
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
tids
|
||||
end
|
||||
|
||||
def list_tags : Array(String)
|
||||
tags = [] of String
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query "select distinct tag from tags natural join titles " \
|
||||
"where unavailable = 0" do |rs|
|
||||
rs.each do
|
||||
tags << rs.read String
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
tags
|
||||
end
|
||||
|
||||
def add_tag(id : String, tag : String)
|
||||
err = nil
|
||||
MainFiber.run do
|
||||
begin
|
||||
get_db do |db|
|
||||
db.exec "insert into tags values (?, ?)", id, tag
|
||||
end
|
||||
rescue e
|
||||
err = e
|
||||
end
|
||||
end
|
||||
raise err.not_nil! if err
|
||||
end
|
||||
|
||||
def delete_tag(id : String, tag : String)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.exec "delete from tags where id = (?) and tag = (?)", id, tag
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Mark titles and entries that no longer exist on the file system as
|
||||
# unavailable. By supplying `id_candidates` and `titles_candidates`, it
|
||||
# only checks the existence of the candidate titles/entries to speed up
|
||||
# the process.
|
||||
def mark_unavailable(ids_candidates : Array(String)?,
|
||||
titles_candidates : Array(String)?)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
# Detect dangling entry IDs
|
||||
trash_ids = [] of String
|
||||
query = "select path, id from ids where unavailable = 0"
|
||||
unless ids_candidates.nil?
|
||||
query += " and id in (#{ids_candidates.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
db.query query do |rs|
|
||||
rs.each do
|
||||
path = rs.read String
|
||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||
trash_ids << rs.read String unless File.exists? fullpath
|
||||
end
|
||||
end
|
||||
|
||||
unless trash_ids.empty?
|
||||
Logger.debug "Marking #{trash_ids.size} entries as unavailable"
|
||||
end
|
||||
db.exec "update ids set unavailable = 1 where id in " \
|
||||
"(#{trash_ids.join "," { |i| "'#{i}'" }})"
|
||||
|
||||
# Detect dangling title IDs
|
||||
trash_titles = [] of String
|
||||
query = "select path, id from titles where unavailable = 0"
|
||||
unless titles_candidates.nil?
|
||||
query += " and id in (#{titles_candidates.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
db.query query do |rs|
|
||||
rs.each do
|
||||
path = rs.read String
|
||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||
trash_titles << rs.read String unless Dir.exists? fullpath
|
||||
end
|
||||
end
|
||||
|
||||
unless trash_titles.empty?
|
||||
Logger.debug "Marking #{trash_titles.size} titles as unavailable"
|
||||
end
|
||||
db.exec "update titles set unavailable = 1 where id in " \
|
||||
"(#{trash_titles.join "," { |i| "'#{i}'" }})"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private def get_missing(tablename)
|
||||
ary = [] of IDTuple
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query "select id, path, signature from #{tablename} " \
|
||||
"where unavailable = 1" do |rs|
|
||||
rs.each do
|
||||
ary << {
|
||||
id: rs.read(String),
|
||||
path: rs.read(String),
|
||||
signature: rs.read(String?),
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
ary
|
||||
end
|
||||
|
||||
private def delete_missing(tablename, id : String? = nil)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
if id
|
||||
db.exec "delete from #{tablename} where id = (?) " \
|
||||
"and unavailable = 1", id
|
||||
else
|
||||
db.exec "delete from #{tablename} where unavailable = 1"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def missing_entries
|
||||
get_missing "ids"
|
||||
end
|
||||
|
||||
def missing_titles
|
||||
get_missing "titles"
|
||||
end
|
||||
|
||||
def delete_missing_entry(id = nil)
|
||||
delete_missing "ids", id
|
||||
end
|
||||
|
||||
def delete_missing_title(id = nil)
|
||||
delete_missing "titles", id
|
||||
end
|
||||
|
||||
def save_md_token(username : String, token : String, expire : Time)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
count = db.query_one "select count(*) from md_account where " \
|
||||
"username = (?)", username, as: Int64
|
||||
if count == 0
|
||||
db.exec "insert into md_account values (?, ?, ?)", username, token,
|
||||
expire.to_unix
|
||||
else
|
||||
db.exec "update md_account set token = (?), expire = (?) " \
|
||||
"where username = (?)", token, expire.to_unix, username
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_md_token(username) : Tuple(String?, Time?)
|
||||
token = nil
|
||||
expires = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query_one? "select token, expire from md_account where " \
|
||||
"username = (?)", username do |res|
|
||||
token = res.read String
|
||||
expires = Time.unix res.read Int64
|
||||
end
|
||||
end
|
||||
end
|
||||
{token, expires}
|
||||
end
|
||||
|
||||
def close
|
||||
MainFiber.run do
|
||||
unless @db.nil?
|
||||
|
||||
83
src/subscription.cr
Normal file
83
src/subscription.cr
Normal file
@@ -0,0 +1,83 @@
|
||||
require "db"
|
||||
require "json"
|
||||
|
||||
struct Subscription
|
||||
include DB::Serializable
|
||||
include JSON::Serializable
|
||||
|
||||
getter id : Int64 = 0
|
||||
getter username : String
|
||||
getter manga_id : Int64
|
||||
property language : String?
|
||||
property group_id : Int64?
|
||||
property min_volume : Int64?
|
||||
property max_volume : Int64?
|
||||
property min_chapter : Int64?
|
||||
property max_chapter : Int64?
|
||||
@[DB::Field(key: "last_checked")]
|
||||
@[JSON::Field(key: "last_checked")]
|
||||
@raw_last_checked : Int64
|
||||
@[DB::Field(key: "created_at")]
|
||||
@[JSON::Field(key: "created_at")]
|
||||
@raw_created_at : Int64
|
||||
|
||||
def last_checked : Time
|
||||
Time.unix @raw_last_checked
|
||||
end
|
||||
|
||||
def created_at : Time
|
||||
Time.unix @raw_created_at
|
||||
end
|
||||
|
||||
def initialize(@manga_id, @username)
|
||||
@raw_created_at = Time.utc.to_unix
|
||||
@raw_last_checked = Time.utc.to_unix
|
||||
end
|
||||
|
||||
private def in_range?(value : String, lowerbound : Int64?,
|
||||
upperbound : Int64?) : Bool
|
||||
lb = lowerbound.try &.to_f64
|
||||
ub = upperbound.try &.to_f64
|
||||
|
||||
return true if lb.nil? && ub.nil?
|
||||
|
||||
v = value.to_f64?
|
||||
return false unless v
|
||||
|
||||
if lb.nil?
|
||||
v <= ub.not_nil!
|
||||
elsif ub.nil?
|
||||
v >= lb.not_nil!
|
||||
else
|
||||
v >= lb.not_nil! && v <= ub.not_nil!
|
||||
end
|
||||
end
|
||||
|
||||
def match?(chapter : MangaDex::Chapter) : Bool
|
||||
if chapter.manga_id != manga_id ||
|
||||
(language && chapter.language != language) ||
|
||||
(group_id && !chapter.groups.map(&.id).includes? group_id)
|
||||
return false
|
||||
end
|
||||
|
||||
in_range?(chapter.volume, min_volume, max_volume) &&
|
||||
in_range?(chapter.chapter, min_chapter, max_chapter)
|
||||
end
|
||||
|
||||
def check_for_updates : Int32
|
||||
Logger.debug "Checking updates for subscription with ID #{id}"
|
||||
jobs = [] of Queue::Job
|
||||
get_client(username).user.updates_after last_checked do |chapter|
|
||||
next unless match? chapter
|
||||
jobs << chapter.to_job
|
||||
end
|
||||
Storage.default.update_subscription_last_checked id
|
||||
count = Queue.default.push jobs
|
||||
Logger.debug "#{count}/#{jobs.size} of updates added to queue"
|
||||
count
|
||||
rescue e
|
||||
Logger.error "Error occurred when checking updates for " \
|
||||
"subscription with ID #{id}. #{e}"
|
||||
0
|
||||
end
|
||||
end
|
||||
@@ -73,7 +73,7 @@ class ChapterSorter
|
||||
.select do |key|
|
||||
keys[key].count >= str_ary.size / 2
|
||||
end
|
||||
.sort do |a_key, b_key|
|
||||
.sort! do |a_key, b_key|
|
||||
a = keys[a_key]
|
||||
b = keys[b_key]
|
||||
# Sort keys by the number of times they appear
|
||||
|
||||
@@ -11,7 +11,7 @@ end
|
||||
def split_by_alphanumeric(str)
|
||||
arr = [] of String
|
||||
str.scan(/([^\d\n\r]*)(\d*)([^\d\n\r]*)/) do |match|
|
||||
arr += match.captures.select { |s| s != "" }
|
||||
arr += match.captures.select &.!= ""
|
||||
end
|
||||
arr
|
||||
end
|
||||
|
||||
@@ -5,7 +5,7 @@ require "http_proxy"
|
||||
module HTTP
|
||||
class Client
|
||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||
Logger.debug "Using monkey-patched HTTP::Client"
|
||||
Logger.debug "Setting proxy"
|
||||
previous_def uri, tls do |client, path|
|
||||
client.set_proxy get_proxy uri
|
||||
yield client, path
|
||||
@@ -35,7 +35,8 @@ private def env_to_proxy(key : String) : HTTP::Proxy::Client?
|
||||
|
||||
begin
|
||||
uri = URI.parse val
|
||||
HTTP::Proxy::Client.new uri.hostname.not_nil!, uri.port.not_nil!
|
||||
HTTP::Proxy::Client.new uri.hostname.not_nil!, uri.port.not_nil!,
|
||||
username: uri.user, password: uri.password
|
||||
rescue
|
||||
nil
|
||||
end
|
||||
|
||||
79
src/util/signature.cr
Normal file
79
src/util/signature.cr
Normal file
@@ -0,0 +1,79 @@
|
||||
require "./util"
|
||||
|
||||
class File
|
||||
abstract struct Info
|
||||
def inode : UInt64
|
||||
@stat.st_ino.to_u64
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the signature of the file at filename.
|
||||
# When it is not a supported file, returns 0. Otherwise, uses the inode
|
||||
# number as its signature. On most file systems, the inode number is
|
||||
# preserved even when the file is renamed, moved or edited.
|
||||
# Some cases that would cause the inode number to change:
|
||||
# - Reboot/remount on some file systems
|
||||
# - Replaced with a copied file
|
||||
# - Moved to a different device
|
||||
# Since we are also using the relative paths to match ids, we won't lose
|
||||
# information as long as the above changes do not happen together with
|
||||
# a file/folder rename, with no library scan in between.
|
||||
def self.signature(filename) : UInt64
|
||||
if is_supported_file filename
|
||||
File.info(filename).inode
|
||||
else
|
||||
0u64
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class Dir
|
||||
# Returns the signature of the directory at dirname. See the comments for
|
||||
# `File.signature` for more information.
|
||||
def self.signature(dirname) : UInt64
|
||||
signatures = [File.info(dirname).inode]
|
||||
self.open dirname do |dir|
|
||||
dir.entries.each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dirname, fn
|
||||
if File.directory? path
|
||||
signatures << Dir.signature path
|
||||
else
|
||||
_sig = File.signature path
|
||||
# Only add its signature value to `signatures` when it is a
|
||||
# supported file
|
||||
signatures << _sig if _sig > 0
|
||||
end
|
||||
end
|
||||
end
|
||||
Digest::CRC32.checksum(signatures.sort.join).to_u64
|
||||
end
|
||||
|
||||
# Returns the contents signature of the directory at dirname for checking
|
||||
# to rescan.
|
||||
# Rescan conditions:
|
||||
# - When a file added, moved, removed, renamed (including which in nested
|
||||
# directories)
|
||||
def self.contents_signature(dirname, cache = {} of String => String) : String
|
||||
return cache[dirname] if cache[dirname]?
|
||||
Fiber.yield
|
||||
signatures = [] of String
|
||||
self.open dirname do |dir|
|
||||
dir.entries.sort.each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dirname, fn
|
||||
if File.directory? path
|
||||
signatures << Dir.contents_signature path, cache
|
||||
else
|
||||
# Only add its signature value to `signatures` when it is a
|
||||
# supported file
|
||||
signatures << fn if is_supported_file fn
|
||||
end
|
||||
Fiber.yield
|
||||
end
|
||||
end
|
||||
hash = Digest::SHA1.hexdigest(signatures.join)
|
||||
cache[dirname] = hash
|
||||
hash
|
||||
end
|
||||
end
|
||||
@@ -1,7 +1,8 @@
|
||||
IMGS_PER_PAGE = 5
|
||||
ENTRIES_IN_HOME_SECTIONS = 8
|
||||
UPLOAD_URL_PREFIX = "/uploads"
|
||||
STATIC_DIRS = ["/css", "/js", "/img", "/favicon.ico"]
|
||||
STATIC_DIRS = %w(/css /js /img /webfonts /favicon.ico /robots.txt)
|
||||
SUPPORTED_FILE_EXTNAMES = [".zip", ".cbz", ".rar", ".cbr"]
|
||||
|
||||
def random_str
|
||||
UUID.random.to_s.gsub "-", ""
|
||||
@@ -22,15 +23,32 @@ end
|
||||
|
||||
def register_mime_types
|
||||
{
|
||||
# Comic Archives
|
||||
".zip" => "application/zip",
|
||||
".rar" => "application/x-rar-compressed",
|
||||
".cbz" => "application/vnd.comicbook+zip",
|
||||
".cbr" => "application/vnd.comicbook-rar",
|
||||
|
||||
# Favicon
|
||||
".ico" => "image/x-icon",
|
||||
|
||||
# FontAwesome fonts
|
||||
".woff" => "font/woff",
|
||||
".woff2" => "font/woff2",
|
||||
|
||||
# Supported image formats. JPG, PNG, GIF, WebP, and SVG are already
|
||||
# defiend by Crystal in `MIME.DEFAULT_TYPES`
|
||||
".apng" => "image/apng",
|
||||
".avif" => "image/avif",
|
||||
}.each do |k, v|
|
||||
MIME.register k, v
|
||||
end
|
||||
end
|
||||
|
||||
def is_supported_file(path)
|
||||
SUPPORTED_FILE_EXTNAMES.includes? File.extname(path).downcase
|
||||
end
|
||||
|
||||
struct Int
|
||||
def or(other : Int)
|
||||
if self == 0
|
||||
@@ -61,3 +79,68 @@ class String
|
||||
self.chars.all? { |c| c.alphanumeric? || c == '_' }
|
||||
end
|
||||
end
|
||||
|
||||
def env_is_true?(key : String) : Bool
|
||||
val = ENV[key.upcase]? || ENV[key.downcase]?
|
||||
return false unless val
|
||||
val.downcase.in? "1", "true"
|
||||
end
|
||||
|
||||
def sort_titles(titles : Array(Title), opt : SortOptions, username : String)
|
||||
ary = titles
|
||||
|
||||
case opt.method
|
||||
when .time_modified?
|
||||
ary.sort! { |a, b| (a.mtime <=> b.mtime).or \
|
||||
compare_numerically a.title, b.title }
|
||||
when .progress?
|
||||
ary.sort! do |a, b|
|
||||
(a.load_percentage(username) <=> b.load_percentage(username)).or \
|
||||
compare_numerically a.title, b.title
|
||||
end
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
ary.sort! { |a, b| compare_numerically a.title, b.title }
|
||||
end
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
ary
|
||||
end
|
||||
|
||||
class String
|
||||
# Returns the similarity (in [0, 1]) of two paths.
|
||||
# For the two paths, separate them into arrays of components, count the
|
||||
# number of matching components backwards, and divide the count by the
|
||||
# number of components of the shorter path.
|
||||
def components_similarity(other : String) : Float64
|
||||
s, l = [self, other]
|
||||
.map { |str| Path.new(str).parts }
|
||||
.sort_by! &.size
|
||||
|
||||
match = s.reverse.zip(l.reverse).count { |a, b| a == b }
|
||||
match / s.size
|
||||
end
|
||||
end
|
||||
|
||||
# Does the followings:
|
||||
# - turns space-like characters into the normal whitespaces ( )
|
||||
# - strips and collapses spaces
|
||||
# - removes ASCII control characters
|
||||
# - replaces slashes (/) with underscores (_)
|
||||
# - removes leading dots (.)
|
||||
# - removes the following special characters: \:*?"<>|
|
||||
#
|
||||
# If the sanitized string is empty, returns a random string instead.
|
||||
def sanitize_filename(str : String) : String
|
||||
sanitized = str
|
||||
.gsub(/\s+/, " ")
|
||||
.strip
|
||||
.gsub(/\//, "_")
|
||||
.gsub(/^[\.\s]+/, "")
|
||||
.gsub(/[\177\000-\031\\:\*\?\"<>\|]/, "")
|
||||
sanitized.size > 0 ? sanitized : random_str
|
||||
end
|
||||
|
||||
@@ -1,30 +1,60 @@
|
||||
# Web related helper functions/macros
|
||||
|
||||
def is_admin?(env) : Bool
|
||||
is_admin = false
|
||||
if !Config.current.auth_proxy_header_name.empty? ||
|
||||
Config.current.disable_login
|
||||
is_admin = Storage.default.username_is_admin get_username env
|
||||
end
|
||||
|
||||
# The token (if exists) takes precedence over other authentication methods.
|
||||
if token = env.session.string? "token"
|
||||
is_admin = Storage.default.verify_admin token
|
||||
end
|
||||
|
||||
is_admin
|
||||
end
|
||||
|
||||
macro layout(name)
|
||||
base_url = Config.current.base_url
|
||||
is_admin = is_admin? env
|
||||
begin
|
||||
is_admin = false
|
||||
if token = env.session.string? "token"
|
||||
is_admin = @context.storage.verify_admin token
|
||||
end
|
||||
page = {{name}}
|
||||
render "src/views/#{{{name}}}.html.ecr", "src/views/layout.html.ecr"
|
||||
rescue e
|
||||
message = e.to_s
|
||||
@context.error message
|
||||
Logger.error message
|
||||
page = "Error"
|
||||
render "src/views/message.html.ecr", "src/views/layout.html.ecr"
|
||||
end
|
||||
end
|
||||
|
||||
macro send_error_page(msg)
|
||||
message = {{msg}}
|
||||
base_url = Config.current.base_url
|
||||
is_admin = is_admin? env
|
||||
page = "Error"
|
||||
html = render "src/views/message.html.ecr", "src/views/layout.html.ecr"
|
||||
send_file env, html.to_slice, "text/html"
|
||||
end
|
||||
|
||||
macro send_img(env, img)
|
||||
send_file {{env}}, {{img}}.data, {{img}}.mime
|
||||
end
|
||||
|
||||
macro get_username(env)
|
||||
# if the request gets here, it has gone through the auth handler, and
|
||||
# we can be sure that a valid token exists, so we can use not_nil! here
|
||||
token = env.session.string "token"
|
||||
(@context.storage.verify_token token).not_nil!
|
||||
begin
|
||||
token = env.session.string "token"
|
||||
(Storage.default.verify_token token).not_nil!
|
||||
rescue e
|
||||
if Config.current.disable_login
|
||||
Config.current.default_username
|
||||
elsif (header = Config.current.auth_proxy_header_name) && !header.empty?
|
||||
env.request.headers[header]
|
||||
else
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def send_json(env, json)
|
||||
@@ -42,16 +72,11 @@ def redirect(env, path)
|
||||
end
|
||||
|
||||
def hash_to_query(hash)
|
||||
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
||||
hash.join "&" { |k, v| "#{k}=#{v}" }
|
||||
end
|
||||
|
||||
def request_path_startswith(env, ary)
|
||||
ary.each do |prefix|
|
||||
if env.request.path.starts_with? prefix
|
||||
return true
|
||||
end
|
||||
end
|
||||
false
|
||||
ary.any? { |prefix| env.request.path.starts_with? prefix }
|
||||
end
|
||||
|
||||
def requesting_static_file(env)
|
||||
@@ -81,3 +106,40 @@ macro get_sort_opt
|
||||
sort_opt = SortOptions.new sort_method, is_ascending
|
||||
end
|
||||
end
|
||||
|
||||
macro get_and_save_sort_opt(dir)
|
||||
sort_method = env.params.query["sort"]?
|
||||
|
||||
if sort_method
|
||||
is_ascending = true
|
||||
|
||||
ascend = env.params.query["ascend"]?
|
||||
if ascend && ascend.to_i? == 0
|
||||
is_ascending = false
|
||||
end
|
||||
|
||||
sort_opt = SortOptions.new sort_method, is_ascending
|
||||
|
||||
TitleInfo.new {{dir}} do |info|
|
||||
info.sort_by[username] = sort_opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module HTTP
|
||||
class Client
|
||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||
previous_def uri, tls do |client, path|
|
||||
if client.tls? && env_is_true? "DISABLE_SSL_VERIFICATION"
|
||||
Logger.debug "Disabling SSL verification"
|
||||
client.tls.verify_mode = OpenSSL::SSL::VerifyMode::NONE
|
||||
end
|
||||
Logger.debug "Setting read timeout"
|
||||
client.read_timeout = Config.current.download_timeout_seconds.seconds
|
||||
Logger.debug "Requesting #{uri}"
|
||||
yield client, path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,15 +1,33 @@
|
||||
<ul class="uk-list uk-list-large uk-list-divider">
|
||||
<li data-url="<%= base_url %>admin/user">User Managerment</li>
|
||||
<li onclick="if(!scanning){scan()}">
|
||||
<span id="scan">Scan Library Files</span>
|
||||
<span id="scan-status" class="uk-align-right">
|
||||
<div uk-spinner hidden></div>
|
||||
<span hidden></span>
|
||||
</span>
|
||||
<ul class="uk-list uk-list-large uk-list-divider" x-data="component()" x-init="init()">
|
||||
<li><a class="uk-link-reset" href="<%= base_url %>admin/user">User Management</a></li>
|
||||
<li>
|
||||
<a class="uk-link-reset" href="<%= base_url %>admin/missing">Missing Items</a>
|
||||
<% if missing_count > 0 %>
|
||||
<div class="uk-align-right">
|
||||
<span class="uk-badge"><%= missing_count %></span>
|
||||
</div>
|
||||
<% end %>
|
||||
</li>
|
||||
<li class="nopointer">
|
||||
<li>
|
||||
<a class="uk-link-reset" @click="scan()">
|
||||
<span :style="`${scanning ? 'color:grey' : ''}`">Scan Library Files</span>
|
||||
<div class="uk-align-right">
|
||||
<div uk-spinner x-show="scanning"></div>
|
||||
<span x-show="!scanning && scanMs > 0" x-text="`Scan ${scanTitles} titles in ${scanMs}ms`"></span>
|
||||
</div>
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a class="uk-link-reset" @click="generateThumbnails()">
|
||||
<span :style="`${generating ? 'color:grey' : ''}`">Generate Thumbnails</span>
|
||||
<div class="uk-align-right">
|
||||
<span x-show="generating && progress > 0" x-text="`${(progress * 100).toFixed(2)}%`"></span>
|
||||
</div>
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<span>Theme</span>
|
||||
<select id="theme-select" class="uk-select uk-align-right uk-width-1-3@m uk-width-1-2">
|
||||
<select id="theme-select" class="uk-select uk-align-right uk-width-1-3@m uk-width-1-2" :value="themeSetting" @change="themeChanged($event)">
|
||||
<option>Dark</option>
|
||||
<option>Light</option>
|
||||
<option>System</option>
|
||||
|
||||
14
src/views/api.html.ecr
Normal file
14
src/views/api.html.ecr
Normal file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mango API Documentation</title>
|
||||
<meta name="description" content="Mango - Manga Server and Web Reader">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
</head>
|
||||
<body>
|
||||
<redoc spec-url="<%= base_url %>openapi.json"></redoc>
|
||||
<script src="https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -35,7 +35,7 @@
|
||||
onclick="location='<%= base_url %>book/<%= item.id %>'"
|
||||
<% end %>>
|
||||
|
||||
<div class="uk-card uk-card-default" x-data="{selected: false, hover: false, disabled: true}" :class="{selected: selected}"
|
||||
<div class="uk-card uk-card-default" x-data="{selected: false, hover: false, disabled: true, selecting: false}" :class="{selected: selected}" @count.window="selecting = $event.detail.count > 0"
|
||||
<% if page == "title" && item.is_a?(Entry) && item.err_msg.nil? %>
|
||||
x-init="disabled = false"
|
||||
<% end %>>
|
||||
@@ -45,6 +45,7 @@
|
||||
class="grayscale"
|
||||
<% end %>>
|
||||
<div class="uk-overlay-primary uk-position-cover" x-show="!disabled && (selected || hover)">
|
||||
<div class="uk-height-1-1 uk-width-1-1" x-show="selecting" @click.stop="selected = !selected; $dispatch(selected ? 'add' : 'remove')"></div>
|
||||
<div class="uk-position-center">
|
||||
<span class="fas fa-check-circle fa-3x" @click.stop="selected = !selected; $dispatch(selected ? 'add' : 'remove')" :style="`color:${selected && 'orange'};`"></span>
|
||||
</div>
|
||||
@@ -75,7 +76,7 @@
|
||||
<% end %>
|
||||
<% if item.is_a? Title %>
|
||||
<% if grouped_count == 1 %>
|
||||
<p class="uk-text-meta"><%= item.size %> entries</p>
|
||||
<p class="uk-text-meta"><%= item.content_label %></p>
|
||||
<% else %>
|
||||
<p class="uk-text-meta"><%= grouped_count %> new entries</p>
|
||||
<% end %>
|
||||
|
||||
3
src/views/components/dots.html.ecr
Normal file
3
src/views/components/dots.html.ecr
Normal file
@@ -0,0 +1,3 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/protonet-jquery.inview/1.1.2/jquery.inview.min.js"></script>
|
||||
<script src="<%= base_url %>js/dots.js"></script>
|
||||
@@ -1,15 +1,15 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mango</title>
|
||||
<title>Mango - <%= page.split("-").map(&.capitalize).join(" ") %></title>
|
||||
<meta name="description" content="Mango - Manga Server and Web Reader">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="<%= base_url %>css/uikit.css" />
|
||||
<link rel="stylesheet" href="<%= base_url %>css/mango.css" />
|
||||
<link rel="icon" href="<%= base_url %>favicon.ico">
|
||||
<script defer src="<%= base_url %>js/fontawesome.min.js"></script>
|
||||
<script defer src="<%= base_url %>js/solid.min.js"></script>
|
||||
|
||||
<script src="https://polyfill.io/v3/polyfill.min.js?features=MutationObserver%2Cdefault%2CmatchMedia&flats=gated"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.5.0/dist/alpine.min.js" defer></script>
|
||||
<script src="<%= base_url %>js/theme.js"></script>
|
||||
<script type="module" src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.8.0/dist/alpine.min.js"></script>
|
||||
<script nomodule src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.8.0/dist/alpine-ie11.min.js" defer></script>
|
||||
<script src="<%= base_url %>js/common.js"></script>
|
||||
</head>
|
||||
|
||||
1
src/views/components/jquery-ui.html.ecr
Normal file
1
src/views/components/jquery-ui.html.ecr
Normal file
@@ -0,0 +1 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
1
src/views/components/moment.html.ecr
Normal file
1
src/views/components/moment.html.ecr
Normal file
@@ -0,0 +1 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
2
src/views/components/uikit.html.ecr
Normal file
2
src/views/components/uikit.html.ecr
Normal file
@@ -0,0 +1,2 @@
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.5.9/dist/js/uikit.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.5.9/dist/js/uikit-icons.min.js"></script>
|
||||
@@ -1,33 +1,69 @@
|
||||
<div class="uk-margin">
|
||||
<div id="actions" class="uk-margin">
|
||||
<button class="uk-button uk-button-default" onclick="remove()">Delete Completed Tasks</button>
|
||||
<button class="uk-button uk-button-default" onclick="refresh()">Retry Failed Tasks</button>
|
||||
<button class="uk-button uk-button-default" onclick="load()">Refresh Queue</button>
|
||||
<button class="uk-button uk-button-default" onclick="toggle()" id="pause-resume-btn" hidden></button>
|
||||
</div>
|
||||
<div id="config" class="uk-margin">
|
||||
<label><input id="auto-refresh" class="uk-checkbox" type="checkbox" checked> Auto Refresh</label>
|
||||
<div x-data="component()" x-init="init()">
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" @click="jobAction('delete')">Delete Completed Tasks</button>
|
||||
<button class="uk-button uk-button-default" @click="jobAction('retry')">Retry Failed Tasks</button>
|
||||
<button class="uk-button uk-button-default" @click="load()" :disabled="loading">Refresh Queue</button>
|
||||
<button class="uk-button uk-button-default" x-show="paused !== undefined" x-text="paused ? 'Resume Download' : 'Pause Download'" @click="toggle()" :disabled="toggling"></button>
|
||||
</div>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Chapter</th>
|
||||
<th>Manga</th>
|
||||
<th>Progress</th>
|
||||
<th>Time</th>
|
||||
<th>Status</th>
|
||||
<th>Plugin</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<template x-for="job in jobs" :key="job">
|
||||
<tr :id="`chapter-${job.id}`">
|
||||
|
||||
<template x-if="job.plugin_id">
|
||||
<td x-text="job.title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`<%= mangadex_base_url %>/chapter/${job.id}`" x-text="job.title"></td>
|
||||
</template>
|
||||
|
||||
<template x-if="job.plugin_id">
|
||||
<td x-text="job.manga_title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`<%= mangadex_base_url %>/manga/${job.manga_id}`" x-text="job.manga_title"></td>
|
||||
</template>
|
||||
|
||||
<td x-text="`${job.success_count}/${job.pages}`"></td>
|
||||
<td x-text="`${moment(job.time).fromNow()}`"></td>
|
||||
|
||||
<td>
|
||||
<span :class="statusClass(job.status)" x-text="job.status"></span>
|
||||
<template x-if="job.status_message.length > 0">
|
||||
<div class="uk-inline">
|
||||
<span uk-icon="info"></span>
|
||||
<div uk-dropdown x-text="job.status_message" style="white-space: pre-line;"></div>
|
||||
</div>
|
||||
</template>
|
||||
</td>
|
||||
|
||||
<td x-text="`${job.plugin_id || ''}`"></td>
|
||||
<td>
|
||||
<a @click="jobAction('delete', $event)" uk-icon="trash" uk-tooltip="Delete"></a>
|
||||
<template x-if="job.status_message.length > 0">
|
||||
<a @click="jobAction('retry', $event)" uk-icon="refresh" uk-tooltip="Retry"></a>
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Chapter</th>
|
||||
<th>Manga</th>
|
||||
<th>Progress</th>
|
||||
<th>Time</th>
|
||||
<th>Status</th>
|
||||
<th>Plugin</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script>
|
||||
var baseURL = "<%= mangadex_base_url %>".replace(/\/$/, "");
|
||||
</script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<%= render_component "moment" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download-manager.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -1,83 +1,162 @@
|
||||
<h2 class=uk-title>Download from MangaDex</h2>
|
||||
<div class="uk-grid-small" uk-grid>
|
||||
<div class="uk-width-3-4">
|
||||
<input id="search-input" class="uk-input" type="text" placeholder="MangaDex manga ID or URL">
|
||||
<div x-data="downloadComponent()" x-init="init()">
|
||||
<div class="uk-grid-small" uk-grid style="margin-bottom:40px;">
|
||||
<div class="uk-width-expand">
|
||||
<input class="uk-input" type="text" :placeholder="searchAvailable ? 'Search MangaDex or enter a manga ID/URL' : 'MangaDex manga ID or URL'" x-model="searchInput" @keydown.enter.debounce="search()">
|
||||
</div>
|
||||
<div class="uk-width-auto">
|
||||
<div uk-spinner class="uk-align-center" x-show="loading" x-cloak></div>
|
||||
<button class="uk-button uk-button-default" x-show="!loading" @click="search()">Search</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-width-1-4">
|
||||
<div id="spinner" uk-spinner class="uk-align-center" hidden></div>
|
||||
<button id="search-btn" class="uk-button uk-button-default" onclick="search()">Search</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class"uk-grid-small" uk-grid hidden id="manga-details">
|
||||
<div class="uk-width-1-4@s">
|
||||
<img id="cover">
|
||||
</div>
|
||||
<div class="uk-width-1-4@s">
|
||||
<p id="title"></p>
|
||||
<p id="artist"></p>
|
||||
<p id="author"></p>
|
||||
</div>
|
||||
<div id="filter-form" class="uk-form-stacked uk-width-1-2@s" hidden>
|
||||
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" id="count-text"></p>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="lang-select">Language</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="lang-select">
|
||||
</select>
|
||||
|
||||
<template x-if="mangaAry">
|
||||
<div>
|
||||
<p x-show="mangaAry.length === 0">No matching manga found.</p>
|
||||
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<template x-for="manga in mangaAry" :key="manga.id">
|
||||
<div class="item" :data-id="manga.id" @click="chooseManga(manga)">
|
||||
<div class="uk-card uk-card-default">
|
||||
<div class="uk-card-media-top uk-inline">
|
||||
<img uk-img :data-src="manga.mainCover">
|
||||
</div>
|
||||
<div class="uk-card-body">
|
||||
<h3 class="uk-card-title break-word uk-margin-remove-bottom free-height" x-text="manga.title"></h3>
|
||||
<p class="uk-text-meta" x-text="`ID: ${manga.id}`"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="group-select">Group</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="group-select">
|
||||
</select>
|
||||
</template>
|
||||
|
||||
<div x-show="data && data.chapters" x-cloak>
|
||||
<div class"uk-grid-small" uk-grid>
|
||||
<div class="uk-width-1-4@s">
|
||||
<img :src="data.mainCover">
|
||||
</div>
|
||||
<div class="uk-width-1-4@s">
|
||||
<p>Title: <a :href="`<%= mangadex_base_url %>/manga/${data.id}`" x-text="data.title"></a></p>
|
||||
<p x-text="`Artist: ${data.artist}`"></p>
|
||||
<p x-text="`Author: ${data.author}`"></p>
|
||||
</div>
|
||||
<div class="uk-form-stacked uk-width-1-2@s" id="filters">
|
||||
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" x-text="`${chapters.length} chapters found`"></p>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Language</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" x-model="langChoice" @change="filtersUpdated()">
|
||||
<template x-for="lang in languages" :key="lang">
|
||||
<option x-text="lang"></option>
|
||||
</template>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Group</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" x-model="groupChoice" @change="filtersUpdated()">
|
||||
<template x-for="group in groups" :key="group">
|
||||
<option x-text="group"></option>
|
||||
</template>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Volume</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="volumeRange" @keydown.enter="filtersUpdated()">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label">Chapter</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="chapterRange" @keydown.enter="filtersUpdated()">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="volume-range">Volume</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="volume-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" @click="selectAll()">Select All</button>
|
||||
<button class="uk-button uk-button-default" @click="clearSelection()">Clear Selections</button>
|
||||
<button class="uk-button uk-button-primary" @click="download()" x-show="!addingToDownload">Download Selected</button>
|
||||
<div uk-spinner class="uk-margin-left" x-show="addingToDownload"></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="chapter-range">Chapter</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="chapter-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
<p x-text="`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters. Please use the filter options above to narrow down your search.`" x-show="chapters.length > chaptersLimit"></p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto" x-show="chapters.length <= chaptersLimit">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Title</th>
|
||||
<th>Language</th>
|
||||
<th>Group</th>
|
||||
<th>Volume</th>
|
||||
<th>Chapter</th>
|
||||
<th>Timestamp</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<template x-if="chapters.length <= chaptersLimit">
|
||||
<tbody id="selectable">
|
||||
<template x-for="chp in chapters" :key="chp">
|
||||
<tr class="ui-widget-content">
|
||||
<td><a :href="`<%= mangadex_base_url %>/chapter/${chp.id}`" x-text="chp.id"></a></td>
|
||||
<td x-text="chp.title"></td>
|
||||
<td x-text="chp.language"></td>
|
||||
<td>
|
||||
<template x-for="grp in Object.entries(chp.groups)">
|
||||
<div>
|
||||
<a :href="`<%= mangadex_base_url %>/group/${grp[1]}`" x-text="grp[0]"></a>
|
||||
</div>
|
||||
</template>
|
||||
</td>
|
||||
<td x-text="chp.volume"></td>
|
||||
<td x-text="chp.chapter"></td>
|
||||
<td x-text="`${moment.unix(chp.timestamp).fromNow()}`"></td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</template>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id="modal" class="uk-flex-top" uk-modal="container: false">
|
||||
<div class="uk-modal-dialog uk-margin-auto-vertical">
|
||||
<button class="uk-modal-close-default" type="button" uk-close></button>
|
||||
<div class="uk-modal-header">
|
||||
<h3 class="uk-modal-title break-word" x-text="candidateManga.title"></h3>
|
||||
</div>
|
||||
<div class="uk-modal-body">
|
||||
<div class="uk-grid">
|
||||
<div class="uk-width-1-3@s">
|
||||
<img uk-img data-width data-height :src="candidateManga.mainCover" style="width:100%;margin-bottom:10px;">
|
||||
<a :href="`<%= mangadex_base_url %>/manga/${candidateManga.id}`" x-text="`ID: ${candidateManga.id}`" class="uk-link-muted"></a>
|
||||
</div>
|
||||
<div class="uk-width-2-3@s" uk-overflow-auto>
|
||||
<p x-text="candidateManga.description"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-modal-footer">
|
||||
<button class="uk-button uk-button-primary" type="button" @click="confirmManga(candidateManga.id)">Choose</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="selection-controls" class="uk-margin" hidden>
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" onclick="selectAll()">Select All</button>
|
||||
<button class="uk-button uk-button-default" onclick="unselect()">Clear Selections</button>
|
||||
<button class="uk-button uk-button-primary" id="download-btn" onclick="download()">Download Selected</button>
|
||||
<div id="download-spinner" uk-spinner class="uk-margin-left" hidden></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
</div>
|
||||
<p id="filter-notification" hidden></p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto" hidden>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Title</th>
|
||||
<th>Language</th>
|
||||
<th>Group</th>
|
||||
<th>Volume</th>
|
||||
<th>Chapter</th>
|
||||
<th>Timestamp</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script>
|
||||
var baseURL = "<%= mangadex_base_url %>".replace(/\/$/, "");
|
||||
</script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<%= render_component "moment" %>
|
||||
<%= render_component "jquery-ui" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<dd>Update <code>config.yml</code> located at: <code><%= Config.current.path %></code></dd>
|
||||
<dt style="font-weight: 500;">Can't see your files yet?</dt>
|
||||
<dd>
|
||||
You must wait <%= Config.current.scan_interval %> minutes for the library scan to complete
|
||||
You must wait <%= Config.current.scan_interval_minutes %> minutes for the library scan to complete
|
||||
<% if is_admin %>
|
||||
, or manually re-scan from <a href="<%= base_url %>admin">Admin</a>
|
||||
<% end %>.
|
||||
@@ -77,8 +77,7 @@
|
||||
<%- end -%>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="<%= base_url %>js/dots.js"></script>
|
||||
<%= render_component "dots" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/title.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -11,12 +11,12 @@
|
||||
<ul class="uk-nav-parent-icon uk-nav-primary uk-nav-center uk-margin-auto-vertical" uk-nav>
|
||||
<li><a href="<%= base_url %>">Home</a></li>
|
||||
<li><a href="<%= base_url %>library">Library</a></li>
|
||||
<li><a href="<%= base_url %>tags">Tags</a></li>
|
||||
<% if is_admin %>
|
||||
<li><a href="<%= base_url %>admin">Admin</a></li>
|
||||
<li class="uk-parent">
|
||||
<a href="#">Download</a>
|
||||
<ul class="uk-nav-sub">
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
</ul>
|
||||
@@ -36,10 +36,11 @@
|
||||
<div class="uk-navbar-toggle" uk-navbar-toggle-icon="uk-navbar-toggle-icon" uk-toggle="target: #mobile-nav"></div>
|
||||
</div>
|
||||
<div class="uk-navbar-left uk-visible@s">
|
||||
<a class="uk-navbar-item uk-logo" href="<%= base_url %>"><img src="<%= base_url %>img/icon.png"></a>
|
||||
<a class="uk-navbar-item uk-logo" href="<%= base_url %>"><img src="<%= base_url %>img/icon.png" style="width:90px;height:90px;"></a>
|
||||
<ul class="uk-navbar-nav">
|
||||
<li><a href="<%= base_url %>">Home</a></li>
|
||||
<li><a href="<%= base_url %>library">Library</a></li>
|
||||
<li><a href="<%= base_url %>tags">Tags</a></li>
|
||||
<% if is_admin %>
|
||||
<li><a href="<%= base_url %>admin">Admin</a></li>
|
||||
<li>
|
||||
@@ -47,7 +48,6 @@
|
||||
<div class="uk-navbar-dropdown">
|
||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
||||
<li class="uk-nav-header">Source</li>
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li class="uk-nav-divider"></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
@@ -67,7 +67,7 @@
|
||||
</div>
|
||||
<div class="uk-section uk-section-small">
|
||||
</div>
|
||||
<div class="uk-section uk-section-small" id="main-section">
|
||||
<div class="uk-section uk-section-small" style="position:relative;">
|
||||
<div class="uk-container uk-container-small">
|
||||
<div id="alert"></div>
|
||||
<%= content %>
|
||||
@@ -80,9 +80,7 @@
|
||||
setTheme();
|
||||
const base_url = "<%= base_url %>";
|
||||
</script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
|
||||
<%= render_component "uikit" %>
|
||||
<%= yield_content "script" %>
|
||||
</body>
|
||||
|
||||
|
||||
@@ -24,8 +24,7 @@
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="<%= base_url %>js/dots.js"></script>
|
||||
<%= render_component "dots" %>
|
||||
<script src="<%= base_url %>js/search.js"></script>
|
||||
<script src="<%= base_url %>js/sort-items.js"></script>
|
||||
<% end %>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<% page = "Login" %>
|
||||
<%= render_component "head" %>
|
||||
|
||||
<body>
|
||||
@@ -29,8 +30,7 @@
|
||||
<script>
|
||||
setTheme();
|
||||
</script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
<%= render_component "uikit" %>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
39
src/views/mangadex.html.ecr
Normal file
39
src/views/mangadex.html.ecr
Normal file
@@ -0,0 +1,39 @@
|
||||
<div x-data="component()" x-init="init()">
|
||||
<h2 class="uk-title">Connect to MangaDex</h2>
|
||||
<div class"uk-grid-small" uk-grid x-show="!loading" x-cloak>
|
||||
<div class="uk-width-1-2@s" x-show="!expires">
|
||||
<p>This step is optional but highly recommended if you are using the MangaDex downloader. Connecting to MangaDex allows you to:</p>
|
||||
<ul>
|
||||
<li>Search MangaDex by search terms in addition to manga IDs</li>
|
||||
<li>Automatically download new chapters when they are available (coming soon)</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="uk-width-1-2@s" x-show="expires">
|
||||
<p>
|
||||
<span x-show="!expired">You have logged in to MangaDex!</span>
|
||||
<span x-show="expired">You have logged in to MangaDex but the token has expired.</span>
|
||||
The expiration date of your token is <code x-text="moment.unix(expires).format('MMMM Do YYYY, HH:mm:ss')"></code>.
|
||||
<span x-show="!expired">If the integration is not working, you</span>
|
||||
<span x-show="expired">You</span>
|
||||
can log in again and the token will be updated.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="uk-width-1-2@s">
|
||||
<div class="uk-margin">
|
||||
<div class="uk-inline uk-width-1-1"><span class="uk-form-icon" uk-icon="icon:user"></span><input class="uk-input uk-form-large" type="text" x-model="username" @keydown.enter.debounce="login()"></div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<div class="uk-inline uk-width-1-1"><span class="uk-form-icon" uk-icon="icon:lock"></span><input class="uk-input uk-form-large" type="password" x-model="password" @keydown.enter.debounce="login()"></div>
|
||||
</div>
|
||||
<div class="uk-margin"><button class="uk-button uk-button-primary uk-button-large uk-width-1-1" @click="login()" :disabled="loggingIn">Login to MangaDex</button></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "moment" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/mangadex.js"></script>
|
||||
<% end %>
|
||||
40
src/views/missing-items.html.ecr
Normal file
40
src/views/missing-items.html.ecr
Normal file
@@ -0,0 +1,40 @@
|
||||
<div x-data="component()" x-init="load()" x-cloak x-show="!loading">
|
||||
<p x-show="empty" class="uk-text-lead uk-text-center">No missing items found.</p>
|
||||
<div x-show="!empty">
|
||||
<p>The following items were present in your library, but now we can't find them anymore. If you deleted them mistakenly, try to recover the files or folders, put them back to where they were, and rescan the library. Otherwise, you can safely delete them and the associated metadata using the buttons below to free up database space.</p>
|
||||
<button class="uk-button uk-button-danger" @click="rmAll()">Delete All</button>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Type</th>
|
||||
<th>Relative Path</th>
|
||||
<th>ID</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<template x-for="title in titles" :key="title">
|
||||
<tr :id="`title-${title.id}`">
|
||||
<td>Title</td>
|
||||
<td x-text="title.path"></td>
|
||||
<td x-text="title.id"></td>
|
||||
<td><a @click="rm($event)" uk-icon="trash"></a></td>
|
||||
</tr>
|
||||
</template>
|
||||
<template x-for="entry in entries" :key="entry">
|
||||
<tr :id="`entry-${entry.id}`">
|
||||
<td>Entry</td>
|
||||
<td x-text="entry.path"></td>
|
||||
<td x-text="entry.id"></td>
|
||||
<td><a @click="rm($event)" uk-icon="trash"></a></td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/missing-items.js"></script>
|
||||
<% end %>
|
||||
@@ -29,7 +29,7 @@
|
||||
<link rel="http://opds-spec.org/image" href="<%= e.cover_url %>" />
|
||||
<link rel="http://opds-spec.org/image/thumbnail" href="<%= e.cover_url %>" />
|
||||
|
||||
<link rel="http://opds-spec.org/acquisition" href="<%= base_url %>opds/download/<%= e.book.id %>/<%= e.id %>" title="Read" type="<%= MIME.from_filename e.zip_path %>" />
|
||||
<link rel="http://opds-spec.org/acquisition" href="<%= base_url %>api/download/<%= e.book.id %>/<%= e.id %>" title="Read" type="<%= MIME.from_filename e.zip_path %>" />
|
||||
|
||||
<link type="text/html" rel="alternate" title="Read in Mango" href="<%= base_url %>reader/<%= e.book.id %>/<%= e.id %>" />
|
||||
<link type="text/html" rel="alternate" title="Open in Mango" href="<%= base_url %>book/<%= e.book.id %>" />
|
||||
|
||||
@@ -56,8 +56,10 @@
|
||||
<div id="download-spinner" uk-spinner class="uk-margin-left" hidden></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto tablesorter">
|
||||
</table>
|
||||
<div class="uk-overflow-auto">
|
||||
<table class="uk-table uk-table-striped tablesorter">
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
@@ -68,7 +70,7 @@
|
||||
var pid = "<%= plugin.not_nil!.info.id %>";
|
||||
</script>
|
||||
<% end %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<%= render_component "jquery-ui" %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.tablesorter/2.31.3/js/jquery.tablesorter.combined.min.js"></script>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/plugin-download.js"></script>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user