mirror of
https://github.com/hkalexling/Mango.git
synced 2026-01-24 00:03:14 -05:00
Compare commits
540 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7b8e2d852 | ||
|
|
946017c8bd | ||
|
|
ec5256dabd | ||
|
|
4e707076a1 | ||
|
|
66a3cc268b | ||
|
|
96949905b9 | ||
|
|
30c0199039 | ||
|
|
7a7cb78f82 | ||
|
|
8931ba8c43 | ||
|
|
d50981c151 | ||
|
|
df4deb1415 | ||
|
|
aa5e999ed4 | ||
|
|
84d4b0c529 | ||
|
|
d3e5691478 | ||
|
|
1000b02ae0 | ||
|
|
1f795889a9 | ||
|
|
d33b45233a | ||
|
|
4f6df5b9a3 | ||
|
|
341b586cb3 | ||
|
|
9dcc9665ce | ||
|
|
1cd90926df | ||
|
|
ac1ff61e6d | ||
|
|
6ea41f79e9 | ||
|
|
dad02a2a30 | ||
|
|
280490fb36 | ||
|
|
455315a362 | ||
|
|
df51406638 | ||
|
|
531d42ef18 | ||
|
|
2645e8cd05 | ||
|
|
b2dc44a919 | ||
|
|
c8db397a3b | ||
|
|
6384d4b77a | ||
|
|
1039732d87 | ||
|
|
011123f690 | ||
|
|
e602a35b0c | ||
|
|
7792d3426e | ||
|
|
b59c8f85ad | ||
|
|
18834ac28e | ||
|
|
bf68e32ac8 | ||
|
|
54eb041fe4 | ||
|
|
57d8c100f9 | ||
|
|
56d973b99d | ||
|
|
670e5cdf6a | ||
|
|
1b35392f9c | ||
|
|
c4e1ffe023 | ||
|
|
44f4959477 | ||
|
|
0582b57d60 | ||
|
|
83d96fd2a1 | ||
|
|
8ac89c420c | ||
|
|
968c2f4ad5 | ||
|
|
ad940f30d5 | ||
|
|
308ad4e063 | ||
|
|
4d709b7eb5 | ||
|
|
5760ad924e | ||
|
|
fff171c8c9 | ||
|
|
44ff566a1d | ||
|
|
853f422964 | ||
|
|
3bb0917374 | ||
|
|
a86f0d0f34 | ||
|
|
16a9d7fc2e | ||
|
|
ee2b4abc85 | ||
|
|
a6c2799521 | ||
|
|
2370e4d2c6 | ||
|
|
32b0384ea0 | ||
|
|
50d4ffdb7b | ||
|
|
96463641f9 | ||
|
|
ddbba5d596 | ||
|
|
2a04f4531e | ||
|
|
a5b6fb781f | ||
|
|
8dfdab9d73 | ||
|
|
3a95270dfb | ||
|
|
2960ca54df | ||
|
|
f5fe3c6b1c | ||
|
|
a612cc15fb | ||
|
|
c9c0818069 | ||
|
|
2f8efc382f | ||
|
|
a0fb1880bd | ||
|
|
a408f14425 | ||
|
|
243b6c8927 | ||
|
|
ff3a44d017 | ||
|
|
67ef1f7112 | ||
|
|
5d7b8a1ef9 | ||
|
|
a68f3eea95 | ||
|
|
220fc42bf2 | ||
|
|
a45e6ea3da | ||
|
|
88394d4636 | ||
|
|
ef1ab940f5 | ||
|
|
97a1c408d8 | ||
|
|
abbf77df13 | ||
|
|
3b4021f680 | ||
|
|
68b1923cb6 | ||
|
|
3cdd4b29a5 | ||
|
|
af84c0f6de | ||
|
|
85a65f84d0 | ||
|
|
5027a911cd | ||
|
|
ac63bf7599 | ||
|
|
30b0e0b8fb | ||
|
|
ddda058d8d | ||
|
|
46db25e8e0 | ||
|
|
c07f421322 | ||
|
|
99a77966ad | ||
|
|
d00b917575 | ||
|
|
4fd8334c37 | ||
|
|
3aa4630558 | ||
|
|
cde5af7066 | ||
|
|
eb528e1726 | ||
|
|
5e01cc38fe | ||
|
|
9a787ccbc3 | ||
|
|
8a83c0df4e | ||
|
|
87dea01917 | ||
|
|
586ee4f0ba | ||
|
|
53f3387e1a | ||
|
|
be5d1918aa | ||
|
|
df2cc0ffa9 | ||
|
|
b8cfc3a201 | ||
|
|
8dc60ac2ea | ||
|
|
1719335d02 | ||
|
|
0cd46abc66 | ||
|
|
e4fd7c58ee | ||
|
|
d4abee52db | ||
|
|
d29c94e898 | ||
|
|
1c19a91ee2 | ||
|
|
7eb5c253e9 | ||
|
|
22a660aabf | ||
|
|
6e9466c9d2 | ||
|
|
ab34fb260c | ||
|
|
0e9a659828 | ||
|
|
361d37d742 | ||
|
|
c6adb4ee18 | ||
|
|
8349fb68a4 | ||
|
|
0e1e4de528 | ||
|
|
b47788a85a | ||
|
|
f7004549b8 | ||
|
|
8d99400c5f | ||
|
|
ce59acae7a | ||
|
|
37c5911a23 | ||
|
|
8694b4beaf | ||
|
|
3b315ad880 | ||
|
|
33107670ce | ||
|
|
f116e2f1d0 | ||
|
|
ebf6221876 | ||
|
|
2a910335af | ||
|
|
9ea26474b4 | ||
|
|
df8a6ee6da | ||
|
|
70ea1711ce | ||
|
|
2773c1e67f | ||
|
|
dcfd1c8765 | ||
|
|
10b6047df8 | ||
|
|
8de735a2ca | ||
|
|
6c2350c9c7 | ||
|
|
a994c43857 | ||
|
|
7e4532fb14 | ||
|
|
d184d6fba5 | ||
|
|
92f5a90629 | ||
|
|
2a36804e8d | ||
|
|
87b6e79952 | ||
|
|
b75a838e14 | ||
|
|
ae7c72ab85 | ||
|
|
5cee68d76c | ||
|
|
f444496915 | ||
|
|
a812e3ed46 | ||
|
|
1be089b53e | ||
|
|
a7f4e161de | ||
|
|
ba31eb0071 | ||
|
|
192474c950 | ||
|
|
87b72fbd30 | ||
|
|
6acfa02314 | ||
|
|
bdba7bdd13 | ||
|
|
1b244c68b8 | ||
|
|
281f626e8c | ||
|
|
5be4f51d7e | ||
|
|
cd7782ba1e | ||
|
|
6d97bc083c | ||
|
|
ff4b1be9ae | ||
|
|
ba16c3db2f | ||
|
|
69b06a8352 | ||
|
|
687788767f | ||
|
|
94a1e63963 | ||
|
|
360913ee78 | ||
|
|
ea366f263a | ||
|
|
0d11cb59e9 | ||
|
|
2208f90d8e | ||
|
|
07100121ef | ||
|
|
a0e550569e | ||
|
|
bbbe2e0588 | ||
|
|
9d31b24e8c | ||
|
|
38ba324fa9 | ||
|
|
c00016fa19 | ||
|
|
4d5a305d1b | ||
|
|
f9ca52ee2f | ||
|
|
f6c393545c | ||
|
|
466aee62fe | ||
|
|
eab0800376 | ||
|
|
1725f42698 | ||
|
|
f5cdf8b7b6 | ||
|
|
fe082e7537 | ||
|
|
c87b96dd0b | ||
|
|
9d76ca8c24 | ||
|
|
5f21653e07 | ||
|
|
0035cd9177 | ||
|
|
899b221842 | ||
|
|
a317086f81 | ||
|
|
b83313b231 | ||
|
|
62af879bfa | ||
|
|
96f98f6c78 | ||
|
|
841d5051cb | ||
|
|
0768e2177b | ||
|
|
0e4d67cf29 | ||
|
|
00fcc881ee | ||
|
|
ca8d9efcfd | ||
|
|
0e7be6392d | ||
|
|
4af5258602 | ||
|
|
23c6256552 | ||
|
|
ef0e3fd346 | ||
|
|
b70fad13a7 | ||
|
|
d2f9735250 | ||
|
|
06d6311080 | ||
|
|
674da55bde | ||
|
|
dc084aff7c | ||
|
|
4c2cf64f53 | ||
|
|
f4c4bb536c | ||
|
|
47edb3008b | ||
|
|
e28dadc94e | ||
|
|
3dc9bd2264 | ||
|
|
9302601307 | ||
|
|
650ba98039 | ||
|
|
bb2173788b | ||
|
|
c8be2849b9 | ||
|
|
aa269f26ee | ||
|
|
5c26b0d6dc | ||
|
|
c9d3c35bdd | ||
|
|
9255de710f | ||
|
|
39b251774f | ||
|
|
156e511d4a | ||
|
|
5cd6f3eacb | ||
|
|
a0e5a03052 | ||
|
|
e53641add1 | ||
|
|
45cdfd5306 | ||
|
|
3d352ed062 | ||
|
|
bac7be5163 | ||
|
|
717d44e029 | ||
|
|
8da4475a74 | ||
|
|
680504779f | ||
|
|
926d0e66a5 | ||
|
|
0f3dd51d6b | ||
|
|
53c3798691 | ||
|
|
6d4e8ea544 | ||
|
|
0bd94a2290 | ||
|
|
cff599f688 | ||
|
|
fa85d9834f | ||
|
|
aaf0a3c6af | ||
|
|
5ed2a8affa | ||
|
|
de690fbf29 | ||
|
|
12c3c3f356 | ||
|
|
1ddcabcc12 | ||
|
|
8b04f2c96b | ||
|
|
66e2fc138a | ||
|
|
6817113523 | ||
|
|
6ad4385b18 | ||
|
|
012fd71ab4 | ||
|
|
373ff6520a | ||
|
|
8a0e9250c8 | ||
|
|
871a5fe755 | ||
|
|
1493c3de90 | ||
|
|
808074e478 | ||
|
|
49193b9b00 | ||
|
|
1cb470fb2d | ||
|
|
e443176a79 | ||
|
|
bec257c99f | ||
|
|
f2df493d79 | ||
|
|
b74f61c025 | ||
|
|
c76c287e66 | ||
|
|
8e7eaa680a | ||
|
|
30cdb3ec8f | ||
|
|
9c367e7d35 | ||
|
|
4f5e05c008 | ||
|
|
d2f95e5970 | ||
|
|
82bcd03f15 | ||
|
|
fe799f30c8 | ||
|
|
54123917af | ||
|
|
3b737c0bee | ||
|
|
14bf4da06c | ||
|
|
a72dfcecd3 | ||
|
|
160a249dc6 | ||
|
|
f9a2534f80 | ||
|
|
06fe2ccf16 | ||
|
|
13c0878357 | ||
|
|
3ef6a7bfc4 | ||
|
|
e214e00dfb | ||
|
|
9b5aea223d | ||
|
|
60100c51fe | ||
|
|
27c111d273 | ||
|
|
1b9d83f367 | ||
|
|
96b8186add | ||
|
|
27dab3c989 | ||
|
|
bcb95d1462 | ||
|
|
4371c7877d | ||
|
|
d72d635c68 | ||
|
|
b724b4d508 | ||
|
|
8bbbe650f1 | ||
|
|
651bd17612 | ||
|
|
dd01e632a2 | ||
|
|
43ee8f3b85 | ||
|
|
4841f90cc1 | ||
|
|
bedcac4e35 | ||
|
|
5260a82e88 | ||
|
|
1efb300988 | ||
|
|
6b43ee7fe5 | ||
|
|
e99d7b8b29 | ||
|
|
d2ad7fef77 | ||
|
|
ddb6a860ae | ||
|
|
3039031924 | ||
|
|
8665616c2e | ||
|
|
4453b0ee9f | ||
|
|
487154e68c | ||
|
|
60609263ab | ||
|
|
4a245d2504 | ||
|
|
48c3a82078 | ||
|
|
4a59459773 | ||
|
|
eefa8c3982 | ||
|
|
8fe2f3b4cc | ||
|
|
6a9105605d | ||
|
|
60d4cee0a9 | ||
|
|
8658cb8306 | ||
|
|
c74a01f546 | ||
|
|
2aeb38a271 | ||
|
|
a2c7638141 | ||
|
|
c35e840694 | ||
|
|
ff6e64f12a | ||
|
|
16fa27e4f6 | ||
|
|
16734c2c59 | ||
|
|
392b3d8339 | ||
|
|
d4e523c337 | ||
|
|
d49c0092c2 | ||
|
|
d75009f088 | ||
|
|
d416dc6618 | ||
|
|
7233e6e5c3 | ||
|
|
bd8ae9497f | ||
|
|
34b11dc2c7 | ||
|
|
30dea57346 | ||
|
|
7448592216 | ||
|
|
049bd3ab2c | ||
|
|
c3608c101b | ||
|
|
1bec9f0108 | ||
|
|
09b297cd8e | ||
|
|
b7cd55e692 | ||
|
|
986939ecb6 | ||
|
|
a5e97af3a3 | ||
|
|
4cee5faecd | ||
|
|
711add74ef | ||
|
|
f6f09c54bc | ||
|
|
0f58ebb87b | ||
|
|
46347a8fe4 | ||
|
|
a354d811d9 | ||
|
|
22d757362a | ||
|
|
8afcea7e87 | ||
|
|
fb05e913a0 | ||
|
|
490888ad71 | ||
|
|
20d71bfa81 | ||
|
|
ec6a7bd3d9 | ||
|
|
b449d906ec | ||
|
|
f66bec5545 | ||
|
|
ce5f444012 | ||
|
|
8506044232 | ||
|
|
079dd8e280 | ||
|
|
8262a163db | ||
|
|
d6b22ef736 | ||
|
|
39f4897fc5 | ||
|
|
fc6a33e5fd | ||
|
|
7d97d21d40 | ||
|
|
fcf9d39047 | ||
|
|
d33cae7618 | ||
|
|
8b184ed48d | ||
|
|
d3309a810b | ||
|
|
3866c81588 | ||
|
|
2c31f594a4 | ||
|
|
c572c56a39 | ||
|
|
e670a083a3 | ||
|
|
9b23e1759d | ||
|
|
14e3470b12 | ||
|
|
8ce51a6163 | ||
|
|
1d4237d687 | ||
|
|
b7c0515af7 | ||
|
|
75edfcdb5b | ||
|
|
51d19328be | ||
|
|
d405498af4 | ||
|
|
696f79aea1 | ||
|
|
d2da8d0b9a | ||
|
|
4e961192d4 | ||
|
|
8b90524a2c | ||
|
|
c9b8770b9f | ||
|
|
e568ec8878 | ||
|
|
ac3df03d88 | ||
|
|
7c9728683c | ||
|
|
d921d04abf | ||
|
|
5400c8c8ef | ||
|
|
58e96cd4fe | ||
|
|
aa09f3a86f | ||
|
|
a5daded453 | ||
|
|
4968cb8e18 | ||
|
|
27c6e02da8 | ||
|
|
68d1b55aea | ||
|
|
32dc3e84b9 | ||
|
|
460fcdf2f5 | ||
|
|
c6369f9f26 | ||
|
|
aa147602fc | ||
|
|
d58c83fbd8 | ||
|
|
1a0c3d81ce | ||
|
|
33c61fd8c1 | ||
|
|
6eba3fe351 | ||
|
|
da2708abe5 | ||
|
|
febf344d33 | ||
|
|
ae15398b6c | ||
|
|
b28f6046dd | ||
|
|
91b823450c | ||
|
|
085fba611c | ||
|
|
f8d633c751 | ||
|
|
f5e6f42fc2 | ||
|
|
3ca6d3d338 | ||
|
|
750a28eccb | ||
|
|
88b16445e2 | ||
|
|
7774efa471 | ||
|
|
4aeda53806 | ||
|
|
5d62a87720 | ||
|
|
e902e1dff0 | ||
|
|
9fe32b5011 | ||
|
|
e65d701e0a | ||
|
|
5a500364fc | ||
|
|
3e42266955 | ||
|
|
6407cea7bf | ||
|
|
7e22cc5f57 | ||
|
|
e68678f2fb | ||
|
|
82fb45b242 | ||
|
|
46dfc2f712 | ||
|
|
79aa816ca8 | ||
|
|
e35cf2ce0c | ||
|
|
47ba0e39af | ||
|
|
aedb13ac92 | ||
|
|
d1c0e52f90 | ||
|
|
173ff2d2e6 | ||
|
|
ae281e2e21 | ||
|
|
2c10623731 | ||
|
|
31da5acdc5 | ||
|
|
77237a274a | ||
|
|
318501bc9b | ||
|
|
dc5284968d | ||
|
|
01216d806c | ||
|
|
c4ffb5cd59 | ||
|
|
50ce0e2b54 | ||
|
|
8b8967de26 | ||
|
|
335fb45de6 | ||
|
|
8c7ced87f1 | ||
|
|
00d2540b95 | ||
|
|
d120433525 | ||
|
|
9536ce62e6 | ||
|
|
4ba81b9ffe | ||
|
|
c355c67415 | ||
|
|
4def23a5cf | ||
|
|
943076ccf7 | ||
|
|
36034042f2 | ||
|
|
36e2b2bfaf | ||
|
|
c6c908953b | ||
|
|
3ae0ad6348 | ||
|
|
7ca40215b6 | ||
|
|
54206bc6ac | ||
|
|
1abdac2fdd | ||
|
|
9ffe896705 | ||
|
|
7a7c855ce4 | ||
|
|
e2d01f7eb9 | ||
|
|
7575785c1c | ||
|
|
dfd53bc51d | ||
|
|
f140ffa4b2 | ||
|
|
589483cd75 | ||
|
|
306edc3c77 | ||
|
|
30af64e9ca | ||
|
|
fecb96c91b | ||
|
|
4f01aba3e1 | ||
|
|
f13f7989d5 | ||
|
|
1ce553f541 | ||
|
|
c4253db572 | ||
|
|
db6d33eae1 | ||
|
|
8fbc5528a8 | ||
|
|
d50804830d | ||
|
|
5d7bbc7c9b | ||
|
|
0b463539c9 | ||
|
|
7f0088f45a | ||
|
|
5645f272df | ||
|
|
dc3bbd10d6 | ||
|
|
c89c74c71b | ||
|
|
cb76a96126 | ||
|
|
73b38492ba | ||
|
|
bf37c4aa10 | ||
|
|
f837be0718 | ||
|
|
8c47d50291 | ||
|
|
4ca8daca29 | ||
|
|
d3d8dff6d2 | ||
|
|
f11a5cd608 | ||
|
|
6bccba16da | ||
|
|
28ac5c7a00 | ||
|
|
f8e0c6d795 | ||
|
|
e3d505d62b | ||
|
|
77864afa67 | ||
|
|
5abdca24c2 | ||
|
|
e8c365b7a1 | ||
|
|
6659041631 | ||
|
|
fa50f4cb88 | ||
|
|
c39a1ddbaf | ||
|
|
7de01991a0 | ||
|
|
319967438b | ||
|
|
1bbb08eede | ||
|
|
d9d1dbc26f | ||
|
|
c33884ea29 | ||
|
|
2dd980b92c | ||
|
|
89e747d3ee | ||
|
|
468f109776 | ||
|
|
905d02e911 | ||
|
|
bb00c2e77f | ||
|
|
bc75f4d336 | ||
|
|
98baf63b0c | ||
|
|
46b36860d1 | ||
|
|
9f6261e02d | ||
|
|
d782995bac | ||
|
|
b264f7dd76 | ||
|
|
59622930c7 | ||
|
|
e90b97ca43 | ||
|
|
b58d2e3620 | ||
|
|
a507e3be7a | ||
|
|
67d3d2bd55 | ||
|
|
5ec35f3af6 | ||
|
|
0fa95959a7 | ||
|
|
83597e7f84 | ||
|
|
c893135ec6 | ||
|
|
a3356344fa | ||
|
|
aecac748dc | ||
|
|
c449a1e9b1 | ||
|
|
f9a4698fca | ||
|
|
676f2ae032 | ||
|
|
fd342fe1ee | ||
|
|
1649f286aa | ||
|
|
60a1032f71 |
9
.ameba.yml
Normal file
9
.ameba.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
Lint/UselessAssign:
|
||||
Excluded:
|
||||
- src/routes/*
|
||||
- src/server.cr
|
||||
Lint/UnusedArgument:
|
||||
Excluded:
|
||||
- src/routes/*
|
||||
Metrics/CyclomaticComplexity:
|
||||
Enabled: false
|
||||
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
lib
|
||||
5
.github/FUNDING.yml
vendored
Normal file
5
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
open_collective: mango
|
||||
patreon: hkalexling
|
||||
ko_fi: hkalexling
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -26,7 +26,7 @@ A clear and concise description of what you expected to happen.
|
||||
- Mango Version [e.g. v0.1.0]
|
||||
|
||||
**Docker (if you are running Mango in a Docker container)**
|
||||
- The `docker-compose.yml` file you are using
|
||||
- The `docker-compose.yml` file you are using, or your `.env` file.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here. Add screenshots if applicable.
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -8,10 +8,13 @@ assignees: ''
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
A clear and concise description of what the problem is. E.g. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe a small use-case for this feature request**
|
||||
How would you imagine this to be used? What would be the advantage of this for the users of the application?
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
||||
BIN
.github/screenshots/dark.png
vendored
Normal file
BIN
.github/screenshots/dark.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 598 KiB |
40
.github/workflows/build.yml
vendored
Normal file
40
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev ]
|
||||
pull_request:
|
||||
branches: [ master, dev ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: "!contains(github.event.head_commit.message, 'skip ci')"
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: crystallang/crystal:0.35.1-alpine
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
- name: Build
|
||||
run: make static || make static
|
||||
- name: Linter
|
||||
run: make check
|
||||
- name: Run tests
|
||||
run: make test
|
||||
- name: Upload binary
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: mango
|
||||
path: mango
|
||||
- name: build arm32v7 object file
|
||||
run: make arm32v7 || make arm32v7
|
||||
- name: build arm64v8 object file
|
||||
run: make arm64v8 || make arm64v8
|
||||
- name: Upload object files
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: object files
|
||||
path: ./*.o
|
||||
19
.github/workflows/dockerhub.yml
vendored
Normal file
19
.github/workflows/dockerhub.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Publish Dockerhub
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get release version
|
||||
id: get_version
|
||||
run: echo "RELEASE_VERSION=$(echo ${GITHUB_REF:10})" >> $GITHUB_ENV
|
||||
- name: Publish to Dockerhub
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
with:
|
||||
name: hkalexling/mango
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
tags: "latest,${{ env.RELEASE_VERSION }}"
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -7,4 +7,8 @@ node_modules
|
||||
yarn.lock
|
||||
dist
|
||||
mango
|
||||
docker-compose.yml
|
||||
.env
|
||||
*.md
|
||||
public/css/uikit.css
|
||||
public/img/*.svg
|
||||
public/js/*.min.js
|
||||
|
||||
21
Dockerfile
21
Dockerfile
@@ -1,18 +1,15 @@
|
||||
FROM crystallang/crystal:0.32.0
|
||||
|
||||
RUN apt-get update && apt-get install -y curl
|
||||
|
||||
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash -
|
||||
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
|
||||
RUN apt-get update && apt-get install -y nodejs yarn libsqlite3-dev
|
||||
FROM crystallang/crystal:0.35.1-alpine AS builder
|
||||
|
||||
WORKDIR /Mango
|
||||
|
||||
COPY . .
|
||||
COPY package*.json .
|
||||
RUN apk add --no-cache yarn yaml sqlite-static libarchive-dev libarchive-static acl-static expat-static zstd-static lz4-static bzip2-static libjpeg-turbo-dev libpng-dev tiff-dev
|
||||
RUN make static || make static
|
||||
|
||||
RUN make && make install
|
||||
FROM library/alpine
|
||||
|
||||
CMD ["mango"]
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=builder /Mango/mango .
|
||||
|
||||
CMD ["./mango"]
|
||||
|
||||
14
Dockerfile.arm32v7
Normal file
14
Dockerfile.arm32v7
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM arm32v7/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.35.1 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
|
||||
|
||||
COPY mango-arm32v7.o .
|
||||
|
||||
RUN cc 'mango-arm32v7.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["./mango"]
|
||||
14
Dockerfile.arm64v8
Normal file
14
Dockerfile.arm64v8
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM arm64v8/ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
|
||||
|
||||
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.35.1 && make deps && cd ..
|
||||
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
|
||||
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
|
||||
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
|
||||
|
||||
COPY mango-arm64v8.o .
|
||||
|
||||
RUN cc 'mango-arm64v8.o' -o 'mango' -rdynamic -lxml2 -L/image_size.cr/ext/libwebp -lwebp -L/image_size.cr/ext/stbi -lstbi /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/aarch64-linux-gnu/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||
|
||||
CMD ["./mango"]
|
||||
27
Makefile
27
Makefile
@@ -1,4 +1,4 @@
|
||||
PREFIX=/usr/local
|
||||
PREFIX ?= /usr/local
|
||||
INSTALL_DIR=$(PREFIX)/bin
|
||||
|
||||
all: uglify | build
|
||||
@@ -7,15 +7,36 @@ uglify:
|
||||
yarn
|
||||
yarn uglify
|
||||
|
||||
setup: libs
|
||||
yarn
|
||||
yarn gulp dev
|
||||
|
||||
build: libs
|
||||
crystal build src/mango.cr --release --progress
|
||||
crystal build src/mango.cr --release --progress --error-trace
|
||||
|
||||
static: uglify | libs
|
||||
crystal build src/mango.cr --release --progress --static --error-trace
|
||||
|
||||
libs:
|
||||
shards install
|
||||
shards install --production
|
||||
|
||||
run:
|
||||
crystal run src/mango.cr --error-trace
|
||||
|
||||
test:
|
||||
crystal spec
|
||||
|
||||
check:
|
||||
crystal tool format --check
|
||||
./bin/ameba
|
||||
./dev/linewidth.sh
|
||||
|
||||
arm32v7:
|
||||
crystal build src/mango.cr --release --progress --error-trace --cross-compile --target='arm-linux-gnueabihf' -o mango-arm32v7
|
||||
|
||||
arm64v8:
|
||||
crystal build src/mango.cr --release --progress --error-trace --cross-compile --target='aarch64-linux-gnu' -o mango-arm64v8
|
||||
|
||||
install:
|
||||
cp mango $(INSTALL_DIR)/mango
|
||||
|
||||
|
||||
97
README.md
97
README.md
@@ -1,33 +1,47 @@
|
||||

|
||||
|
||||
# Mango
|
||||
|
||||

|
||||
[](https://www.patreon.com/hkalexling)  [](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
|
||||
|
||||
Mango is a self-hosted manga server and reader. Its features include
|
||||
|
||||
- Multi-user support
|
||||
- Supports both `.zip` and `.cbz` formats
|
||||
- OPDS support
|
||||
- Dark/light mode switch
|
||||
- Supported formats: `.cbz`, `.zip`, `.cbr` and `.rar`
|
||||
- Supports nested folders in library
|
||||
- Automatically stores reading progress
|
||||
- Thumbnail generation
|
||||
- Built-in [MangaDex](https://mangadex.org/) downloader
|
||||
- Supports [plugins](https://github.com/hkalexling/mango-plugins) to download from thrid-party sites
|
||||
- The web reader is responsive and works well on mobile, so there is no need for a mobile app
|
||||
- All the static files are embedded in the binary, so the deployment process is easy and painless
|
||||
|
||||
Please check the [Wiki](https://github.com/hkalexling/Mango/wiki) for more information.
|
||||
|
||||
## Installation
|
||||
|
||||
### Pre-built Binary
|
||||
|
||||
1. Simply download the pre-built binary file `mango` for the latest [release](https://github.com/hkalexling/Mango/releases). All the dependencies are statically linked, and it should work with most Linux systems on amd64.
|
||||
Simply download the pre-built binary file `mango` for the latest [release](https://github.com/hkalexling/Mango/releases). All the dependencies are statically linked, and it should work with most Linux systems on amd64.
|
||||
|
||||
### Docker
|
||||
|
||||
1. Make sure you have docker installed and running. You will also need `docker-compose`
|
||||
2. Clone the repository
|
||||
3. Copy `docker-compose.example.yml` to `docker-compose.yml`
|
||||
4. Modify the `volumes` in `docker-compose.yml` to point the directories to desired locations on the host machine
|
||||
3. Copy the `env.example` file to `.env`
|
||||
4. Fill out the values in the `.env` file. Note that the main and config directories will be created if they don't already exist. The files in these folders will be owned by the root user
|
||||
5. Run `docker-compose up`. This should build the docker image and start the container with Mango running inside
|
||||
6. Head over to `localhost:9000` to log in
|
||||
6. Head over to `localhost:9000` (or a different port if you changed it) to log in
|
||||
|
||||
### Docker (via Dockerhub)
|
||||
|
||||
The official docker images are available on [Dockerhub](https://hub.docker.com/r/hkalexling/mango).
|
||||
|
||||
### Build from source
|
||||
|
||||
1. Make sure you have Crystal, Node and Yarn installed. You might also need to install the development headers for `libsqlite3` and `libyaml`.
|
||||
1. Make sure you have `crystal`, `shards` and `yarn` installed. You might also need to install the development headers of some libraries. Please see the [Dockerfile](https://github.com/hkalexling/Mango/blob/master/Dockerfile) for the full list of dependencies
|
||||
2. Clone the repository
|
||||
3. `make && sudo make install`
|
||||
4. Start Mango by running the command `mango`
|
||||
@@ -38,11 +52,21 @@ Mango is a self-hosted manga server and reader. Its features include
|
||||
### CLI
|
||||
|
||||
```
|
||||
Mango e-manga server/reader. Version 0.1.0
|
||||
Mango - Manga Server and Web Reader. Version 0.17.1
|
||||
|
||||
-v, --version Show version
|
||||
-h, --help Show help
|
||||
-c PATH, --config=PATH Path to the config file. Default is `~/.config/mango/config.yml`
|
||||
Usage:
|
||||
|
||||
mango [sub_command] [options]
|
||||
|
||||
Options:
|
||||
|
||||
-c PATH, --config=PATH Path to the config file [type:String]
|
||||
-h, --help Show this help.
|
||||
-v, --version Show version.
|
||||
|
||||
Sub Commands:
|
||||
|
||||
admin Run admin tools
|
||||
```
|
||||
|
||||
### Config
|
||||
@@ -52,29 +76,47 @@ The default config file location is `~/.config/mango/config.yml`. It might be di
|
||||
```yaml
|
||||
---
|
||||
port: 9000
|
||||
base_url: /
|
||||
session_secret: mango-session-secret
|
||||
library_path: ~/mango/library
|
||||
db_path: ~/mango/mango.db
|
||||
scan_interval_minutes: 5
|
||||
thumbnail_generation_interval_hours: 24
|
||||
db_optimization_interval_hours: 24
|
||||
log_level: info
|
||||
upload_path: ~/mango/uploads
|
||||
plugin_path: ~/mango/plugins
|
||||
download_timeout_seconds: 30
|
||||
mangadex:
|
||||
base_url: https://mangadex.org
|
||||
api_url: https://mangadex.org/api
|
||||
download_wait_seconds: 5
|
||||
download_retries: 4
|
||||
download_queue_db_path: /home/alex_ling/mango/queue.db
|
||||
chapter_rename_rule: '[Vol.{volume} ][Ch.{chapter} ]{title|id}'
|
||||
manga_rename_rule: '{title}'
|
||||
```
|
||||
|
||||
- `scan_interval_minutes` can be any non-negative integer. Setting it to `0` disables the periodic scan
|
||||
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
|
||||
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
|
||||
|
||||
### Required Library Structure
|
||||
### Library Structure
|
||||
|
||||
Please make sure that your library directory has the following structure:
|
||||
You can organize your archive files in nested folders in the library directory. Here's an example:
|
||||
|
||||
```
|
||||
.
|
||||
├── Manga 1
|
||||
│  └── Manga 1.cbz
|
||||
│  ├── Volume 1.cbz
|
||||
│  ├── Volume 2.cbz
|
||||
│  ├── Volume 3.cbz
|
||||
│  └── Volume 4.zip
|
||||
└── Manga 2
|
||||
├── Vol 0001.zip
|
||||
├── Vol 0002.zip
|
||||
├── Vol 0003.zip
|
||||
├── Vol 0004.zip
|
||||
└── Vol 0005.zip
|
||||
  └── Vol. 1
|
||||
  └── Ch.1 - Ch.3
|
||||
  ├── 1.zip
|
||||
  ├── 2.zip
|
||||
  └── 3.zip
|
||||
```
|
||||
|
||||
### Initial Login
|
||||
@@ -91,6 +133,10 @@ Title:
|
||||
|
||||

|
||||
|
||||
Dark mode:
|
||||
|
||||

|
||||
|
||||
Reader:
|
||||
|
||||

|
||||
@@ -98,3 +144,14 @@ Reader:
|
||||
Mobile UI:
|
||||
|
||||

|
||||
|
||||
## Sponsors
|
||||
|
||||
<a href="https://casinoshunter.com/online-casinos/"><img src="https://i.imgur.com/EJb3wBo.png" width="150" height="auto"></a>
|
||||
<a href="https://www.browserstack.com/open-source"><img src="https://i.imgur.com/hGJUJXD.png" width="150" height="auto"></a>
|
||||
|
||||
## Contributors
|
||||
|
||||
Please check the [development guideline](https://github.com/hkalexling/Mango/wiki/Development) if you are interested in code contributions.
|
||||
|
||||
[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/0)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/1)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/2)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/3)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/4)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/5)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/6)[](https://sourcerer.io/fame/hkalexling/hkalexling/Mango/links/7)
|
||||
|
||||
5
dev/linewidth.sh
Executable file
5
dev/linewidth.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
[ ! -z "$(grep '.\{80\}' --exclude-dir=lib --include="*.cr" -nr --color=always . | grep -v "routes/api.cr" | tee /dev/tty)" ] \
|
||||
&& echo "The above lines exceed the 80 characters limit" \
|
||||
|| exit 0
|
||||
@@ -7,9 +7,9 @@ services:
|
||||
context: .
|
||||
dockerfile: ./Dockerfile
|
||||
expose:
|
||||
- 9000
|
||||
- ${PORT}
|
||||
ports:
|
||||
- 9000:9000
|
||||
- "${PORT}:9000"
|
||||
volumes:
|
||||
- ~/mango:/root/mango
|
||||
- ~/.config/mango:/root/.config/mango
|
||||
- ${MAIN_DIRECTORY_PATH}:/root/mango
|
||||
- ${CONFIG_DIRECTORY_PATH}:/root/.config/mango
|
||||
10
env.example
Normal file
10
env.example
Normal file
@@ -0,0 +1,10 @@
|
||||
# Port that exposes the HTTP frontend
|
||||
PORT=9000
|
||||
|
||||
# Path to the mango main directory
|
||||
# This directory holds the database and the library files
|
||||
MAIN_DIRECTORY_PATH=
|
||||
|
||||
# Path to the mango config directory
|
||||
# This directory holds the mango configuration path
|
||||
CONFIG_DIRECTORY_PATH=
|
||||
67
gulpfile.js
67
gulpfile.js
@@ -1,27 +1,70 @@
|
||||
const gulp = require('gulp');
|
||||
const minify = require("gulp-babel-minify");
|
||||
const babel = require('gulp-babel');
|
||||
const minify = require('gulp-babel-minify');
|
||||
const minifyCss = require('gulp-minify-css');
|
||||
const less = require('gulp-less');
|
||||
|
||||
gulp.task('minify-js', () => {
|
||||
return gulp.src('public/js/*.js')
|
||||
.pipe(minify())
|
||||
// Copy libraries from node_moduels to public/js
|
||||
gulp.task('copy-js', () => {
|
||||
return gulp.src([
|
||||
'node_modules/@fortawesome/fontawesome-free/js/fontawesome.min.js',
|
||||
'node_modules/@fortawesome/fontawesome-free/js/solid.min.js',
|
||||
'node_modules/uikit/dist/js/uikit.min.js',
|
||||
'node_modules/uikit/dist/js/uikit-icons.min.js'
|
||||
])
|
||||
.pipe(gulp.dest('public/js'));
|
||||
});
|
||||
|
||||
// Copy UIKit SVG icons to public/img
|
||||
gulp.task('copy-uikit-icons', () => {
|
||||
return gulp.src('node_modules/uikit/src/images/backgrounds/*.svg')
|
||||
.pipe(gulp.dest('public/img'));
|
||||
});
|
||||
|
||||
// Compile less
|
||||
gulp.task('less', () => {
|
||||
return gulp.src('public/css/*.less')
|
||||
.pipe(less())
|
||||
.pipe(gulp.dest('public/css'));
|
||||
});
|
||||
|
||||
// Transpile and minify JS files and output to dist
|
||||
gulp.task('babel', () => {
|
||||
return gulp.src(['public/js/*.js', '!public/js/*.min.js'])
|
||||
.pipe(babel({
|
||||
presets: [
|
||||
['@babel/preset-env', {
|
||||
targets: '>0.25%, not dead, ios>=9'
|
||||
}]
|
||||
],
|
||||
}))
|
||||
.pipe(minify({
|
||||
removeConsole: true,
|
||||
builtIns: false
|
||||
}))
|
||||
.pipe(gulp.dest('dist/js'));
|
||||
});
|
||||
|
||||
// Minify CSS and output to dist
|
||||
gulp.task('minify-css', () => {
|
||||
return gulp.src('public/css/*.css')
|
||||
.pipe(minifyCss())
|
||||
.pipe(gulp.dest('dist/css'));
|
||||
});
|
||||
|
||||
gulp.task('img', () => {
|
||||
return gulp.src('public/img/*')
|
||||
.pipe(gulp.dest('dist/img'));
|
||||
});
|
||||
|
||||
gulp.task('favicon', () => {
|
||||
return gulp.src('public/favicon.ico')
|
||||
// Copy static files (includeing images) to dist
|
||||
gulp.task('copy-files', () => {
|
||||
return gulp.src(['public/img/*', 'public/*.*', 'public/js/*.min.js'], {
|
||||
base: 'public'
|
||||
})
|
||||
.pipe(gulp.dest('dist'));
|
||||
});
|
||||
|
||||
gulp.task('default', gulp.parallel('minify-js', 'minify-css', 'img', 'favicon'));
|
||||
// Set up the public folder for development
|
||||
gulp.task('dev', gulp.parallel('copy-js', 'copy-uikit-icons', 'less'));
|
||||
|
||||
// Set up the dist folder for deployment
|
||||
gulp.task('deploy', gulp.parallel('babel', 'minify-css', 'copy-files'));
|
||||
|
||||
// Default task
|
||||
gulp.task('default', gulp.series('dev', 'deploy'));
|
||||
|
||||
36
package.json
36
package.json
@@ -1,16 +1,24 @@
|
||||
{
|
||||
"name": "mango",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/hkalexling/Mango.git",
|
||||
"author": "Alex Ling <hkalexling@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-babel-minify": "^0.5.1",
|
||||
"gulp-minify-css": "^1.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"uglify": "gulp"
|
||||
}
|
||||
"name": "mango",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/hkalexling/Mango.git",
|
||||
"author": "Alex Ling <hkalexling@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@babel/preset-env": "^7.11.5",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-babel": "^8.0.0",
|
||||
"gulp-babel-minify": "^0.5.1",
|
||||
"gulp-less": "^4.0.1",
|
||||
"gulp-minify-css": "^1.2.4",
|
||||
"less": "^3.11.3"
|
||||
},
|
||||
"scripts": {
|
||||
"uglify": "gulp"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-free": "^5.14.0",
|
||||
"uikit": "^3.5.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,154 @@
|
||||
.uk-alert-close {
|
||||
color: black !important;
|
||||
}
|
||||
|
||||
.uk-card-body {
|
||||
padding: 20px;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.uk-card-media-top {
|
||||
max-height: 350px;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
height: 250px;
|
||||
}
|
||||
.acard:hover {
|
||||
text-decoration: none;
|
||||
|
||||
@media (min-width: 600px) {
|
||||
.uk-card-media-top {
|
||||
height: 300px;
|
||||
}
|
||||
}
|
||||
.uk-list li {
|
||||
cursor: pointer;
|
||||
}
|
||||
.reader-bg {
|
||||
background-color: black;
|
||||
}
|
||||
#scan-status {
|
||||
cursor: auto;
|
||||
|
||||
.uk-card-media-top>img {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.uk-card-title {
|
||||
word-wrap: break-word;
|
||||
max-height: 3em;
|
||||
}
|
||||
.uk-logo > img {
|
||||
max-height: 90px;
|
||||
|
||||
.acard:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.uk-list li:not(.nopointer) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#scan-status {
|
||||
cursor: auto;
|
||||
}
|
||||
|
||||
.reader-bg {
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.break-word {
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.uk-logo>img {
|
||||
height: 90px;
|
||||
width: 90px;
|
||||
}
|
||||
|
||||
.uk-search {
|
||||
width: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#selectable .ui-selecting {
|
||||
background: #EEE6B9;
|
||||
}
|
||||
|
||||
#selectable .ui-selected {
|
||||
background: #F4E487;
|
||||
}
|
||||
|
||||
.uk-light #selectable .ui-selecting {
|
||||
background: #5E5731;
|
||||
}
|
||||
|
||||
.uk-light #selectable .ui-selected {
|
||||
background: #9D9252;
|
||||
}
|
||||
|
||||
td>.uk-dropdown {
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
#edit-modal .uk-grid>div {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
#edit-modal #cover {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
#edit-modal #cover-upload {
|
||||
height: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
#edit-modal .uk-modal-body .uk-inline {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.item .uk-card-title {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.grayscale {
|
||||
filter: grayscale(100%);
|
||||
}
|
||||
|
||||
.uk-light .uk-navbar-dropdown,
|
||||
.uk-light .uk-modal-header,
|
||||
.uk-light .uk-modal-body,
|
||||
.uk-light .uk-modal-footer {
|
||||
background: #222;
|
||||
}
|
||||
|
||||
.uk-light .uk-dropdown {
|
||||
background: #333;
|
||||
}
|
||||
|
||||
.uk-light .uk-navbar-dropdown,
|
||||
.uk-light .uk-dropdown {
|
||||
color: #ccc;
|
||||
}
|
||||
|
||||
.uk-light .uk-nav-header,
|
||||
.uk-light .uk-description-list>dt {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
[x-cloak] {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#select-bar-controls a {
|
||||
transform: scale(1.5, 1.5);
|
||||
}
|
||||
|
||||
#select-bar-controls a:hover {
|
||||
color: orange;
|
||||
}
|
||||
|
||||
#main-section {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#totop-wrapper {
|
||||
position: absolute;
|
||||
top: 100vh;
|
||||
right: 2em;
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
#totop-wrapper a {
|
||||
position: fixed;
|
||||
position: sticky;
|
||||
top: calc(100vh - 5em);
|
||||
}
|
||||
|
||||
45
public/css/uikit.less
Normal file
45
public/css/uikit.less
Normal file
@@ -0,0 +1,45 @@
|
||||
@import "node_modules/uikit/src/less/uikit.theme.less";
|
||||
|
||||
.label {
|
||||
display: inline-block;
|
||||
padding: @label-padding-vertical @label-padding-horizontal;
|
||||
background: @label-background;
|
||||
line-height: @label-line-height;
|
||||
font-size: @label-font-size;
|
||||
color: @label-color;
|
||||
vertical-align: middle;
|
||||
white-space: nowrap;
|
||||
.hook-label;
|
||||
}
|
||||
|
||||
.label-success {
|
||||
background-color: @label-success-background;
|
||||
color: @label-success-color;
|
||||
}
|
||||
|
||||
.label-warning {
|
||||
background-color: @label-warning-background;
|
||||
color: @label-warning-color;
|
||||
}
|
||||
|
||||
.label-danger {
|
||||
background-color: @label-danger-background;
|
||||
color: @label-danger-color;
|
||||
}
|
||||
|
||||
.label-pending {
|
||||
background-color: @global-secondary-background;
|
||||
color: @global-inverse-color;
|
||||
}
|
||||
|
||||
@internal-divider-icon-image: "../img/divider-icon.svg";
|
||||
@internal-form-select-image: "../img/form-select.svg";
|
||||
@internal-form-datalist-image: "../img/form-datalist.svg";
|
||||
@internal-form-radio-image: "../img/form-radio.svg";
|
||||
@internal-form-checkbox-image: "../img/form-checkbox.svg";
|
||||
@internal-form-checkbox-indeterminate-image: "../img/form-checkbox-indeterminate.svg";
|
||||
@internal-nav-parent-close-image: "../img/nav-parent-close.svg";
|
||||
@internal-nav-parent-open-image: "../img/nav-parent-open.svg";
|
||||
@internal-list-bullet-image: "../img/list-bullet.svg";
|
||||
@internal-accordion-open-image: "../img/accordion-open.svg";
|
||||
@internal-accordion-close-image: "../img/accordion-close.svg";
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
BIN
public/img/loading.gif
Normal file
BIN
public/img/loading.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 272 KiB |
@@ -1,25 +1,68 @@
|
||||
var scanning = false;
|
||||
function scan() {
|
||||
scanning = true;
|
||||
$('#scan-status > div').removeAttr('hidden');
|
||||
$('#scan-status > span').attr('hidden', '');
|
||||
var color = $('#scan').css('color');
|
||||
$('#scan').css('color', 'gray');
|
||||
$.post('/api/admin/scan', function (data) {
|
||||
var ms = data.milliseconds;
|
||||
var titles = data.titles;
|
||||
$('#scan-status > span').text('Scanned ' + titles + ' titles in ' + ms + 'ms');
|
||||
$('#scan-status > span').removeAttr('hidden');
|
||||
$('#scan').css('color', color);
|
||||
$('#scan-status > div').attr('hidden', '');
|
||||
scanning = false;
|
||||
});
|
||||
}
|
||||
$(function() {
|
||||
$('li').click(function() {
|
||||
url = $(this).attr('data-url');
|
||||
if (url) {
|
||||
$(location).attr('href', url);
|
||||
}
|
||||
$(() => {
|
||||
const setting = loadThemeSetting();
|
||||
$('#theme-select').val(capitalize(setting));
|
||||
$('#theme-select').change((e) => {
|
||||
const newSetting = $(e.currentTarget).val().toLowerCase();
|
||||
saveThemeSetting(newSetting);
|
||||
setTheme();
|
||||
});
|
||||
|
||||
getProgress();
|
||||
setInterval(getProgress, 5000);
|
||||
});
|
||||
|
||||
/**
|
||||
* Capitalize String
|
||||
*
|
||||
* @function capitalize
|
||||
* @param {string} str - The string to be capitalized
|
||||
* @return {string} The capitalized string
|
||||
*/
|
||||
const capitalize = (str) => {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the thumbnail generation progress from the API
|
||||
*
|
||||
* @function getProgress
|
||||
*/
|
||||
const getProgress = () => {
|
||||
$.get(`${base_url}api/admin/thumbnail_progress`)
|
||||
.then(data => {
|
||||
setProp('progress', data.progress);
|
||||
const generating = data.progress > 0
|
||||
setProp('generating', generating);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Trigger the thumbnail generation
|
||||
*
|
||||
* @function generateThumbnails
|
||||
*/
|
||||
const generateThumbnails = () => {
|
||||
setProp('generating', true);
|
||||
setProp('progress', 0.0);
|
||||
$.post(`${base_url}api/admin/generate_thumbnails`)
|
||||
.then(getProgress);
|
||||
};
|
||||
|
||||
/**
|
||||
* Trigger the scan
|
||||
*
|
||||
* @function scan
|
||||
*/
|
||||
const scan = () => {
|
||||
setProp('scanning', true);
|
||||
setProp('scanMs', -1);
|
||||
setProp('scanTitles', 0);
|
||||
$.post(`${base_url}api/admin/scan`)
|
||||
.then(data => {
|
||||
setProp('scanMs', data.milliseconds);
|
||||
setProp('scanTitles', data.titles);
|
||||
})
|
||||
.always(() => {
|
||||
setProp('scanning', false);
|
||||
});
|
||||
}
|
||||
|
||||
6
public/js/alert.js
Normal file
6
public/js/alert.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const alert = (level, text) => {
|
||||
$('#alert').empty();
|
||||
const html = `<div class="uk-alert-${level}" uk-alert><a class="uk-alert-close" uk-close></a><p>${text}</p></div>`;
|
||||
$('#alert').append(html);
|
||||
$("html, body").animate({ scrollTop: 0 });
|
||||
};
|
||||
147
public/js/common.js
Normal file
147
public/js/common.js
Normal file
@@ -0,0 +1,147 @@
|
||||
/**
|
||||
* --- Alpine helper functions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Set an alpine.js property
|
||||
*
|
||||
* @function setProp
|
||||
* @param {string} key - Key of the data property
|
||||
* @param {*} prop - The data property
|
||||
* @param {string} selector - The jQuery selector to the root element
|
||||
*/
|
||||
const setProp = (key, prop, selector = '#root') => {
|
||||
$(selector).get(0).__x.$data[key] = prop;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get an alpine.js property
|
||||
*
|
||||
* @function getProp
|
||||
* @param {string} key - Key of the data property
|
||||
* @param {string} selector - The jQuery selector to the root element
|
||||
* @return {*} The data property
|
||||
*/
|
||||
const getProp = (key, selector = '#root') => {
|
||||
return $(selector).get(0).__x.$data[key];
|
||||
};
|
||||
|
||||
/**
|
||||
* --- Theme related functions
|
||||
* Note: In the comments below we treat "theme" and "theme setting"
|
||||
* differently. A theme can have only two values, either "dark" or
|
||||
* "light", while a theme setting can have the third value "system".
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if the system setting prefers dark theme.
|
||||
* from https://flaviocopes.com/javascript-detect-dark-mode/
|
||||
*
|
||||
* @function preferDarkMode
|
||||
* @return {bool}
|
||||
*/
|
||||
const preferDarkMode = () => {
|
||||
return window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a given string represents a valid theme setting
|
||||
*
|
||||
* @function validThemeSetting
|
||||
* @param {string} theme - The string representing the theme setting
|
||||
* @return {bool}
|
||||
*/
|
||||
const validThemeSetting = (theme) => {
|
||||
return ['dark', 'light', 'system'].indexOf(theme) >= 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load theme setting from local storage, or use 'light'
|
||||
*
|
||||
* @function loadThemeSetting
|
||||
* @return {string} A theme setting ('dark', 'light', or 'system')
|
||||
*/
|
||||
const loadThemeSetting = () => {
|
||||
let str = localStorage.getItem('theme');
|
||||
if (!str || !validThemeSetting(str)) str = 'system';
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load the current theme (not theme setting)
|
||||
*
|
||||
* @function loadTheme
|
||||
* @return {string} The current theme to use ('dark' or 'light')
|
||||
*/
|
||||
const loadTheme = () => {
|
||||
let setting = loadThemeSetting();
|
||||
if (setting === 'system') {
|
||||
setting = preferDarkMode() ? 'dark' : 'light';
|
||||
}
|
||||
return setting;
|
||||
};
|
||||
|
||||
/**
|
||||
* Save a theme setting
|
||||
*
|
||||
* @function saveThemeSetting
|
||||
* @param {string} setting - A theme setting
|
||||
*/
|
||||
const saveThemeSetting = setting => {
|
||||
if (!validThemeSetting(setting)) setting = 'system';
|
||||
localStorage.setItem('theme', setting);
|
||||
};
|
||||
|
||||
/**
|
||||
* Toggle the current theme. When the current theme setting is 'system', it
|
||||
* will be changed to either 'light' or 'dark'
|
||||
*
|
||||
* @function toggleTheme
|
||||
*/
|
||||
const toggleTheme = () => {
|
||||
const theme = loadTheme();
|
||||
const newTheme = theme === 'dark' ? 'light' : 'dark';
|
||||
saveThemeSetting(newTheme);
|
||||
setTheme(newTheme);
|
||||
};
|
||||
|
||||
/**
|
||||
* Apply a theme, or load a theme and then apply it
|
||||
*
|
||||
* @function setTheme
|
||||
* @param {string?} theme - (Optional) The theme to apply. When omitted, use
|
||||
* `loadTheme` to get a theme and apply it.
|
||||
*/
|
||||
const setTheme = (theme) => {
|
||||
if (!theme) theme = loadTheme();
|
||||
if (theme === 'dark') {
|
||||
$('html').css('background', 'rgb(20, 20, 20)');
|
||||
$('body').addClass('uk-light');
|
||||
$('.uk-card').addClass('uk-card-secondary');
|
||||
$('.uk-card').removeClass('uk-card-default');
|
||||
$('.ui-widget-content').addClass('dark');
|
||||
} else {
|
||||
$('html').css('background', '');
|
||||
$('body').removeClass('uk-light');
|
||||
$('.uk-card').removeClass('uk-card-secondary');
|
||||
$('.uk-card').addClass('uk-card-default');
|
||||
$('.ui-widget-content').removeClass('dark');
|
||||
}
|
||||
};
|
||||
|
||||
// do it before document is ready to prevent the initial flash of white on
|
||||
// most pages
|
||||
setTheme();
|
||||
$(() => {
|
||||
// hack for the reader page
|
||||
setTheme();
|
||||
|
||||
// on system dark mode setting change
|
||||
if (window.matchMedia) {
|
||||
window.matchMedia('(prefers-color-scheme: dark)')
|
||||
.addEventListener('change', event => {
|
||||
if (loadThemeSetting() === 'system')
|
||||
setTheme(event.matches ? 'dark' : 'light');
|
||||
});
|
||||
}
|
||||
});
|
||||
26
public/js/dots.js
Normal file
26
public/js/dots.js
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Truncate a .uk-card-title element
|
||||
*
|
||||
* @function truncate
|
||||
* @param {object} e - The title element to truncate
|
||||
*/
|
||||
const truncate = (e) => {
|
||||
$(e).dotdotdot({
|
||||
truncate: 'letter',
|
||||
watch: true,
|
||||
callback: (truncated) => {
|
||||
if (truncated) {
|
||||
$(e).attr('uk-tooltip', $(e).attr('data-title'));
|
||||
} else {
|
||||
$(e).removeAttr('uk-tooltip');
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$('.uk-card-title').each((i, e) => {
|
||||
// Truncate the title when it first enters the view
|
||||
$(e).one('inview', () => {
|
||||
truncate(e);
|
||||
});
|
||||
});
|
||||
124
public/js/download-manager.js
Normal file
124
public/js/download-manager.js
Normal file
@@ -0,0 +1,124 @@
|
||||
/**
|
||||
* Get the current queue and update the view
|
||||
*
|
||||
* @function load
|
||||
*/
|
||||
const load = () => {
|
||||
try {
|
||||
setProp('loading', true);
|
||||
} catch {}
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: base_url + 'api/admin/mangadex/queue',
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to fetch download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
setProp('jobs', data.jobs);
|
||||
setProp('paused', data.paused);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to fetch download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
setProp('loading', false);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform an action on either a specific job or the entire queue
|
||||
*
|
||||
* @function jobAction
|
||||
* @param {string} action - The action to perform. Should be either 'delete' or 'retry'
|
||||
* @param {string?} id - (Optional) A job ID. When omitted, apply the action to the queue
|
||||
*/
|
||||
const jobAction = (action, id) => {
|
||||
let url = `${base_url}api/admin/mangadex/queue/${action}`;
|
||||
if (id !== undefined)
|
||||
url += '?' + $.param({
|
||||
id: id
|
||||
});
|
||||
console.log(url);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (!data.success && data.error) {
|
||||
alert('danger', `Failed to ${action} job from download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
load();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${action} job from download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Pause/resume the download
|
||||
*
|
||||
* @function toggle
|
||||
*/
|
||||
const toggle = () => {
|
||||
setProp('toggling', true);
|
||||
const action = getProp('paused') ? 'resume' : 'pause';
|
||||
const url = `${base_url}api/admin/mangadex/queue/${action}`;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to ${action} download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
load();
|
||||
setProp('toggling', false);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the uk-label class name for a given job status
|
||||
*
|
||||
* @function statusClass
|
||||
* @param {string} status - The job status
|
||||
* @return {string} The class name string
|
||||
*/
|
||||
const statusClass = status => {
|
||||
let cls = 'label ';
|
||||
switch (status) {
|
||||
case 'Pending':
|
||||
cls += 'label-pending';
|
||||
break;
|
||||
case 'Completed':
|
||||
cls += 'label-success';
|
||||
break;
|
||||
case 'Error':
|
||||
cls += 'label-danger';
|
||||
break;
|
||||
case 'MissingPages':
|
||||
cls += 'label-warning';
|
||||
break;
|
||||
}
|
||||
return cls;
|
||||
};
|
||||
|
||||
$(() => {
|
||||
const ws = new WebSocket(`ws://${location.host}/api/admin/mangadex/queue`);
|
||||
ws.onmessage = event => {
|
||||
const data = JSON.parse(event.data);
|
||||
setProp('jobs', data.jobs);
|
||||
setProp('paused', data.paused);
|
||||
};
|
||||
ws.onerror = err => {
|
||||
alert('danger', `Socket connection failed. Error: ${err}`);
|
||||
};
|
||||
ws.onclose = err => {
|
||||
alert('danger', 'Socket connection failed');
|
||||
};
|
||||
});
|
||||
305
public/js/download.js
Normal file
305
public/js/download.js
Normal file
@@ -0,0 +1,305 @@
|
||||
$(() => {
|
||||
$('#search-input').keypress(event => {
|
||||
if (event.which === 13) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
$('.filter-field').each((i, ele) => {
|
||||
$(ele).change(() => {
|
||||
buildTable();
|
||||
});
|
||||
});
|
||||
});
|
||||
const selectAll = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).addClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const unselect = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).removeClass('ui-selected');
|
||||
});
|
||||
};
|
||||
const download = () => {
|
||||
const selected = $('tbody > tr.ui-selected');
|
||||
if (selected.length === 0) return;
|
||||
UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => {
|
||||
$('#download-btn').attr('hidden', '');
|
||||
$('#download-spinner').removeAttr('hidden');
|
||||
const ids = selected.map((i, e) => {
|
||||
return $(e).find('td').first().text();
|
||||
}).get();
|
||||
const chapters = globalChapters.filter(c => ids.indexOf(c.id) >= 0);
|
||||
console.log(ids);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: base_url + 'api/admin/mangadex/download',
|
||||
data: JSON.stringify({
|
||||
chapters: chapters
|
||||
}),
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
console.log(data);
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
$('#download-spinner').attr('hidden', '');
|
||||
$('#download-btn').removeAttr('hidden');
|
||||
});
|
||||
});
|
||||
};
|
||||
const toggleSpinner = () => {
|
||||
var attr = $('#spinner').attr('hidden');
|
||||
if (attr) {
|
||||
$('#spinner').removeAttr('hidden');
|
||||
$('#search-btn').attr('hidden', '');
|
||||
} else {
|
||||
$('#search-btn').removeAttr('hidden');
|
||||
$('#spinner').attr('hidden', '');
|
||||
}
|
||||
searching = !searching;
|
||||
};
|
||||
var searching = false;
|
||||
var globalChapters;
|
||||
const search = () => {
|
||||
if (searching) {
|
||||
return;
|
||||
}
|
||||
$('#manga-details').attr('hidden', '');
|
||||
$('#filter-form').attr('hidden', '');
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
toggleSpinner();
|
||||
const input = $('input').val();
|
||||
|
||||
if (input === "") {
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
var int_id = -1;
|
||||
|
||||
try {
|
||||
const path = new URL(input).pathname;
|
||||
const match = /\/(?:title|manga)\/([0-9]+)/.exec(path);
|
||||
int_id = parseInt(match[1]);
|
||||
} catch (e) {
|
||||
int_id = parseInt(input);
|
||||
}
|
||||
|
||||
if (int_id <= 0 || isNaN(int_id)) {
|
||||
alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex.');
|
||||
toggleSpinner();
|
||||
return;
|
||||
}
|
||||
|
||||
$.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`)
|
||||
.done((data) => {
|
||||
if (data.error) {
|
||||
alert('danger', 'Failed to get manga info. Error: ' + data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
const cover = baseURL + data.cover_url;
|
||||
$('#cover').attr("src", cover);
|
||||
$('#title').text("Title: " + data.title);
|
||||
$('#artist').text("Artist: " + data.artist);
|
||||
$('#author').text("Author: " + data.author);
|
||||
|
||||
$('#manga-details').removeAttr('hidden');
|
||||
|
||||
console.log(data.chapters);
|
||||
globalChapters = data.chapters;
|
||||
|
||||
let langs = new Set();
|
||||
let group_names = new Set();
|
||||
data.chapters.forEach(chp => {
|
||||
Object.entries(chp.groups).forEach(([k, v]) => {
|
||||
group_names.add(k);
|
||||
});
|
||||
langs.add(chp.language);
|
||||
});
|
||||
|
||||
const comp = (a, b) => {
|
||||
var ai;
|
||||
var bi;
|
||||
try {
|
||||
ai = parseFloat(a);
|
||||
} catch (e) {}
|
||||
try {
|
||||
bi = parseFloat(b);
|
||||
} catch (e) {}
|
||||
if (typeof ai === 'undefined') return -1;
|
||||
if (typeof bi === 'undefined') return 1;
|
||||
if (ai < bi) return 1;
|
||||
if (ai > bi) return -1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
langs = [...langs].sort();
|
||||
group_names = [...group_names].sort();
|
||||
|
||||
langs.unshift('All');
|
||||
group_names.unshift('All');
|
||||
|
||||
$('select#lang-select').append(langs.map(e => `<option>${e}</option>`).join(''));
|
||||
$('select#group-select').append(group_names.map(e => `<option>${e}</option>`).join(''));
|
||||
|
||||
$('#filter-form').removeAttr('hidden');
|
||||
|
||||
buildTable();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
toggleSpinner();
|
||||
});
|
||||
};
|
||||
const parseRange = str => {
|
||||
const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m;
|
||||
const matches = str.match(regex);
|
||||
var num;
|
||||
|
||||
if (!matches) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
} else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') {
|
||||
// e.g., <= 30
|
||||
num = parseInt(matches[2]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
switch (matches[1]) {
|
||||
case '<':
|
||||
return [null, num - 1];
|
||||
case '<=':
|
||||
return [null, num];
|
||||
case '>':
|
||||
return [num + 1, null];
|
||||
case '>=':
|
||||
return [num, null];
|
||||
}
|
||||
} else if (typeof matches[3] !== 'undefined') {
|
||||
// a single number
|
||||
num = parseInt(matches[3]);
|
||||
if (isNaN(num)) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, num];
|
||||
} else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') {
|
||||
// e.g., 10 - 23
|
||||
num = parseInt(matches[4]);
|
||||
const n2 = parseInt(matches[5]);
|
||||
if (isNaN(num) || isNaN(n2) || num > n2) {
|
||||
alert('danger', `Failed to parse filter input ${str}`);
|
||||
return [null, null];
|
||||
}
|
||||
return [num, n2];
|
||||
} else {
|
||||
// empty or space only
|
||||
return [null, null];
|
||||
}
|
||||
};
|
||||
const getFilters = () => {
|
||||
const filters = {};
|
||||
$('.uk-select').each((i, ele) => {
|
||||
const id = $(ele).attr('id');
|
||||
const by = id.split('-')[0];
|
||||
const choice = $(ele).val();
|
||||
filters[by] = choice;
|
||||
});
|
||||
filters.volume = parseRange($('#volume-range').val());
|
||||
filters.chapter = parseRange($('#chapter-range').val());
|
||||
return filters;
|
||||
};
|
||||
const buildTable = () => {
|
||||
$('table').attr('hidden', '');
|
||||
$('#selection-controls').attr('hidden', '');
|
||||
$('#filter-notification').attr('hidden', '');
|
||||
console.log('rebuilding table');
|
||||
const filters = getFilters();
|
||||
console.log('filters:', filters);
|
||||
var chapters = globalChapters.slice();
|
||||
Object.entries(filters).forEach(([k, v]) => {
|
||||
if (v === 'All') return;
|
||||
if (k === 'group') {
|
||||
chapters = chapters.filter(c => {
|
||||
unescaped_groups = Object.entries(c.groups).map(([g, id]) => unescapeHTML(g));
|
||||
return unescaped_groups.indexOf(v) >= 0;
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (k === 'lang') {
|
||||
chapters = chapters.filter(c => c.language === v);
|
||||
return;
|
||||
}
|
||||
const lb = parseFloat(v[0]);
|
||||
const ub = parseFloat(v[1]);
|
||||
if (isNaN(lb) && isNaN(ub)) return;
|
||||
chapters = chapters.filter(c => {
|
||||
const val = parseFloat(c[k]);
|
||||
if (isNaN(val)) return false;
|
||||
if (isNaN(lb))
|
||||
return val <= ub;
|
||||
else if (isNaN(ub))
|
||||
return val >= lb;
|
||||
else
|
||||
return val >= lb && val <= ub;
|
||||
});
|
||||
});
|
||||
console.log('filtered chapters:', chapters);
|
||||
$('#count-text').text(`${chapters.length} chapters found`);
|
||||
|
||||
const chaptersLimit = 1000;
|
||||
if (chapters.length > chaptersLimit) {
|
||||
$('#filter-notification').text(`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters in this manga. Please use the filter options above to narrow down your search.`);
|
||||
$('#filter-notification').removeAttr('hidden');
|
||||
return;
|
||||
}
|
||||
|
||||
const inner = chapters.map(chp => {
|
||||
const group_str = Object.entries(chp.groups).map(([k, v]) => {
|
||||
return `<a href="${baseURL }/group/${v}">${k}</a>`;
|
||||
}).join(' | ');
|
||||
return `<tr class="ui-widget-content">
|
||||
<td><a href="${baseURL}/chapter/${chp.id}">${chp.id}</a></td>
|
||||
<td>${chp.title}</td>
|
||||
<td>${chp.language}</td>
|
||||
<td>${group_str}</td>
|
||||
<td>${chp.volume}</td>
|
||||
<td>${chp.chapter}</td>
|
||||
<td>${moment.unix(chp.time).fromNow()}</td>
|
||||
</tr>`;
|
||||
}).join('');
|
||||
const tbody = `<tbody id="selectable">${inner}</tbody>`;
|
||||
$('tbody').remove();
|
||||
$('table').append(tbody);
|
||||
$('table').removeAttr('hidden');
|
||||
$("#selectable").selectable({
|
||||
filter: 'tr'
|
||||
});
|
||||
$('#selection-controls').removeAttr('hidden');
|
||||
};
|
||||
|
||||
const unescapeHTML = (str) => {
|
||||
var elt = document.createElement("span");
|
||||
elt.innerHTML = str;
|
||||
return elt.innerText;
|
||||
};
|
||||
141
public/js/plugin-download.js
Normal file
141
public/js/plugin-download.js
Normal file
@@ -0,0 +1,141 @@
|
||||
const loadPlugin = id => {
|
||||
localStorage.setItem('plugin', id);
|
||||
const url = `${location.protocol}//${location.host}${location.pathname}`;
|
||||
const newURL = `${url}?${$.param({
|
||||
plugin: id
|
||||
})}`;
|
||||
window.location.href = newURL;
|
||||
};
|
||||
|
||||
$(() => {
|
||||
var storedID = localStorage.getItem('plugin');
|
||||
if (storedID && storedID !== pid) {
|
||||
loadPlugin(storedID);
|
||||
} else {
|
||||
$('#controls').removeAttr('hidden');
|
||||
}
|
||||
|
||||
$('#search-input').keypress(event => {
|
||||
if (event.which === 13) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
$('#plugin-select').val(pid);
|
||||
$('#plugin-select').change(() => {
|
||||
const id = $('#plugin-select').val();
|
||||
loadPlugin(id);
|
||||
});
|
||||
});
|
||||
|
||||
let mangaTitle = "";
|
||||
let searching = false;
|
||||
const search = () => {
|
||||
if (searching)
|
||||
return;
|
||||
|
||||
const query = $.param({
|
||||
query: $('#search-input').val(),
|
||||
plugin: pid
|
||||
});
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: `${base_url}api/admin/plugin/list?${query}`,
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
console.log(data);
|
||||
if (data.error) {
|
||||
alert('danger', `Search failed. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
mangaTitle = data.title;
|
||||
$('#title-text').text(data.title);
|
||||
buildTable(data.chapters);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Search failed. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {});
|
||||
};
|
||||
|
||||
const buildTable = (chapters) => {
|
||||
$('#table').attr('hidden', '');
|
||||
$('table').empty();
|
||||
|
||||
const keys = Object.keys(chapters[0]).map(k => `<th>${k}</th>`).join('');
|
||||
const thead = `<thead><tr>${keys}</tr></thead>`;
|
||||
$('table').append(thead);
|
||||
|
||||
const rows = chapters.map(ch => {
|
||||
const tds = Object.values(ch).map(v => `<td>${v}</td>`).join('');
|
||||
return `<tr data-id="${ch.id}" data-title="${ch.title}">${tds}</tr>`;
|
||||
});
|
||||
const tbody = `<tbody id="selectable">${rows}</tbody>`;
|
||||
$('table').append(tbody);
|
||||
|
||||
$('#selectable').selectable({
|
||||
filter: 'tr'
|
||||
});
|
||||
|
||||
$('#table table').tablesorter();
|
||||
$('#table').removeAttr('hidden');
|
||||
};
|
||||
|
||||
const selectAll = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).addClass('ui-selected');
|
||||
});
|
||||
};
|
||||
|
||||
const unselect = () => {
|
||||
$('tbody > tr').each((i, e) => {
|
||||
$(e).removeClass('ui-selected');
|
||||
});
|
||||
};
|
||||
|
||||
const download = () => {
|
||||
const selected = $('tbody > tr.ui-selected');
|
||||
if (selected.length === 0) return;
|
||||
UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => {
|
||||
$('#download-btn').attr('hidden', '');
|
||||
$('#download-spinner').removeAttr('hidden');
|
||||
const chapters = selected.map((i, e) => {
|
||||
return {
|
||||
id: $(e).attr('data-id'),
|
||||
title: $(e).attr('data-title')
|
||||
}
|
||||
}).get();
|
||||
console.log(chapters);
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: base_url + 'api/admin/plugin/download',
|
||||
data: JSON.stringify({
|
||||
plugin: pid,
|
||||
chapters: chapters,
|
||||
title: mangaTitle
|
||||
}),
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
console.log(data);
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
const successCount = parseInt(data.success);
|
||||
const failCount = parseInt(data.fail);
|
||||
UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => {
|
||||
window.location.href = base_url + 'admin/downloads';
|
||||
});
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
$('#download-spinner').attr('hidden', '');
|
||||
$('#download-btn').removeAttr('hidden');
|
||||
});
|
||||
});
|
||||
};
|
||||
@@ -1,77 +1,293 @@
|
||||
$(function() {
|
||||
function bind() {
|
||||
var controller = new ScrollMagic.Controller();
|
||||
let lastSavedPage = page;
|
||||
let items = [];
|
||||
let longPages = false;
|
||||
|
||||
// replace history on scroll
|
||||
$('img').each(function(idx){
|
||||
var scene = new ScrollMagic.Scene({
|
||||
triggerElement: $(this).get(),
|
||||
triggerHook: 'onEnter',
|
||||
reverse: true
|
||||
})
|
||||
.addTo(controller)
|
||||
.on('enter', function(event){
|
||||
current = $(event.target.triggerElement()).attr('id');
|
||||
replaceHistory(current);
|
||||
})
|
||||
.on('leave', function(event){
|
||||
var prev = $(event.target.triggerElement()).prev();
|
||||
current = $(prev).attr('id');
|
||||
replaceHistory(current);
|
||||
});
|
||||
});
|
||||
$(() => {
|
||||
getPages();
|
||||
|
||||
// poor man's infinite scroll
|
||||
var scene = new ScrollMagic.Scene({
|
||||
triggerElement: $('.next-url').get(),
|
||||
triggerHook: 'onEnter',
|
||||
offset: -500
|
||||
})
|
||||
.addTo(controller)
|
||||
.on('enter', function(){
|
||||
var nextURL = $('.next-url').attr('href');
|
||||
$('.next-url').remove();
|
||||
if (!nextURL) {
|
||||
console.log('No .next-url found. Reached end of page');
|
||||
var lastURL = $('img').last().attr('id');
|
||||
// load the reader URL for the last page to update reading progrss to 100%
|
||||
$.get(lastURL);
|
||||
$('#next-btn').removeAttr('hidden');
|
||||
return;
|
||||
}
|
||||
$('#hidden').load(encodeURI(nextURL) + ' .uk-container', function(res, status, xhr){
|
||||
if (status === 'error') console.log(xhr.statusText);
|
||||
if (status === 'success') {
|
||||
console.log(nextURL + ' loaded');
|
||||
// new page loaded to #hidden, we now append it
|
||||
$('.uk-section > .uk-container').append($('#hidden .uk-container').children());
|
||||
$('#hidden').empty();
|
||||
bind();
|
||||
}
|
||||
});
|
||||
});
|
||||
$('#page-select').change(() => {
|
||||
const p = parseInt($('#page-select').val());
|
||||
toPage(p);
|
||||
});
|
||||
|
||||
$('#mode-select').change(() => {
|
||||
const mode = $('#mode-select').val();
|
||||
const curIdx = parseInt($('#page-select').val());
|
||||
|
||||
updateMode(mode, curIdx);
|
||||
});
|
||||
});
|
||||
|
||||
$(window).resize(() => {
|
||||
const mode = getProp('mode');
|
||||
if (mode === 'continuous') return;
|
||||
|
||||
const wideScreen = $(window).width() > $(window).height();
|
||||
const propMode = wideScreen ? 'height' : 'width';
|
||||
setProp('mode', propMode);
|
||||
});
|
||||
|
||||
/**
|
||||
* Update the reader mode
|
||||
*
|
||||
* @function updateMode
|
||||
* @param {string} mode - The mode. Can be one of the followings:
|
||||
* {'continuous', 'paged', 'height', 'width'}
|
||||
* @param {number} targetPage - The one-based index of the target page
|
||||
*/
|
||||
const updateMode = (mode, targetPage) => {
|
||||
localStorage.setItem('mode', mode);
|
||||
|
||||
// The mode to be put into the `mode` prop. It can't be `screen`
|
||||
let propMode = mode;
|
||||
|
||||
if (mode === 'paged') {
|
||||
const wideScreen = $(window).width() > $(window).height();
|
||||
propMode = wideScreen ? 'height' : 'width';
|
||||
}
|
||||
|
||||
bind();
|
||||
});
|
||||
$('#page-select').change(function(){
|
||||
jumpTo(parseInt($('#page-select').val()));
|
||||
});
|
||||
function showControl(idx) {
|
||||
setProp('mode', propMode);
|
||||
|
||||
if (mode === 'continuous') {
|
||||
waitForPage(items.length, () => {
|
||||
setupScroller();
|
||||
});
|
||||
}
|
||||
|
||||
waitForPage(targetPage, () => {
|
||||
setTimeout(() => {
|
||||
toPage(targetPage);
|
||||
}, 100);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Get dimension of the pages in the entry from the API and update the view
|
||||
*/
|
||||
const getPages = () => {
|
||||
$.get(`${base_url}api/dimensions/${tid}/${eid}`)
|
||||
.then(data => {
|
||||
if (!data.success && data.error)
|
||||
throw new Error(resp.error);
|
||||
const dimensions = data.dimensions;
|
||||
|
||||
items = dimensions.map((d, i) => {
|
||||
return {
|
||||
id: i + 1,
|
||||
url: `${base_url}api/page/${tid}/${eid}/${i+1}`,
|
||||
width: d.width,
|
||||
height: d.height
|
||||
};
|
||||
});
|
||||
|
||||
const avgRatio = items.reduce((acc, cur) => {
|
||||
return acc + cur.height / cur.width
|
||||
}, 0) / items.length;
|
||||
|
||||
console.log(avgRatio);
|
||||
longPages = avgRatio > 2;
|
||||
|
||||
setProp('items', items);
|
||||
setProp('loading', false);
|
||||
|
||||
const storedMode = localStorage.getItem('mode') || 'continuous';
|
||||
|
||||
setProp('mode', storedMode);
|
||||
updateMode(storedMode, page);
|
||||
$('#mode-select').val(storedMode);
|
||||
})
|
||||
.catch(e => {
|
||||
const errMsg = `Failed to get the page dimensions. ${e}`;
|
||||
console.error(e);
|
||||
setProp('alertClass', 'uk-alert-danger');
|
||||
setProp('msg', errMsg);
|
||||
})
|
||||
};
|
||||
|
||||
/**
|
||||
* Jump to a specific page
|
||||
*
|
||||
* @function toPage
|
||||
* @param {number} idx - One-based index of the page
|
||||
*/
|
||||
const toPage = (idx) => {
|
||||
const mode = getProp('mode');
|
||||
if (mode === 'continuous') {
|
||||
$(`#${idx}`).get(0).scrollIntoView(true);
|
||||
} else {
|
||||
if (idx >= 1 && idx <= items.length) {
|
||||
setProp('curItem', items[idx - 1]);
|
||||
}
|
||||
}
|
||||
replaceHistory(idx);
|
||||
UIkit.modal($('#modal-sections')).hide();
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a page exists every 100ms. If so, invoke the callback function.
|
||||
*
|
||||
* @function waitForPage
|
||||
* @param {number} idx - One-based index of the page
|
||||
* @param {function} cb - Callback function
|
||||
*/
|
||||
const waitForPage = (idx, cb) => {
|
||||
if ($(`#${idx}`).length > 0) return cb();
|
||||
setTimeout(() => {
|
||||
waitForPage(idx, cb)
|
||||
}, 100);
|
||||
};
|
||||
|
||||
/**
|
||||
* Show the control modal
|
||||
*
|
||||
* @function showControl
|
||||
* @param {string} idx - One-based index of the current page
|
||||
*/
|
||||
const showControl = (idx) => {
|
||||
const pageCount = $('#page-select > option').length;
|
||||
const progressText = `Progress: ${idx}/${pageCount} (${(idx/pageCount * 100).toFixed(1)}%)`;
|
||||
$('#progress-label').text(progressText);
|
||||
$('#page-select').val(idx);
|
||||
UIkit.modal($('#modal-sections')).show();
|
||||
}
|
||||
function jumpTo(page) {
|
||||
var ary = window.location.pathname.split('/');
|
||||
ary[ary.length - 1] = page;
|
||||
ary.shift(); // remove leading `/`
|
||||
ary.unshift(window.location.origin);
|
||||
window.location.replace(ary.join('/'));
|
||||
}
|
||||
function replaceHistory(url) {
|
||||
history.replaceState(null, "", url);
|
||||
console.log('reading ' + url);
|
||||
}
|
||||
function redirect(url) {
|
||||
|
||||
/**
|
||||
* Redirect to a URL
|
||||
*
|
||||
* @function redirect
|
||||
* @param {string} url - The target URL
|
||||
*/
|
||||
const redirect = (url) => {
|
||||
window.location.replace(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the address bar history and save th ereading progress if necessary
|
||||
*
|
||||
* @function replaceHistory
|
||||
* @param {number} idx - One-based index of the current page
|
||||
*/
|
||||
const replaceHistory = (idx) => {
|
||||
const ary = window.location.pathname.split('/');
|
||||
ary[ary.length - 1] = idx;
|
||||
ary.shift(); // remove leading `/`
|
||||
ary.unshift(window.location.origin);
|
||||
const url = ary.join('/');
|
||||
saveProgress(idx);
|
||||
history.replaceState(null, "", url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the scroll handler that calls `replaceHistory` when an image
|
||||
* enters the view port
|
||||
*
|
||||
* @function setupScroller
|
||||
*/
|
||||
const setupScroller = () => {
|
||||
const mode = getProp('mode');
|
||||
if (mode !== 'continuous') return;
|
||||
$('#root img').each((idx, el) => {
|
||||
$(el).on('inview', (event, inView) => {
|
||||
if (inView) {
|
||||
const current = $(event.currentTarget).attr('id');
|
||||
|
||||
setProp('curItem', getProp('items')[current - 1]);
|
||||
replaceHistory(current);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Update the backend reading progress if:
|
||||
* 1) the current page is more than five pages away from the last
|
||||
* saved page, or
|
||||
* 2) the average height/width ratio of the pages is over 2, or
|
||||
* 3) the current page is the first page, or
|
||||
* 4) the current page is the last page
|
||||
*
|
||||
* @function saveProgress
|
||||
* @param {number} idx - One-based index of the page
|
||||
* @param {function} cb - Callback
|
||||
*/
|
||||
const saveProgress = (idx, cb) => {
|
||||
idx = parseInt(idx);
|
||||
if (Math.abs(idx - lastSavedPage) >= 5 ||
|
||||
longPages ||
|
||||
idx === 1 || idx === items.length
|
||||
) {
|
||||
lastSavedPage = idx;
|
||||
console.log('saving progress', idx);
|
||||
|
||||
const url = `${base_url}api/progress/${tid}/${idx}?${$.param({eid: eid})}`;
|
||||
$.ajax({
|
||||
method: 'PUT',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error)
|
||||
alert('danger', data.error);
|
||||
if (cb) cb();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Mark progress to 100% and redirect to the next entry
|
||||
* Used as the onclick handler for the "Next Entry" button
|
||||
*
|
||||
* @function nextEntry
|
||||
* @param {string} nextUrl - URL of the next entry
|
||||
*/
|
||||
const nextEntry = (nextUrl) => {
|
||||
saveProgress(items.length, () => {
|
||||
redirect(nextUrl);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Show the next or the previous page
|
||||
*
|
||||
* @function flipPage
|
||||
* @param {bool} isNext - Whether we are going to the next page
|
||||
*/
|
||||
const flipPage = (isNext) => {
|
||||
const curItem = getProp('curItem');
|
||||
const idx = parseInt(curItem.id);
|
||||
const delta = isNext ? 1 : -1;
|
||||
const newIdx = idx + delta;
|
||||
|
||||
toPage(newIdx);
|
||||
|
||||
if (isNext)
|
||||
setProp('flipAnimation', 'right');
|
||||
else
|
||||
setProp('flipAnimation', 'left');
|
||||
|
||||
setTimeout(() => {
|
||||
setProp('flipAnimation', null);
|
||||
}, 500);
|
||||
|
||||
replaceHistory(newIdx);
|
||||
saveProgress(newIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle the global keydown events
|
||||
*
|
||||
* @function keyHandler
|
||||
* @param {event} event - The $event object
|
||||
*/
|
||||
const keyHandler = (event) => {
|
||||
const mode = getProp('mode');
|
||||
if (mode === 'continuous') return;
|
||||
|
||||
if (event.key === 'ArrowLeft' || event.key === 'k')
|
||||
flipPage(false);
|
||||
if (event.key === 'ArrowRight' || event.key === 'j')
|
||||
flipPage(true);
|
||||
};
|
||||
|
||||
15
public/js/sort-items.js
Normal file
15
public/js/sort-items.js
Normal file
@@ -0,0 +1,15 @@
|
||||
$(() => {
|
||||
$('#sort-select').change(() => {
|
||||
const sort = $('#sort-select').find(':selected').attr('id');
|
||||
const ary = sort.split('-');
|
||||
const by = ary[0];
|
||||
const dir = ary[1];
|
||||
|
||||
const url = `${location.protocol}//${location.host}${location.pathname}`;
|
||||
const newURL = `${url}?${$.param({
|
||||
sort: by,
|
||||
ascend: dir === 'up' ? 1 : 0
|
||||
})}`;
|
||||
window.location.href = newURL;
|
||||
});
|
||||
});
|
||||
@@ -1,52 +1,254 @@
|
||||
$(() => {
|
||||
setupAcard();
|
||||
});
|
||||
|
||||
const setupAcard = () => {
|
||||
$('.acard.is_entry').click((e) => {
|
||||
if ($(e.target).hasClass('no-modal')) return;
|
||||
const card = $(e.target).closest('.acard');
|
||||
|
||||
showModal(
|
||||
$(card).attr('data-encoded-path'),
|
||||
parseInt($(card).attr('data-pages')),
|
||||
parseFloat($(card).attr('data-progress')),
|
||||
$(card).attr('data-encoded-book-title'),
|
||||
$(card).attr('data-encoded-title'),
|
||||
$(card).attr('data-book-id'),
|
||||
$(card).attr('data-id')
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
function showModal(encodedPath, pages, percentage, encodedeTitle, encodedEntryTitle, titleID, entryID) {
|
||||
const zipPath = decodeURIComponent(encodedPath);
|
||||
const title = decodeURIComponent(encodedeTitle);
|
||||
const entry = decodeURIComponent(encodedEntryTitle);
|
||||
$('#modal button, #modal a').each(function(){
|
||||
$('#modal button, #modal a').each(function() {
|
||||
$(this).removeAttr('hidden');
|
||||
});
|
||||
if (percentage === 0) {
|
||||
$('#continue-btn').attr('hidden', '');
|
||||
$('#unread-btn').attr('hidden', '');
|
||||
}
|
||||
else {
|
||||
} else if (percentage === 100) {
|
||||
$('#read-btn').attr('hidden', '');
|
||||
$('#continue-btn').attr('hidden', '');
|
||||
} else {
|
||||
$('#continue-btn').text('Continue from ' + percentage + '%');
|
||||
}
|
||||
if (percentage === 100) {
|
||||
$('#read-btn').attr('hidden', '');
|
||||
}
|
||||
$('#modal-title').text(title);
|
||||
|
||||
$('#modal-entry-title').find('span').text(entry);
|
||||
$('#modal-entry-title').next().attr('data-id', titleID);
|
||||
$('#modal-entry-title').next().attr('data-entry-id', entryID);
|
||||
$('#modal-entry-title').next().find('.title-rename-field').val(entry);
|
||||
$('#path-text').text(zipPath);
|
||||
$('#pages-text').text(pages + ' pages');
|
||||
|
||||
$('#beginning-btn').attr('href', '/reader/' + titleID + '/' + entryID + '/1');
|
||||
$('#continue-btn').attr('href', '/reader/' + titleID + '/' + entryID);
|
||||
$('#beginning-btn').attr('href', `${base_url}reader/${titleID}/${entryID}/1`);
|
||||
$('#continue-btn').attr('href', `${base_url}reader/${titleID}/${entryID}`);
|
||||
|
||||
$('#read-btn').click(function(){
|
||||
$('#read-btn').click(function() {
|
||||
updateProgress(titleID, entryID, pages);
|
||||
});
|
||||
$('#unread-btn').click(function(){
|
||||
$('#unread-btn').click(function() {
|
||||
updateProgress(titleID, entryID, 0);
|
||||
});
|
||||
|
||||
$('#modal-edit-btn').attr('onclick', `edit("${entryID}")`);
|
||||
|
||||
$('#modal-download-btn').attr('href', `${base_url}api/download/${titleID}/${entryID}`);
|
||||
|
||||
UIkit.modal($('#modal')).show();
|
||||
}
|
||||
function updateProgress(titleID, entryID, page) {
|
||||
$.post('/api/progress/' + titleID + '/' + entryID + '/' + page, function(data) {
|
||||
if (data.success) {
|
||||
|
||||
const updateProgress = (tid, eid, page) => {
|
||||
let url = `${base_url}api/progress/${tid}/${page}`
|
||||
const query = $.param({
|
||||
eid: eid
|
||||
});
|
||||
if (eid)
|
||||
url += `?${query}`;
|
||||
|
||||
$.ajax({
|
||||
method: 'PUT',
|
||||
url: url,
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.success) {
|
||||
location.reload();
|
||||
} else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
}
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
const renameSubmit = (name, eid) => {
|
||||
const upload = $('.upload-field');
|
||||
const titleId = upload.attr('data-title-id');
|
||||
|
||||
console.log(name);
|
||||
|
||||
if (name.length === 0) {
|
||||
alert('danger', 'The display name should not be empty');
|
||||
return;
|
||||
}
|
||||
|
||||
const query = $.param({
|
||||
eid: eid
|
||||
});
|
||||
let url = `${base_url}api/admin/display_name/${titleId}/${name}`;
|
||||
if (eid)
|
||||
url += `?${query}`;
|
||||
|
||||
$.ajax({
|
||||
type: 'PUT',
|
||||
url: url,
|
||||
contentType: "application/json",
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to update display name. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
location.reload();
|
||||
}
|
||||
else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to update display name. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
const edit = (eid) => {
|
||||
const cover = $('#edit-modal #cover');
|
||||
let url = cover.attr('data-title-cover');
|
||||
let displayName = $('h2.uk-title > span').text();
|
||||
|
||||
if (eid) {
|
||||
const item = $(`#${eid}`);
|
||||
url = item.find('img').attr('data-src');
|
||||
displayName = item.find('.uk-card-title').attr('data-title');
|
||||
$('#title-progress-control').attr('hidden', '');
|
||||
} else {
|
||||
$('#title-progress-control').removeAttr('hidden');
|
||||
}
|
||||
|
||||
cover.attr('data-src', url);
|
||||
|
||||
const displayNameField = $('#display-name-field');
|
||||
displayNameField.attr('value', displayName);
|
||||
console.log(displayNameField);
|
||||
displayNameField.keyup(event => {
|
||||
if (event.keyCode === 13) {
|
||||
renameSubmit(displayNameField.val(), eid);
|
||||
}
|
||||
});
|
||||
}
|
||||
function alert(level, text) {
|
||||
hideAlert();
|
||||
var html = '<div class="uk-alert-' + level + '" uk-alert><a class="uk-alert-close" uk-close></a><p>' + text + '</p></div>';
|
||||
$('#alert').append(html);
|
||||
}
|
||||
function hideAlert() {
|
||||
$('#alert').empty();
|
||||
}
|
||||
displayNameField.siblings('a.uk-form-icon').click(() => {
|
||||
renameSubmit(displayNameField.val(), eid);
|
||||
});
|
||||
|
||||
setupUpload(eid);
|
||||
|
||||
UIkit.modal($('#edit-modal')).show();
|
||||
};
|
||||
|
||||
const setupUpload = (eid) => {
|
||||
const upload = $('.upload-field');
|
||||
const bar = $('#upload-progress').get(0);
|
||||
const titleId = upload.attr('data-title-id');
|
||||
const queryObj = {
|
||||
tid: titleId
|
||||
};
|
||||
if (eid)
|
||||
queryObj['eid'] = eid;
|
||||
const query = $.param(queryObj);
|
||||
const url = `${base_url}api/admin/upload/cover?${query}`;
|
||||
console.log(url);
|
||||
UIkit.upload('.upload-field', {
|
||||
url: url,
|
||||
name: 'file',
|
||||
error: (e) => {
|
||||
alert('danger', `Failed to upload cover image: ${e.toString()}`);
|
||||
},
|
||||
loadStart: (e) => {
|
||||
$(bar).removeAttr('hidden');
|
||||
bar.max = e.total;
|
||||
bar.value = e.loaded;
|
||||
},
|
||||
progress: (e) => {
|
||||
bar.max = e.total;
|
||||
bar.value = e.loaded;
|
||||
},
|
||||
loadEnd: (e) => {
|
||||
bar.max = e.total;
|
||||
bar.value = e.loaded;
|
||||
},
|
||||
completeAll: () => {
|
||||
$(bar).attr('hidden', '');
|
||||
location.reload();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const deselectAll = () => {
|
||||
$('.item .uk-card').each((i, e) => {
|
||||
const data = e.__x.$data;
|
||||
data['selected'] = false;
|
||||
});
|
||||
$('#select-bar')[0].__x.$data['count'] = 0;
|
||||
};
|
||||
|
||||
const selectAll = () => {
|
||||
let count = 0;
|
||||
$('.item .uk-card').each((i, e) => {
|
||||
const data = e.__x.$data;
|
||||
if (!data['disabled']) {
|
||||
data['selected'] = true;
|
||||
count++;
|
||||
}
|
||||
});
|
||||
$('#select-bar')[0].__x.$data['count'] = count;
|
||||
};
|
||||
|
||||
const selectedIDs = () => {
|
||||
const ary = [];
|
||||
$('.item .uk-card').each((i, e) => {
|
||||
const data = e.__x.$data;
|
||||
if (!data['disabled'] && data['selected']) {
|
||||
const item = $(e).closest('.item');
|
||||
ary.push($(item).attr('id'));
|
||||
}
|
||||
});
|
||||
return ary;
|
||||
};
|
||||
|
||||
const bulkProgress = (action, el) => {
|
||||
const tid = $(el).attr('data-id');
|
||||
const ids = selectedIDs();
|
||||
const url = `${base_url}api/bulk_progress/${action}/${tid}`;
|
||||
$.ajax({
|
||||
type: 'PUT',
|
||||
url: url,
|
||||
contentType: "application/json",
|
||||
dataType: 'json',
|
||||
data: JSON.stringify({
|
||||
ids: ids
|
||||
})
|
||||
})
|
||||
.done(data => {
|
||||
if (data.error) {
|
||||
alert('danger', `Failed to mark entries as ${action}. Error: ${data.error}`);
|
||||
return;
|
||||
}
|
||||
location.reload();
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to mark entries as ${action}. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
})
|
||||
.always(() => {
|
||||
deselectAll();
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,16 +1,6 @@
|
||||
$(function(){
|
||||
var target = '/admin/user/edit';
|
||||
$(() => {
|
||||
var target = base_url + 'admin/user/edit';
|
||||
if (username) target += username;
|
||||
$('form').attr('action', target);
|
||||
|
||||
function alert(level, text) {
|
||||
hideAlert();
|
||||
var html = '<div class="uk-alert-' + level + '" uk-alert><a class="uk-alert-close" uk-close></a><p>' + text + '</p></div>';
|
||||
$('#alert').append(html);
|
||||
}
|
||||
function hideAlert() {
|
||||
$('#alert').empty();
|
||||
}
|
||||
|
||||
if (error) alert('danger', error);
|
||||
});
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
function alert(level, text) {
|
||||
hideAlert();
|
||||
var html = '<div class="uk-alert-' + level + '" uk-alert><a class="uk-alert-close" uk-close></a><p>' + text + '</p></div>';
|
||||
$('#alert').append(html);
|
||||
}
|
||||
function hideAlert() {
|
||||
$('#alert').empty();
|
||||
}
|
||||
function remove(username) {
|
||||
$.post('/api/admin/user/delete/' + username, function(data) {
|
||||
if (data.success) {
|
||||
location.reload();
|
||||
}
|
||||
else {
|
||||
error = data.error;
|
||||
alert('danger', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
const remove = (username) => {
|
||||
$.ajax({
|
||||
url: `${base_url}api/admin/user/delete/${username}`,
|
||||
type: 'DELETE',
|
||||
dataType: 'json'
|
||||
})
|
||||
.done(data => {
|
||||
if (data.success)
|
||||
location.reload();
|
||||
else
|
||||
alert('danger', data.error);
|
||||
})
|
||||
.fail((jqXHR, status) => {
|
||||
alert('danger', `Failed to delete the user. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
|
||||
});
|
||||
};
|
||||
|
||||
2
public/robots.txt
Normal file
2
public/robots.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
82
shard.lock
82
shard.lock
@@ -1,30 +1,70 @@
|
||||
version: 1.0
|
||||
version: 2.0
|
||||
shards:
|
||||
db:
|
||||
github: crystal-lang/crystal-db
|
||||
version: 0.8.0
|
||||
ameba:
|
||||
git: https://github.com/crystal-ameba/ameba.git
|
||||
version: 0.12.1
|
||||
|
||||
exception_page:
|
||||
github: crystal-loot/exception_page
|
||||
version: 0.1.2
|
||||
|
||||
kemal:
|
||||
github: kemalcr/kemal
|
||||
version: 0.26.1
|
||||
|
||||
kemal-basic-auth:
|
||||
github: kemalcr/kemal-basic-auth
|
||||
version: 0.2.0
|
||||
|
||||
kilt:
|
||||
github: jeromegn/kilt
|
||||
archive:
|
||||
git: https://github.com/hkalexling/archive.cr.git
|
||||
version: 0.4.0
|
||||
|
||||
baked_file_system:
|
||||
git: https://github.com/schovi/baked_file_system.git
|
||||
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
|
||||
|
||||
clim:
|
||||
git: https://github.com/at-grandpa/clim.git
|
||||
version: 0.12.0
|
||||
|
||||
db:
|
||||
git: https://github.com/crystal-lang/crystal-db.git
|
||||
version: 0.9.0
|
||||
|
||||
duktape:
|
||||
git: https://github.com/jessedoyle/duktape.cr.git
|
||||
version: 0.20.0
|
||||
|
||||
exception_page:
|
||||
git: https://github.com/crystal-loot/exception_page.git
|
||||
version: 0.1.4
|
||||
|
||||
http_proxy:
|
||||
git: https://github.com/mamantoha/http_proxy.git
|
||||
version: 0.7.1
|
||||
|
||||
image_size:
|
||||
git: https://github.com/hkalexling/image_size.cr.git
|
||||
version: 0.4.0
|
||||
|
||||
kemal:
|
||||
git: https://github.com/kemalcr/kemal.git
|
||||
version: 0.27.0
|
||||
|
||||
kemal-session:
|
||||
git: https://github.com/kemalcr/kemal-session.git
|
||||
version: 0.12.1
|
||||
|
||||
kilt:
|
||||
git: https://github.com/jeromegn/kilt.git
|
||||
version: 0.4.0
|
||||
|
||||
koa:
|
||||
git: https://github.com/hkalexling/koa.git
|
||||
version: 0.5.0
|
||||
|
||||
myhtml:
|
||||
git: https://github.com/kostya/myhtml.git
|
||||
version: 1.5.1
|
||||
|
||||
open_api:
|
||||
git: https://github.com/jreinert/open_api.cr.git
|
||||
version: 1.2.1+git.commit.95e4df2ca10b1fe88b8b35c62a18b06a10267b6c
|
||||
|
||||
radix:
|
||||
github: luislavena/radix
|
||||
git: https://github.com/luislavena/radix.git
|
||||
version: 0.3.9
|
||||
|
||||
sqlite3:
|
||||
github: crystal-lang/crystal-sqlite3
|
||||
version: 0.15.0
|
||||
git: https://github.com/crystal-lang/crystal-sqlite3.git
|
||||
version: 0.16.0
|
||||
|
||||
|
||||
24
shard.yml
24
shard.yml
@@ -1,5 +1,5 @@
|
||||
name: mango
|
||||
version: 0.1.0
|
||||
version: 0.17.1
|
||||
|
||||
authors:
|
||||
- Alex Ling <hkalexling@gmail.com>
|
||||
@@ -8,14 +8,34 @@ targets:
|
||||
mango:
|
||||
main: src/mango.cr
|
||||
|
||||
crystal: 0.32.1
|
||||
crystal: 0.35.1
|
||||
|
||||
license: MIT
|
||||
|
||||
dependencies:
|
||||
kemal:
|
||||
github: kemalcr/kemal
|
||||
kemal-session:
|
||||
github: kemalcr/kemal-session
|
||||
sqlite3:
|
||||
github: crystal-lang/crystal-sqlite3
|
||||
baked_file_system:
|
||||
github: schovi/baked_file_system
|
||||
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
|
||||
archive:
|
||||
github: hkalexling/archive.cr
|
||||
ameba:
|
||||
github: crystal-ameba/ameba
|
||||
clim:
|
||||
github: at-grandpa/clim
|
||||
duktape:
|
||||
github: jessedoyle/duktape.cr
|
||||
version: ~> 0.20.0
|
||||
myhtml:
|
||||
github: kostya/myhtml
|
||||
http_proxy:
|
||||
github: mamantoha/http_proxy
|
||||
image_size:
|
||||
github: hkalexling/image_size.cr
|
||||
koa:
|
||||
github: hkalexling/koa
|
||||
|
||||
2
spec/asset/test-config.yml
Normal file
2
spec/asset/test-config.yml
Normal file
@@ -0,0 +1,2 @@
|
||||
---
|
||||
port: 3000
|
||||
14
spec/config_spec.cr
Normal file
14
spec/config_spec.cr
Normal file
@@ -0,0 +1,14 @@
|
||||
require "./spec_helper"
|
||||
|
||||
describe Config do
|
||||
it "creates config if it does not exist" do
|
||||
with_default_config do |_, path|
|
||||
File.exists?(path).should be_true
|
||||
end
|
||||
end
|
||||
|
||||
it "correctly loads config" do
|
||||
config = Config.load "spec/asset/test-config.yml"
|
||||
config.port.should eq 3000
|
||||
end
|
||||
end
|
||||
76
spec/rename_spec.cr
Normal file
76
spec/rename_spec.cr
Normal file
@@ -0,0 +1,76 @@
|
||||
require "./spec_helper"
|
||||
require "../src/rename"
|
||||
|
||||
include Rename
|
||||
|
||||
describe Rule do
|
||||
it "raises on nested brackets" do
|
||||
expect_raises Exception do
|
||||
Rule.new "[[]]"
|
||||
end
|
||||
expect_raises Exception do
|
||||
Rule.new "{{}}"
|
||||
end
|
||||
end
|
||||
|
||||
it "raises on unclosed brackets" do
|
||||
expect_raises Exception do
|
||||
Rule.new "["
|
||||
end
|
||||
expect_raises Exception do
|
||||
Rule.new "{"
|
||||
end
|
||||
expect_raises Exception do
|
||||
Rule.new "[{]}"
|
||||
end
|
||||
end
|
||||
|
||||
it "raises when closing unopened brackets" do
|
||||
expect_raises Exception do
|
||||
Rule.new "]"
|
||||
end
|
||||
expect_raises Exception do
|
||||
Rule.new "[}"
|
||||
end
|
||||
end
|
||||
|
||||
it "handles `|` in patterns" do
|
||||
rule = Rule.new "{a|b|c}"
|
||||
rule.render({"b" => "b"}).should eq "b"
|
||||
rule.render({"a" => "a", "b" => "b"}).should eq "a"
|
||||
end
|
||||
|
||||
it "allows `|` outside of patterns" do
|
||||
rule = Rule.new "hello|world"
|
||||
rule.render({} of String => String).should eq "hello|world"
|
||||
end
|
||||
|
||||
it "raises on escaped characters" do
|
||||
expect_raises Exception do
|
||||
Rule.new "hello/world"
|
||||
end
|
||||
end
|
||||
|
||||
it "handles spaces in patterns" do
|
||||
rule = Rule.new "{ a }"
|
||||
rule.render({"a" => "a"}).should eq "a"
|
||||
end
|
||||
|
||||
it "strips leading and tailing spaces" do
|
||||
rule = Rule.new " hello "
|
||||
rule.render({"a" => "a"}).should eq "hello"
|
||||
end
|
||||
|
||||
it "renders a few examples correctly" do
|
||||
rule = Rule.new "[Ch. {chapter }] {title | id} testing"
|
||||
rule.render({"id" => "ID"}).should eq "ID testing"
|
||||
rule.render({"chapter" => "CH", "id" => "ID"})
|
||||
.should eq "Ch. CH ID testing"
|
||||
rule.render({} of String => String).should eq "testing"
|
||||
end
|
||||
|
||||
it "escapes slash" do
|
||||
rule = Rule.new "{id}"
|
||||
rule.render({"id" => "/hello/world"}).should eq "_hello_world"
|
||||
end
|
||||
end
|
||||
56
spec/spec_helper.cr
Normal file
56
spec/spec_helper.cr
Normal file
@@ -0,0 +1,56 @@
|
||||
require "spec"
|
||||
require "../src/queue"
|
||||
require "../src/server"
|
||||
require "../src/config"
|
||||
require "../src/main_fiber"
|
||||
|
||||
class State
|
||||
@@hash = {} of String => String
|
||||
|
||||
def self.get(key)
|
||||
@@hash[key]?
|
||||
end
|
||||
|
||||
def self.get!(key)
|
||||
@@hash[key]
|
||||
end
|
||||
|
||||
def self.set(key, value)
|
||||
return if value.nil?
|
||||
@@hash[key] = value
|
||||
end
|
||||
|
||||
def self.reset
|
||||
@@hash.clear
|
||||
end
|
||||
end
|
||||
|
||||
def get_tempfile(name)
|
||||
path = State.get name
|
||||
if path.nil? || !File.exists? path
|
||||
file = File.tempfile name
|
||||
State.set name, file.path
|
||||
file
|
||||
else
|
||||
File.new path
|
||||
end
|
||||
end
|
||||
|
||||
def with_default_config
|
||||
temp_config = get_tempfile "mango-test-config"
|
||||
config = Config.load temp_config.path
|
||||
config.set_current
|
||||
yield config, temp_config.path
|
||||
temp_config.delete
|
||||
end
|
||||
|
||||
def with_storage
|
||||
with_default_config do
|
||||
temp_db = get_tempfile "mango-test-db"
|
||||
storage = Storage.new temp_db.path, false
|
||||
clear = yield storage, temp_db.path
|
||||
if clear == true
|
||||
temp_db.delete
|
||||
end
|
||||
end
|
||||
end
|
||||
91
spec/storage_spec.cr
Normal file
91
spec/storage_spec.cr
Normal file
@@ -0,0 +1,91 @@
|
||||
require "./spec_helper"
|
||||
|
||||
describe Storage do
|
||||
it "creates DB at given path" do
|
||||
with_storage do |_, path|
|
||||
File.exists?(path).should be_true
|
||||
end
|
||||
end
|
||||
|
||||
it "deletes user" do
|
||||
with_storage do |storage|
|
||||
storage.delete_user "admin"
|
||||
end
|
||||
end
|
||||
|
||||
it "creates new user" do
|
||||
with_storage do |storage|
|
||||
storage.new_user "user", "123456", false
|
||||
storage.new_user "admin", "123456", true
|
||||
end
|
||||
end
|
||||
|
||||
it "verifies username/password combination" do
|
||||
with_storage do |storage|
|
||||
user_token = storage.verify_user "user", "123456"
|
||||
admin_token = storage.verify_user "admin", "123456"
|
||||
user_token.should_not be_nil
|
||||
admin_token.should_not be_nil
|
||||
State.set "user_token", user_token
|
||||
State.set "admin_token", admin_token
|
||||
end
|
||||
end
|
||||
|
||||
it "rejects duplicate username" do
|
||||
with_storage do |storage|
|
||||
expect_raises SQLite3::Exception,
|
||||
"UNIQUE constraint failed: users.username" do
|
||||
storage.new_user "admin", "123456", true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "verifies token" do
|
||||
with_storage do |storage|
|
||||
user_token = State.get! "user_token"
|
||||
user = storage.verify_token user_token
|
||||
user.should eq "user"
|
||||
end
|
||||
end
|
||||
|
||||
it "verfies admin token" do
|
||||
with_storage do |storage|
|
||||
admin_token = State.get! "admin_token"
|
||||
storage.verify_admin(admin_token).should be_true
|
||||
end
|
||||
end
|
||||
|
||||
it "rejects non-admin token" do
|
||||
with_storage do |storage|
|
||||
user_token = State.get! "user_token"
|
||||
storage.verify_admin(user_token).should be_false
|
||||
end
|
||||
end
|
||||
|
||||
it "updates user" do
|
||||
with_storage do |storage|
|
||||
storage.update_user "admin", "admin", "654321", true
|
||||
token = storage.verify_user "admin", "654321"
|
||||
admin_token = State.get! "admin_token"
|
||||
token.should eq admin_token
|
||||
end
|
||||
end
|
||||
|
||||
it "logs user out" do
|
||||
with_storage do |storage|
|
||||
user_token = State.get! "user_token"
|
||||
admin_token = State.get! "admin_token"
|
||||
storage.logout user_token
|
||||
storage.logout admin_token
|
||||
storage.verify_token(user_token).should be_nil
|
||||
storage.verify_token(admin_token).should be_nil
|
||||
end
|
||||
end
|
||||
|
||||
it "cleans up" do
|
||||
with_storage do
|
||||
true
|
||||
end
|
||||
State.reset
|
||||
end
|
||||
end
|
||||
46
spec/util_spec.cr
Normal file
46
spec/util_spec.cr
Normal file
@@ -0,0 +1,46 @@
|
||||
require "./spec_helper"
|
||||
|
||||
describe "compare_numerically" do
|
||||
it "sorts filenames with leading zeros correctly" do
|
||||
ary = ["010.jpg", "001.jpg", "002.png"]
|
||||
ary.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}
|
||||
ary.should eq ["001.jpg", "002.png", "010.jpg"]
|
||||
end
|
||||
|
||||
it "sorts filenames without leading zeros correctly" do
|
||||
ary = ["10.jpg", "1.jpg", "0.png", "0100.jpg"]
|
||||
ary.sort! { |a, b|
|
||||
compare_numerically a, b
|
||||
}
|
||||
ary.should eq ["0.png", "1.jpg", "10.jpg", "0100.jpg"]
|
||||
end
|
||||
|
||||
# https://ux.stackexchange.com/a/95441
|
||||
it "sorts like the stack exchange post" do
|
||||
ary = ["2", "12", "200000", "1000000", "a", "a12", "b2", "text2",
|
||||
"text2a", "text2a2", "text2a12", "text2ab", "text12", "text12a"]
|
||||
ary.reverse.sort { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
|
||||
# https://github.com/hkalexling/Mango/issues/22
|
||||
it "handles numbers larger than Int32" do
|
||||
ary = ["14410155591588.jpg", "21410155591588.png", "104410155591588.jpg"]
|
||||
ary.reverse.sort { |a, b|
|
||||
compare_numerically a, b
|
||||
}.should eq ary
|
||||
end
|
||||
end
|
||||
|
||||
describe "chapter_sort" do
|
||||
it "sorts correctly" do
|
||||
ary = ["Vol.1 Ch.01", "Vol.1 Ch.02", "Vol.2 Ch. 2.5", "Ch. 3", "Ch.04"]
|
||||
sorter = ChapterSorter.new ary
|
||||
ary.reverse.sort do |a, b|
|
||||
sorter.compare a, b
|
||||
end.should eq ary
|
||||
end
|
||||
end
|
||||
59
src/archive.cr
Normal file
59
src/archive.cr
Normal file
@@ -0,0 +1,59 @@
|
||||
require "compress/zip"
|
||||
require "archive"
|
||||
|
||||
# A unified class to handle all supported archive formats. It uses the
|
||||
# Compress::Zip module in crystal standard library if the target file is
|
||||
# a zip archive. Otherwise it uses `archive.cr`.
|
||||
class ArchiveFile
|
||||
def initialize(@filename : String)
|
||||
if [".cbz", ".zip"].includes? File.extname filename
|
||||
@archive_file = Compress::Zip::File.new filename
|
||||
else
|
||||
@archive_file = Archive::File.new filename
|
||||
end
|
||||
end
|
||||
|
||||
def self.open(filename : String, &)
|
||||
s = self.new filename
|
||||
yield s
|
||||
s.close
|
||||
end
|
||||
|
||||
def close
|
||||
if @archive_file.is_a? Compress::Zip::File
|
||||
@archive_file.as(Compress::Zip::File).close
|
||||
end
|
||||
end
|
||||
|
||||
# Lists all file entries
|
||||
def entries
|
||||
ary = [] of Compress::Zip::File::Entry | Archive::Entry
|
||||
@archive_file.entries.map do |e|
|
||||
if (e.is_a? Compress::Zip::File::Entry && e.file?) ||
|
||||
(e.is_a? Archive::Entry && e.info.file?)
|
||||
ary.push e
|
||||
end
|
||||
end
|
||||
ary
|
||||
end
|
||||
|
||||
def read_entry(e : Compress::Zip::File::Entry | Archive::Entry) : Bytes?
|
||||
if e.is_a? Compress::Zip::File::Entry
|
||||
data = nil
|
||||
e.open do |io|
|
||||
slice = Bytes.new e.uncompressed_size
|
||||
bytes_read = io.read_fully? slice
|
||||
data = slice if bytes_read
|
||||
end
|
||||
data
|
||||
else
|
||||
e.read
|
||||
end
|
||||
end
|
||||
|
||||
def check
|
||||
if @archive_file.is_a? Archive::File
|
||||
@archive_file.as(Archive::File).check
|
||||
end
|
||||
end
|
||||
end
|
||||
41
src/assets/lang_codes.csv
Normal file
41
src/assets/lang_codes.csv
Normal file
@@ -0,0 +1,41 @@
|
||||
Arabic,sa
|
||||
Bengali,bd
|
||||
Bulgarian,bg
|
||||
Burmese,mm
|
||||
Catalan,ct
|
||||
Chinese (Simp),cn
|
||||
Chinese (Trad),hk
|
||||
Czech,cz
|
||||
Danish,dk
|
||||
Dutch,nl
|
||||
English,gb
|
||||
Filipino,ph
|
||||
Finnish,fi
|
||||
French,fr
|
||||
German,de
|
||||
Greek,gr
|
||||
Hebrew,il
|
||||
Hindi,in
|
||||
Hungarian,hu
|
||||
Indonesian,id
|
||||
Italian,it
|
||||
Japanese,jp
|
||||
Korean,kr
|
||||
Lithuanian,lt
|
||||
Malay,my
|
||||
Mongolian,mn
|
||||
Other,
|
||||
Persian,ir
|
||||
Polish,pl
|
||||
Portuguese (Br),br
|
||||
Portuguese (Pt),pt
|
||||
Romanian,ro
|
||||
Russian,ru
|
||||
Serbo-Croatian,rs
|
||||
Spanish (Es),es
|
||||
Spanish (LATAM),mx
|
||||
Swedish,se
|
||||
Thai,th
|
||||
Turkish,tr
|
||||
Ukrainian,ua
|
||||
Vietnames,vn
|
||||
|
@@ -1,26 +0,0 @@
|
||||
require "kemal"
|
||||
require "./storage"
|
||||
require "./util"
|
||||
|
||||
class AuthHandler < Kemal::Handler
|
||||
def initialize(@storage : Storage)
|
||||
end
|
||||
|
||||
def call(env)
|
||||
return call_next(env) \
|
||||
if request_path_startswith env, ["/login", "/logout"]
|
||||
|
||||
cookie = env.request.cookies.find { |c| c.name == "token" }
|
||||
if cookie.nil? || ! @storage.verify_token cookie.value
|
||||
return env.redirect "/login"
|
||||
end
|
||||
|
||||
if request_path_startswith env, ["/admin", "/api/admin"]
|
||||
unless @storage.verify_admin cookie.value
|
||||
env.response.status_code = 403
|
||||
end
|
||||
end
|
||||
|
||||
call_next env
|
||||
end
|
||||
end
|
||||
113
src/config.cr
113
src/config.cr
@@ -1,44 +1,89 @@
|
||||
require "yaml"
|
||||
|
||||
class Config
|
||||
include YAML::Serializable
|
||||
include YAML::Serializable
|
||||
|
||||
@[YAML::Field(key: "port")]
|
||||
property port : Int32 = 9000
|
||||
@[YAML::Field(ignore: true)]
|
||||
property path : String = ""
|
||||
property port : Int32 = 9000
|
||||
property base_url : String = "/"
|
||||
property session_secret : String = "mango-session-secret"
|
||||
property library_path : String = File.expand_path "~/mango/library",
|
||||
home: true
|
||||
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
||||
property scan_interval_minutes : Int32 = 5
|
||||
property thumbnail_generation_interval_hours : Int32 = 24
|
||||
property db_optimization_interval_hours : Int32 = 24
|
||||
property log_level : String = "info"
|
||||
property upload_path : String = File.expand_path "~/mango/uploads",
|
||||
home: true
|
||||
property plugin_path : String = File.expand_path "~/mango/plugins",
|
||||
home: true
|
||||
property download_timeout_seconds : Int32 = 30
|
||||
property mangadex = Hash(String, String | Int32).new
|
||||
|
||||
@[YAML::Field(key: "library_path")]
|
||||
property library_path : String = \
|
||||
File.expand_path "~/mango/library", home: true
|
||||
@[YAML::Field(ignore: true)]
|
||||
@mangadex_defaults = {
|
||||
"base_url" => "https://mangadex.org",
|
||||
"api_url" => "https://mangadex.org/api",
|
||||
"download_wait_seconds" => 5,
|
||||
"download_retries" => 4,
|
||||
"download_queue_db_path" => File.expand_path("~/mango/queue.db",
|
||||
home: true),
|
||||
"chapter_rename_rule" => "[Vol.{volume} ][Ch.{chapter} ]{title|id}",
|
||||
"manga_rename_rule" => "{title}",
|
||||
}
|
||||
|
||||
@[YAML::Field(key: "db_path")]
|
||||
property db_path : String = \
|
||||
File.expand_path "~/mango/mango.db", home: true
|
||||
@@singlet : Config?
|
||||
|
||||
@[YAML::Field(key: "scan_interval_minutes")]
|
||||
property scan_interval : Int32 = 5
|
||||
def self.current
|
||||
@@singlet.not_nil!
|
||||
end
|
||||
|
||||
@[YAML::Field(key: "log_level")]
|
||||
property log_level : String = "info"
|
||||
def set_current
|
||||
@@singlet = self
|
||||
end
|
||||
|
||||
def self.load(path : String?)
|
||||
path = "~/.config/mango/config.yml" if path.nil?
|
||||
cfg_path = File.expand_path path, home: true
|
||||
if File.exists? cfg_path
|
||||
return self.from_yaml File.read cfg_path
|
||||
end
|
||||
puts "The config file #{cfg_path} does not exist." \
|
||||
" Do you want mango to dump the default config there? [Y/n]"
|
||||
input = gets
|
||||
if input && input.downcase == "n"
|
||||
abort "Aborting..."
|
||||
end
|
||||
default = self.allocate
|
||||
cfg_dir = File.dirname cfg_path
|
||||
unless Dir.exists? cfg_dir
|
||||
Dir.mkdir_p cfg_dir
|
||||
end
|
||||
File.write cfg_path, default.to_yaml
|
||||
puts "The config file has been created at #{cfg_path}."
|
||||
default
|
||||
end
|
||||
def self.load(path : String?)
|
||||
path = "~/.config/mango/config.yml" if path.nil?
|
||||
cfg_path = File.expand_path path, home: true
|
||||
if File.exists? cfg_path
|
||||
config = self.from_yaml File.read cfg_path
|
||||
config.preprocess
|
||||
config.path = path
|
||||
config.fill_defaults
|
||||
return config
|
||||
end
|
||||
puts "The config file #{cfg_path} does not exist. " \
|
||||
"Dumping the default config there."
|
||||
default = self.allocate
|
||||
default.path = path
|
||||
default.fill_defaults
|
||||
cfg_dir = File.dirname cfg_path
|
||||
unless Dir.exists? cfg_dir
|
||||
Dir.mkdir_p cfg_dir
|
||||
end
|
||||
File.write cfg_path, default.to_yaml
|
||||
puts "The config file has been created at #{cfg_path}."
|
||||
default
|
||||
end
|
||||
|
||||
def fill_defaults
|
||||
{% for hash_name in ["mangadex"] %}
|
||||
@{{hash_name.id}}_defaults.map do |k, v|
|
||||
if @{{hash_name.id}}[k]?.nil?
|
||||
@{{hash_name.id}}[k] = v
|
||||
end
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
def preprocess
|
||||
unless base_url.starts_with? "/"
|
||||
raise "base url (#{base_url}) should start with `/`"
|
||||
end
|
||||
unless base_url.ends_with? "/"
|
||||
@base_url += "/"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
require "./config"
|
||||
require "./library"
|
||||
require "./storage"
|
||||
require "./logger"
|
||||
|
||||
class Context
|
||||
property config : Config
|
||||
property library : Library
|
||||
property storage : Storage
|
||||
property logger : MLogger
|
||||
|
||||
def initialize(@config, @logger, @library, @storage)
|
||||
end
|
||||
|
||||
{% for lvl in LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
@logger.{{lvl.id}} msg
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
92
src/handlers/auth_handler.cr
Normal file
92
src/handlers/auth_handler.cr
Normal file
@@ -0,0 +1,92 @@
|
||||
require "kemal"
|
||||
require "../storage"
|
||||
require "../util/*"
|
||||
|
||||
class AuthHandler < Kemal::Handler
|
||||
# Some of the code is copied form kemalcr/kemal-basic-auth on GitHub
|
||||
|
||||
BASIC = "Basic"
|
||||
AUTH = "Authorization"
|
||||
AUTH_MESSAGE = "Could not verify your access level for that URL.\n" \
|
||||
"You have to login with proper credentials"
|
||||
HEADER_LOGIN_REQUIRED = "Basic realm=\"Login Required\""
|
||||
|
||||
def initialize(@storage : Storage)
|
||||
end
|
||||
|
||||
def require_basic_auth(env)
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
call_next env
|
||||
end
|
||||
|
||||
def validate_token(env)
|
||||
token = env.session.string? "token"
|
||||
!token.nil? && @storage.verify_token token
|
||||
end
|
||||
|
||||
def validate_token_admin(env)
|
||||
token = env.session.string? "token"
|
||||
!token.nil? && @storage.verify_admin token
|
||||
end
|
||||
|
||||
def validate_auth_header(env)
|
||||
if env.request.headers[AUTH]?
|
||||
if value = env.request.headers[AUTH]
|
||||
if value.size > 0 && value.starts_with?(BASIC)
|
||||
token = verify_user value
|
||||
return false if token.nil?
|
||||
|
||||
env.session.string "token", token
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
false
|
||||
end
|
||||
|
||||
def verify_user(value)
|
||||
username, password = Base64.decode_string(value[BASIC.size + 1..-1])
|
||||
.split(":")
|
||||
@storage.verify_user username, password
|
||||
end
|
||||
|
||||
def handle_opds_auth(env)
|
||||
if validate_token(env) || validate_auth_header(env)
|
||||
call_next env
|
||||
else
|
||||
env.response.status_code = 401
|
||||
env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED
|
||||
env.response.print AUTH_MESSAGE
|
||||
end
|
||||
end
|
||||
|
||||
def handle_auth(env)
|
||||
if request_path_startswith(env, ["/login", "/logout"]) ||
|
||||
requesting_static_file env
|
||||
return call_next(env)
|
||||
end
|
||||
|
||||
unless validate_token env
|
||||
env.session.string "callback", env.request.path
|
||||
return redirect env, "/login"
|
||||
end
|
||||
|
||||
if request_path_startswith env, ["/admin", "/api/admin", "/download"]
|
||||
unless validate_token_admin env
|
||||
env.response.status_code = 403
|
||||
end
|
||||
end
|
||||
|
||||
call_next env
|
||||
end
|
||||
|
||||
def call(env)
|
||||
if request_path_startswith env, ["/opds"]
|
||||
handle_opds_auth env
|
||||
else
|
||||
handle_auth env
|
||||
end
|
||||
end
|
||||
end
|
||||
23
src/handlers/log_handler.cr
Normal file
23
src/handlers/log_handler.cr
Normal file
@@ -0,0 +1,23 @@
|
||||
require "kemal"
|
||||
require "../logger"
|
||||
|
||||
class LogHandler < Kemal::BaseLogHandler
|
||||
def call(env)
|
||||
elapsed_time = Time.measure { call_next env }
|
||||
elapsed_text = elapsed_text elapsed_time
|
||||
msg = "#{env.response.status_code} #{env.request.method}" \
|
||||
" #{env.request.resource} #{elapsed_text}"
|
||||
Logger.debug msg
|
||||
env
|
||||
end
|
||||
|
||||
def write(msg)
|
||||
Logger.debug msg
|
||||
end
|
||||
|
||||
private def elapsed_text(elapsed)
|
||||
millis = elapsed.total_milliseconds
|
||||
return "#{millis.round(2)}ms" if millis >= 1
|
||||
"#{(millis * 1000).round(2)}µs"
|
||||
end
|
||||
end
|
||||
30
src/handlers/static_handler.cr
Normal file
30
src/handlers/static_handler.cr
Normal file
@@ -0,0 +1,30 @@
|
||||
require "baked_file_system"
|
||||
require "kemal"
|
||||
require "../util/*"
|
||||
|
||||
class FS
|
||||
extend BakedFileSystem
|
||||
{% if flag?(:release) %}
|
||||
{% if read_file? "#{__DIR__}/../../dist/favicon.ico" %}
|
||||
{% puts "baking ../../dist" %}
|
||||
bake_folder "../../dist"
|
||||
{% else %}
|
||||
{% puts "baking ../../public" %}
|
||||
bake_folder "../../public"
|
||||
{% end %}
|
||||
{% end %}
|
||||
end
|
||||
|
||||
class StaticHandler < Kemal::Handler
|
||||
def call(env)
|
||||
if requesting_static_file env
|
||||
file = FS.get? env.request.path
|
||||
return call_next env if file.nil?
|
||||
|
||||
slice = Bytes.new file.size
|
||||
file.read slice
|
||||
return send_file env, slice, MIME.from_filename file.path
|
||||
end
|
||||
call_next env
|
||||
end
|
||||
end
|
||||
26
src/handlers/upload_handler.cr
Normal file
26
src/handlers/upload_handler.cr
Normal file
@@ -0,0 +1,26 @@
|
||||
require "kemal"
|
||||
require "../util/*"
|
||||
|
||||
class UploadHandler < Kemal::Handler
|
||||
def initialize(@upload_dir : String)
|
||||
end
|
||||
|
||||
def call(env)
|
||||
unless request_path_startswith(env, [UPLOAD_URL_PREFIX]) &&
|
||||
env.request.method == "GET"
|
||||
return call_next env
|
||||
end
|
||||
|
||||
ary = env.request.path.split(File::SEPARATOR).select do |part|
|
||||
!part.empty?
|
||||
end
|
||||
ary[0] = @upload_dir
|
||||
path = File.join ary
|
||||
|
||||
if File.exists? path
|
||||
send_file env, path
|
||||
else
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
187
src/library.cr
187
src/library.cr
@@ -1,187 +0,0 @@
|
||||
require "zip"
|
||||
require "mime"
|
||||
require "json"
|
||||
require "uri"
|
||||
|
||||
struct Image
|
||||
property data : Bytes
|
||||
property mime : String
|
||||
property filename : String
|
||||
property size : Int32
|
||||
|
||||
def initialize(@data, @mime, @filename, @size)
|
||||
end
|
||||
end
|
||||
|
||||
class Entry
|
||||
JSON.mapping zip_path: String, book_title: String, title: String, \
|
||||
size: String, pages: Int32, cover_url: String, id: String, \
|
||||
title_id: String, encoded_path: String, encoded_title: String
|
||||
|
||||
def initialize(path, @book_title, @title_id, storage)
|
||||
@zip_path = path
|
||||
@encoded_path = URI.encode path
|
||||
@title = File.basename path, File.extname path
|
||||
@encoded_title = URI.encode @title
|
||||
@size = (File.size path).humanize_bytes
|
||||
@pages = Zip::File.new(path).entries
|
||||
.select { |e|
|
||||
["image/jpeg", "image/png"].includes? \
|
||||
MIME.from_filename? e.filename
|
||||
}
|
||||
.size
|
||||
@id = storage.get_id @zip_path, false
|
||||
@cover_url = "/api/page/#{@title_id}/#{@id}/1"
|
||||
end
|
||||
def read_page(page_num)
|
||||
Zip::File.open @zip_path do |file|
|
||||
page = file.entries
|
||||
.select { |e|
|
||||
["image/jpeg", "image/png"].includes? \
|
||||
MIME.from_filename? e.filename
|
||||
}
|
||||
.sort { |a, b| a.filename <=> b.filename }
|
||||
.[page_num - 1]
|
||||
page.open do |io|
|
||||
slice = Bytes.new page.uncompressed_size
|
||||
bytes_read = io.read_fully? slice
|
||||
unless bytes_read
|
||||
return nil
|
||||
end
|
||||
return Image.new slice, MIME.from_filename(page.filename),\
|
||||
page.filename, bytes_read
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class Title
|
||||
JSON.mapping dir: String, entries: Array(Entry), title: String,
|
||||
id: String, encoded_title: String
|
||||
|
||||
def initialize(dir : String, storage)
|
||||
@dir = dir
|
||||
@id = storage.get_id @dir, true
|
||||
@title = File.basename dir
|
||||
@encoded_title = URI.encode @title
|
||||
@entries = (Dir.entries dir)
|
||||
.select { |path| [".zip", ".cbz"].includes? File.extname path }
|
||||
.map { |path|
|
||||
Entry.new File.join(dir, path), @title, @id, storage
|
||||
}
|
||||
.select { |e| e.pages > 0 }
|
||||
.sort { |a, b| a.title <=> b.title }
|
||||
end
|
||||
def get_entry(eid)
|
||||
@entries.find { |e| e.id == eid }
|
||||
end
|
||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||
# instead of IDs in info.json
|
||||
def save_progress(username, entry, page)
|
||||
info = TitleInfo.new @dir
|
||||
if info.progress[username]?.nil?
|
||||
info.progress[username] = {entry => page}
|
||||
info.save @dir
|
||||
return
|
||||
end
|
||||
info.progress[username][entry] = page
|
||||
info.save @dir
|
||||
end
|
||||
def load_progress(username, entry)
|
||||
info = TitleInfo.new @dir
|
||||
if info.progress[username]?.nil?
|
||||
return 0
|
||||
end
|
||||
if info.progress[username][entry]?.nil?
|
||||
return 0
|
||||
end
|
||||
info.progress[username][entry]
|
||||
end
|
||||
def load_percetage(username, entry)
|
||||
info = TitleInfo.new @dir
|
||||
page = load_progress username, entry
|
||||
entry_obj = @entries.find{|e| e.title == entry}
|
||||
return 0 if entry_obj.nil?
|
||||
page / entry_obj.pages
|
||||
end
|
||||
def load_percetage(username)
|
||||
read_pages = total_pages = 0
|
||||
@entries.each do |e|
|
||||
read_pages += load_progress username, e.title
|
||||
total_pages += e.pages
|
||||
end
|
||||
read_pages / total_pages
|
||||
end
|
||||
def next_entry(current_entry_obj)
|
||||
idx = @entries.index current_entry_obj
|
||||
return nil if idx.nil? || idx == @entries.size - 1
|
||||
@entries[idx + 1]
|
||||
end
|
||||
end
|
||||
|
||||
class TitleInfo
|
||||
# { user1: { entry1: 10, entry2: 0 } }
|
||||
include JSON::Serializable
|
||||
|
||||
@[JSON::Field(key: "comment")]
|
||||
property comment = "Generated by Mango. DO NOT EDIT!"
|
||||
|
||||
@[JSON::Field(key: "progress")]
|
||||
property progress : Hash(String, Hash(String, Int32))
|
||||
|
||||
def initialize(title_dir)
|
||||
info = nil
|
||||
|
||||
json_path = File.join title_dir, "info.json"
|
||||
if File.exists? json_path
|
||||
info = TitleInfo.from_json File.read json_path
|
||||
else
|
||||
info = TitleInfo.from_json "{\"progress\": {}}"
|
||||
end
|
||||
|
||||
@progress = info.progress.clone
|
||||
end
|
||||
def save(title_dir)
|
||||
json_path = File.join title_dir, "info.json"
|
||||
File.write json_path, self.to_pretty_json
|
||||
end
|
||||
end
|
||||
|
||||
class Library
|
||||
JSON.mapping dir: String, titles: Array(Title), scan_interval: Int32,
|
||||
logger: MLogger, storage: Storage
|
||||
|
||||
def initialize(@dir, @scan_interval, @logger, @storage)
|
||||
# explicitly initialize @titles to bypass the compiler check. it will
|
||||
# be filled with actual Titles in the `scan` call below
|
||||
@titles = [] of Title
|
||||
|
||||
return scan if @scan_interval < 1
|
||||
spawn do
|
||||
loop do
|
||||
start = Time.local
|
||||
scan
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
@logger.info "Scanned #{@titles.size} titles in #{ms}ms"
|
||||
sleep @scan_interval * 60
|
||||
end
|
||||
end
|
||||
end
|
||||
def get_title(tid)
|
||||
@titles.find { |t| t.id == tid }
|
||||
end
|
||||
def scan
|
||||
unless Dir.exists? @dir
|
||||
@logger.info "The library directory #{@dir} does not exist. " \
|
||||
"Attempting to create it"
|
||||
Dir.mkdir_p @dir
|
||||
end
|
||||
@titles = (Dir.entries @dir)
|
||||
.select { |path| File.directory? File.join @dir, path }
|
||||
.map { |path| Title.new File.join(@dir, path), @storage }
|
||||
.select { |title| !title.entries.empty? }
|
||||
.sort { |a, b| a.title <=> b.title }
|
||||
@logger.debug "Scan completed"
|
||||
@logger.debug "Scanned library: \n#{self.to_pretty_json}"
|
||||
end
|
||||
end
|
||||
238
src/library/entry.cr
Normal file
238
src/library/entry.cr
Normal file
@@ -0,0 +1,238 @@
|
||||
require "image_size"
|
||||
|
||||
class Entry
|
||||
property zip_path : String, book : Title, title : String,
|
||||
size : String, pages : Int32, id : String, encoded_path : String,
|
||||
encoded_title : String, mtime : Time, err_msg : String?
|
||||
|
||||
def initialize(@zip_path, @book, storage)
|
||||
@encoded_path = URI.encode @zip_path
|
||||
@title = File.basename @zip_path, File.extname @zip_path
|
||||
@encoded_title = URI.encode @title
|
||||
@size = (File.size @zip_path).humanize_bytes
|
||||
id = storage.get_id @zip_path, false
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_id({
|
||||
path: @zip_path,
|
||||
id: id,
|
||||
is_title: false,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@mtime = File.info(@zip_path).modification_time
|
||||
|
||||
unless File.readable? @zip_path
|
||||
@err_msg = "File #{@zip_path} is not readable."
|
||||
Logger.warn "#{@err_msg} Please make sure the " \
|
||||
"file permission is configured correctly."
|
||||
return
|
||||
end
|
||||
|
||||
archive_exception = validate_archive @zip_path
|
||||
unless archive_exception.nil?
|
||||
@err_msg = "Archive error: #{archive_exception}"
|
||||
Logger.warn "Unable to extract archive #{@zip_path}. " \
|
||||
"Ignoring it. #{@err_msg}"
|
||||
return
|
||||
end
|
||||
|
||||
file = ArchiveFile.new @zip_path
|
||||
@pages = file.entries.count do |e|
|
||||
SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? e.filename
|
||||
end
|
||||
file.close
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "title_id", @book.id
|
||||
json.field "display_name", @book.display_name @title
|
||||
json.field "cover_url", cover_url
|
||||
json.field "pages" { json.number @pages }
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
end
|
||||
end
|
||||
|
||||
def display_name
|
||||
@book.display_name @title
|
||||
end
|
||||
|
||||
def encoded_display_name
|
||||
URI.encode display_name
|
||||
end
|
||||
|
||||
def cover_url
|
||||
return "#{Config.current.base_url}img/icon.png" if @err_msg
|
||||
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
|
||||
TitleInfo.new @book.dir do |info|
|
||||
info_url = info.entry_cover_url[@title]?
|
||||
unless info_url.nil? || info_url.empty?
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
end
|
||||
url
|
||||
end
|
||||
|
||||
private def sorted_archive_entries
|
||||
ArchiveFile.open @zip_path do |file|
|
||||
entries = file.entries
|
||||
.select { |e|
|
||||
SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? e.filename
|
||||
}
|
||||
.sort { |a, b|
|
||||
compare_numerically a.filename, b.filename
|
||||
}
|
||||
yield file, entries
|
||||
end
|
||||
end
|
||||
|
||||
def read_page(page_num)
|
||||
raise "Unreadble archive. #{@err_msg}" if @err_msg
|
||||
img = nil
|
||||
sorted_archive_entries do |file, entries|
|
||||
page = entries[page_num - 1]
|
||||
data = file.read_entry page
|
||||
if data
|
||||
img = Image.new data, MIME.from_filename(page.filename), page.filename,
|
||||
data.size
|
||||
end
|
||||
end
|
||||
img
|
||||
end
|
||||
|
||||
def page_dimensions
|
||||
sizes = [] of Hash(String, Int32)
|
||||
sorted_archive_entries do |file, entries|
|
||||
entries.each_with_index do |e, i|
|
||||
begin
|
||||
data = file.read_entry(e).not_nil!
|
||||
size = ImageSize.get data
|
||||
sizes << {
|
||||
"width" => size.width,
|
||||
"height" => size.height,
|
||||
}
|
||||
rescue e
|
||||
Logger.warn "Failed to read page #{i} of entry #{zip_path}. #{e}"
|
||||
sizes << {"width" => 1000_i32, "height" => 1000_i32}
|
||||
end
|
||||
end
|
||||
end
|
||||
sizes
|
||||
end
|
||||
|
||||
def next_entry(username)
|
||||
entries = @book.sorted_entries username
|
||||
idx = entries.index self
|
||||
return nil if idx.nil? || idx == entries.size - 1
|
||||
entries[idx + 1]
|
||||
end
|
||||
|
||||
def previous_entry
|
||||
idx = @book.entries.index self
|
||||
return nil if idx.nil? || idx == 0
|
||||
@book.entries[idx - 1]
|
||||
end
|
||||
|
||||
def date_added
|
||||
date_added = nil
|
||||
TitleInfo.new @book.dir do |info|
|
||||
info_da = info.date_added[@title]?
|
||||
if info_da.nil?
|
||||
date_added = info.date_added[@title] = ctime @zip_path
|
||||
info.save
|
||||
else
|
||||
date_added = info_da
|
||||
end
|
||||
end
|
||||
date_added.not_nil! # is it ok to set not_nil! here?
|
||||
end
|
||||
|
||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||
# instead of IDs in info.json
|
||||
def save_progress(username, page)
|
||||
TitleInfo.new @book.dir do |info|
|
||||
if info.progress[username]?.nil?
|
||||
info.progress[username] = {@title => page}
|
||||
else
|
||||
info.progress[username][@title] = page
|
||||
end
|
||||
# save last_read timestamp
|
||||
if info.last_read[username]?.nil?
|
||||
info.last_read[username] = {@title => Time.utc}
|
||||
else
|
||||
info.last_read[username][@title] = Time.utc
|
||||
end
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def load_progress(username)
|
||||
progress = 0
|
||||
TitleInfo.new @book.dir do |info|
|
||||
unless info.progress[username]?.nil? ||
|
||||
info.progress[username][@title]?.nil?
|
||||
progress = info.progress[username][@title]
|
||||
end
|
||||
end
|
||||
[progress, @pages].min
|
||||
end
|
||||
|
||||
def load_percentage(username)
|
||||
page = load_progress username
|
||||
page / @pages
|
||||
end
|
||||
|
||||
def load_last_read(username)
|
||||
last_read = nil
|
||||
TitleInfo.new @book.dir do |info|
|
||||
unless info.last_read[username]?.nil? ||
|
||||
info.last_read[username][@title]?.nil?
|
||||
last_read = info.last_read[username][@title]
|
||||
end
|
||||
end
|
||||
last_read
|
||||
end
|
||||
|
||||
def finished?(username)
|
||||
load_progress(username) == @pages
|
||||
end
|
||||
|
||||
def started?(username)
|
||||
load_progress(username) > 0
|
||||
end
|
||||
|
||||
def generate_thumbnail : Image?
|
||||
return if @err_msg
|
||||
|
||||
img = read_page(1).not_nil!
|
||||
begin
|
||||
size = ImageSize.get img.data
|
||||
if size.height > size.width
|
||||
thumbnail = ImageSize.resize img.data, width: 200
|
||||
else
|
||||
thumbnail = ImageSize.resize img.data, height: 300
|
||||
end
|
||||
img.data = thumbnail
|
||||
img.size = thumbnail.size
|
||||
unless img.mime == "image/webp"
|
||||
# image_size.cr resizes non-webp images to jpg
|
||||
img.mime = "image/jpeg"
|
||||
end
|
||||
Storage.default.save_thumbnail @id, img
|
||||
rescue e
|
||||
Logger.warn "Failed to generate thumbnail for file #{@zip_path}. #{e}"
|
||||
end
|
||||
|
||||
img
|
||||
end
|
||||
|
||||
def get_thumbnail : Image?
|
||||
Storage.default.get_thumbnail @id
|
||||
end
|
||||
end
|
||||
271
src/library/library.cr
Normal file
271
src/library/library.cr
Normal file
@@ -0,0 +1,271 @@
|
||||
class Library
|
||||
property dir : String, title_ids : Array(String),
|
||||
title_hash : Hash(String, Title)
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
register_mime_types
|
||||
|
||||
@dir = Config.current.library_path
|
||||
# explicitly initialize @titles to bypass the compiler check. it will
|
||||
# be filled with actual Titles in the `scan` call below
|
||||
@title_ids = [] of String
|
||||
@title_hash = {} of String => Title
|
||||
|
||||
@entries_count = 0
|
||||
@thumbnails_count = 0
|
||||
|
||||
scan_interval = Config.current.scan_interval_minutes
|
||||
if scan_interval < 1
|
||||
scan
|
||||
else
|
||||
spawn do
|
||||
loop do
|
||||
start = Time.local
|
||||
scan
|
||||
ms = (Time.local - start).total_milliseconds
|
||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||
sleep scan_interval.minutes
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
thumbnail_interval = Config.current.thumbnail_generation_interval_hours
|
||||
unless thumbnail_interval < 1
|
||||
spawn do
|
||||
loop do
|
||||
# Wait for scan to complete (in most cases)
|
||||
sleep 1.minutes
|
||||
generate_thumbnails
|
||||
sleep thumbnail_interval.hours
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
db_interval = Config.current.db_optimization_interval_hours
|
||||
unless db_interval < 1
|
||||
spawn do
|
||||
loop do
|
||||
Storage.default.optimize
|
||||
sleep db_interval.hours
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def titles
|
||||
@title_ids.map { |tid| self.get_title!(tid) }
|
||||
end
|
||||
|
||||
def sorted_titles(username, opt : SortOptions? = nil)
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
# This is a hack to bypass a compiler bug
|
||||
ary = titles
|
||||
|
||||
case opt.not_nil!.method
|
||||
when .time_modified?
|
||||
ary.sort! { |a, b| (a.mtime <=> b.mtime).or \
|
||||
compare_numerically a.title, b.title }
|
||||
when .progress?
|
||||
ary.sort! do |a, b|
|
||||
(a.load_percentage(username) <=> b.load_percentage(username)).or \
|
||||
compare_numerically a.title, b.title
|
||||
end
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
ary.sort! { |a, b| compare_numerically a.title, b.title }
|
||||
end
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
ary
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
json.field "dir", @dir
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_title(tid)
|
||||
@title_hash[tid]?
|
||||
end
|
||||
|
||||
def get_title!(tid)
|
||||
@title_hash[tid]
|
||||
end
|
||||
|
||||
def scan
|
||||
unless Dir.exists? @dir
|
||||
Logger.info "The library directory #{@dir} does not exist. " \
|
||||
"Attempting to create it"
|
||||
Dir.mkdir_p @dir
|
||||
end
|
||||
|
||||
storage = Storage.new auto_close: false
|
||||
|
||||
(Dir.entries @dir)
|
||||
.select { |fn| !fn.starts_with? "." }
|
||||
.map { |fn| File.join @dir, fn }
|
||||
.select { |path| File.directory? path }
|
||||
.map { |path| Title.new path, "", storage, self }
|
||||
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
||||
.sort { |a, b| a.title <=> b.title }
|
||||
.tap { |_| @title_ids.clear }
|
||||
.each do |title|
|
||||
@title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
end
|
||||
|
||||
storage.bulk_insert_ids
|
||||
storage.close
|
||||
|
||||
Logger.debug "Scan completed"
|
||||
end
|
||||
|
||||
def get_continue_reading_entries(username)
|
||||
cr_entries = deep_titles
|
||||
.map { |t| t.get_last_read_entry username }
|
||||
# Select elements with type `Entry` from the array and ignore all `Nil`s
|
||||
.select(Entry)[0...ENTRIES_IN_HOME_SECTIONS]
|
||||
.map { |e|
|
||||
# Get the last read time of the entry. If it hasn't been started, get
|
||||
# the last read time of the previous entry
|
||||
last_read = e.load_last_read username
|
||||
pe = e.previous_entry
|
||||
if last_read.nil? && pe
|
||||
last_read = pe.load_last_read username
|
||||
end
|
||||
{
|
||||
entry: e,
|
||||
percentage: e.load_percentage(username),
|
||||
last_read: last_read,
|
||||
}
|
||||
}
|
||||
|
||||
# Sort by by last_read, most recent first (nils at the end)
|
||||
cr_entries.sort { |a, b|
|
||||
next 0 if a[:last_read].nil? && b[:last_read].nil?
|
||||
next 1 if a[:last_read].nil?
|
||||
next -1 if b[:last_read].nil?
|
||||
b[:last_read].not_nil! <=> a[:last_read].not_nil!
|
||||
}
|
||||
end
|
||||
|
||||
alias RA = NamedTuple(
|
||||
entry: Entry,
|
||||
percentage: Float64,
|
||||
grouped_count: Int32)
|
||||
|
||||
def get_recently_added_entries(username)
|
||||
recently_added = [] of RA
|
||||
last_date_added = nil
|
||||
|
||||
titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
.select { |e| e[:date_added] > 1.month.ago }
|
||||
.sort { |a, b| b[:date_added] <=> a[:date_added] }
|
||||
.each do |e|
|
||||
break if recently_added.size > 12
|
||||
last = recently_added.last?
|
||||
if last && e[:entry].book.id == last[:entry].book.id &&
|
||||
(e[:date_added] - last_date_added.not_nil!).duration < 1.day
|
||||
# A NamedTuple is immutable, so we have to cast it to a Hash first
|
||||
last_hash = last.to_h
|
||||
count = last_hash[:grouped_count].as(Int32)
|
||||
last_hash[:grouped_count] = count + 1
|
||||
# Setting the percentage to a negative value will hide the
|
||||
# percentage badge on the card
|
||||
last_hash[:percentage] = -1.0
|
||||
recently_added[recently_added.size - 1] = RA.from last_hash
|
||||
else
|
||||
last_date_added = e[:date_added]
|
||||
recently_added << {
|
||||
entry: e[:entry],
|
||||
percentage: e[:entry].load_percentage(username),
|
||||
grouped_count: 1,
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
recently_added[0...ENTRIES_IN_HOME_SECTIONS]
|
||||
end
|
||||
|
||||
def get_start_reading_titles(username)
|
||||
# Here we are not using `deep_titles` as it may cause unexpected behaviors
|
||||
# For example, consider the following nested titles:
|
||||
# - One Puch Man
|
||||
# - Vol. 1
|
||||
# - Vol. 2
|
||||
# If we use `deep_titles`, the start reading section might include `Vol. 2`
|
||||
# when the user hasn't started `Vol. 1` yet
|
||||
titles
|
||||
.select { |t| t.load_percentage(username) == 0 }
|
||||
.sample(ENTRIES_IN_HOME_SECTIONS)
|
||||
.shuffle
|
||||
end
|
||||
|
||||
def thumbnail_generation_progress
|
||||
return 0 if @entries_count == 0
|
||||
@thumbnails_count / @entries_count
|
||||
end
|
||||
|
||||
def generate_thumbnails
|
||||
if @thumbnails_count > 0
|
||||
Logger.debug "Thumbnail generation in progress"
|
||||
return
|
||||
end
|
||||
|
||||
Logger.info "Starting thumbnail generation"
|
||||
entries = deep_titles.map(&.deep_entries).flatten.reject &.err_msg
|
||||
@entries_count = entries.size
|
||||
@thumbnails_count = 0
|
||||
|
||||
# Report generation progress regularly
|
||||
spawn do
|
||||
loop do
|
||||
unless @thumbnails_count == 0
|
||||
Logger.debug "Thumbnail generation progress: " \
|
||||
"#{(thumbnail_generation_progress * 100).round 1}%"
|
||||
end
|
||||
# Generation is completed. We reset the count to 0 to allow subsequent
|
||||
# calls to the function, and break from the loop to stop the progress
|
||||
# report fiber
|
||||
if thumbnail_generation_progress.to_i == 1
|
||||
@thumbnails_count = 0
|
||||
break
|
||||
end
|
||||
sleep 10.seconds
|
||||
end
|
||||
end
|
||||
|
||||
entries.each do |e|
|
||||
unless e.get_thumbnail
|
||||
e.generate_thumbnail
|
||||
# Sleep after each generation to minimize the impact on disk IO
|
||||
# and CPU
|
||||
sleep 0.5.seconds
|
||||
end
|
||||
@thumbnails_count += 1
|
||||
end
|
||||
Logger.info "Thumbnail generation finished"
|
||||
end
|
||||
end
|
||||
383
src/library/title.cr
Normal file
383
src/library/title.cr
Normal file
@@ -0,0 +1,383 @@
|
||||
require "../archive"
|
||||
|
||||
class Title
|
||||
property dir : String, parent_id : String, title_ids : Array(String),
|
||||
entries : Array(Entry), title : String, id : String,
|
||||
encoded_title : String, mtime : Time,
|
||||
entry_display_name_cache : Hash(String, String)?
|
||||
|
||||
def initialize(@dir : String, @parent_id, storage,
|
||||
@library : Library)
|
||||
id = storage.get_id @dir, true
|
||||
if id.nil?
|
||||
id = random_str
|
||||
storage.insert_id({
|
||||
path: @dir,
|
||||
id: id,
|
||||
is_title: true,
|
||||
})
|
||||
end
|
||||
@id = id
|
||||
@title = File.basename dir
|
||||
@encoded_title = URI.encode @title
|
||||
@title_ids = [] of String
|
||||
@entries = [] of Entry
|
||||
@mtime = File.info(dir).modification_time
|
||||
|
||||
Dir.entries(dir).each do |fn|
|
||||
next if fn.starts_with? "."
|
||||
path = File.join dir, fn
|
||||
if File.directory? path
|
||||
title = Title.new path, @id, storage, library
|
||||
next if title.entries.size == 0 && title.titles.size == 0
|
||||
@library.title_hash[title.id] = title
|
||||
@title_ids << title.id
|
||||
next
|
||||
end
|
||||
if [".zip", ".cbz", ".rar", ".cbr"].includes? File.extname path
|
||||
entry = Entry.new path, self, storage
|
||||
@entries << entry if entry.pages > 0 || entry.err_msg
|
||||
end
|
||||
end
|
||||
|
||||
mtimes = [@mtime]
|
||||
mtimes += @title_ids.map { |e| @library.title_hash[e].mtime }
|
||||
mtimes += @entries.map { |e| e.mtime }
|
||||
@mtime = mtimes.max
|
||||
|
||||
@title_ids.sort! do |a, b|
|
||||
compare_numerically @library.title_hash[a].title,
|
||||
@library.title_hash[b].title
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
@entries.sort! do |a, b|
|
||||
sorter.compare a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.object do
|
||||
{% for str in ["dir", "title", "id"] %}
|
||||
json.field {{str}}, @{{str.id}}
|
||||
{% end %}
|
||||
json.field "display_name", display_name
|
||||
json.field "cover_url", cover_url
|
||||
json.field "mtime" { json.number @mtime.to_unix }
|
||||
json.field "titles" do
|
||||
json.raw self.titles.to_json
|
||||
end
|
||||
json.field "entries" do
|
||||
json.raw @entries.to_json
|
||||
end
|
||||
json.field "parents" do
|
||||
json.array do
|
||||
self.parents.each do |title|
|
||||
json.object do
|
||||
json.field "title", title.title
|
||||
json.field "id", title.id
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def titles
|
||||
@title_ids.map { |tid| @library.get_title! tid }
|
||||
end
|
||||
|
||||
# Get all entries, including entries in nested titles
|
||||
def deep_entries
|
||||
return @entries if title_ids.empty?
|
||||
@entries + titles.map { |t| t.deep_entries }.flatten
|
||||
end
|
||||
|
||||
def deep_titles
|
||||
return [] of Title if titles.empty?
|
||||
titles + titles.map { |t| t.deep_titles }.flatten
|
||||
end
|
||||
|
||||
def parents
|
||||
ary = [] of Title
|
||||
tid = @parent_id
|
||||
while !tid.empty?
|
||||
title = @library.get_title! tid
|
||||
ary << title
|
||||
tid = title.parent_id
|
||||
end
|
||||
ary.reverse
|
||||
end
|
||||
|
||||
def size
|
||||
@entries.size + @title_ids.size
|
||||
end
|
||||
|
||||
def get_entry(eid)
|
||||
@entries.find { |e| e.id == eid }
|
||||
end
|
||||
|
||||
def display_name
|
||||
dn = @title
|
||||
TitleInfo.new @dir do |info|
|
||||
info_dn = info.display_name
|
||||
dn = info_dn unless info_dn.empty?
|
||||
end
|
||||
dn
|
||||
end
|
||||
|
||||
def encoded_display_name
|
||||
URI.encode display_name
|
||||
end
|
||||
|
||||
def display_name(entry_name)
|
||||
unless @entry_display_name_cache
|
||||
TitleInfo.new @dir do |info|
|
||||
@entry_display_name_cache = info.entry_display_name
|
||||
end
|
||||
end
|
||||
|
||||
dn = entry_name
|
||||
info_dn = @entry_display_name_cache.not_nil![entry_name]?
|
||||
unless info_dn.nil? || info_dn.empty?
|
||||
dn = info_dn
|
||||
end
|
||||
dn
|
||||
end
|
||||
|
||||
def set_display_name(dn)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.display_name = dn
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def set_display_name(entry_name : String, dn)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_display_name[entry_name] = dn
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def cover_url
|
||||
url = "#{Config.current.base_url}img/icon.png"
|
||||
readable_entries = @entries.select &.err_msg.nil?
|
||||
if readable_entries.size > 0
|
||||
url = readable_entries[0].cover_url
|
||||
end
|
||||
TitleInfo.new @dir do |info|
|
||||
info_url = info.cover_url
|
||||
unless info_url.nil? || info_url.empty?
|
||||
url = File.join Config.current.base_url, info_url
|
||||
end
|
||||
end
|
||||
url
|
||||
end
|
||||
|
||||
def set_cover_url(url : String)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.cover_url = url
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
def set_cover_url(entry_name : String, url : String)
|
||||
TitleInfo.new @dir do |info|
|
||||
info.entry_cover_url[entry_name] = url
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
# Set the reading progress of all entries and nested libraries to 100%
|
||||
def read_all(username)
|
||||
@entries.each do |e|
|
||||
e.save_progress username, e.pages
|
||||
end
|
||||
titles.each do |t|
|
||||
t.read_all username
|
||||
end
|
||||
end
|
||||
|
||||
# Set the reading progress of all entries and nested libraries to 0%
|
||||
def unread_all(username)
|
||||
@entries.each do |e|
|
||||
e.save_progress username, 0
|
||||
end
|
||||
titles.each do |t|
|
||||
t.unread_all username
|
||||
end
|
||||
end
|
||||
|
||||
def deep_read_page_count(username) : Int32
|
||||
load_progress_for_all_entries(username).sum +
|
||||
titles.map { |t| t.deep_read_page_count username }.flatten.sum
|
||||
end
|
||||
|
||||
def deep_total_page_count : Int32
|
||||
entries.map { |e| e.pages }.sum +
|
||||
titles.map { |t| t.deep_total_page_count }.flatten.sum
|
||||
end
|
||||
|
||||
def load_percentage(username)
|
||||
deep_read_page_count(username) / deep_total_page_count
|
||||
end
|
||||
|
||||
def load_progress_for_all_entries(username, opt : SortOptions? = nil,
|
||||
unsorted = false)
|
||||
progress = {} of String => Int32
|
||||
TitleInfo.new @dir do |info|
|
||||
progress = info.progress[username]?
|
||||
end
|
||||
|
||||
if unsorted
|
||||
ary = @entries
|
||||
else
|
||||
ary = sorted_entries username, opt
|
||||
end
|
||||
|
||||
ary.map do |e|
|
||||
info_progress = 0
|
||||
if progress && progress.has_key? e.title
|
||||
info_progress = [progress[e.title], e.pages].min
|
||||
end
|
||||
info_progress
|
||||
end
|
||||
end
|
||||
|
||||
def load_percentage_for_all_entries(username, opt : SortOptions? = nil,
|
||||
unsorted = false)
|
||||
if unsorted
|
||||
ary = @entries
|
||||
else
|
||||
ary = sorted_entries username, opt
|
||||
end
|
||||
|
||||
progress = load_progress_for_all_entries username, opt, unsorted
|
||||
ary.map_with_index do |e, i|
|
||||
progress[i] / e.pages
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the sorted entries array
|
||||
#
|
||||
# When `opt` is nil, it uses the preferred sorting options in info.json, or
|
||||
# use the default (auto, ascending)
|
||||
# When `opt` is not nil, it saves the options to info.json
|
||||
def sorted_entries(username, opt : SortOptions? = nil)
|
||||
if opt.nil?
|
||||
opt = SortOptions.from_info_json @dir, username
|
||||
else
|
||||
TitleInfo.new @dir do |info|
|
||||
info.sort_by[username] = opt.to_tuple
|
||||
info.save
|
||||
end
|
||||
end
|
||||
|
||||
case opt.not_nil!.method
|
||||
when .title?
|
||||
ary = @entries.sort { |a, b| compare_numerically a.title, b.title }
|
||||
when .time_modified?
|
||||
ary = @entries.sort { |a, b| (a.mtime <=> b.mtime).or \
|
||||
compare_numerically a.title, b.title }
|
||||
when .time_added?
|
||||
ary = @entries.sort { |a, b| (a.date_added <=> b.date_added).or \
|
||||
compare_numerically a.title, b.title }
|
||||
when .progress?
|
||||
percentage_ary = load_percentage_for_all_entries username, opt, true
|
||||
ary = @entries.zip(percentage_ary)
|
||||
.sort { |a_tp, b_tp| (a_tp[1] <=> b_tp[1]).or \
|
||||
compare_numerically a_tp[0].title, b_tp[0].title }
|
||||
.map { |tp| tp[0] }
|
||||
else
|
||||
unless opt.method.auto?
|
||||
Logger.warn "Unknown sorting method #{opt.not_nil!.method}. Using " \
|
||||
"Auto instead"
|
||||
end
|
||||
sorter = ChapterSorter.new @entries.map { |e| e.title }
|
||||
ary = @entries.sort do |a, b|
|
||||
sorter.compare(a.title, b.title).or \
|
||||
compare_numerically a.title, b.title
|
||||
end
|
||||
end
|
||||
|
||||
ary.reverse! unless opt.not_nil!.ascend
|
||||
|
||||
ary
|
||||
end
|
||||
|
||||
# === helper methods ===
|
||||
|
||||
# Gets the last read entry in the title. If the entry has been completed,
|
||||
# returns the next entry. Returns nil when no entry has been read yet,
|
||||
# or when all entries are completed
|
||||
def get_last_read_entry(username) : Entry?
|
||||
progress = {} of String => Int32
|
||||
TitleInfo.new @dir do |info|
|
||||
progress = info.progress[username]?
|
||||
end
|
||||
return if progress.nil?
|
||||
|
||||
last_read_entry = nil
|
||||
|
||||
sorted_entries(username).reverse_each do |e|
|
||||
if progress.has_key?(e.title) && progress[e.title] > 0
|
||||
last_read_entry = e
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if last_read_entry && last_read_entry.finished? username
|
||||
last_read_entry = last_read_entry.next_entry username
|
||||
end
|
||||
|
||||
last_read_entry
|
||||
end
|
||||
|
||||
# Equivalent to `@entries.map &. date_added`, but much more efficient
|
||||
def get_date_added_for_all_entries
|
||||
da = {} of String => Time
|
||||
TitleInfo.new @dir do |info|
|
||||
da = info.date_added
|
||||
end
|
||||
|
||||
@entries.each do |e|
|
||||
next if da.has_key? e.title
|
||||
da[e.title] = ctime e.zip_path
|
||||
end
|
||||
|
||||
TitleInfo.new @dir do |info|
|
||||
info.date_added = da
|
||||
info.save
|
||||
end
|
||||
|
||||
@entries.map { |e| da[e.title] }
|
||||
end
|
||||
|
||||
def deep_entries_with_date_added
|
||||
da_ary = get_date_added_for_all_entries
|
||||
zip = @entries.map_with_index do |e, i|
|
||||
{entry: e, date_added: da_ary[i]}
|
||||
end
|
||||
return zip if title_ids.empty?
|
||||
zip + titles.map { |t| t.deep_entries_with_date_added }.flatten
|
||||
end
|
||||
|
||||
def bulk_progress(action, ids : Array(String), username)
|
||||
selected_entries = ids
|
||||
.map { |id|
|
||||
@entries.find { |e| e.id == id }
|
||||
}
|
||||
.select(Entry)
|
||||
|
||||
TitleInfo.new @dir do |info|
|
||||
selected_entries.each do |e|
|
||||
page = action == "read" ? e.pages : 0
|
||||
if info.progress[username]?.nil?
|
||||
info.progress[username] = {e.title => page}
|
||||
else
|
||||
info.progress[username][e.title] = page
|
||||
end
|
||||
end
|
||||
info.save
|
||||
end
|
||||
end
|
||||
end
|
||||
112
src/library/types.cr
Normal file
112
src/library/types.cr
Normal file
@@ -0,0 +1,112 @@
|
||||
SUPPORTED_IMG_TYPES = ["image/jpeg", "image/png", "image/webp"]
|
||||
|
||||
enum SortMethod
|
||||
Auto
|
||||
Title
|
||||
Progress
|
||||
TimeModified
|
||||
TimeAdded
|
||||
end
|
||||
|
||||
class SortOptions
|
||||
property method : SortMethod, ascend : Bool
|
||||
|
||||
def initialize(in_method : String? = nil, @ascend = true)
|
||||
@method = SortMethod::Auto
|
||||
SortMethod.each do |m, _|
|
||||
if in_method && m.to_s.underscore == in_method
|
||||
@method = m
|
||||
return
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(in_method : SortMethod? = nil, @ascend = true)
|
||||
if in_method
|
||||
@method = in_method
|
||||
else
|
||||
@method = SortMethod::Auto
|
||||
end
|
||||
end
|
||||
|
||||
def self.from_tuple(tp : Tuple(String, Bool))
|
||||
method, ascend = tp
|
||||
self.new method, ascend
|
||||
end
|
||||
|
||||
def self.from_info_json(dir, username)
|
||||
opt = SortOptions.new
|
||||
TitleInfo.new dir do |info|
|
||||
if info.sort_by.has_key? username
|
||||
opt = SortOptions.from_tuple info.sort_by[username]
|
||||
end
|
||||
end
|
||||
opt
|
||||
end
|
||||
|
||||
def to_tuple
|
||||
{@method.to_s.underscore, ascend}
|
||||
end
|
||||
end
|
||||
|
||||
struct Image
|
||||
property data : Bytes
|
||||
property mime : String
|
||||
property filename : String
|
||||
property size : Int32
|
||||
|
||||
def initialize(@data, @mime, @filename, @size)
|
||||
end
|
||||
|
||||
def self.from_db(res : DB::ResultSet)
|
||||
img = Image.allocate
|
||||
res.read String
|
||||
img.data = res.read Bytes
|
||||
img.filename = res.read String
|
||||
img.mime = res.read String
|
||||
img.size = res.read Int32
|
||||
img
|
||||
end
|
||||
end
|
||||
|
||||
class TitleInfo
|
||||
include JSON::Serializable
|
||||
|
||||
property comment = "Generated by Mango. DO NOT EDIT!"
|
||||
property progress = {} of String => Hash(String, Int32)
|
||||
property display_name = ""
|
||||
property entry_display_name = {} of String => String
|
||||
property cover_url = ""
|
||||
property entry_cover_url = {} of String => String
|
||||
property last_read = {} of String => Hash(String, Time)
|
||||
property date_added = {} of String => Time
|
||||
property sort_by = {} of String => Tuple(String, Bool)
|
||||
|
||||
@[JSON::Field(ignore: true)]
|
||||
property dir : String = ""
|
||||
|
||||
@@mutex_hash = {} of String => Mutex
|
||||
|
||||
def self.new(dir, &)
|
||||
if @@mutex_hash[dir]?
|
||||
mutex = @@mutex_hash[dir]
|
||||
else
|
||||
mutex = Mutex.new
|
||||
@@mutex_hash[dir] = mutex
|
||||
end
|
||||
mutex.synchronize do
|
||||
instance = TitleInfo.allocate
|
||||
json_path = File.join dir, "info.json"
|
||||
if File.exists? json_path
|
||||
instance = TitleInfo.from_json File.read json_path
|
||||
end
|
||||
instance.dir = dir
|
||||
yield instance
|
||||
end
|
||||
end
|
||||
|
||||
def save
|
||||
json_path = File.join @dir, "info.json"
|
||||
File.write json_path, self.to_pretty_json
|
||||
end
|
||||
end
|
||||
@@ -1,26 +0,0 @@
|
||||
require "kemal"
|
||||
require "./logger"
|
||||
|
||||
class LogHandler < Kemal::BaseLogHandler
|
||||
def initialize(@logger : MLogger)
|
||||
end
|
||||
|
||||
def call(env)
|
||||
elapsed_time = Time.measure { call_next env }
|
||||
elapsed_text = elapsed_text elapsed_time
|
||||
msg = "#{env.response.status_code} #{env.request.method}" \
|
||||
" #{env.request.resource} #{elapsed_text}"
|
||||
@logger.debug(msg)
|
||||
env
|
||||
end
|
||||
|
||||
def write(msg)
|
||||
@logger.debug(msg)
|
||||
end
|
||||
|
||||
private def elapsed_text(elapsed)
|
||||
millis = elapsed.total_milliseconds
|
||||
return "#{millis.round(2)}ms" if millis >= 1
|
||||
"#{(millis * 1000).round(2)}µs"
|
||||
end
|
||||
end
|
||||
107
src/logger.cr
107
src/logger.cr
@@ -1,59 +1,70 @@
|
||||
require "./config"
|
||||
require "logger"
|
||||
require "log"
|
||||
require "colorize"
|
||||
|
||||
LEVELS = ["debug", "error", "fatal", "info", "warn"]
|
||||
COLORS = [:light_cyan, :light_red, :red, :light_yellow, :light_magenta]
|
||||
class Logger
|
||||
LEVELS = ["debug", "error", "fatal", "info", "warn"]
|
||||
SEVERITY_IDS = [0, 4, 5, 2, 3]
|
||||
COLORS = [:light_cyan, :light_red, :red, :light_yellow, :light_magenta]
|
||||
|
||||
class MLogger
|
||||
def initialize(config : Config)
|
||||
@logger = Logger.new STDOUT
|
||||
@@severity : Log::Severity = :info
|
||||
|
||||
@log_off = false
|
||||
log_level = config.log_level
|
||||
if log_level == "off"
|
||||
@log_off = true
|
||||
return
|
||||
end
|
||||
use_default
|
||||
|
||||
{% begin %}
|
||||
case log_level
|
||||
{% for lvl in LEVELS %}
|
||||
when {{lvl}}
|
||||
@logger.level = Logger::{{lvl.upcase.id}}
|
||||
{% end %}
|
||||
else
|
||||
raise "Unknown log level #{log_level}"
|
||||
end
|
||||
{% end %}
|
||||
def initialize
|
||||
level = Config.current.log_level
|
||||
{% begin %}
|
||||
case level.downcase
|
||||
when "off"
|
||||
@@severity = :none
|
||||
{% for lvl, i in LEVELS %}
|
||||
when {{lvl}}
|
||||
@@severity = Log::Severity.new SEVERITY_IDS[{{i}}]
|
||||
{% end %}
|
||||
else
|
||||
raise "Unknown log level #{level}"
|
||||
end
|
||||
{% end %}
|
||||
|
||||
@logger.formatter = Logger::Formatter.new do \
|
||||
|severity, datetime, progname, message, io|
|
||||
@log = Log.for("")
|
||||
@backend = Log::IOBackend.new
|
||||
|
||||
color = :default
|
||||
{% begin %}
|
||||
case severity.to_s().downcase
|
||||
{% for lvl, i in LEVELS %}
|
||||
when {{lvl}}
|
||||
color = COLORS[{{i}}]
|
||||
{% end %}
|
||||
end
|
||||
{% end %}
|
||||
format_proc = ->(entry : Log::Entry, io : IO) do
|
||||
color = :default
|
||||
{% begin %}
|
||||
case entry.severity.label.to_s().downcase
|
||||
{% for lvl, i in LEVELS %}
|
||||
when {{lvl}}, "#{{{lvl}}}ing"
|
||||
color = COLORS[{{i}}]
|
||||
{% end %}
|
||||
else
|
||||
end
|
||||
{% end %}
|
||||
|
||||
io << "[#{severity}]".ljust(8).colorize(color)
|
||||
io << datetime.to_s("%Y/%m/%d %H:%M:%S") << " | "
|
||||
io << message
|
||||
end
|
||||
end
|
||||
io << "[#{entry.severity.label}]".ljust(10).colorize(color)
|
||||
io << entry.timestamp.to_s("%Y/%m/%d %H:%M:%S") << " | "
|
||||
io << entry.message
|
||||
end
|
||||
|
||||
{% for lvl in LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
return if @log_off
|
||||
@logger.{{lvl.id}} msg
|
||||
end
|
||||
{% end %}
|
||||
@backend.formatter = Log::Formatter.new &format_proc
|
||||
Log.setup @@severity, @backend
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.string self
|
||||
end
|
||||
# Ignores @@severity and always log msg
|
||||
def log(msg)
|
||||
@backend.write Log::Entry.new "", Log::Severity::None, msg,
|
||||
Log::Metadata.empty, nil
|
||||
end
|
||||
|
||||
def self.log(msg)
|
||||
default.log msg
|
||||
end
|
||||
|
||||
{% for lvl in LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
@log.{{lvl.id}} { msg }
|
||||
end
|
||||
def self.{{lvl.id}}(msg)
|
||||
default.not_nil!.{{lvl.id}} msg
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
34
src/main_fiber.cr
Normal file
34
src/main_fiber.cr
Normal file
@@ -0,0 +1,34 @@
|
||||
# On ARM, connecting to the SQLite DB from a spawned fiber would crash
|
||||
# https://github.com/crystal-lang/crystal-sqlite3/issues/30
|
||||
# This is a temporary workaround that forces the relevant code to run in the
|
||||
# main fiber
|
||||
|
||||
class MainFiber
|
||||
@@channel = Channel(-> Nil).new
|
||||
@@done = Channel(Bool).new
|
||||
@@main_fiber = Fiber.current
|
||||
|
||||
def self.start_and_block
|
||||
loop do
|
||||
if proc = @@channel.receive
|
||||
begin
|
||||
proc.call
|
||||
ensure
|
||||
@@done.send true
|
||||
end
|
||||
end
|
||||
Fiber.yield
|
||||
end
|
||||
end
|
||||
|
||||
def self.run(&block : -> Nil)
|
||||
if @@main_fiber == Fiber.current
|
||||
block.call
|
||||
else
|
||||
@@channel.send block
|
||||
until @@done.receive
|
||||
Fiber.yield
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
217
src/mangadex/api.cr
Normal file
217
src/mangadex/api.cr
Normal file
@@ -0,0 +1,217 @@
|
||||
require "json"
|
||||
require "csv"
|
||||
require "../rename"
|
||||
|
||||
macro string_properties(names)
|
||||
{% for name in names %}
|
||||
property {{name.id}} = ""
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro parse_strings_from_json(names)
|
||||
{% for name in names %}
|
||||
@{{name.id}} = obj[{{name}}].as_s
|
||||
{% end %}
|
||||
end
|
||||
|
||||
macro properties_to_hash(names)
|
||||
{
|
||||
{% for name in names %}
|
||||
"{{name.id}}" => @{{name.id}}.to_s,
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
|
||||
module MangaDex
|
||||
class Chapter
|
||||
string_properties ["lang_code", "title", "volume", "chapter"]
|
||||
property manga : Manga
|
||||
property time = Time.local
|
||||
property id : String
|
||||
property full_title = ""
|
||||
property language = ""
|
||||
property pages = [] of {String, String} # filename, url
|
||||
property groups = [] of {Int32, String} # group_id, group_name
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any, @manga,
|
||||
lang : Hash(String, String))
|
||||
self.parse_json json_obj, lang
|
||||
end
|
||||
|
||||
def to_info_json
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "volume", "chapter",
|
||||
"language", "full_title"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
json.field "time", @time.to_unix.to_s
|
||||
json.field "manga_title", @manga.title
|
||||
json.field "manga_id", @manga.id
|
||||
json.field "groups" do
|
||||
json.object do
|
||||
@groups.each do |gid, gname|
|
||||
json.field gname, gid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj, lang)
|
||||
parse_strings_from_json ["lang_code", "title", "volume",
|
||||
"chapter"]
|
||||
language = lang[@lang_code]?
|
||||
@language = language if language
|
||||
@time = Time.unix obj["timestamp"].as_i
|
||||
suffixes = ["", "_2", "_3"]
|
||||
suffixes.each do |s|
|
||||
gid = obj["group_id#{s}"].as_i
|
||||
next if gid == 0
|
||||
gname = obj["group_name#{s}"].as_s
|
||||
@groups << {gid, gname}
|
||||
end
|
||||
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["chapter_rename_rule"].to_s
|
||||
@full_title = rename rename_rule
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
hash = properties_to_hash ["id", "title", "volume", "chapter",
|
||||
"lang_code", "language", "pages"]
|
||||
hash["groups"] = @groups.map { |g| g[1] }.join ","
|
||||
rule.render hash
|
||||
end
|
||||
end
|
||||
|
||||
class Manga
|
||||
string_properties ["cover_url", "description", "title", "author", "artist"]
|
||||
property chapters = [] of Chapter
|
||||
property id : String
|
||||
|
||||
def initialize(@id, json_obj : JSON::Any)
|
||||
self.parse_json json_obj
|
||||
end
|
||||
|
||||
def to_info_json(with_chapters = true)
|
||||
JSON.build do |json|
|
||||
json.object do
|
||||
{% for name in ["id", "title", "description", "author", "artist",
|
||||
"cover_url"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
if with_chapters
|
||||
json.field "chapters" do
|
||||
json.array do
|
||||
@chapters.each do |c|
|
||||
json.raw c.to_info_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_json(obj)
|
||||
parse_strings_from_json ["cover_url", "description", "title", "author",
|
||||
"artist"]
|
||||
rescue e
|
||||
raise "failed to parse json: #{e}"
|
||||
end
|
||||
|
||||
def rename(rule : Rename::Rule)
|
||||
rule.render properties_to_hash ["id", "title", "author", "artist"]
|
||||
end
|
||||
end
|
||||
|
||||
class API
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@base_url = Config.current.mangadex["api_url"].to_s ||
|
||||
"https://mangadex.org/api/"
|
||||
@lang = {} of String => String
|
||||
CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row|
|
||||
@lang[row[1]] = row[0]
|
||||
end
|
||||
end
|
||||
|
||||
def get(url)
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
res = HTTP::Client.get url, headers
|
||||
raise "Failed to get #{url}. [#{res.status_code}] " \
|
||||
"#{res.status_message}" if !res.success?
|
||||
JSON.parse res.body
|
||||
end
|
||||
|
||||
def get_manga(id)
|
||||
obj = self.get File.join @base_url, "manga/#{id}"
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
manga = Manga.new id, obj["manga"]
|
||||
obj["chapter"].as_h.map do |k, v|
|
||||
chapter = Chapter.new k, v, manga, @lang
|
||||
manga.chapters << chapter
|
||||
end
|
||||
manga
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(chapter : Chapter)
|
||||
obj = self.get File.join @base_url, "chapter/#{chapter.id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
begin
|
||||
server = obj["server"].as_s
|
||||
hash = obj["hash"].as_s
|
||||
chapter.pages = obj["page_array"].as_a.map do |fn|
|
||||
{
|
||||
fn.as_s,
|
||||
"#{server}#{hash}/#{fn.as_s}",
|
||||
}
|
||||
end
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
end
|
||||
|
||||
def get_chapter(id : String)
|
||||
obj = self.get File.join @base_url, "chapter/#{id}"
|
||||
if obj["status"]? == "external"
|
||||
raise "This chapter is hosted on an external site " \
|
||||
"#{obj["external"]?}, and Mango does not support " \
|
||||
"external chapters."
|
||||
end
|
||||
if obj["status"]? != "OK"
|
||||
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||
end
|
||||
manga_id = ""
|
||||
begin
|
||||
manga_id = obj["manga_id"].as_i.to_s
|
||||
rescue
|
||||
raise "Failed to parse JSON"
|
||||
end
|
||||
manga = self.get_manga manga_id
|
||||
chapter = manga.chapters.find { |c| c.id == id }.not_nil!
|
||||
self.get_chapter chapter
|
||||
chapter
|
||||
end
|
||||
end
|
||||
end
|
||||
167
src/mangadex/downloader.cr
Normal file
167
src/mangadex/downloader.cr
Normal file
@@ -0,0 +1,167 @@
|
||||
require "./api"
|
||||
require "compress/zip"
|
||||
|
||||
module MangaDex
|
||||
class PageJob
|
||||
property success = false
|
||||
property url : String
|
||||
property filename : String
|
||||
property writer : Compress::Zip::Writer
|
||||
property tries_remaning : Int32
|
||||
|
||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||
end
|
||||
end
|
||||
|
||||
class Downloader < Queue::Downloader
|
||||
@wait_seconds : Int32 = Config.current.mangadex["download_wait_seconds"]
|
||||
.to_i32
|
||||
@retries : Int32 = Config.current.mangadex["download_retries"].to_i32
|
||||
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
@api = API.default
|
||||
super
|
||||
end
|
||||
|
||||
def pop : Queue::Job?
|
||||
job = nil
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id not like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
job
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
begin
|
||||
chapter = @api.get_chapter(job.id)
|
||||
rescue e
|
||||
Logger.error e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@queue.add_message e.message.not_nil!, job
|
||||
end
|
||||
@downloading = false
|
||||
return
|
||||
end
|
||||
@queue.set_pages chapter.pages.size, job
|
||||
lib_dir = @library_path
|
||||
rename_rule = Rename::Rule.new \
|
||||
Config.current.mangadex["manga_rename_rule"].to_s
|
||||
manga_dir = File.join lib_dir, chapter.manga.rename rename_rule
|
||||
unless File.exists? manga_dir
|
||||
Dir.mkdir_p manga_dir
|
||||
end
|
||||
zip_path = File.join manga_dir, "#{job.title}.cbz.part"
|
||||
|
||||
# Find the number of digits needed to store the number of pages
|
||||
len = Math.log10(chapter.pages.size).to_i + 1
|
||||
|
||||
writer = Compress::Zip::Writer.new zip_path
|
||||
# Create a buffered channel. It works as an FIFO queue
|
||||
channel = Channel(PageJob).new chapter.pages.size
|
||||
spawn do
|
||||
chapter.pages.each_with_index do |tuple, i|
|
||||
fn, url = tuple
|
||||
ext = File.extname fn
|
||||
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
||||
page_job = PageJob.new url, fn, writer, @retries
|
||||
Logger.debug "Downloading #{url}"
|
||||
loop do
|
||||
sleep @wait_seconds.seconds
|
||||
download_page page_job
|
||||
break if page_job.success ||
|
||||
page_job.tries_remaning <= 0
|
||||
page_job.tries_remaning -= 1
|
||||
Logger.warn "Failed to download page #{url}. " \
|
||||
"Retrying... Remaining retries: " \
|
||||
"#{page_job.tries_remaning}"
|
||||
end
|
||||
|
||||
channel.send page_job
|
||||
break unless @queue.exists? job
|
||||
end
|
||||
end
|
||||
|
||||
spawn do
|
||||
page_jobs = [] of PageJob
|
||||
chapter.pages.size.times do
|
||||
page_job = channel.receive
|
||||
|
||||
break unless @queue.exists? job
|
||||
|
||||
Logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
||||
"#{page_job.url}"
|
||||
page_jobs << page_job
|
||||
if page_job.success
|
||||
@queue.add_success job
|
||||
else
|
||||
@queue.add_fail job
|
||||
msg = "Failed to download page #{page_job.url}"
|
||||
@queue.add_message msg, job
|
||||
Logger.error msg
|
||||
end
|
||||
end
|
||||
|
||||
unless @queue.exists? job
|
||||
Logger.debug "Download cancelled"
|
||||
@downloading = false
|
||||
next
|
||||
end
|
||||
|
||||
fail_count = page_jobs.count { |j| !j.success }
|
||||
Logger.debug "Download completed. " \
|
||||
"#{fail_count}/#{page_jobs.size} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
".part")
|
||||
File.rename zip_path, filename
|
||||
Logger.debug "cbz File created at #{filename}"
|
||||
|
||||
zip_exception = validate_archive filename
|
||||
if !zip_exception.nil?
|
||||
@queue.add_message "The downloaded archive is corrupted. " \
|
||||
"Error: #{zip_exception}", job
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
elsif fail_count > 0
|
||||
@queue.set_status Queue::JobStatus::MissingPages, job
|
||||
else
|
||||
@queue.set_status Queue::JobStatus::Completed, job
|
||||
end
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
|
||||
private def download_page(job : PageJob)
|
||||
Logger.debug "downloading #{job.url}"
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
}
|
||||
begin
|
||||
HTTP::Client.get job.url, headers do |res|
|
||||
unless res.success?
|
||||
raise "Failed to download page #{job.url}. " \
|
||||
"[#{res.status_code}] #{res.status_message}"
|
||||
end
|
||||
job.writer.add job.filename, res.body_io
|
||||
end
|
||||
job.success = true
|
||||
rescue e
|
||||
Logger.error e
|
||||
job.success = false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
145
src/mango.cr
145
src/mango.cr
@@ -1,34 +1,131 @@
|
||||
require "./config"
|
||||
require "./queue"
|
||||
require "./server"
|
||||
require "./context"
|
||||
require "./main_fiber"
|
||||
require "./mangadex/*"
|
||||
require "option_parser"
|
||||
require "clim"
|
||||
require "./plugin/*"
|
||||
|
||||
VERSION = "0.1.0"
|
||||
MANGO_VERSION = "0.17.1"
|
||||
|
||||
config_path = nil
|
||||
# From http://www.network-science.de/ascii/
|
||||
BANNER = %{
|
||||
|
||||
parser = OptionParser.parse do |parser|
|
||||
parser.banner = "Mango e-manga server/reader. Version #{VERSION}\n"
|
||||
_| _|
|
||||
_|_| _|_| _|_|_| _|_|_| _|_|_| _|_|
|
||||
_| _| _| _| _| _| _| _| _| _| _|
|
||||
_| _| _| _| _| _| _| _| _| _|
|
||||
_| _| _|_|_| _| _| _|_|_| _|_|
|
||||
_|
|
||||
_|_|
|
||||
|
||||
parser.on "-v", "--version", "Show version" do
|
||||
puts "Version #{VERSION}"
|
||||
exit
|
||||
end
|
||||
parser.on "-h", "--help", "Show help" do
|
||||
puts parser
|
||||
exit
|
||||
end
|
||||
parser.on "-c PATH", "--config=PATH", "Path to the config file. " \
|
||||
"Default is `~/.config/mango/config.yml`" do |path|
|
||||
config_path = path
|
||||
end
|
||||
|
||||
}
|
||||
|
||||
DESCRIPTION = "Mango - Manga Server and Web Reader. Version #{MANGO_VERSION}"
|
||||
|
||||
macro common_option
|
||||
option "-c PATH", "--config=PATH", type: String,
|
||||
desc: "Path to the config file"
|
||||
end
|
||||
|
||||
config = Config.load config_path
|
||||
logger = MLogger.new config
|
||||
storage = Storage.new config.db_path, logger
|
||||
library = Library.new config.library_path, config.scan_interval, logger, storage
|
||||
macro throw(msg)
|
||||
puts "ERROR: #{{{msg}}}"
|
||||
puts
|
||||
puts "Please see the `--help`."
|
||||
exit 1
|
||||
end
|
||||
|
||||
context = Context.new config, logger, library, storage
|
||||
class CLI < Clim
|
||||
main do
|
||||
desc DESCRIPTION
|
||||
usage "mango [sub_command] [options]"
|
||||
help short: "-h"
|
||||
version "Version #{MANGO_VERSION}", short: "-v"
|
||||
common_option
|
||||
run do |opts|
|
||||
puts BANNER
|
||||
puts DESCRIPTION
|
||||
puts
|
||||
|
||||
server = Server.new context
|
||||
server.start
|
||||
# empty ARGV so it won't be passed to Kemal
|
||||
ARGV.clear
|
||||
|
||||
Config.load(opts.config).set_current
|
||||
MangaDex::Downloader.default
|
||||
Plugin::Downloader.default
|
||||
|
||||
spawn do
|
||||
Server.new.start
|
||||
end
|
||||
|
||||
MainFiber.start_and_block
|
||||
end
|
||||
|
||||
sub "admin" do
|
||||
desc "Run admin tools"
|
||||
usage "mango admin [tool]"
|
||||
help short: "-h"
|
||||
run do |opts|
|
||||
puts opts.help_string
|
||||
end
|
||||
sub "user" do
|
||||
desc "User management tool"
|
||||
usage "mango admin user [arguments] [options]"
|
||||
help short: "-h"
|
||||
argument "action", type: String,
|
||||
desc: "Action to perform. Can be add/delete/update/list"
|
||||
argument "username", type: String,
|
||||
desc: "Username to update or delete"
|
||||
option "-u USERNAME", "--username=USERNAME", type: String,
|
||||
desc: "Username"
|
||||
option "-p PASSWORD", "--password=PASSWORD", type: String,
|
||||
desc: "Password"
|
||||
option "-a", "--admin", desc: "Admin flag", type: Bool, default: false
|
||||
common_option
|
||||
run do |opts, args|
|
||||
Config.load(opts.config).set_current
|
||||
storage = Storage.new nil, false
|
||||
|
||||
case args.action
|
||||
when "add"
|
||||
throw "Options `-u` and `-p` required." if opts.username.nil? ||
|
||||
opts.password.nil?
|
||||
storage.new_user opts.username.not_nil!,
|
||||
opts.password.not_nil!, opts.admin
|
||||
when "delete"
|
||||
throw "Argument `username` required." if args.username.nil?
|
||||
storage.delete_user args.username
|
||||
when "update"
|
||||
throw "Argument `username` required." if args.username.nil?
|
||||
username = opts.username || args.username
|
||||
password = opts.password || ""
|
||||
storage.update_user args.username, username.not_nil!,
|
||||
password.not_nil!, opts.admin
|
||||
when "list"
|
||||
users = storage.list_users
|
||||
name_length = users.map(&.[0].size).max? || 0
|
||||
l_cell_width = ["username".size, name_length].max
|
||||
r_cell_width = "admin access".size
|
||||
header = " #{"username".ljust l_cell_width} | admin access "
|
||||
puts "-" * header.size
|
||||
puts header
|
||||
puts "-" * header.size
|
||||
users.each do |name, admin|
|
||||
puts " #{name.ljust l_cell_width} | " \
|
||||
"#{admin.to_s.ljust r_cell_width} "
|
||||
end
|
||||
puts "-" * header.size
|
||||
when nil
|
||||
puts opts.help_string
|
||||
else
|
||||
throw "Unknown action \"#{args.action}\"."
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
CLI.start(ARGV)
|
||||
|
||||
141
src/plugin/downloader.cr
Normal file
141
src/plugin/downloader.cr
Normal file
@@ -0,0 +1,141 @@
|
||||
class Plugin
|
||||
class Downloader < Queue::Downloader
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
super
|
||||
end
|
||||
|
||||
def pop : Queue::Job?
|
||||
job = nil
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
job
|
||||
end
|
||||
|
||||
private def process_filename(str)
|
||||
return "_" if str == ".."
|
||||
str.gsub "/", "_"
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
|
||||
begin
|
||||
unless job.plugin_id
|
||||
raise "Job does not have a plugin ID specificed"
|
||||
end
|
||||
|
||||
plugin = Plugin.new job.plugin_id.not_nil!
|
||||
info = plugin.select_chapter job.plugin_chapter_id.not_nil!
|
||||
|
||||
pages = info["pages"].as_i
|
||||
|
||||
manga_title = process_filename job.manga_title
|
||||
chapter_title = process_filename info["title"].as_s
|
||||
|
||||
@queue.set_pages pages, job
|
||||
lib_dir = @library_path
|
||||
manga_dir = File.join lib_dir, manga_title
|
||||
unless File.exists? manga_dir
|
||||
Dir.mkdir_p manga_dir
|
||||
end
|
||||
|
||||
zip_path = File.join manga_dir, "#{chapter_title}.cbz.part"
|
||||
writer = Compress::Zip::Writer.new zip_path
|
||||
rescue e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@queue.add_message e.message.not_nil!, job
|
||||
end
|
||||
@downloading = false
|
||||
raise e
|
||||
end
|
||||
|
||||
fail_count = 0
|
||||
|
||||
while page = plugin.next_page
|
||||
break unless @queue.exists? job
|
||||
|
||||
fn = process_filename page["filename"].as_s
|
||||
url = page["url"].as_s
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
if page["headers"]?
|
||||
page["headers"].as_h.each do |k, v|
|
||||
headers.add k, v.as_s
|
||||
end
|
||||
end
|
||||
|
||||
page_success = false
|
||||
tries = 4
|
||||
|
||||
loop do
|
||||
sleep plugin.info.wait_seconds.seconds
|
||||
Logger.debug "downloading #{url}"
|
||||
tries -= 1
|
||||
|
||||
begin
|
||||
HTTP::Client.get url, headers do |res|
|
||||
unless res.success?
|
||||
raise "Failed to download page #{url}. " \
|
||||
"[#{res.status_code}] #{res.status_message}"
|
||||
end
|
||||
writer.add fn, res.body_io
|
||||
end
|
||||
rescue e
|
||||
@queue.add_fail job
|
||||
fail_count += 1
|
||||
msg = "Failed to download page #{url}. Error: #{e}"
|
||||
@queue.add_message msg, job
|
||||
Logger.error msg
|
||||
Logger.debug "[failed] #{url}"
|
||||
else
|
||||
@queue.add_success job
|
||||
Logger.debug "[success] #{url}"
|
||||
page_success = true
|
||||
end
|
||||
|
||||
break if page_success || tries < 0
|
||||
end
|
||||
end
|
||||
|
||||
unless @queue.exists? job
|
||||
Logger.debug "Download cancelled"
|
||||
@downloading = false
|
||||
return
|
||||
end
|
||||
|
||||
Logger.debug "Download completed. #{fail_count}/#{pages} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
".part")
|
||||
File.rename zip_path, filename
|
||||
Logger.debug "cbz File created at #{filename}"
|
||||
|
||||
zip_exception = validate_archive filename
|
||||
if !zip_exception.nil?
|
||||
@queue.add_message "The downloaded archive is corrupted. " \
|
||||
"Error: #{zip_exception}", job
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
elsif fail_count > 0
|
||||
@queue.set_status Queue::JobStatus::MissingPages, job
|
||||
else
|
||||
@queue.set_status Queue::JobStatus::Completed, job
|
||||
end
|
||||
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
end
|
||||
342
src/plugin/plugin.cr
Normal file
342
src/plugin/plugin.cr
Normal file
@@ -0,0 +1,342 @@
|
||||
require "duktape/runtime"
|
||||
require "myhtml"
|
||||
require "xml"
|
||||
|
||||
class Plugin
|
||||
class Error < ::Exception
|
||||
end
|
||||
|
||||
class MetadataError < Error
|
||||
end
|
||||
|
||||
class PluginException < Error
|
||||
end
|
||||
|
||||
class SyntaxError < Error
|
||||
end
|
||||
|
||||
struct Info
|
||||
{% for name in ["id", "title", "placeholder"] %}
|
||||
getter {{name.id}} = ""
|
||||
{% end %}
|
||||
getter wait_seconds : UInt64 = 0
|
||||
getter dir : String
|
||||
|
||||
def initialize(@dir)
|
||||
info_path = File.join @dir, "info.json"
|
||||
|
||||
unless File.exists? info_path
|
||||
raise MetadataError.new "File `info.json` not found in the " \
|
||||
"plugin directory #{dir}"
|
||||
end
|
||||
|
||||
@json = JSON.parse File.read info_path
|
||||
|
||||
begin
|
||||
{% for name in ["id", "title", "placeholder"] %}
|
||||
@{{name.id}} = @json[{{name}}].as_s
|
||||
{% end %}
|
||||
@wait_seconds = @json["wait_seconds"].as_i.to_u64
|
||||
|
||||
unless @id.alphanumeric_underscore?
|
||||
raise "Plugin ID can only contain alphanumeric characters and " \
|
||||
"underscores"
|
||||
end
|
||||
rescue e
|
||||
raise MetadataError.new "Failed to retrieve metadata from plugin " \
|
||||
"at #{@dir}. Error: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
||||
def each(&block : String, JSON::Any -> _)
|
||||
@json.as_h.each &block
|
||||
end
|
||||
end
|
||||
|
||||
struct Storage
|
||||
@hash = {} of String => String
|
||||
|
||||
def initialize(@path : String)
|
||||
unless File.exists? @path
|
||||
save
|
||||
end
|
||||
|
||||
json = JSON.parse File.read @path
|
||||
json.as_h.each do |k, v|
|
||||
@hash[k] = v.as_s
|
||||
end
|
||||
end
|
||||
|
||||
def []?(key)
|
||||
@hash[key]?
|
||||
end
|
||||
|
||||
def []=(key, val : String)
|
||||
@hash[key] = val
|
||||
end
|
||||
|
||||
def save
|
||||
File.write @path, @hash.to_pretty_json
|
||||
end
|
||||
end
|
||||
|
||||
@@info_ary = [] of Info
|
||||
@info : Info?
|
||||
|
||||
getter js_path = ""
|
||||
getter storage_path = ""
|
||||
|
||||
def self.build_info_ary
|
||||
@@info_ary.clear
|
||||
dir = Config.current.plugin_path
|
||||
Dir.mkdir_p dir unless Dir.exists? dir
|
||||
|
||||
Dir.each_child dir do |f|
|
||||
path = File.join dir, f
|
||||
next unless File.directory? path
|
||||
|
||||
begin
|
||||
@@info_ary << Info.new path
|
||||
rescue e : MetadataError
|
||||
Logger.warn e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.list
|
||||
self.build_info_ary
|
||||
@@info_ary.map do |m|
|
||||
{id: m.id, title: m.title}
|
||||
end
|
||||
end
|
||||
|
||||
def info
|
||||
@info.not_nil!
|
||||
end
|
||||
|
||||
def initialize(id : String)
|
||||
Plugin.build_info_ary
|
||||
|
||||
@info = @@info_ary.find { |i| i.id == id }
|
||||
if @info.nil?
|
||||
raise Error.new "Plugin with ID #{id} not found"
|
||||
end
|
||||
|
||||
@js_path = File.join info.dir, "index.js"
|
||||
@storage_path = File.join info.dir, "storage.json"
|
||||
|
||||
unless File.exists? @js_path
|
||||
raise Error.new "Plugin script not found at #{@js_path}"
|
||||
end
|
||||
|
||||
@rt = Duktape::Runtime.new do |sbx|
|
||||
sbx.push_global_object
|
||||
|
||||
sbx.push_pointer @storage_path.as(Void*)
|
||||
path = sbx.require_pointer(-1).as String
|
||||
sbx.pop
|
||||
sbx.push_string path
|
||||
sbx.put_prop_string -2, "storage_path"
|
||||
|
||||
def_helper_functions sbx
|
||||
end
|
||||
|
||||
eval File.read @js_path
|
||||
end
|
||||
|
||||
macro check_fields(ary)
|
||||
{% for field in ary %}
|
||||
unless json[{{field}}]?
|
||||
raise "Field `{{field.id}}` is missing from the function outputs"
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
def list_chapters(query : String)
|
||||
json = eval_json "listChapters('#{query}')"
|
||||
begin
|
||||
check_fields ["title", "chapters"]
|
||||
|
||||
ary = json["chapters"].as_a
|
||||
ary.each do |obj|
|
||||
id = obj["id"]?
|
||||
raise "Field `id` missing from `listChapters` outputs" if id.nil?
|
||||
|
||||
unless id.to_s.alphanumeric_underscore?
|
||||
raise "The `id` field can only contain alphanumeric characters " \
|
||||
"and underscores"
|
||||
end
|
||||
|
||||
title = obj["title"]?
|
||||
raise "Field `title` missing from `listChapters` outputs" if title.nil?
|
||||
end
|
||||
rescue e
|
||||
raise Error.new e.message
|
||||
end
|
||||
json
|
||||
end
|
||||
|
||||
def select_chapter(id : String)
|
||||
json = eval_json "selectChapter('#{id}')"
|
||||
begin
|
||||
check_fields ["title", "pages"]
|
||||
|
||||
if json["title"].to_s.empty?
|
||||
raise "The `title` field of the chapter can not be empty"
|
||||
end
|
||||
rescue e
|
||||
raise Error.new e.message
|
||||
end
|
||||
json
|
||||
end
|
||||
|
||||
def next_page
|
||||
json = eval_json "nextPage()"
|
||||
return if json.size == 0
|
||||
begin
|
||||
check_fields ["filename", "url"]
|
||||
rescue e
|
||||
raise Error.new e.message
|
||||
end
|
||||
json
|
||||
end
|
||||
|
||||
private def eval(str)
|
||||
@rt.eval str
|
||||
rescue e : Duktape::SyntaxError
|
||||
raise SyntaxError.new e.message
|
||||
rescue e : Duktape::Error
|
||||
raise Error.new e.message
|
||||
end
|
||||
|
||||
private def eval_json(str)
|
||||
JSON.parse eval(str).as String
|
||||
end
|
||||
|
||||
private def def_helper_functions(sbx)
|
||||
sbx.push_object
|
||||
|
||||
sbx.push_proc LibDUK::VARARGS do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
url = env.require_string 0
|
||||
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
if env.get_top == 2
|
||||
env.enum 1, LibDUK::Enum::OwnPropertiesOnly
|
||||
while env.next -1, true
|
||||
key = env.require_string -2
|
||||
val = env.require_string -1
|
||||
headers.add key, val
|
||||
env.pop_2
|
||||
end
|
||||
end
|
||||
|
||||
res = HTTP::Client.get url, headers
|
||||
|
||||
env.push_object
|
||||
|
||||
env.push_int res.status_code
|
||||
env.put_prop_string -2, "status_code"
|
||||
|
||||
env.push_string res.body
|
||||
env.put_prop_string -2, "body"
|
||||
|
||||
env.push_object
|
||||
res.headers.each do |k, v|
|
||||
if v.size == 1
|
||||
env.push_string v[0]
|
||||
else
|
||||
env.push_string v.join ","
|
||||
end
|
||||
env.put_prop_string -2, k
|
||||
end
|
||||
env.put_prop_string -2, "headers"
|
||||
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "get"
|
||||
|
||||
sbx.push_proc 2 do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
html = env.require_string 0
|
||||
selector = env.require_string 1
|
||||
|
||||
myhtml = Myhtml::Parser.new html
|
||||
ary = myhtml.css(selector).map(&.to_html).to_a
|
||||
|
||||
ary_idx = env.push_array
|
||||
ary.each_with_index do |str, i|
|
||||
env.push_string str
|
||||
env.put_prop_index ary_idx, i.to_u32
|
||||
end
|
||||
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "css"
|
||||
|
||||
sbx.push_proc 1 do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
html = env.require_string 0
|
||||
|
||||
str = XML.parse(html).inner_text
|
||||
|
||||
env.push_string str
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "text"
|
||||
|
||||
sbx.push_proc 2 do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
html = env.require_string 0
|
||||
name = env.require_string 1
|
||||
|
||||
begin
|
||||
attr = XML.parse(html).first_element_child.not_nil![name]
|
||||
env.push_string attr
|
||||
rescue
|
||||
env.push_undefined
|
||||
end
|
||||
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "attribute"
|
||||
|
||||
sbx.push_proc 1 do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
msg = env.require_string 0
|
||||
env.call_success
|
||||
|
||||
raise PluginException.new msg
|
||||
end
|
||||
sbx.put_prop_string -2, "raise"
|
||||
|
||||
sbx.push_proc LibDUK::VARARGS do |ptr|
|
||||
env = Duktape::Sandbox.new ptr
|
||||
key = env.require_string 0
|
||||
|
||||
env.get_global_string "storage_path"
|
||||
storage_path = env.require_string -1
|
||||
env.pop
|
||||
storage = Storage.new storage_path
|
||||
|
||||
if env.get_top == 2
|
||||
val = env.require_string 1
|
||||
storage[key] = val
|
||||
storage.save
|
||||
else
|
||||
val = storage[key]?
|
||||
if val
|
||||
env.push_string val
|
||||
else
|
||||
env.push_undefined
|
||||
end
|
||||
end
|
||||
|
||||
env.call_success
|
||||
end
|
||||
sbx.put_prop_string -2, "storage"
|
||||
|
||||
sbx.put_prop_string -2, "mango"
|
||||
end
|
||||
end
|
||||
318
src/queue.cr
Normal file
318
src/queue.cr
Normal file
@@ -0,0 +1,318 @@
|
||||
require "sqlite3"
|
||||
require "./util/*"
|
||||
|
||||
class Queue
|
||||
abstract class Downloader
|
||||
property stopped = false
|
||||
@library_path : String = Config.current.library_path
|
||||
@downloading = false
|
||||
|
||||
def initialize
|
||||
@queue = Queue.default
|
||||
@queue << self
|
||||
|
||||
spawn do
|
||||
loop do
|
||||
sleep 1.second
|
||||
next if @stopped || @downloading
|
||||
begin
|
||||
job = pop
|
||||
next if job.nil?
|
||||
download job
|
||||
rescue e
|
||||
Logger.error e
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
abstract def pop : Job?
|
||||
private abstract def download(job : Job)
|
||||
end
|
||||
|
||||
enum JobStatus
|
||||
Pending # 0
|
||||
Downloading # 1
|
||||
Error # 2
|
||||
Completed # 3
|
||||
MissingPages # 4
|
||||
end
|
||||
|
||||
struct Job
|
||||
property id : String
|
||||
property manga_id : String
|
||||
property title : String
|
||||
property manga_title : String
|
||||
property status : JobStatus
|
||||
property status_message : String = ""
|
||||
property pages : Int32 = 0
|
||||
property success_count : Int32 = 0
|
||||
property fail_count : Int32 = 0
|
||||
property time : Time
|
||||
property plugin_id : String?
|
||||
property plugin_chapter_id : String?
|
||||
|
||||
def parse_query_result(res : DB::ResultSet)
|
||||
@id = res.read String
|
||||
@manga_id = res.read String
|
||||
@title = res.read String
|
||||
@manga_title = res.read String
|
||||
status = res.read Int32
|
||||
@status_message = res.read String
|
||||
@pages = res.read Int32
|
||||
@success_count = res.read Int32
|
||||
@fail_count = res.read Int32
|
||||
time = res.read Int64
|
||||
@status = JobStatus.new status
|
||||
@time = Time.unix_ms time
|
||||
|
||||
ary = @id.split("-")
|
||||
if ary.size == 2
|
||||
@plugin_id = ary[0]
|
||||
@plugin_chapter_id = ary[1]
|
||||
end
|
||||
end
|
||||
|
||||
# Raises if the result set does not contain the correct set of columns
|
||||
def self.from_query_result(res : DB::ResultSet)
|
||||
job = Job.allocate
|
||||
job.parse_query_result res
|
||||
job
|
||||
end
|
||||
|
||||
def initialize(@id, @manga_id, @title, @manga_title, @status, @time,
|
||||
@plugin_id = nil)
|
||||
end
|
||||
|
||||
def to_json(json)
|
||||
json.object do
|
||||
{% for name in ["id", "manga_id", "title", "manga_title",
|
||||
"status_message"] %}
|
||||
json.field {{name}}, @{{name.id}}
|
||||
{% end %}
|
||||
{% for name in ["pages", "success_count", "fail_count"] %}
|
||||
json.field {{name}} do
|
||||
json.number @{{name.id}}
|
||||
end
|
||||
{% end %}
|
||||
json.field "status", @status.to_s
|
||||
json.field "time" do
|
||||
json.number @time.to_unix_ms
|
||||
end
|
||||
json.field "plugin_id", @plugin_id if @plugin_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
getter path : String
|
||||
@downloaders = [] of Downloader
|
||||
@paused = false
|
||||
|
||||
use_default
|
||||
|
||||
def initialize(db_path : String? = nil)
|
||||
@path = db_path || Config.current.mangadex["download_queue_db_path"].to_s
|
||||
dir = File.dirname @path
|
||||
unless Dir.exists? dir
|
||||
Logger.info "The queue DB directory #{dir} does not exist. " \
|
||||
"Attepmting to create it"
|
||||
Dir.mkdir_p dir
|
||||
end
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
db.exec "create table if not exists queue " \
|
||||
"(id text, manga_id text, title text, manga_title " \
|
||||
"text, status integer, status_message text, " \
|
||||
"pages integer, success_count integer, " \
|
||||
"fail_count integer, time integer)"
|
||||
db.exec "create unique index if not exists id_idx " \
|
||||
"on queue (id)"
|
||||
db.exec "create index if not exists manga_id_idx " \
|
||||
"on queue (manga_id)"
|
||||
db.exec "create index if not exists status_idx " \
|
||||
"on queue (status)"
|
||||
rescue e
|
||||
Logger.error "Error when checking tables in DB: #{e}"
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Push an array of jobs into the queue, and return the number of jobs
|
||||
# inserted. Any job already exists in the queue will be ignored.
|
||||
def push(jobs : Array(Job))
|
||||
start_count = self.count
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
jobs.each do |job|
|
||||
db.exec "insert or ignore into queue values " \
|
||||
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
job.id, job.manga_id, job.title, job.manga_title,
|
||||
job.status.to_i, job.status_message, job.pages,
|
||||
job.success_count, job.fail_count, job.time.to_unix_ms
|
||||
end
|
||||
end
|
||||
end
|
||||
self.count - start_count
|
||||
end
|
||||
|
||||
def reset(id : String)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set status = 0, status_message = '', " \
|
||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||
"where id = (?)", id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def reset(job : Job)
|
||||
self.reset job.id
|
||||
end
|
||||
|
||||
# Reset all failed tasks (missing pages and error)
|
||||
def reset
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set status = 0, status_message = '', " \
|
||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||
"where status = 2 or status = 4"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete(id : String)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "delete from queue where id = (?)", id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete(job : Job)
|
||||
self.delete job.id
|
||||
end
|
||||
|
||||
def exists?(id : String)
|
||||
res = false
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
res = db.query_one "select count(*) from queue where id = (?)", id,
|
||||
as: Bool
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
|
||||
def exists?(job : Job)
|
||||
self.exists? job.id
|
||||
end
|
||||
|
||||
def delete_status(status : JobStatus)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "delete from queue where status = (?)", status.to_i
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def count_status(status : JobStatus)
|
||||
num = 0
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
num = db.query_one "select count(*) from queue where " \
|
||||
"status = (?)", status.to_i, as: Int32
|
||||
end
|
||||
end
|
||||
num
|
||||
end
|
||||
|
||||
def count
|
||||
num = 0
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
num = db.query_one "select count(*) from queue", as: Int32
|
||||
end
|
||||
end
|
||||
num
|
||||
end
|
||||
|
||||
def set_status(status : JobStatus, job : Job)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set status = (?) where id = (?)",
|
||||
status.to_i, job.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_all
|
||||
jobs = [] of Job
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
jobs = db.query_all "select * from queue order by time" do |rs|
|
||||
Job.from_query_result rs
|
||||
end
|
||||
end
|
||||
end
|
||||
jobs
|
||||
end
|
||||
|
||||
def add_success(job : Job)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set success_count = success_count + 1 " \
|
||||
"where id = (?)", job.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_fail(job : Job)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set fail_count = fail_count + 1 " \
|
||||
"where id = (?)", job.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def set_pages(pages : Int32, job : Job)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set pages = (?), success_count = 0, " \
|
||||
"fail_count = 0 where id = (?)", pages, job.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_message(msg : String, job : Job)
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "update queue set status_message = " \
|
||||
"status_message || (?) || (?) where id = (?)",
|
||||
"\n", msg, job.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def <<(downloader : Downloader)
|
||||
@downloaders << downloader
|
||||
end
|
||||
|
||||
def pause
|
||||
@downloaders.each { |d| d.stopped = true }
|
||||
@paused = true
|
||||
end
|
||||
|
||||
def resume
|
||||
@downloaders.each { |d| d.stopped = false }
|
||||
@paused = false
|
||||
end
|
||||
|
||||
def paused?
|
||||
@paused
|
||||
end
|
||||
end
|
||||
147
src/rename.cr
Normal file
147
src/rename.cr
Normal file
@@ -0,0 +1,147 @@
|
||||
module Rename
|
||||
alias VHash = Hash(String, String)
|
||||
|
||||
abstract class Base(T)
|
||||
@ary = [] of T
|
||||
|
||||
def push(var)
|
||||
@ary.push var
|
||||
end
|
||||
|
||||
abstract def render(hash : VHash)
|
||||
end
|
||||
|
||||
class Variable < Base(String)
|
||||
property id : String
|
||||
|
||||
def initialize(@id)
|
||||
end
|
||||
|
||||
def render(hash : VHash)
|
||||
hash[@id]? || ""
|
||||
end
|
||||
end
|
||||
|
||||
class Pattern < Base(Variable)
|
||||
def render(hash : VHash)
|
||||
@ary.each do |v|
|
||||
if hash.has_key? v.id
|
||||
return v.render hash
|
||||
end
|
||||
end
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
class Group < Base(Pattern | String)
|
||||
def render(hash : VHash)
|
||||
return "" if @ary.select(&.is_a? Pattern)
|
||||
.any? &.as(Pattern).render(hash).empty?
|
||||
@ary.map do |e|
|
||||
if e.is_a? Pattern
|
||||
e.render hash
|
||||
else
|
||||
e
|
||||
end
|
||||
end.join
|
||||
end
|
||||
end
|
||||
|
||||
class Rule < Base(Group | String | Pattern)
|
||||
ESCAPE = ['/']
|
||||
|
||||
def initialize(str : String)
|
||||
parse! str
|
||||
rescue e
|
||||
raise "Failed to parse rename rule #{str}. Error: #{e}"
|
||||
end
|
||||
|
||||
private def parse!(str : String)
|
||||
chars = [] of Char
|
||||
pattern : Pattern? = nil
|
||||
group : Group? = nil
|
||||
|
||||
str.each_char_with_index do |char, i|
|
||||
if ['[', ']', '{', '}', '|'].includes?(char) && !chars.empty?
|
||||
string = chars.join
|
||||
if !pattern.nil?
|
||||
pattern.push Variable.new string.strip
|
||||
elsif !group.nil?
|
||||
group.push string
|
||||
else
|
||||
@ary.push string
|
||||
end
|
||||
chars = [] of Char
|
||||
end
|
||||
|
||||
case char
|
||||
when '['
|
||||
if !group.nil? || !pattern.nil?
|
||||
raise "nested groups are not allowed"
|
||||
end
|
||||
group = Group.new
|
||||
when ']'
|
||||
if group.nil?
|
||||
raise "unmatched ] at position #{i}"
|
||||
end
|
||||
if !pattern.nil?
|
||||
raise "patterns (`{}`) should be closed before closing the " \
|
||||
"group (`[]`)"
|
||||
end
|
||||
@ary.push group
|
||||
group = nil
|
||||
when '{'
|
||||
if !pattern.nil?
|
||||
raise "nested patterns are not allowed"
|
||||
end
|
||||
pattern = Pattern.new
|
||||
when '}'
|
||||
if pattern.nil?
|
||||
raise "unmatched } at position #{i}"
|
||||
end
|
||||
if !group.nil?
|
||||
group.push pattern
|
||||
else
|
||||
@ary.push pattern
|
||||
end
|
||||
pattern = nil
|
||||
when '|'
|
||||
if pattern.nil?
|
||||
chars.push char
|
||||
end
|
||||
else
|
||||
if ESCAPE.includes? char
|
||||
raise "the character #{char} at position #{i} is not allowed"
|
||||
end
|
||||
chars.push char
|
||||
end
|
||||
end
|
||||
|
||||
unless chars.empty?
|
||||
@ary.push chars.join
|
||||
end
|
||||
if !pattern.nil?
|
||||
raise "unclosed pattern {"
|
||||
end
|
||||
if !group.nil?
|
||||
raise "unclosed group ["
|
||||
end
|
||||
end
|
||||
|
||||
def render(hash : VHash)
|
||||
str = @ary.map do |e|
|
||||
if e.is_a? String
|
||||
e
|
||||
else
|
||||
e.render hash
|
||||
end
|
||||
end.join.strip
|
||||
post_process str
|
||||
end
|
||||
|
||||
private def post_process(str)
|
||||
return "_" if str == ".."
|
||||
str.gsub "/", "_"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,103 +1,72 @@
|
||||
require "./router"
|
||||
|
||||
class AdminRouter < Router
|
||||
def setup
|
||||
get "/admin" do |env|
|
||||
layout "admin"
|
||||
end
|
||||
def initialize
|
||||
get "/admin" do |env|
|
||||
layout "admin"
|
||||
end
|
||||
|
||||
get "/admin/user" do |env|
|
||||
users = @context.storage.list_users
|
||||
username = get_username env
|
||||
layout "user"
|
||||
end
|
||||
get "/admin/user" do |env|
|
||||
users = @context.storage.list_users
|
||||
username = get_username env
|
||||
layout "user"
|
||||
end
|
||||
|
||||
get "/admin/user/edit" do |env|
|
||||
username = env.params.query["username"]?
|
||||
admin = env.params.query["admin"]?
|
||||
if admin
|
||||
admin = admin == "true"
|
||||
end
|
||||
error = env.params.query["error"]?
|
||||
current_user = get_username env
|
||||
new_user = username.nil? && admin.nil?
|
||||
layout "user-edit"
|
||||
end
|
||||
get "/admin/user/edit" do |env|
|
||||
username = env.params.query["username"]?
|
||||
admin = env.params.query["admin"]?
|
||||
if admin
|
||||
admin = admin == "true"
|
||||
end
|
||||
error = env.params.query["error"]?
|
||||
current_user = get_username env
|
||||
new_user = username.nil? && admin.nil?
|
||||
layout "user-edit"
|
||||
end
|
||||
|
||||
post "/admin/user/edit" do |env|
|
||||
# creating new user
|
||||
begin
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
# if `admin` is unchecked, the body hash
|
||||
# would not contain `admin`
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
post "/admin/user/edit" do |env|
|
||||
# creating new user
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
# if `admin` is unchecked, the body hash
|
||||
# would not contain `admin`
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
|
||||
if username.size < 3
|
||||
raise "Username should contain at least 3 characters"
|
||||
end
|
||||
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
||||
raise "Username should contain alphanumeric characters "\
|
||||
"and underscores only"
|
||||
end
|
||||
if password.size < 6
|
||||
raise "Password should contain at least 6 characters"
|
||||
end
|
||||
if (password =~ /^[[:ascii:]]+$/).nil?
|
||||
raise "password should contain ASCII characters only"
|
||||
end
|
||||
@context.storage.new_user username, password, admin
|
||||
|
||||
@context.storage.new_user username, password, admin
|
||||
redirect env, "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",
|
||||
query: hash_to_query({"error" => e.message})
|
||||
redirect env, redirect_url.to_s
|
||||
end
|
||||
|
||||
env.redirect "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",\
|
||||
query: hash_to_query({"error" => e.message})
|
||||
env.redirect redirect_url.to_s
|
||||
end
|
||||
end
|
||||
post "/admin/user/edit/:original_username" do |env|
|
||||
# editing existing user
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
# if `admin` is unchecked, the body hash would not contain `admin`
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
original_username = env.params.url["original_username"]
|
||||
|
||||
post "/admin/user/edit/:original_username" do |env|
|
||||
# editing existing user
|
||||
begin
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
# if `admin` is unchecked, the body
|
||||
# hash would not contain `admin`
|
||||
admin = !env.params.body["admin"]?.nil?
|
||||
original_username = env.params.url["original_username"]
|
||||
@context.storage.update_user \
|
||||
original_username, username, password, admin
|
||||
|
||||
if username.size < 3
|
||||
raise "Username should contain at least 3 characters"
|
||||
end
|
||||
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
||||
raise "Username should contain alphanumeric characters "\
|
||||
"and underscores only"
|
||||
end
|
||||
redirect env, "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",
|
||||
query: hash_to_query({"username" => original_username, \
|
||||
"admin" => admin, "error" => e.message})
|
||||
redirect env, redirect_url.to_s
|
||||
end
|
||||
|
||||
if password.size != 0
|
||||
if password.size < 6
|
||||
raise "Password should contain at least 6 characters"
|
||||
end
|
||||
if (password =~ /^[[:ascii:]]+$/).nil?
|
||||
raise "password should contain ASCII characters only"
|
||||
end
|
||||
end
|
||||
|
||||
@context.storage.update_user \
|
||||
original_username, username, password, admin
|
||||
|
||||
env.redirect "/admin/user"
|
||||
rescue e
|
||||
@context.error e
|
||||
redirect_url = URI.new \
|
||||
path: "/admin/user/edit",\
|
||||
query: hash_to_query({"username" => original_username, \
|
||||
"admin" => admin, "error" => e.message})
|
||||
env.redirect redirect_url.to_s
|
||||
end
|
||||
end
|
||||
end
|
||||
get "/admin/downloads" do |env|
|
||||
mangadex_base_url = Config.current.mangadex["base_url"]
|
||||
layout "download-manager"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,92 +1,699 @@
|
||||
require "./router"
|
||||
require "../mangadex/*"
|
||||
require "../upload"
|
||||
require "koa"
|
||||
|
||||
class APIRouter < Router
|
||||
def setup
|
||||
get "/api/page/:tid/:eid/:page" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
page = env.params.url["page"].to_i
|
||||
@@api_json : String?
|
||||
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if \
|
||||
entry.nil?
|
||||
img = entry.read_page page
|
||||
raise "Failed to load page #{page} of " \
|
||||
"`#{title.title}/#{entry.title}`" if img.nil?
|
||||
API_VERSION = "0.1.0"
|
||||
|
||||
send_img env, img
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
macro s(fields)
|
||||
{
|
||||
{% for field in fields %}
|
||||
{{field}} => "string",
|
||||
{% end %}
|
||||
}
|
||||
end
|
||||
|
||||
get "/api/book/:title" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
def initialize
|
||||
Koa.init "Mango API", version: API_VERSION, desc: <<-MD
|
||||
# A Word of Caution
|
||||
|
||||
send_json env, title.to_json
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
This API was designed for internal use only, and the design doesn't comply with the resources convention of a RESTful API. Because of this, most of the API endpoints listed here will soon be updated and removed in future versions of Mango, so use them at your own risk!
|
||||
|
||||
get "/api/book" do |env|
|
||||
send_json env, @context.library.to_json
|
||||
end
|
||||
# Authentication
|
||||
|
||||
post "/api/admin/scan" do |env|
|
||||
start = Time.utc
|
||||
@context.library.scan
|
||||
ms = (Time.utc - start).total_milliseconds
|
||||
send_json env, {
|
||||
"milliseconds" => ms,
|
||||
"titles" => @context.library.titles.size
|
||||
}.to_json
|
||||
end
|
||||
All endpoints require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
||||
|
||||
post "/api/admin/user/delete/:username" do |env|
|
||||
begin
|
||||
username = env.params.url["username"]
|
||||
@context.storage.delete_user username
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
# Terminologies
|
||||
|
||||
post "/api/progress/:title/:entry/:page" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
.not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
- Entry: An entry is a `cbz`/`cbr` file in your library. Depending on how you organize your manga collection, an entry can contain a chapter, a volume or even an entire manga.
|
||||
- Title: A title contains a list of entries and optionally some sub-titles. For example, you can have a title to store a manga, and it contains a list of sub-titles representing the volumes in the manga. Each sub-title would then contain a list of entries representing the chapters in the volume.
|
||||
- Library: The library is a collection of top-level titles, and it does not contain entries (though the titles do). A Mango instance can only have one library.
|
||||
MD
|
||||
|
||||
raise "incorrect page value" if page < 0 || page > entry.pages
|
||||
title.save_progress username, entry.title, page
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
end
|
||||
Koa.cookie_auth "cookie", "mango-sessid-#{Config.current.port}"
|
||||
Koa.global_tag "admin", desc: <<-MD
|
||||
These are the admin endpoints only accessible for users with admin access. A non-admin user will get HTTP 403 when calling the endpoints.
|
||||
MD
|
||||
|
||||
Koa.binary "binary", desc: "A binary file"
|
||||
Koa.array "entryAry", "$entry", desc: "An array of entries"
|
||||
Koa.array "titleAry", "$title", desc: "An array of titles"
|
||||
Koa.array "strAry", "string", desc: "An array of strings"
|
||||
|
||||
entry_schema = {
|
||||
"pages" => "integer",
|
||||
"mtime" => "integer",
|
||||
}.merge s %w(zip_path title size id title_id display_name cover_url)
|
||||
Koa.object "entry", entry_schema, desc: "An entry in a book"
|
||||
|
||||
title_schema = {
|
||||
"mtime" => "integer",
|
||||
"entries" => "$entryAry",
|
||||
"titles" => "$titleAry",
|
||||
"parents" => "$strAry",
|
||||
}.merge s %w(dir title id display_name cover_url)
|
||||
Koa.object "title", title_schema,
|
||||
desc: "A manga title (a collection of entries and sub-titles)"
|
||||
|
||||
Koa.object "library", {
|
||||
"dir" => "string",
|
||||
"titles" => "$titleAry",
|
||||
}, desc: "A library containing a list of top-level titles"
|
||||
|
||||
Koa.object "scanResult", {
|
||||
"milliseconds" => "integer",
|
||||
"titles" => "integer",
|
||||
}
|
||||
|
||||
Koa.object "progressResult", {
|
||||
"progress" => "number",
|
||||
}
|
||||
|
||||
Koa.object "result", {
|
||||
"success" => "boolean",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
mc_schema = {
|
||||
"groups" => "object",
|
||||
}.merge s %w(id title volume chapter language full_title time manga_title manga_id)
|
||||
Koa.object "mangadexChapter", mc_schema, desc: "A MangaDex chapter"
|
||||
|
||||
Koa.array "chapterAry", "$mangadexChapter"
|
||||
|
||||
mm_schema = {
|
||||
"chapers" => "$chapterAry",
|
||||
}.merge s %w(id title description author artist cover_url)
|
||||
Koa.object "mangadexManga", mm_schema, desc: "A MangaDex manga"
|
||||
|
||||
Koa.object "chaptersObj", {
|
||||
"chapters" => "$chapterAry",
|
||||
}
|
||||
|
||||
Koa.object "successFailCount", {
|
||||
"success" => "integer",
|
||||
"fail" => "integer",
|
||||
}
|
||||
|
||||
job_schema = {
|
||||
"pages" => "integer",
|
||||
"success_count" => "integer",
|
||||
"fail_count" => "integer",
|
||||
"time" => "integer",
|
||||
}.merge s %w(id manga_id title manga_title status_message status)
|
||||
Koa.object "job", job_schema, desc: "A download job in the queue"
|
||||
|
||||
Koa.array "jobAry", "$job"
|
||||
|
||||
Koa.object "jobs", {
|
||||
"success" => "boolean",
|
||||
"paused" => "boolean",
|
||||
"jobs" => "$jobAry",
|
||||
}
|
||||
|
||||
Koa.object "binaryUpload", {
|
||||
"file" => "$binary",
|
||||
}
|
||||
|
||||
Koa.object "pluginListBody", {
|
||||
"plugin" => "string",
|
||||
"query" => "string",
|
||||
}
|
||||
|
||||
Koa.object "pluginChapter", {
|
||||
"id" => "string",
|
||||
"title" => "string",
|
||||
}
|
||||
|
||||
Koa.array "pluginChapterAry", "$pluginChapter"
|
||||
|
||||
Koa.object "pluginList", {
|
||||
"success" => "boolean",
|
||||
"chapters" => "$pluginChapterAry?",
|
||||
"title" => "string?",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
Koa.object "pluginDownload", {
|
||||
"plugin" => "string",
|
||||
"title" => "string",
|
||||
"chapters" => "$pluginChapterAry",
|
||||
}
|
||||
|
||||
Koa.object "dimension", {
|
||||
"width" => "integer",
|
||||
"height" => "integer",
|
||||
}
|
||||
|
||||
Koa.array "dimensionAry", "$dimension"
|
||||
|
||||
Koa.object "dimensionResult", {
|
||||
"success" => "boolean",
|
||||
"dimensions" => "$dimensionAry?",
|
||||
"error" => "string?",
|
||||
}
|
||||
|
||||
Koa.object "ids", {
|
||||
"ids" => "$strAry",
|
||||
}
|
||||
|
||||
Koa.describe "Returns a page in a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.path "page", type: "integer", desc: "The page number to return (starts from 1)"
|
||||
Koa.response 200, ref: "$binary", media_type: "image/*"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
get "/api/page/:tid/:eid/:page" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
page = env.params.url["page"].to_i
|
||||
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
img = entry.read_page page
|
||||
raise "Failed to load page #{page} of " \
|
||||
"`#{title.title}/#{entry.title}`" if img.nil?
|
||||
|
||||
send_img env, img
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the cover image of a manga entry"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.path "eid", desc: "Entry ID"
|
||||
Koa.response 200, ref: "$binary", media_type: "image/*"
|
||||
Koa.response 500, "Page not found or not readable"
|
||||
get "/api/cover/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
|
||||
img = entry.get_thumbnail || entry.read_page 1
|
||||
raise "Failed to get cover of `#{title.title}/#{entry.title}`" \
|
||||
if img.nil?
|
||||
|
||||
send_img env, img
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the book with title `tid`"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.response 200, ref: "$title"
|
||||
Koa.response 404, "Title not found"
|
||||
get "/api/book/:tid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
|
||||
send_json env, title.to_json
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
e.message
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the entire library with all titles and entries"
|
||||
Koa.response 200, ref: "$library"
|
||||
get "/api/library" do |env|
|
||||
send_json env, @context.library.to_json
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a library scan"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$scanResult"
|
||||
post "/api/admin/scan" do |env|
|
||||
start = Time.utc
|
||||
@context.library.scan
|
||||
ms = (Time.utc - start).total_milliseconds
|
||||
send_json env, {
|
||||
"milliseconds" => ms,
|
||||
"titles" => @context.library.titles.size,
|
||||
}.to_json
|
||||
end
|
||||
|
||||
Koa.describe "Returns the thumbnail generation progress between 0 and 1"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$progressResult"
|
||||
get "/api/admin/thumbnail_progress" do |env|
|
||||
send_json env, {
|
||||
"progress" => Library.default.thumbnail_generation_progress,
|
||||
}.to_json
|
||||
end
|
||||
|
||||
Koa.describe "Triggers a thumbnail generation"
|
||||
Koa.tag "admin"
|
||||
post "/api/admin/generate_thumbnails" do |env|
|
||||
spawn do
|
||||
Library.default.generate_thumbnails
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Deletes a user with `username`"
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$result"
|
||||
delete "/api/admin/user/delete/:username" do |env|
|
||||
begin
|
||||
username = env.params.url["username"]
|
||||
@context.storage.delete_user username
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Updates the reading progress of an entry or the whole title for the current user", <<-MD
|
||||
When `eid` is provided, sets the reading progress of the entry to `page`.
|
||||
|
||||
When `eid` is omitted, updates the progress of the entire title. Specifically:
|
||||
|
||||
- if `page` is 0, marks the entire title as unread
|
||||
- otherwise, marks the entire title as read
|
||||
MD
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "page", desc: "The new page number indicating the progress"
|
||||
Koa.response 200, ref: "$result"
|
||||
put "/api/progress/:tid/:page" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
title = (@context.library.get_title env.params.url["tid"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
entry_id = env.params.query["eid"]?
|
||||
|
||||
if !entry_id.nil?
|
||||
entry = title.get_entry(entry_id).not_nil!
|
||||
raise "incorrect page value" if page < 0 || page > entry.pages
|
||||
entry.save_progress username, page
|
||||
elsif page == 0
|
||||
title.unread_all username
|
||||
else
|
||||
title.read_all username
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Updates the reading progress of multiple entries in a title"
|
||||
Koa.path "action", desc: "The action to perform. Can be either `read` or `unread`"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.body ref: "$ids", desc: "An array of entry IDs"
|
||||
Koa.response 200, ref: "$result"
|
||||
put "/api/bulk_progress/:action/:tid" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
title = (@context.library.get_title env.params.url["tid"]).not_nil!
|
||||
action = env.params.url["action"]
|
||||
ids = env.params.json["ids"].as(Array).map &.as_s
|
||||
|
||||
unless action.in? ["read", "unread"]
|
||||
raise "Unknow action #{action}"
|
||||
end
|
||||
title.bulk_progress action, ids, username
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Sets the display name of a title or an entry", <<-MD
|
||||
When `eid` is provided, apply the display name to the entry. Otherwise, apply the display name to the title identified by `tid`.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.path "tid", desc: "Title ID"
|
||||
Koa.query "eid", desc: "Entry ID", required: false
|
||||
Koa.path "name", desc: "The new display name"
|
||||
Koa.response 200, ref: "$result"
|
||||
put "/api/admin/display_name/:tid/:name" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["tid"])
|
||||
.not_nil!
|
||||
name = env.params.url["name"]
|
||||
entry = env.params.query["eid"]?
|
||||
if entry.nil?
|
||||
title.set_display_name name
|
||||
else
|
||||
eobj = title.get_entry entry
|
||||
title.set_display_name eobj.not_nil!.title, name
|
||||
end
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
else
|
||||
send_json env, {"success" => true}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns a MangaDex manga identified by `id`", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.path "id", desc: "A MangaDex manga ID"
|
||||
Koa.response 200, ref: "$mangadexManga"
|
||||
get "/api/admin/mangadex/manga/:id" do |env|
|
||||
begin
|
||||
id = env.params.url["id"]
|
||||
api = MangaDex::API.default
|
||||
manga = api.get_manga id
|
||||
send_json env, manga.to_info_json
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Adds a list of MangaDex chapters to the download queue", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$chaptersObj"
|
||||
Koa.response 200, ref: "$successFailCount"
|
||||
post "/api/admin/mangadex/download" do |env|
|
||||
begin
|
||||
chapters = env.params.json["chapters"].as(Array).map { |c| c.as_h }
|
||||
jobs = chapters.map { |chapter|
|
||||
Queue::Job.new(
|
||||
chapter["id"].as_s,
|
||||
chapter["manga_id"].as_s,
|
||||
chapter["full_title"].as_s,
|
||||
chapter["manga_title"].as_s,
|
||||
Queue::JobStatus::Pending,
|
||||
Time.unix chapter["time"].as_s.to_i
|
||||
)
|
||||
}
|
||||
inserted_count = @context.queue.push jobs
|
||||
send_json env, {
|
||||
"success": inserted_count,
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
@context.error e
|
||||
send_json env, {"error" => e.message}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
ws "/api/admin/mangadex/queue" do |socket, env|
|
||||
interval_raw = env.params.query["interval"]?
|
||||
interval = (interval_raw.to_i? if interval_raw) || 5
|
||||
loop do
|
||||
socket.send({
|
||||
"jobs" => @context.queue.get_all,
|
||||
"paused" => @context.queue.paused?,
|
||||
}.to_json)
|
||||
sleep interval.seconds
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the current download queue", <<-MD
|
||||
On error, returns a JSON that contains the error message in the `error` field.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.response 200, ref: "$jobs"
|
||||
get "/api/admin/mangadex/queue" do |env|
|
||||
begin
|
||||
jobs = @context.queue.get_all
|
||||
send_json env, {
|
||||
"jobs" => jobs,
|
||||
"paused" => @context.queue.paused?,
|
||||
"success" => true,
|
||||
}.to_json
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Perform an action on a download job or all jobs in the queue", <<-MD
|
||||
The `action` parameter can be `delete`, `retry`, `pause` or `resume`.
|
||||
|
||||
When `action` is `pause` or `resume`, pauses or resumes the download queue, respectively.
|
||||
|
||||
When `action` is set to `delete`, the behavior depends on `id`. If `id` is provided, deletes the specific job identified by the ID. Otherwise, deletes all **completed** jobs in the queue.
|
||||
|
||||
When `action` is set to `retry`, the behavior depends on `id`. If `id` is provided, restarts the job identified by the ID. Otherwise, retries all jobs in the `Error` or `MissingPages` status in the queue.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.path "action", desc: "The action to perform. It should be one of the followins: `delete`, `retry`, `pause` and `resume`."
|
||||
Koa.query "id", required: false, desc: "A job ID"
|
||||
Koa.response 200, ref: "$result"
|
||||
post "/api/admin/mangadex/queue/:action" do |env|
|
||||
begin
|
||||
action = env.params.url["action"]
|
||||
id = env.params.query["id"]?
|
||||
case action
|
||||
when "delete"
|
||||
if id.nil?
|
||||
@context.queue.delete_status Queue::JobStatus::Completed
|
||||
else
|
||||
@context.queue.delete id
|
||||
end
|
||||
when "retry"
|
||||
if id.nil?
|
||||
@context.queue.reset
|
||||
else
|
||||
@context.queue.reset id
|
||||
end
|
||||
when "pause"
|
||||
@context.queue.pause
|
||||
when "resume"
|
||||
@context.queue.resume
|
||||
else
|
||||
raise "Unknown queue action #{action}"
|
||||
end
|
||||
|
||||
send_json env, {"success" => true}.to_json
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Uploads a file to the server", <<-MD
|
||||
Currently the only supported value for the `target` parameter is `cover`.
|
||||
|
||||
### Cover
|
||||
|
||||
Uploads a cover image for a title or an entry.
|
||||
|
||||
Query parameters:
|
||||
- `tid`: A title ID
|
||||
- `eid`: (Optional) An entry ID
|
||||
|
||||
When `eid` is omitted, the new cover image will be applied to the title. Otherwise, applies the image to the specified entry.
|
||||
MD
|
||||
Koa.tag "admin"
|
||||
Koa.body type: "multipart/form-data", ref: "$binaryUpload"
|
||||
Koa.response 200, ref: "$result"
|
||||
post "/api/admin/upload/:target" do |env|
|
||||
begin
|
||||
target = env.params.url["target"]
|
||||
|
||||
HTTP::FormData.parse env.request do |part|
|
||||
next if part.name != "file"
|
||||
|
||||
filename = part.filename
|
||||
if filename.nil?
|
||||
raise "No file uploaded"
|
||||
end
|
||||
|
||||
case target
|
||||
when "cover"
|
||||
title_id = env.params.query["tid"]
|
||||
entry_id = env.params.query["eid"]?
|
||||
title = @context.library.get_title(title_id).not_nil!
|
||||
|
||||
unless SUPPORTED_IMG_TYPES.includes? \
|
||||
MIME.from_filename? filename
|
||||
raise "The uploaded image must be either JPEG or PNG"
|
||||
end
|
||||
|
||||
ext = File.extname filename
|
||||
upload = Upload.new Config.current.upload_path
|
||||
url = upload.path_to_url upload.save "img", ext, part.body
|
||||
|
||||
if url.nil?
|
||||
raise "Failed to generate a public URL for the uploaded file"
|
||||
end
|
||||
|
||||
if entry_id.nil?
|
||||
title.set_cover_url url
|
||||
else
|
||||
entry_name = title.get_entry(entry_id).not_nil!.title
|
||||
title.set_cover_url entry_name, url
|
||||
end
|
||||
else
|
||||
raise "Unkown upload target #{target}"
|
||||
end
|
||||
|
||||
send_json env, {"success" => true}.to_json
|
||||
env.response.close
|
||||
end
|
||||
|
||||
raise "No part with name `file` found"
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Lists the chapters in a title from a plugin"
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$pluginListBody"
|
||||
Koa.response 200, ref: "$pluginList"
|
||||
get "/api/admin/plugin/list" do |env|
|
||||
begin
|
||||
query = env.params.query["query"].as String
|
||||
plugin = Plugin.new env.params.query["plugin"].as String
|
||||
|
||||
json = plugin.list_chapters query
|
||||
chapters = json["chapters"]
|
||||
title = json["title"]
|
||||
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"chapters" => chapters,
|
||||
"title" => title,
|
||||
}.to_json
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Adds a list of chapters from a plugin to the download queue"
|
||||
Koa.tag "admin"
|
||||
Koa.body ref: "$pluginDownload"
|
||||
Koa.response 200, ref: "$successFailCount"
|
||||
post "/api/admin/plugin/download" do |env|
|
||||
begin
|
||||
plugin = Plugin.new env.params.json["plugin"].as String
|
||||
chapters = env.params.json["chapters"].as Array(JSON::Any)
|
||||
manga_title = env.params.json["title"].as String
|
||||
|
||||
jobs = chapters.map { |ch|
|
||||
Queue::Job.new(
|
||||
"#{plugin.info.id}-#{ch["id"]}",
|
||||
"", # manga_id
|
||||
ch["title"].as_s,
|
||||
manga_title,
|
||||
Queue::JobStatus::Pending,
|
||||
Time.utc
|
||||
)
|
||||
}
|
||||
inserted_count = @context.queue.push jobs
|
||||
send_json env, {
|
||||
"success": inserted_count,
|
||||
"fail": jobs.size - inserted_count,
|
||||
}.to_json
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Returns the image dimensions of all pages in an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.response 200, ref: "$dimensionResult"
|
||||
get "/api/dimensions/:tid/:eid" do |env|
|
||||
begin
|
||||
tid = env.params.url["tid"]
|
||||
eid = env.params.url["eid"]
|
||||
|
||||
title = @context.library.get_title tid
|
||||
raise "Title ID `#{tid}` not found" if title.nil?
|
||||
entry = title.get_entry eid
|
||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||
|
||||
sizes = entry.page_dimensions
|
||||
send_json env, {
|
||||
"success" => true,
|
||||
"dimensions" => sizes,
|
||||
}.to_json
|
||||
rescue e
|
||||
send_json env, {
|
||||
"success" => false,
|
||||
"error" => e.message,
|
||||
}.to_json
|
||||
end
|
||||
end
|
||||
|
||||
Koa.describe "Downloads an entry"
|
||||
Koa.path "tid", desc: "A title ID"
|
||||
Koa.path "eid", desc: "An entry ID"
|
||||
Koa.response 200, ref: "$binary"
|
||||
Koa.response 404, "Entry not found"
|
||||
get "/api/download/:tid/:eid" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["tid"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["eid"]).not_nil!
|
||||
|
||||
send_attachment env, entry.zip_path
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
doc = Koa.generate
|
||||
@@api_json = doc.to_json if doc
|
||||
|
||||
get "/openapi.json" do |env|
|
||||
if @@api_json
|
||||
send_json env, @@api_json
|
||||
else
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,56 +1,121 @@
|
||||
require "./router"
|
||||
|
||||
class MainRouter < Router
|
||||
def setup
|
||||
get "/login" do |env|
|
||||
render "src/views/login.ecr"
|
||||
end
|
||||
def initialize
|
||||
get "/login" do |env|
|
||||
base_url = Config.current.base_url
|
||||
render "src/views/login.html.ecr"
|
||||
end
|
||||
|
||||
get "/logout" do |env|
|
||||
begin
|
||||
cookie = env.request.cookies
|
||||
.find { |c| c.name == "token" }.not_nil!
|
||||
@context.storage.logout cookie.value
|
||||
rescue e
|
||||
@context.error "Error when attempting to log out: #{e}"
|
||||
ensure
|
||||
env.redirect "/login"
|
||||
end
|
||||
end
|
||||
get "/logout" do |env|
|
||||
begin
|
||||
env.session.delete_string "token"
|
||||
rescue e
|
||||
@context.error "Error when attempting to log out: #{e}"
|
||||
ensure
|
||||
redirect env, "/login"
|
||||
end
|
||||
end
|
||||
|
||||
post "/login" do |env|
|
||||
begin
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
token = @context.storage.verify_user(username, password)
|
||||
.not_nil!
|
||||
post "/login" do |env|
|
||||
begin
|
||||
username = env.params.body["username"]
|
||||
password = env.params.body["password"]
|
||||
token = @context.storage.verify_user(username, password).not_nil!
|
||||
|
||||
cookie = HTTP::Cookie.new "token", token
|
||||
env.response.cookies << cookie
|
||||
env.redirect "/"
|
||||
rescue
|
||||
env.redirect "/login"
|
||||
end
|
||||
end
|
||||
get "/" do |env|
|
||||
titles = @context.library.titles
|
||||
username = get_username env
|
||||
percentage = titles.map &.load_percetage username
|
||||
layout "index"
|
||||
end
|
||||
env.session.string "token", token
|
||||
|
||||
get "/book/:title" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
.not_nil!
|
||||
username = get_username env
|
||||
percentage = title.entries.map { |e|
|
||||
title.load_percetage username, e.title }
|
||||
layout "title"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
callback = env.session.string? "callback"
|
||||
if callback
|
||||
env.session.delete_string "callback"
|
||||
redirect env, callback
|
||||
else
|
||||
redirect env, "/"
|
||||
end
|
||||
rescue
|
||||
redirect env, "/login"
|
||||
end
|
||||
end
|
||||
|
||||
get "/library" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json @context.library.dir, username
|
||||
get_sort_opt
|
||||
|
||||
titles = @context.library.sorted_titles username, sort_opt
|
||||
percentage = titles.map &.load_percentage username
|
||||
|
||||
layout "library"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/book/:title" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
username = get_username env
|
||||
|
||||
sort_opt = SortOptions.from_info_json title.dir, username
|
||||
get_sort_opt
|
||||
|
||||
entries = title.sorted_entries username, sort_opt
|
||||
|
||||
percentage = title.load_percentage_for_all_entries username, sort_opt
|
||||
title_percentage = title.titles.map &.load_percentage username
|
||||
layout "title"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/download" do |env|
|
||||
mangadex_base_url = Config.current.mangadex["base_url"]
|
||||
layout "download"
|
||||
end
|
||||
|
||||
get "/download/plugins" do |env|
|
||||
begin
|
||||
id = env.params.query["plugin"]?
|
||||
plugins = Plugin.list
|
||||
plugin = nil
|
||||
|
||||
if id
|
||||
plugin = Plugin.new id
|
||||
elsif !plugins.empty?
|
||||
plugin = Plugin.new plugins[0][:id]
|
||||
end
|
||||
|
||||
layout "plugin-download"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
continue_reading = @context
|
||||
.library.get_continue_reading_entries username
|
||||
recently_added = @context.library.get_recently_added_entries username
|
||||
start_reading = @context.library.get_start_reading_titles username
|
||||
titles = @context.library.titles
|
||||
new_user = !titles.any? { |t| t.load_percentage(username) > 0 }
|
||||
empty_library = titles.size == 0
|
||||
layout "home"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 500
|
||||
end
|
||||
end
|
||||
|
||||
get "/api" do |env|
|
||||
render "src/views/api.html.ecr"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
20
src/routes/opds.cr
Normal file
20
src/routes/opds.cr
Normal file
@@ -0,0 +1,20 @@
|
||||
require "./router"
|
||||
|
||||
class OPDSRouter < Router
|
||||
def initialize
|
||||
get "/opds" do |env|
|
||||
titles = @context.library.titles
|
||||
render_xml "src/views/opds/index.xml.ecr"
|
||||
end
|
||||
|
||||
get "/opds/book/:title_id" do |env|
|
||||
begin
|
||||
title = @context.library.get_title(env.params.url["title_id"]).not_nil!
|
||||
render_xml "src/views/opds/title.xml.ecr"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,58 +1,53 @@
|
||||
require "./router"
|
||||
|
||||
class ReaderRouter < Router
|
||||
def setup
|
||||
get "/reader/:title/:entry" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
.not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
def initialize
|
||||
get "/reader/:title/:entry" do |env|
|
||||
begin
|
||||
username = get_username env
|
||||
|
||||
# load progress
|
||||
username = get_username env
|
||||
page = title.load_progress username, entry.title
|
||||
# we go back 2 * `IMGS_PER_PAGE` pages. the infinite scroll
|
||||
# library perloads a few pages in advance, and the user
|
||||
# might not have actually read them
|
||||
page = [page - 2 * IMGS_PER_PAGE, 1].max
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
|
||||
env.redirect "/reader/#{title.id}/#{entry.id}/#{page}"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
next layout "reader-error" if entry.err_msg
|
||||
|
||||
get "/reader/:title/:entry/:page" do |env|
|
||||
begin
|
||||
title = (@context.library.get_title env.params.url["title"])
|
||||
.not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
raise "" if page > entry.pages || page <= 0
|
||||
# load progress
|
||||
page = [1, entry.load_progress username].max
|
||||
|
||||
# save progress
|
||||
username = get_username env
|
||||
title.save_progress username, entry.title, page
|
||||
# start from page 1 if the user has finished reading the entry
|
||||
page = 1 if entry.finished? username
|
||||
|
||||
pages = (page...[entry.pages + 1, page + IMGS_PER_PAGE].min)
|
||||
urls = pages.map { |idx|
|
||||
"/api/page/#{title.id}/#{entry.id}/#{idx}" }
|
||||
reader_urls = pages.map { |idx|
|
||||
"/reader/#{title.id}/#{entry.id}/#{idx}" }
|
||||
next_page = page + IMGS_PER_PAGE
|
||||
next_url = next_page > entry.pages ? nil :
|
||||
"/reader/#{title.id}/#{entry.id}/#{next_page}"
|
||||
exit_url = "/book/#{title.id}"
|
||||
next_entry = title.next_entry entry
|
||||
next_entry_url = next_entry.nil? ? nil : \
|
||||
"/reader/#{title.id}/#{next_entry.id}"
|
||||
redirect env, "/reader/#{title.id}/#{entry.id}/#{page}"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
|
||||
render "src/views/reader.ecr"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
get "/reader/:title/:entry/:page" do |env|
|
||||
begin
|
||||
base_url = Config.current.base_url
|
||||
|
||||
username = get_username env
|
||||
|
||||
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||
page = env.params.url["page"].to_i
|
||||
raise "" if page > entry.pages || page <= 0
|
||||
|
||||
exit_url = "#{base_url}book/#{title.id}"
|
||||
|
||||
next_entry_url = nil
|
||||
next_entry = entry.next_entry username
|
||||
unless next_entry.nil?
|
||||
next_entry_url = "#{base_url}reader/#{title.id}/#{next_entry.id}"
|
||||
end
|
||||
|
||||
render "src/views/reader.html.ecr"
|
||||
rescue e
|
||||
@context.error e
|
||||
env.response.status_code = 404
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
require "../context"
|
||||
|
||||
class Router
|
||||
def initialize(@context : Context)
|
||||
end
|
||||
@context : Context = Context.default
|
||||
end
|
||||
|
||||
108
src/server.cr
108
src/server.cr
@@ -1,41 +1,81 @@
|
||||
require "kemal"
|
||||
require "./context"
|
||||
require "./auth_handler"
|
||||
require "./static_handler"
|
||||
require "./log_handler"
|
||||
require "./util"
|
||||
require "kemal-session"
|
||||
require "./library/*"
|
||||
require "./handlers/*"
|
||||
require "./util/*"
|
||||
require "./routes/*"
|
||||
|
||||
class Server
|
||||
def initialize(@context : Context)
|
||||
class Context
|
||||
property library : Library
|
||||
property storage : Storage
|
||||
property queue : Queue
|
||||
|
||||
error 403 do |env|
|
||||
message = "You are not authorized to visit #{env.request.path}"
|
||||
layout "message"
|
||||
end
|
||||
use_default
|
||||
|
||||
MainRouter.new(@context).setup
|
||||
AdminRouter.new(@context).setup
|
||||
ReaderRouter.new(@context).setup
|
||||
APIRouter.new(@context).setup
|
||||
def initialize
|
||||
@storage = Storage.default
|
||||
@library = Library.default
|
||||
@queue = Queue.default
|
||||
end
|
||||
|
||||
Kemal.config.logging = false
|
||||
add_handler LogHandler.new @context.logger
|
||||
add_handler AuthHandler.new @context.storage
|
||||
{% if flag?(:release) %}
|
||||
# when building for relase, embed the static files in binary
|
||||
@context.debug "We are in release mode. Using embeded static files."
|
||||
serve_static false
|
||||
add_handler StaticHandler.new
|
||||
{% end %}
|
||||
end
|
||||
|
||||
def start
|
||||
@context.debug "Starting Kemal server"
|
||||
{% if flag?(:release) %}
|
||||
Kemal.config.env = "production"
|
||||
{% end %}
|
||||
Kemal.config.port = @context.config.port
|
||||
Kemal.run
|
||||
end
|
||||
{% for lvl in Logger::LEVELS %}
|
||||
def {{lvl.id}}(msg)
|
||||
Logger.{{lvl.id}} msg
|
||||
end
|
||||
{% end %}
|
||||
end
|
||||
|
||||
class Server
|
||||
@context : Context = Context.default
|
||||
|
||||
def initialize
|
||||
error 403 do |env|
|
||||
message = "HTTP 403: You are not authorized to visit #{env.request.path}"
|
||||
layout "message"
|
||||
end
|
||||
error 404 do |env|
|
||||
message = "HTTP 404: Mango cannot find the page #{env.request.path}"
|
||||
layout "message"
|
||||
end
|
||||
|
||||
{% if flag?(:release) %}
|
||||
error 500 do |env|
|
||||
message = "HTTP 500: Internal server error. Please try again later."
|
||||
layout "message"
|
||||
end
|
||||
{% end %}
|
||||
|
||||
MainRouter.new
|
||||
AdminRouter.new
|
||||
ReaderRouter.new
|
||||
APIRouter.new
|
||||
OPDSRouter.new
|
||||
|
||||
Kemal.config.logging = false
|
||||
add_handler LogHandler.new
|
||||
add_handler AuthHandler.new @context.storage
|
||||
add_handler UploadHandler.new Config.current.upload_path
|
||||
{% if flag?(:release) %}
|
||||
# when building for relase, embed the static files in binary
|
||||
@context.debug "We are in release mode. Using embedded static files."
|
||||
serve_static false
|
||||
add_handler StaticHandler.new
|
||||
{% end %}
|
||||
|
||||
Kemal::Session.config do |c|
|
||||
c.timeout = 365.days
|
||||
c.secret = Config.current.session_secret
|
||||
c.cookie_name = "mango-sessid-#{Config.current.port}"
|
||||
c.path = Config.current.base_url
|
||||
end
|
||||
end
|
||||
|
||||
def start
|
||||
@context.debug "Starting Kemal server"
|
||||
{% if flag?(:release) %}
|
||||
Kemal.config.env = "production"
|
||||
{% end %}
|
||||
Kemal.config.port = Config.current.port
|
||||
Kemal.run
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
require "baked_file_system"
|
||||
require "kemal"
|
||||
require "gzip"
|
||||
require "./util"
|
||||
|
||||
class FS
|
||||
extend BakedFileSystem
|
||||
{% if read_file? "#{__DIR__}/../dist/favicon.ico" %}
|
||||
{% puts "baking ../dist" %}
|
||||
bake_folder "../dist"
|
||||
{% else %}
|
||||
{% puts "baking ../public" %}
|
||||
bake_folder "../public"
|
||||
{% end %}
|
||||
end
|
||||
|
||||
class StaticHandler < Kemal::Handler
|
||||
@dirs = ["/css", "/js", "/img", "/favicon.ico"]
|
||||
|
||||
def call(env)
|
||||
if request_path_startswith env, @dirs
|
||||
file = FS.get? env.request.path
|
||||
return call_next env if file.nil?
|
||||
|
||||
slice = Bytes.new file.size
|
||||
file.read slice
|
||||
return send_file env, slice, file.mime_type
|
||||
end
|
||||
call_next env
|
||||
end
|
||||
end
|
||||
440
src/storage.cr
440
src/storage.cr
@@ -2,176 +2,310 @@ require "sqlite3"
|
||||
require "crypto/bcrypt"
|
||||
require "uuid"
|
||||
require "base64"
|
||||
require "./util/*"
|
||||
|
||||
def hash_password(pw)
|
||||
Crypto::Bcrypt::Password.create(pw).to_s
|
||||
Crypto::Bcrypt::Password.create(pw).to_s
|
||||
end
|
||||
|
||||
def verify_password(hash, pw)
|
||||
(Crypto::Bcrypt::Password.new hash).verify pw
|
||||
end
|
||||
|
||||
def random_str
|
||||
UUID.random.to_s.gsub "-", ""
|
||||
(Crypto::Bcrypt::Password.new hash).verify pw
|
||||
end
|
||||
|
||||
class Storage
|
||||
def initialize(@path : String, @logger : MLogger)
|
||||
dir = File.dirname path
|
||||
unless Dir.exists? dir
|
||||
@logger.info "The DB directory #{dir} does not exist. " \
|
||||
"Attepmting to create it"
|
||||
Dir.mkdir_p dir
|
||||
end
|
||||
DB.open "sqlite3://#{path}" do |db|
|
||||
begin
|
||||
# We create the `ids` table first. even if the uses has an
|
||||
# early version installed and has the `user` table only,
|
||||
# we will still be able to create `ids`
|
||||
db.exec "create table ids" \
|
||||
"(path text, id text, is_title integer)"
|
||||
db.exec "create unique index path_idx on ids (path)"
|
||||
db.exec "create unique index id_idx on ids (id)"
|
||||
@path : String
|
||||
@db : DB::Database?
|
||||
@insert_ids = [] of IDTuple
|
||||
|
||||
db.exec "create table users" \
|
||||
"(username text, password text, token text, admin integer)"
|
||||
rescue e
|
||||
unless e.message.not_nil!.ends_with? "already exists"
|
||||
@logger.fatal "Error when checking tables in DB: #{e}"
|
||||
raise e
|
||||
end
|
||||
else
|
||||
@logger.debug "Creating DB file at #{@path}"
|
||||
db.exec "create unique index username_idx on users (username)"
|
||||
db.exec "create unique index token_idx on users (token)"
|
||||
random_pw = random_str
|
||||
hash = hash_password random_pw
|
||||
db.exec "insert into users values (?, ?, ?, ?)",
|
||||
"admin", hash, nil, 1
|
||||
puts "Initial user created. You can log in with " \
|
||||
"#{{"username" => "admin", "password" => random_pw}}"
|
||||
end
|
||||
end
|
||||
end
|
||||
alias IDTuple = NamedTuple(path: String,
|
||||
id: String,
|
||||
is_title: Bool)
|
||||
|
||||
def verify_user(username, password)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
hash, token = db.query_one "select password, token from "\
|
||||
"users where username = (?)", \
|
||||
username, as: {String, String?}
|
||||
unless verify_password hash, password
|
||||
@logger.debug "Password does not match the hash"
|
||||
return nil
|
||||
end
|
||||
@logger.debug "User #{username} verified"
|
||||
return token if token
|
||||
token = random_str
|
||||
@logger.debug "Updating token for #{username}"
|
||||
db.exec "update users set token = (?) where username = (?)",
|
||||
token, username
|
||||
return token
|
||||
rescue e
|
||||
@logger.error "Error when verifying user #{username}: #{e}"
|
||||
return nil
|
||||
end
|
||||
end
|
||||
end
|
||||
use_default
|
||||
|
||||
def verify_token(token)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
username = db.query_one "select username from users where " \
|
||||
"token = (?)", token, as: String
|
||||
return username
|
||||
rescue e
|
||||
@logger.debug "Unable to verify token"
|
||||
return nil
|
||||
end
|
||||
end
|
||||
end
|
||||
def initialize(db_path : String? = nil, init_user = true, *,
|
||||
@auto_close = true)
|
||||
@path = db_path || Config.current.db_path
|
||||
dir = File.dirname @path
|
||||
unless Dir.exists? dir
|
||||
Logger.info "The DB directory #{dir} does not exist. " \
|
||||
"Attepmting to create it"
|
||||
Dir.mkdir_p dir
|
||||
end
|
||||
MainFiber.run do
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
db.exec "create table thumbnails " \
|
||||
"(id text, data blob, filename text, " \
|
||||
"mime text, size integer)"
|
||||
db.exec "create unique index tn_index on thumbnails (id)"
|
||||
|
||||
def verify_admin(token)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
return db.query_one "select admin from users where " \
|
||||
"token = (?)", token, as: Bool
|
||||
rescue e
|
||||
@logger.debug "Unable to verify user as admin"
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
db.exec "create table ids" \
|
||||
"(path text, id text, is_title integer)"
|
||||
db.exec "create unique index path_idx on ids (path)"
|
||||
db.exec "create unique index id_idx on ids (id)"
|
||||
|
||||
def list_users
|
||||
results = Array(Tuple(String, Bool)).new
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.query "select username, admin from users" do |rs|
|
||||
rs.each do
|
||||
results << {rs.read(String), rs.read(Bool)}
|
||||
end
|
||||
end
|
||||
end
|
||||
results
|
||||
end
|
||||
db.exec "create table users" \
|
||||
"(username text, password text, token text, admin integer)"
|
||||
rescue e
|
||||
unless e.message.not_nil!.ends_with? "already exists"
|
||||
Logger.fatal "Error when checking tables in DB: #{e}"
|
||||
raise e
|
||||
end
|
||||
|
||||
def new_user(username, password, admin)
|
||||
admin = (admin ? 1 : 0)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
hash = hash_password password
|
||||
db.exec "insert into users values (?, ?, ?, ?)",
|
||||
username, hash, nil, admin
|
||||
end
|
||||
end
|
||||
# If the DB is initialized through CLI but no user is added, we need
|
||||
# to create the admin user when first starting the app
|
||||
user_count = db.query_one "select count(*) from users", as: Int32
|
||||
init_admin if init_user && user_count == 0
|
||||
else
|
||||
Logger.debug "Creating DB file at #{@path}"
|
||||
db.exec "create unique index username_idx on users (username)"
|
||||
db.exec "create unique index token_idx on users (token)"
|
||||
|
||||
def update_user(original_username, username, password, admin)
|
||||
admin = (admin ? 1 : 0)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
if password.size == 0
|
||||
db.exec "update users set username = (?), admin = (?) "\
|
||||
"where username = (?)",\
|
||||
username, admin, original_username
|
||||
else
|
||||
hash = hash_password password
|
||||
db.exec "update users set username = (?), admin = (?),"\
|
||||
"password = (?) where username = (?)",\
|
||||
username, admin, hash, original_username
|
||||
end
|
||||
end
|
||||
end
|
||||
init_admin if init_user
|
||||
end
|
||||
end
|
||||
unless @auto_close
|
||||
@db = DB.open "sqlite3://#{@path}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete_user(username)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
db.exec "delete from users where username = (?)", username
|
||||
end
|
||||
end
|
||||
macro init_admin
|
||||
random_pw = random_str
|
||||
hash = hash_password random_pw
|
||||
db.exec "insert into users values (?, ?, ?, ?)",
|
||||
"admin", hash, nil, 1
|
||||
Logger.log "Initial user created. You can log in with " \
|
||||
"#{{"username" => "admin", "password" => random_pw}}"
|
||||
end
|
||||
|
||||
def logout(token)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
db.exec "update users set token = (?) where token = (?)", \
|
||||
nil, token
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
private def get_db(&block : DB::Database ->)
|
||||
if @db.nil?
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
yield db
|
||||
end
|
||||
else
|
||||
yield @db.not_nil!
|
||||
end
|
||||
end
|
||||
|
||||
def get_id(path, is_title)
|
||||
DB.open "sqlite3://#{@path}" do |db|
|
||||
begin
|
||||
id = db.query_one "select id from ids where path = (?)",
|
||||
path, as: {String}
|
||||
return id
|
||||
rescue
|
||||
id = random_str
|
||||
db.exec "insert into ids values (?, ?, ?)", path, id,
|
||||
is_title ? 1 : 0
|
||||
return id
|
||||
end
|
||||
end
|
||||
end
|
||||
def verify_user(username, password)
|
||||
out_token = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
begin
|
||||
hash, token = db.query_one "select password, token from " \
|
||||
"users where username = (?)",
|
||||
username, as: {String, String?}
|
||||
unless verify_password hash, password
|
||||
Logger.debug "Password does not match the hash"
|
||||
next
|
||||
end
|
||||
Logger.debug "User #{username} verified"
|
||||
if token
|
||||
out_token = token
|
||||
next
|
||||
end
|
||||
token = random_str
|
||||
Logger.debug "Updating token for #{username}"
|
||||
db.exec "update users set token = (?) where username = (?)",
|
||||
token, username
|
||||
out_token = token
|
||||
rescue e
|
||||
Logger.error "Error when verifying user #{username}: #{e}"
|
||||
end
|
||||
end
|
||||
end
|
||||
out_token
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.string self
|
||||
end
|
||||
def verify_token(token)
|
||||
username = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
begin
|
||||
username = db.query_one "select username from users where " \
|
||||
"token = (?)", token, as: String
|
||||
rescue e
|
||||
Logger.debug "Unable to verify token"
|
||||
end
|
||||
end
|
||||
end
|
||||
username
|
||||
end
|
||||
|
||||
def verify_admin(token)
|
||||
is_admin = false
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
begin
|
||||
is_admin = db.query_one "select admin from users where " \
|
||||
"token = (?)", token, as: Bool
|
||||
rescue e
|
||||
Logger.debug "Unable to verify user as admin"
|
||||
end
|
||||
end
|
||||
end
|
||||
is_admin
|
||||
end
|
||||
|
||||
def list_users
|
||||
results = Array(Tuple(String, Bool)).new
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query "select username, admin from users" do |rs|
|
||||
rs.each do
|
||||
results << {rs.read(String), rs.read(Bool)}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
results
|
||||
end
|
||||
|
||||
def new_user(username, password, admin)
|
||||
validate_username username
|
||||
validate_password password
|
||||
admin = (admin ? 1 : 0)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
hash = hash_password password
|
||||
db.exec "insert into users values (?, ?, ?, ?)",
|
||||
username, hash, nil, admin
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def update_user(original_username, username, password, admin)
|
||||
admin = (admin ? 1 : 0)
|
||||
validate_username username
|
||||
validate_password password unless password.empty?
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
if password.empty?
|
||||
db.exec "update users set username = (?), admin = (?) " \
|
||||
"where username = (?)",
|
||||
username, admin, original_username
|
||||
else
|
||||
hash = hash_password password
|
||||
db.exec "update users set username = (?), admin = (?)," \
|
||||
"password = (?) where username = (?)",
|
||||
username, admin, hash, original_username
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete_user(username)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.exec "delete from users where username = (?)", username
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def logout(token)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
begin
|
||||
db.exec "update users set token = (?) where token = (?)", nil, token
|
||||
rescue
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_id(path, is_title)
|
||||
id = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
id = db.query_one? "select id from ids where path = (?)", path,
|
||||
as: {String}
|
||||
end
|
||||
end
|
||||
id
|
||||
end
|
||||
|
||||
def insert_id(tp : IDTuple)
|
||||
@insert_ids << tp
|
||||
end
|
||||
|
||||
def bulk_insert_ids
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.transaction do |tx|
|
||||
@insert_ids.each do |tp|
|
||||
tx.connection.exec "insert into ids values (?, ?, ?)", tp[:path],
|
||||
tp[:id], tp[:is_title] ? 1 : 0
|
||||
end
|
||||
end
|
||||
end
|
||||
@insert_ids.clear
|
||||
end
|
||||
end
|
||||
|
||||
def save_thumbnail(id : String, img : Image)
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.exec "insert into thumbnails values (?, ?, ?, ?, ?)", id, img.data,
|
||||
img.filename, img.mime, img.size
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_thumbnail(id : String) : Image?
|
||||
img = nil
|
||||
MainFiber.run do
|
||||
get_db do |db|
|
||||
db.query_one? "select * from thumbnails where id = (?)", id do |res|
|
||||
img = Image.from_db res
|
||||
end
|
||||
end
|
||||
end
|
||||
img
|
||||
end
|
||||
|
||||
def optimize
|
||||
MainFiber.run do
|
||||
Logger.info "Starting DB optimization"
|
||||
get_db do |db|
|
||||
trash_ids = [] of String
|
||||
db.query "select path, id from ids" do |rs|
|
||||
rs.each do
|
||||
path = rs.read String
|
||||
trash_ids << rs.read String unless File.exists? path
|
||||
end
|
||||
end
|
||||
|
||||
# Delete dangling IDs
|
||||
db.exec "delete from ids where id in " \
|
||||
"(#{trash_ids.map { |i| "'#{i}'" }.join ","})"
|
||||
Logger.debug "#{trash_ids.size} dangling IDs deleted" \
|
||||
if trash_ids.size > 0
|
||||
|
||||
# Delete dangling thumbnails
|
||||
trash_thumbnails_count = db.query_one "select count(*) from " \
|
||||
"thumbnails where id not in " \
|
||||
"(select id from ids)", as: Int32
|
||||
if trash_thumbnails_count > 0
|
||||
db.exec "delete from thumbnails where id not in (select id from ids)"
|
||||
Logger.info "#{trash_thumbnails_count} dangling thumbnails deleted"
|
||||
end
|
||||
end
|
||||
Logger.info "DB optimization finished"
|
||||
end
|
||||
end
|
||||
|
||||
def close
|
||||
MainFiber.run do
|
||||
unless @db.nil?
|
||||
@db.not_nil!.close
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def to_json(json : JSON::Builder)
|
||||
json.string self
|
||||
end
|
||||
end
|
||||
|
||||
60
src/upload.cr
Normal file
60
src/upload.cr
Normal file
@@ -0,0 +1,60 @@
|
||||
require "./util/*"
|
||||
|
||||
class Upload
|
||||
def initialize(@dir : String)
|
||||
unless Dir.exists? @dir
|
||||
Logger.info "The uploads directory #{@dir} does not exist. " \
|
||||
"Attempting to create it"
|
||||
Dir.mkdir_p @dir
|
||||
end
|
||||
end
|
||||
|
||||
# Writes IO to a file with random filename in the uploads directory and
|
||||
# returns the full path of created file
|
||||
# e.g., save("image", ".png", <io>)
|
||||
# ==> "~/mango/uploads/image/<random string>.png"
|
||||
def save(sub_dir : String, ext : String, io : IO)
|
||||
full_dir = File.join @dir, sub_dir
|
||||
filename = random_str + ext
|
||||
file_path = File.join full_dir, filename
|
||||
|
||||
unless Dir.exists? full_dir
|
||||
Logger.debug "creating directory #{full_dir}"
|
||||
Dir.mkdir_p full_dir
|
||||
end
|
||||
|
||||
File.open file_path, "w" do |f|
|
||||
IO.copy io, f
|
||||
end
|
||||
|
||||
file_path
|
||||
end
|
||||
|
||||
# Converts path to a file in the uploads directory to the URL path for
|
||||
# accessing the file.
|
||||
def path_to_url(path : String)
|
||||
dir_mathed = false
|
||||
ary = [] of String
|
||||
# We fill it with parts until it equals to @upload_dir
|
||||
dir_ary = [] of String
|
||||
|
||||
Path.new(path).each_part do |part|
|
||||
if dir_mathed
|
||||
ary << part
|
||||
else
|
||||
dir_ary << part
|
||||
if File.same? @dir, File.join dir_ary
|
||||
dir_mathed = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if ary.empty?
|
||||
Logger.warn "File #{path} is not in the upload directory #{@dir}"
|
||||
return
|
||||
end
|
||||
|
||||
ary.unshift UPLOAD_URL_PREFIX
|
||||
File.join(ary).to_s
|
||||
end
|
||||
end
|
||||
34
src/util.cr
34
src/util.cr
@@ -1,34 +0,0 @@
|
||||
IMGS_PER_PAGE = 5
|
||||
|
||||
macro layout(name)
|
||||
render "src/views/#{{{name}}}.ecr", "src/views/layout.ecr"
|
||||
end
|
||||
|
||||
macro send_img(env, img)
|
||||
send_file {{env}}, {{img}}.data, {{img}}.mime
|
||||
end
|
||||
|
||||
macro get_username(env)
|
||||
# if the request gets here, its has gone through the auth handler, and
|
||||
# we can be sure that a valid token exists, so we can use not_nil! here
|
||||
cookie = {{env}}.request.cookies.find { |c| c.name == "token" }.not_nil!
|
||||
(@context.storage.verify_token cookie.value).not_nil!
|
||||
end
|
||||
|
||||
macro send_json(env, json)
|
||||
{{env}}.response.content_type = "application/json"
|
||||
{{json}}
|
||||
end
|
||||
|
||||
def hash_to_query(hash)
|
||||
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
||||
end
|
||||
|
||||
def request_path_startswith(env, ary)
|
||||
ary.each do |prefix|
|
||||
if env.request.path.starts_with? prefix
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
112
src/util/chapter_sort.cr
Normal file
112
src/util/chapter_sort.cr
Normal file
@@ -0,0 +1,112 @@
|
||||
# Helper method used to sort chapters in a folder
|
||||
# It respects the keywords like "Vol." and "Ch." in the filenames
|
||||
# This sorting method was initially implemented in JS and done in the frontend.
|
||||
# see https://github.com/hkalexling/Mango/blob/
|
||||
# 07100121ef15260b5a8e8da0e5948c993df574c5/public/js/sort-items.js#L15-L87
|
||||
|
||||
require "big"
|
||||
|
||||
private class Item
|
||||
getter numbers : Hash(String, BigDecimal)
|
||||
|
||||
def initialize(@numbers)
|
||||
end
|
||||
|
||||
# Compare with another Item using keys
|
||||
def <=>(other : Item, keys : Array(String))
|
||||
keys.each do |key|
|
||||
if !@numbers.has_key?(key) && !other.numbers.has_key?(key)
|
||||
next
|
||||
elsif !@numbers.has_key? key
|
||||
return 1
|
||||
elsif !other.numbers.has_key? key
|
||||
return -1
|
||||
elsif @numbers[key] == other.numbers[key]
|
||||
next
|
||||
else
|
||||
return @numbers[key] <=> other.numbers[key]
|
||||
end
|
||||
end
|
||||
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
private class KeyRange
|
||||
getter min : BigDecimal, max : BigDecimal, count : Int32
|
||||
|
||||
def initialize(value : BigDecimal)
|
||||
@min = @max = value
|
||||
@count = 1
|
||||
end
|
||||
|
||||
def update(value : BigDecimal)
|
||||
@min = value if value < @min
|
||||
@max = value if value > @max
|
||||
@count += 1
|
||||
end
|
||||
|
||||
def range
|
||||
@max - @min
|
||||
end
|
||||
end
|
||||
|
||||
class ChapterSorter
|
||||
@sorted_keys = [] of String
|
||||
|
||||
def initialize(str_ary : Array(String))
|
||||
keys = {} of String => KeyRange
|
||||
|
||||
str_ary.each do |str|
|
||||
scan str do |k, v|
|
||||
if keys.has_key? k
|
||||
keys[k].update v
|
||||
else
|
||||
keys[k] = KeyRange.new v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Get the array of keys string and sort them
|
||||
@sorted_keys = keys.keys
|
||||
# Only use keys that are present in over half of the strings
|
||||
.select do |key|
|
||||
keys[key].count >= str_ary.size / 2
|
||||
end
|
||||
.sort do |a_key, b_key|
|
||||
a = keys[a_key]
|
||||
b = keys[b_key]
|
||||
# Sort keys by the number of times they appear
|
||||
count_compare = b.count <=> a.count
|
||||
if count_compare == 0
|
||||
# Then sort by value range
|
||||
b.range <=> a.range
|
||||
else
|
||||
count_compare
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def compare(a : String, b : String)
|
||||
item_a = str_to_item a
|
||||
item_b = str_to_item b
|
||||
item_a.<=>(item_b, @sorted_keys)
|
||||
end
|
||||
|
||||
private def scan(str, &)
|
||||
str.scan /([^0-9\n\r\ ]*)[ ]*([0-9]*\.*[0-9]+)/ do |match|
|
||||
key = match[1]
|
||||
num = match[2].to_big_d
|
||||
|
||||
yield key, num
|
||||
end
|
||||
end
|
||||
|
||||
private def str_to_item(str)
|
||||
numbers = {} of String => BigDecimal
|
||||
scan str do |k, v|
|
||||
numbers[k] = v
|
||||
end
|
||||
Item.new numbers
|
||||
end
|
||||
end
|
||||
42
src/util/numeric_sort.cr
Normal file
42
src/util/numeric_sort.cr
Normal file
@@ -0,0 +1,42 @@
|
||||
# Properly sort alphanumeric strings
|
||||
# Used to sort the images files inside the archives
|
||||
# https://github.com/hkalexling/Mango/issues/12
|
||||
|
||||
require "big"
|
||||
|
||||
def is_numeric(str)
|
||||
/^\d+/.match(str) != nil
|
||||
end
|
||||
|
||||
def split_by_alphanumeric(str)
|
||||
arr = [] of String
|
||||
str.scan(/([^\d\n\r]*)(\d*)([^\d\n\r]*)/) do |match|
|
||||
arr += match.captures.select { |s| s != "" }
|
||||
end
|
||||
arr
|
||||
end
|
||||
|
||||
def compare_numerically(c, d)
|
||||
is_c_bigger = c.size <=> d.size
|
||||
if c.size > d.size
|
||||
d += [nil] * (c.size - d.size)
|
||||
elsif c.size < d.size
|
||||
c += [nil] * (d.size - c.size)
|
||||
end
|
||||
c.zip(d) do |a, b|
|
||||
return -1 if a.nil?
|
||||
return 1 if b.nil?
|
||||
if is_numeric(a) && is_numeric(b)
|
||||
compare = a.to_big_i <=> b.to_big_i
|
||||
return compare if compare != 0
|
||||
else
|
||||
compare = a <=> b
|
||||
return compare if compare != 0
|
||||
end
|
||||
end
|
||||
is_c_bigger
|
||||
end
|
||||
|
||||
def compare_numerically(a : String, b : String)
|
||||
compare_numerically split_by_alphanumeric(a), split_by_alphanumeric(b)
|
||||
end
|
||||
43
src/util/proxy.cr
Normal file
43
src/util/proxy.cr
Normal file
@@ -0,0 +1,43 @@
|
||||
require "http_proxy"
|
||||
|
||||
# Monkey-patch `HTTP::Client` to make it respect the `*_PROXY`
|
||||
# environment variables
|
||||
module HTTP
|
||||
class Client
|
||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||
Logger.debug "Setting proxy"
|
||||
previous_def uri, tls do |client, path|
|
||||
client.set_proxy get_proxy uri
|
||||
yield client, path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private def get_proxy(uri : URI) : HTTP::Proxy::Client?
|
||||
no_proxy = ENV["no_proxy"]? || ENV["NO_PROXY"]?
|
||||
return if no_proxy &&
|
||||
no_proxy.split(",").any? &.== uri.hostname
|
||||
|
||||
case uri.scheme
|
||||
when "http"
|
||||
env_to_proxy "http_proxy"
|
||||
when "https"
|
||||
env_to_proxy "https_proxy"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
private def env_to_proxy(key : String) : HTTP::Proxy::Client?
|
||||
val = ENV[key.downcase]? || ENV[key.upcase]?
|
||||
return if val.nil?
|
||||
|
||||
begin
|
||||
uri = URI.parse val
|
||||
HTTP::Proxy::Client.new uri.hostname.not_nil!, uri.port.not_nil!,
|
||||
username: uri.user, password: uri.password
|
||||
rescue
|
||||
nil
|
||||
end
|
||||
end
|
||||
69
src/util/util.cr
Normal file
69
src/util/util.cr
Normal file
@@ -0,0 +1,69 @@
|
||||
IMGS_PER_PAGE = 5
|
||||
ENTRIES_IN_HOME_SECTIONS = 8
|
||||
UPLOAD_URL_PREFIX = "/uploads"
|
||||
STATIC_DIRS = ["/css", "/js", "/img", "/favicon.ico"]
|
||||
|
||||
def random_str
|
||||
UUID.random.to_s.gsub "-", ""
|
||||
end
|
||||
|
||||
# Works in all Unix systems. Follows https://github.com/crystal-lang/crystal/
|
||||
# blob/master/src/crystal/system/unix/file_info.cr#L42-L48
|
||||
def ctime(file_path : String) : Time
|
||||
res = LibC.stat(file_path, out stat)
|
||||
raise "Unable to get ctime of file #{file_path}" if res != 0
|
||||
|
||||
{% if flag?(:darwin) %}
|
||||
Time.new stat.st_ctimespec, Time::Location::UTC
|
||||
{% else %}
|
||||
Time.new stat.st_ctim, Time::Location::UTC
|
||||
{% end %}
|
||||
end
|
||||
|
||||
def register_mime_types
|
||||
{
|
||||
".zip" => "application/zip",
|
||||
".rar" => "application/x-rar-compressed",
|
||||
".cbz" => "application/vnd.comicbook+zip",
|
||||
".cbr" => "application/vnd.comicbook-rar",
|
||||
}.each do |k, v|
|
||||
MIME.register k, v
|
||||
end
|
||||
end
|
||||
|
||||
struct Int
|
||||
def or(other : Int)
|
||||
if self == 0
|
||||
other
|
||||
else
|
||||
self
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
struct Nil
|
||||
def or(other : Int)
|
||||
other
|
||||
end
|
||||
end
|
||||
|
||||
macro use_default
|
||||
def self.default : self
|
||||
unless @@default
|
||||
@@default = new
|
||||
end
|
||||
@@default.not_nil!
|
||||
end
|
||||
end
|
||||
|
||||
class String
|
||||
def alphanumeric_underscore?
|
||||
self.chars.all? { |c| c.alphanumeric? || c == '_' }
|
||||
end
|
||||
end
|
||||
|
||||
def env_is_true?(key : String) : Bool
|
||||
val = ENV[key.upcase]? || ENV[key.downcase]?
|
||||
return false unless val
|
||||
val.downcase.in? "1", "true"
|
||||
end
|
||||
31
src/util/validation.cr
Normal file
31
src/util/validation.cr
Normal file
@@ -0,0 +1,31 @@
|
||||
def validate_username(username)
|
||||
if username.size < 3
|
||||
raise "Username should contain at least 3 characters"
|
||||
end
|
||||
if (username =~ /^[a-zA-Z_][a-zA-Z0-9_\-]*$/).nil?
|
||||
raise "Username can only contain alphanumeric characters, " \
|
||||
"underscores, and hyphens"
|
||||
end
|
||||
end
|
||||
|
||||
def validate_password(password)
|
||||
if password.size < 6
|
||||
raise "Password should contain at least 6 characters"
|
||||
end
|
||||
if (password =~ /^[[:ascii:]]+$/).nil?
|
||||
raise "password should contain ASCII characters only"
|
||||
end
|
||||
end
|
||||
|
||||
def validate_archive(path : String) : Exception?
|
||||
file = nil
|
||||
begin
|
||||
file = ArchiveFile.new path
|
||||
file.check
|
||||
file.close
|
||||
return
|
||||
rescue e
|
||||
file.close unless file.nil?
|
||||
e
|
||||
end
|
||||
end
|
||||
100
src/util/web.cr
Normal file
100
src/util/web.cr
Normal file
@@ -0,0 +1,100 @@
|
||||
# Web related helper functions/macros
|
||||
|
||||
macro layout(name)
|
||||
base_url = Config.current.base_url
|
||||
begin
|
||||
is_admin = false
|
||||
if token = env.session.string? "token"
|
||||
is_admin = @context.storage.verify_admin token
|
||||
end
|
||||
page = {{name}}
|
||||
render "src/views/#{{{name}}}.html.ecr", "src/views/layout.html.ecr"
|
||||
rescue e
|
||||
message = e.to_s
|
||||
@context.error message
|
||||
render "src/views/message.html.ecr", "src/views/layout.html.ecr"
|
||||
end
|
||||
end
|
||||
|
||||
macro send_img(env, img)
|
||||
send_file {{env}}, {{img}}.data, {{img}}.mime
|
||||
end
|
||||
|
||||
macro get_username(env)
|
||||
# if the request gets here, it has gone through the auth handler, and
|
||||
# we can be sure that a valid token exists, so we can use not_nil! here
|
||||
token = env.session.string "token"
|
||||
(@context.storage.verify_token token).not_nil!
|
||||
end
|
||||
|
||||
def send_json(env, json)
|
||||
env.response.content_type = "application/json"
|
||||
env.response.print json
|
||||
end
|
||||
|
||||
def send_attachment(env, path)
|
||||
send_file env, path, filename: File.basename(path), disposition: "attachment"
|
||||
end
|
||||
|
||||
def redirect(env, path)
|
||||
base = Config.current.base_url
|
||||
env.redirect File.join base, path
|
||||
end
|
||||
|
||||
def hash_to_query(hash)
|
||||
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
||||
end
|
||||
|
||||
def request_path_startswith(env, ary)
|
||||
ary.each do |prefix|
|
||||
if env.request.path.starts_with? prefix
|
||||
return true
|
||||
end
|
||||
end
|
||||
false
|
||||
end
|
||||
|
||||
def requesting_static_file(env)
|
||||
request_path_startswith env, STATIC_DIRS
|
||||
end
|
||||
|
||||
macro render_xml(path)
|
||||
base_url = Config.current.base_url
|
||||
send_file env, ECR.render({{path}}).to_slice, "application/xml"
|
||||
end
|
||||
|
||||
macro render_component(filename)
|
||||
render "src/views/components/#{{{filename}}}.html.ecr"
|
||||
end
|
||||
|
||||
macro get_sort_opt
|
||||
sort_method = env.params.query["sort"]?
|
||||
|
||||
if sort_method
|
||||
is_ascending = true
|
||||
|
||||
ascend = env.params.query["ascend"]?
|
||||
if ascend && ascend.to_i? == 0
|
||||
is_ascending = false
|
||||
end
|
||||
|
||||
sort_opt = SortOptions.new sort_method, is_ascending
|
||||
end
|
||||
end
|
||||
|
||||
module HTTP
|
||||
class Client
|
||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||
previous_def uri, tls do |client, path|
|
||||
if client.tls? && env_is_true? "DISABLE_SSL_VERIFICATION"
|
||||
Logger.debug "Disabling SSL verification"
|
||||
client.tls.verify_mode = OpenSSL::SSL::VerifyMode::NONE
|
||||
end
|
||||
Logger.debug "Setting read timeout"
|
||||
client.read_timeout = Config.current.download_timeout_seconds.seconds
|
||||
Logger.debug "Requesting #{uri}"
|
||||
yield client, path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,17 +0,0 @@
|
||||
<ul class="uk-list uk-list-large uk-list-divider">
|
||||
<li data-url="/admin/user">User Managerment</li>
|
||||
<li onclick="if(!scanning){scan()}">
|
||||
<span id="scan">Scan Library Files</span>
|
||||
<span id="scan-status" class="uk-align-right">
|
||||
<div uk-spinner hidden></div>
|
||||
<span hidden></span>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<hr class="uk-divider-icon">
|
||||
<a class="uk-button uk-button-danger" href="/logout">Log Out</a>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="/js/admin.js"></script>
|
||||
<% end %>
|
||||
32
src/views/admin.html.ecr
Normal file
32
src/views/admin.html.ecr
Normal file
@@ -0,0 +1,32 @@
|
||||
<ul class="uk-list uk-list-large uk-list-divider" id="root" x-data="{progress : 1.0, generating : false, scanTitles: 0, scanMs: -1, scanning : false}">
|
||||
<li @click="location.href = '<%= base_url %>admin/user'">User Managerment</li>
|
||||
<li :class="{'nopointer' : scanning}" @click="scan()">
|
||||
<span :style="`${scanning ? 'color:grey' : ''}`">Scan Library Files</span>
|
||||
<div class="uk-align-right">
|
||||
<div uk-spinner x-show="scanning"></div>
|
||||
<span x-show="!scanning && scanMs > 0" x-text="`Scan ${scanTitles} titles in ${scanMs}ms`"></span>
|
||||
</div>
|
||||
</li>
|
||||
<li :class="{'nopointer' : generating}" @click="generateThumbnails()">
|
||||
<span :style="`${generating ? 'color:grey' : ''}`">Generate Thumbnails</span>
|
||||
<div class="uk-align-right">
|
||||
<span x-show="generating && progress > 0" x-text="`${(progress * 100).toFixed(2)}%`"></span>
|
||||
</div>
|
||||
</li>
|
||||
<li class="nopointer">
|
||||
<span>Theme</span>
|
||||
<select id="theme-select" class="uk-select uk-align-right uk-width-1-3@m uk-width-1-2">
|
||||
<option>Dark</option>
|
||||
<option>Light</option>
|
||||
<option>System</option>
|
||||
</select>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<hr class="uk-divider-icon">
|
||||
<p class="uk-text-meta">Version: v<%= MANGO_VERSION %></p>
|
||||
<a class="uk-button uk-button-danger" href="<%= base_url %>logout">Log Out</a>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="<%= base_url %>js/admin.js"></script>
|
||||
<% end %>
|
||||
14
src/views/api.html.ecr
Normal file
14
src/views/api.html.ecr
Normal file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mango API Documentation</title>
|
||||
<meta name="description" content="Mango - Manga Server and Web Reader">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
</head>
|
||||
<body>
|
||||
<redoc spec-url="/openapi.json"></redoc>
|
||||
<script src="https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
87
src/views/components/card.html.ecr
Normal file
87
src/views/components/card.html.ecr
Normal file
@@ -0,0 +1,87 @@
|
||||
<% if item.is_a? NamedTuple(entry: Entry, percentage: Float64, grouped_count: Int32) %>
|
||||
<% grouped_count = item[:grouped_count] %>
|
||||
<% if grouped_count == 1 %>
|
||||
<% item = item[:entry] %>
|
||||
<% else %>
|
||||
<% item = item[:entry].book %>
|
||||
<% end %>
|
||||
<% else %>
|
||||
<% grouped_count = 1 %>
|
||||
<% end %>
|
||||
|
||||
<div class="item"
|
||||
<% if item.is_a? Entry %>
|
||||
id="<%= item.id %>"
|
||||
<% end %>>
|
||||
|
||||
<div class="acard
|
||||
<% if item.is_a? Entry && item.err_msg.nil? %>
|
||||
<%= "is_entry" %>
|
||||
<% end %>
|
||||
"
|
||||
<% if item.is_a? Entry %>
|
||||
<% if item.err_msg %>
|
||||
onclick="location='<%= base_url %>reader/<%= item.book.id %>/<%= item.id %>'"
|
||||
<% else %>
|
||||
data-encoded-path="<%= item.encoded_path %>"
|
||||
data-pages="<%= item.pages %>"
|
||||
data-progress="<%= (progress * 100).round(1) %>"
|
||||
data-encoded-book-title="<%= item.book.encoded_display_name %>"
|
||||
data-encoded-title="<%= item.encoded_display_name %>"
|
||||
data-book-id="<%= item.book.id %>"
|
||||
data-id="<%= item.id %>"
|
||||
<% end %>
|
||||
<% else %>
|
||||
onclick="location='<%= base_url %>book/<%= item.id %>'"
|
||||
<% end %>>
|
||||
|
||||
<div class="uk-card uk-card-default" x-data="{selected: false, hover: false, disabled: true, selecting: false}" :class="{selected: selected}" @count.window="selecting = $event.detail.count > 0"
|
||||
<% if page == "title" && item.is_a?(Entry) && item.err_msg.nil? %>
|
||||
x-init="disabled = false"
|
||||
<% end %>>
|
||||
<div class="uk-card-media-top uk-inline" @mouseenter="hover = true" @mouseleave="hover = false">
|
||||
<img data-src="<%= item.cover_url %>" width="100%" height="100%" alt="" uk-img
|
||||
<% if item.is_a? Entry && item.err_msg %>
|
||||
class="grayscale"
|
||||
<% end %>>
|
||||
<div class="uk-overlay-primary uk-position-cover" x-show="!disabled && (selected || hover)">
|
||||
<div class="uk-height-1-1 uk-width-1-1" x-show="selecting" @click.stop="selected = !selected; $dispatch(selected ? 'add' : 'remove')"></div>
|
||||
<div class="uk-position-center">
|
||||
<span class="fas fa-check-circle fa-3x" @click.stop="selected = !selected; $dispatch(selected ? 'add' : 'remove')" :style="`color:${selected && 'orange'};`"></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-card-body">
|
||||
<% unless progress < 0 || progress > 100 || progress.nan? %>
|
||||
<div class="uk-card-badge label"><%= (progress * 100).round(1) %>%</div>
|
||||
<% end %>
|
||||
|
||||
<h3 class="uk-card-title break-word
|
||||
<% if page == "home" && item.is_a? Entry %>
|
||||
<%= "uk-margin-remove-bottom" %>
|
||||
<% end %>
|
||||
" data-title="<%= HTML.escape(item.display_name) %>"><%= HTML.escape(item.display_name) %>
|
||||
</h3>
|
||||
<% if page == "home" && item.is_a? Entry %>
|
||||
<a class="uk-card-title break-word uk-margin-remove-top uk-text-meta uk-display-inline-block no-modal" data-title="<%= HTML.escape(item.book.display_name) %>" href="<%= base_url %>book/<%= item.book.id %>"><%= HTML.escape(item.book.display_name) %></a>
|
||||
<% end %>
|
||||
<% if item.is_a? Entry %>
|
||||
<% if item.err_msg %>
|
||||
<p class="uk-text-meta uk-margin-remove-bottom">Error <span uk-icon="info"></span></p>
|
||||
<div uk-dropdown><%= item.err_msg %></div>
|
||||
<% else %>
|
||||
<p class="uk-text-meta"><%= item.pages %> pages</p>
|
||||
<% end %>
|
||||
<% end %>
|
||||
<% if item.is_a? Title %>
|
||||
<% if grouped_count == 1 %>
|
||||
<p class="uk-text-meta"><%= item.size %> entries</p>
|
||||
<% else %>
|
||||
<p class="uk-text-meta"><%= grouped_count %> new entries</p>
|
||||
<% end %>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
3
src/views/components/dots-scripts.html.ecr
Normal file
3
src/views/components/dots-scripts.html.ecr
Normal file
@@ -0,0 +1,3 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jQuery.dotdotdot/4.0.11/dotdotdot.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/protonet-jquery.inview/1.1.2/jquery.inview.min.js"></script>
|
||||
<script src="<%= base_url %>js/dots.js"></script>
|
||||
32
src/views/components/entry-modal.html.ecr
Normal file
32
src/views/components/entry-modal.html.ecr
Normal file
@@ -0,0 +1,32 @@
|
||||
<div id="modal" class="uk-flex-top" uk-modal>
|
||||
<div class="uk-modal-dialog uk-margin-auto-vertical">
|
||||
<button class="uk-modal-close-default" type="button" uk-close></button>
|
||||
<div class="uk-modal-header">
|
||||
<div>
|
||||
<h3 class="uk-modal-title break-word uk-margin-remove-top" id="modal-entry-title"><span></span>
|
||||
|
||||
<% unless page == "home" %>
|
||||
<% if is_admin %>
|
||||
<a id="modal-edit-btn" class="uk-icon-button" uk-icon="icon:pencil"></a>
|
||||
<% end %>
|
||||
<% end %>
|
||||
<a id="modal-download-btn" class="uk-icon-button" uk-icon="icon:download"></a>
|
||||
</h3>
|
||||
</div>
|
||||
<p class="uk-text-meta uk-margin-remove-bottom break-word" id="path-text"></p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" id="pages-text"></p>
|
||||
</div>
|
||||
<div class="uk-modal-body">
|
||||
<p>Read</p>
|
||||
<p uk-margin>
|
||||
<a id="beginning-btn" class="uk-button uk-button-default">From beginning</a>
|
||||
<a id="continue-btn" class="uk-button uk-button-primary"></a>
|
||||
</p>
|
||||
<p>Progress</p>
|
||||
<p uk-margin>
|
||||
<button id="read-btn" class="uk-button uk-button-default">Mark as read (100%)</button>
|
||||
<button id="unread-btn" class="uk-button uk-button-default">Mark as unread (0%)</button>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
18
src/views/components/head.html.ecr
Normal file
18
src/views/components/head.html.ecr
Normal file
@@ -0,0 +1,18 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mango</title>
|
||||
<meta name="description" content="Mango - Manga Server and Web Reader">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="<%= base_url %>css/uikit.css" />
|
||||
<link rel="stylesheet" href="<%= base_url %>css/mango.css" />
|
||||
<link rel="icon" href="<%= base_url %>favicon.ico">
|
||||
|
||||
<script src="https://polyfill.io/v3/polyfill.min.js?features=matchMedia%2Cdefault&flags=gated"></script>
|
||||
<script defer src="<%= base_url %>js/fontawesome.min.js"></script>
|
||||
<script defer src="<%= base_url %>js/solid.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
|
||||
<script type="module" src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.5.0/dist/alpine.min.js"></script>
|
||||
<script nomodule src="https://cdn.jsdelivr.net/gh/alpinejs/alpine@v2.5.0/dist/alpine-ie11.min.js" defer></script>
|
||||
<script src="<%= base_url %>js/common.js"></script>
|
||||
</head>
|
||||
14
src/views/components/sort-form.html.ecr
Normal file
14
src/views/components/sort-form.html.ecr
Normal file
@@ -0,0 +1,14 @@
|
||||
<div class="uk-form-horizontal">
|
||||
<select class="uk-select" id="sort-select">
|
||||
<% hash.each do |k, v| %>
|
||||
<option id="<%= k %>-up"
|
||||
<% if sort_opt && k == sort_opt.method.to_s.underscore && sort_opt.ascend %>
|
||||
<%= "selected" %>
|
||||
<% end %>>â–˛ <%= v %></option>
|
||||
<option id="<%= k %>-down"
|
||||
<% if sort_opt && k == sort_opt.method.to_s.underscore && !sort_opt.ascend %>
|
||||
<%= "selected" %>
|
||||
<% end %>>â–Ľ <%= v %></option>
|
||||
<% end %>
|
||||
</select>
|
||||
</div>
|
||||
69
src/views/download-manager.html.ecr
Normal file
69
src/views/download-manager.html.ecr
Normal file
@@ -0,0 +1,69 @@
|
||||
<div id="root" x-data="{jobs: [], paused: undefined, loading: false, toggling: false}" x-init="load()">
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" @click="jobAction('delete')">Delete Completed Tasks</button>
|
||||
<button class="uk-button uk-button-default" @click="jobAction('retry')">Retry Failed Tasks</button>
|
||||
<button class="uk-button uk-button-default" @click="load()" :disabled="loading">Refresh Queue</button>
|
||||
<button class="uk-button uk-button-default" x-show="paused !== undefined" x-text="paused ? 'Resume Download' : 'Pause Download'" @click="toggle()" :disabled="toggling"></button>
|
||||
</div>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Chapter</th>
|
||||
<th>Manga</th>
|
||||
<th>Progress</th>
|
||||
<th>Time</th>
|
||||
<th>Status</th>
|
||||
<th>Plugin</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<template x-for="job in jobs" :key="job">
|
||||
<tr :id="`chapter-${job.id}`">
|
||||
|
||||
<template x-if="job.plugin_id">
|
||||
<td x-text="job.title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`${'<%= mangadex_base_url %>'.replace(/\/$/, '')}/chapter/${job.id}`" x-text="job.title"></td>
|
||||
</template>
|
||||
|
||||
<template x-if="job.plugin_id">
|
||||
<td x-text="job.manga_title"></td>
|
||||
</template>
|
||||
<template x-if="!job.plugin_id">
|
||||
<td><a :href="`${'<%= mangadex_base_url %>'.replace(/\/$/, '')}/manga/${job.manga_id}`" x-text="job.manga_title"></td>
|
||||
</template>
|
||||
|
||||
<td x-text="`${job.success_count}/${job.pages}`"></td>
|
||||
<td x-text="`${moment(job.time).fromNow()}`"></td>
|
||||
|
||||
<td>
|
||||
<span :class="statusClass(job.status)" x-text="job.status"></span>
|
||||
<template x-if="job.status_message.length > 0">
|
||||
<div class="uk-inline">
|
||||
<span uk-icon="info"></span>
|
||||
<div uk-dropdown x-text="job.status_message"></div>
|
||||
</div>
|
||||
</template>
|
||||
</td>
|
||||
|
||||
<td x-text="`${job.plugin_id || ''}`"></td>
|
||||
|
||||
<td>
|
||||
<a :onclick="`jobAction('delete', '${job.id}')`" uk-icon="trash"></a>
|
||||
<template x-if="job.status_message.length > 0">
|
||||
<a :onclick="`jobAction('retry', '${job.id}')`" uk-icon="refresh"></a>
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download-manager.js"></script>
|
||||
<% end %>
|
||||
83
src/views/download.html.ecr
Normal file
83
src/views/download.html.ecr
Normal file
@@ -0,0 +1,83 @@
|
||||
<h2 class=uk-title>Download from MangaDex</h2>
|
||||
<div class="uk-grid-small" uk-grid>
|
||||
<div class="uk-width-3-4">
|
||||
<input id="search-input" class="uk-input" type="text" placeholder="MangaDex manga ID or URL">
|
||||
</div>
|
||||
<div class="uk-width-1-4">
|
||||
<div id="spinner" uk-spinner class="uk-align-center" hidden></div>
|
||||
<button id="search-btn" class="uk-button uk-button-default" onclick="search()">Search</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class"uk-grid-small" uk-grid hidden id="manga-details">
|
||||
<div class="uk-width-1-4@s">
|
||||
<img id="cover">
|
||||
</div>
|
||||
<div class="uk-width-1-4@s">
|
||||
<p id="title"></p>
|
||||
<p id="artist"></p>
|
||||
<p id="author"></p>
|
||||
</div>
|
||||
<div id="filter-form" class="uk-form-stacked uk-width-1-2@s" hidden>
|
||||
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
|
||||
<p class="uk-text-meta uk-margin-remove-top" id="count-text"></p>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="lang-select">Language</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="lang-select">
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="group-select">Group</label>
|
||||
<div class="uk-form-controls">
|
||||
<select class="uk-select filter-field" id="group-select">
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="volume-range">Volume</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="volume-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-margin">
|
||||
<label class="uk-form-label" for="chapter-range">Chapter</label>
|
||||
<div class="uk-form-controls">
|
||||
<input class="uk-input filter-field" type="text" id="chapter-range" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty.">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="selection-controls" class="uk-margin" hidden>
|
||||
<div class="uk-margin">
|
||||
<button class="uk-button uk-button-default" onclick="selectAll()">Select All</button>
|
||||
<button class="uk-button uk-button-default" onclick="unselect()">Clear Selections</button>
|
||||
<button class="uk-button uk-button-primary" id="download-btn" onclick="download()">Download Selected</button>
|
||||
<div id="download-spinner" uk-spinner class="uk-margin-left" hidden></div>
|
||||
</div>
|
||||
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
|
||||
</div>
|
||||
<p id="filter-notification" hidden></p>
|
||||
<table class="uk-table uk-table-striped uk-overflow-auto" hidden>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Title</th>
|
||||
<th>Language</th>
|
||||
<th>Group</th>
|
||||
<th>Volume</th>
|
||||
<th>Chapter</th>
|
||||
<th>Timestamp</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script>
|
||||
var baseURL = "<%= mangadex_base_url %>".replace(/\/$/, "");
|
||||
</script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.24.0/moment.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/download.js"></script>
|
||||
<% end %>
|
||||
83
src/views/home.html.ecr
Normal file
83
src/views/home.html.ecr
Normal file
@@ -0,0 +1,83 @@
|
||||
<%- if new_user && empty_library -%>
|
||||
|
||||
<div class="uk-container uk-text-center">
|
||||
<i class="fas fa-plus" style="font-size: 80px;"></i>
|
||||
<h2>Add your first manga</h2>
|
||||
<p style="margin-bottom: 40px;">We can't find any files yet. Add some to your library and they'll appear here.</p>
|
||||
<dl class="uk-description-list">
|
||||
<dt style="font-weight: 500;">Current library path</dt>
|
||||
<dd><code><%= Config.current.library_path %></code></dd>
|
||||
<dt style="font-weight: 500;">Want to change your library path?</dt>
|
||||
<dd>Update <code>config.yml</code> located at: <code><%= Config.current.path %></code></dd>
|
||||
<dt style="font-weight: 500;">Can't see your files yet?</dt>
|
||||
<dd>
|
||||
You must wait <%= Config.current.scan_interval_minutes %> minutes for the library scan to complete
|
||||
<% if is_admin %>
|
||||
, or manually re-scan from <a href="<%= base_url %>admin">Admin</a>
|
||||
<% end %>.
|
||||
</dd>
|
||||
</dl>
|
||||
</div>
|
||||
|
||||
<%- elsif new_user && empty_library == false -%>
|
||||
|
||||
<div class="uk-container uk-text-center">
|
||||
<i class="fas fa-book-open" style="font-size: 80px;"></i>
|
||||
<h2>Read your first manga</h2>
|
||||
<p>Once you start reading, Mango will remember where you left off
|
||||
and show your entries here.</p>
|
||||
<a href="<%= base_url %>library" class="uk-button uk-button-default">View library</a>
|
||||
</div>
|
||||
|
||||
<%- elsif new_user == false && empty_library == false -%>
|
||||
|
||||
<%- if continue_reading.empty? && recently_added.empty? -%>
|
||||
<div class="uk-container uk-text-center">
|
||||
<img src="<%= base_url %>img/banner.png" style="max-width: 400px; padding: 0 20px;">
|
||||
<p>A self-hosted manga server and reader</p>
|
||||
<a href="<%= base_url %>library" class="uk-button uk-button-default">View library</a>
|
||||
</div>
|
||||
<%- end -%>
|
||||
|
||||
<%- unless continue_reading.empty? -%>
|
||||
<h2 class="uk-title home-headings">Continue Reading</h2>
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<%- continue_reading.each do |cr| -%>
|
||||
<% item = cr[:entry] %>
|
||||
<% progress = cr[:percentage] %>
|
||||
<%= render_component "card" %>
|
||||
<%- end -%>
|
||||
</div>
|
||||
<%- end -%>
|
||||
|
||||
<%- unless start_reading.empty? -%>
|
||||
<h2 class="uk-title home-headings">Start Reading</h2>
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<%- start_reading.each do |t| -%>
|
||||
<% item = t %>
|
||||
<% progress = 0.0 %>
|
||||
<%= render_component "card" %>
|
||||
<%- end -%>
|
||||
</div>
|
||||
<%- end -%>
|
||||
|
||||
<%- unless recently_added.empty? -%>
|
||||
<h2 class="uk-title home-headings">Recently Added</h2>
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<%- recently_added.each do |ra| -%>
|
||||
<% item = ra %>
|
||||
<% progress = ra[:percentage] %>
|
||||
<%= render_component "card" %>
|
||||
<%- end -%>
|
||||
</div>
|
||||
<%- end -%>
|
||||
|
||||
<%= render_component "entry-modal" %>
|
||||
|
||||
<%- end -%>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<script src="<%= base_url %>js/alert.js"></script>
|
||||
<script src="<%= base_url %>js/title.js"></script>
|
||||
<% end %>
|
||||
@@ -1,30 +0,0 @@
|
||||
<h2 class=uk-title>Library</h2>
|
||||
<p class="uk-text-meta"><%= titles.size %> titles found</p>
|
||||
<div class="uk-margin">
|
||||
<form class="uk-search uk-search-default">
|
||||
<span uk-search-icon></span>
|
||||
<input class="uk-search-input" type="search" placeholder="Search">
|
||||
</form>
|
||||
</div>
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<%- titles.each_with_index do |t, i| -%>
|
||||
<div class="item">
|
||||
<a class="acard" href="/book/<%= t.id %>">
|
||||
<div class="uk-card uk-card-default">
|
||||
<div class="uk-card-media-top">
|
||||
<img src="<%= t.entries[0].cover_url %>" alt="">
|
||||
</div>
|
||||
<div class="uk-card-body">
|
||||
<div class="uk-card-badge uk-label"><%= (percentage[i] * 100).round(1) %>%</div>
|
||||
<h3 class="uk-card-title"><%= t.title %></h3>
|
||||
<p><%= t.entries.size %> entries</p>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<%- end -%>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<script src="/js/search.js"></script>
|
||||
<% end %>
|
||||
@@ -1,60 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mango</title>
|
||||
<meta name="description" content="Mango Manga Server">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/uikit@3.3.1/dist/css/uikit.min.css" />
|
||||
<link rel="stylesheet" href="/css/mango.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="uk-offcanvas-content">
|
||||
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
||||
<div id="mobile-nav" uk-offcanvas="overlay: true">
|
||||
<div class="uk-offcanvas-bar uk-flex uk-flex-column">
|
||||
<ul class="uk-nav uk-nav-primary uk-nav-center uk-margin-auto-vertical">
|
||||
<li><a href="/">Home</a></li>
|
||||
<li><a href="/admin">Admin</a></li>
|
||||
<hr uk-divider>
|
||||
<li><a href="/logout">Logout</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-position-top">
|
||||
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
||||
<div class="uk-navbar-left uk-hidden@s">
|
||||
<div class="uk-navbar-toggle" uk-navbar-toggle-icon="uk-navbar-toggle-icon" uk-toggle="target: #mobile-nav"></div>
|
||||
</div>
|
||||
<div class="uk-navbar-left uk-visible@s">
|
||||
<a class="uk-navbar-item uk-logo" href="/"><img src="/img/icon.png"></a>
|
||||
<ul class="uk-navbar-nav">
|
||||
<li><a href="/">Home</a></li>
|
||||
<li><a href="/admin">Admin</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="uk-navbar-right uk-visible@s">
|
||||
<ul class="uk-navbar-nav">
|
||||
<li><a href="/logout">Logout</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-section uk-section-default uk-section-small">
|
||||
</div>
|
||||
<div class="uk-section uk-section-default uk-section-small">
|
||||
<div class="uk-container uk-container-small">
|
||||
<%= content %>
|
||||
</div>
|
||||
</div>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.3.1/dist/js/uikit.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/uikit@3.3.1/dist/js/uikit-icons.min.js"></script>
|
||||
|
||||
<%= yield_content "script" %>
|
||||
</body>
|
||||
</html>
|
||||
89
src/views/layout.html.ecr
Normal file
89
src/views/layout.html.ecr
Normal file
@@ -0,0 +1,89 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<%= render_component "head" %>
|
||||
|
||||
<body>
|
||||
<div class="uk-offcanvas-content">
|
||||
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
||||
<div id="mobile-nav" uk-offcanvas="overlay: true">
|
||||
<div class="uk-offcanvas-bar uk-flex uk-flex-column">
|
||||
<ul class="uk-nav-parent-icon uk-nav-primary uk-nav-center uk-margin-auto-vertical" uk-nav>
|
||||
<li><a href="<%= base_url %>">Home</a></li>
|
||||
<li><a href="<%= base_url %>library">Library</a></li>
|
||||
<% if is_admin %>
|
||||
<li><a href="<%= base_url %>admin">Admin</a></li>
|
||||
<li class="uk-parent">
|
||||
<a href="#">Download</a>
|
||||
<ul class="uk-nav-sub">
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<% end %>
|
||||
<hr uk-divider>
|
||||
<li><a onclick="toggleTheme()"><i class="fas fa-adjust"></i></a></li>
|
||||
<li><a href="<%= base_url %>logout">Logout</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-position-top">
|
||||
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
||||
<div class="uk-navbar-left uk-hidden@s">
|
||||
<div class="uk-navbar-toggle" uk-navbar-toggle-icon="uk-navbar-toggle-icon" uk-toggle="target: #mobile-nav"></div>
|
||||
</div>
|
||||
<div class="uk-navbar-left uk-visible@s">
|
||||
<a class="uk-navbar-item uk-logo" href="<%= base_url %>"><img src="<%= base_url %>img/icon.png"></a>
|
||||
<ul class="uk-navbar-nav">
|
||||
<li><a href="<%= base_url %>">Home</a></li>
|
||||
<li><a href="<%= base_url %>library">Library</a></li>
|
||||
<% if is_admin %>
|
||||
<li><a href="<%= base_url %>admin">Admin</a></li>
|
||||
<li>
|
||||
<a href="#">Download</a>
|
||||
<div class="uk-navbar-dropdown">
|
||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
||||
<li class="uk-nav-header">Source</li>
|
||||
<li><a href="<%= base_url %>download">MangaDex</a></li>
|
||||
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
|
||||
<li class="uk-nav-divider"></li>
|
||||
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</li>
|
||||
<% end %>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="uk-navbar-right uk-visible@s">
|
||||
<ul class="uk-navbar-nav">
|
||||
<li><a onclick="toggleTheme()"><i class="fas fa-adjust"></i></a></li>
|
||||
<li><a href="<%= base_url %>logout">Logout</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-section uk-section-small">
|
||||
</div>
|
||||
<div class="uk-section uk-section-small" id="main-section">
|
||||
<div class="uk-container uk-container-small">
|
||||
<div id="alert"></div>
|
||||
<%= content %>
|
||||
<div class="uk-visible@m" id="totop-wrapper" x-data="{}" x-show="$('body').height() > 1.5 * $(window).height()">
|
||||
<a href="#" uk-totop uk-scroll></a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
setTheme();
|
||||
const base_url = "<%= base_url %>";
|
||||
</script>
|
||||
<script src="<%= base_url %>js/uikit.min.js"></script>
|
||||
<script src="<%= base_url %>js/uikit-icons.min.js"></script>
|
||||
|
||||
<%= yield_content "script" %>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
30
src/views/library.html.ecr
Normal file
30
src/views/library.html.ecr
Normal file
@@ -0,0 +1,30 @@
|
||||
<h2 class=uk-title>Library</h2>
|
||||
<p class="uk-text-meta"><%= titles.size %> titles found</p>
|
||||
<div class="uk-grid-small" uk-grid>
|
||||
<div class="uk-margin-bottom uk-width-3-4@s">
|
||||
<form class="uk-search uk-search-default">
|
||||
<span uk-search-icon></span>
|
||||
<input class="uk-search-input" type="search" placeholder="Search">
|
||||
</form>
|
||||
</div>
|
||||
<div class="uk-margin-bottom uk-width-1-4@s">
|
||||
<% hash = {
|
||||
"auto" => "Auto",
|
||||
"time_modified" => "Date Modified",
|
||||
"progress" => "Progress"
|
||||
} %>
|
||||
<%= render_component "sort-form" %>
|
||||
</div>
|
||||
</div>
|
||||
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
|
||||
<% titles.each_with_index do |item, i| %>
|
||||
<% progress = percentage[i] %>
|
||||
<%= render_component "card" %>
|
||||
<% end %>
|
||||
</div>
|
||||
|
||||
<% content_for "script" do %>
|
||||
<%= render_component "dots-scripts" %>
|
||||
<script src="<%= base_url %>js/search.js"></script>
|
||||
<script src="<%= base_url %>js/sort-items.js"></script>
|
||||
<% end %>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user