Compare commits

..

1 Commits

Author SHA1 Message Date
Leeingnyo
5a17ca07d8 Use yaml-static in Dockerfile 2021-08-18 19:42:32 +09:00
39 changed files with 1541 additions and 1088 deletions

View File

@@ -104,15 +104,6 @@
"contributions": [
"infra"
]
},
{
"login": "lincolnthedev",
"name": "i use arch btw",
"avatar_url": "https://avatars.githubusercontent.com/u/41193328?v=4",
"profile": "https://lncn.dev",
"contributions": [
"infra"
]
}
],
"contributorsPerLine": 7,

View File

@@ -1,9 +1,2 @@
node_modules
lib
Dockerfile
Dockerfile.arm32v7
Dockerfile.arm64v8
README.md
.all-contributorsrc
env.example
.github/

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
container:
image: crystallang/crystal:1.0.0-alpine
image: crystallang/crystal:0.36.1-alpine
steps:
- uses: actions/checkout@v2

View File

@@ -1,4 +1,4 @@
FROM crystallang/crystal:1.0.0-alpine AS builder
FROM crystallang/crystal:0.36.1-alpine AS builder
WORKDIR /Mango

View File

@@ -2,10 +2,10 @@ FROM arm32v7/ubuntu:18.04
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.36.1 && make deps && cd ..
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
COPY mango-arm32v7.o .

View File

@@ -2,10 +2,10 @@ FROM arm64v8/ubuntu:18.04
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev libjpeg-turbo8-dev libpng-dev libtiff-dev
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 1.0.0 && make deps && cd ..
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.8 && make && cd ..
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v1.0.0 && make && cd ..
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.5.0 && make && cd ..
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.36.1 && make deps && cd ..
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && git checkout v1.5.0 && make && cd ..
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && git checkout v0.20.0 && make && cd ..
RUN git clone https://github.com/hkalexling/image_size.cr && cd image_size.cr && git checkout v0.2.0 && make && cd ..
COPY mango-arm64v8.o .

View File

@@ -2,7 +2,7 @@
# Mango
[![Patreon](https://img.shields.io/badge/support-patreon-brightgreen?link=https://www.patreon.com/hkalexling)](https://www.patreon.com/hkalexling) ![Build](https://github.com/hkalexling/Mango/workflows/Build/badge.svg) [![Gitter](https://badges.gitter.im/mango-cr/mango.svg)](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![Discord](https://img.shields.io/discord/855633663425118228?label=discord)](http://discord.com/invite/ezKtacCp9Q)
[![Patreon](https://img.shields.io/badge/support-patreon-brightgreen?link=https://www.patreon.com/hkalexling)](https://www.patreon.com/hkalexling) ![Build](https://github.com/hkalexling/Mango/workflows/Build/badge.svg) [![Gitter](https://badges.gitter.im/mango-cr/mango.svg)](https://gitter.im/mango-cr/mango?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
Mango is a self-hosted manga server and reader. Its features include
@@ -13,6 +13,7 @@ Mango is a self-hosted manga server and reader. Its features include
- Supports nested folders in library
- Automatically stores reading progress
- Thumbnail generation
- Built-in [MangaDex](https://mangadex.org/) downloader
- Supports [plugins](https://github.com/hkalexling/mango-plugins) to download from thrid-party sites
- The web reader is responsive and works well on mobile, so there is no need for a mobile app
- All the static files are embedded in the binary, so the deployment process is easy and painless
@@ -51,7 +52,7 @@ The official docker images are available on [Dockerhub](https://hub.docker.com/r
### CLI
```
Mango - Manga Server and Web Reader. Version 0.24.0
Mango - Manga Server and Web Reader. Version 0.22.0
Usage:
@@ -86,10 +87,7 @@ log_level: info
upload_path: ~/mango/uploads
plugin_path: ~/mango/plugins
download_timeout_seconds: 30
library_cache_path: ~/mango/library.yml.gz
cache_enabled: false
cache_size_mbs: 50
cache_log_enabled: true
page_margin: 30
disable_login: false
default_username: ""
auth_proxy_header_name: ""
@@ -101,12 +99,12 @@ mangadex:
download_queue_db_path: ~/mango/queue.db
chapter_rename_rule: '[Vol.{volume} ][Ch.{chapter} ]{title|id}'
manga_rename_rule: '{title}'
subscription_update_interval_hours: 24
```
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
- By setting `cache_enabled` to `true`, you can enable an experimental feature where Mango caches library metadata to improve page load time. You can further fine-tune the feature with `cache_size_mbs` and `cache_log_enabled`.
### Library Structure
@@ -178,7 +176,6 @@ Please check the [development guideline](https://github.com/hkalexling/Mango/wik
<td align="center"><a href="https://github.com/Leeingnyo"><img src="https://avatars0.githubusercontent.com/u/6760150?v=4?s=100" width="100px;" alt=""/><br /><sub><b>이인용</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=Leeingnyo" title="Code">💻</a></td>
<td align="center"><a href="http://h45h74x.eu.org"><img src="https://avatars1.githubusercontent.com/u/27204033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=h45h74x" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/davidkna"><img src="https://avatars.githubusercontent.com/u/835177?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Knaack</b></sub></a><br /><a href="#infra-davidkna" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
<td align="center"><a href="https://lncn.dev"><img src="https://avatars.githubusercontent.com/u/41193328?v=4?s=100" width="100px;" alt=""/><br /><sub><b>i use arch btw</b></sub></a><br /><a href="#infra-lincolnthedev" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
</tr>
</table>

View File

@@ -0,0 +1,31 @@
class CreateSubscription < MG::Base
def up : String
# We allow multiple subscriptions for the same manga.
# This can be useful for example when you want to download from multiple
# groups.
<<-SQL
CREATE TABLE subscription (
id INTEGER PRIMARY KEY,
manga_id INTEGER NOT NULL,
language TEXT,
group_id INTEGER,
min_volume INTEGER,
max_volume INTEGER,
min_chapter INTEGER,
max_chapter INTEGER,
last_checked INTEGER NOT NULL,
created_at INTEGER NOT NULL,
username TEXT NOT NULL,
FOREIGN KEY (username) REFERENCES users (username)
ON UPDATE CASCADE
ON DELETE CASCADE
);
SQL
end
def down : String
<<-SQL
DROP TABLE subscription;
SQL
end
end

379
public/js/download.js Normal file
View File

@@ -0,0 +1,379 @@
const downloadComponent = () => {
return {
chaptersLimit: 1000,
loading: false,
addingToDownload: false,
searchAvailable: false,
searchInput: '',
data: {},
chapters: [],
mangaAry: undefined, // undefined: not searching; []: searched but no result
candidateManga: {},
langChoice: 'All',
groupChoice: 'All',
chapterRange: '',
volumeRange: '',
get languages() {
const set = new Set();
if (this.data.chapters) {
this.data.chapters.forEach(chp => {
set.add(chp.language);
});
}
const ary = [...set].sort();
ary.unshift('All');
return ary;
},
get groups() {
const set = new Set();
if (this.data.chapters) {
this.data.chapters.forEach(chp => {
Object.keys(chp.groups).forEach(g => {
set.add(g);
});
});
}
const ary = [...set].sort();
ary.unshift('All');
return ary;
},
init() {
const tableObserver = new MutationObserver(() => {
console.log('table mutated');
$("#selectable").selectable({
filter: 'tr'
});
});
tableObserver.observe($('table').get(0), {
childList: true,
subtree: true
});
$.getJSON(`${base_url}api/admin/mangadex/expires`)
.done((data) => {
if (data.error) {
alert('danger', 'Failed to check MangaDex integration status. Error: ' + data.error);
return;
}
if (data.expires && data.expires > Math.floor(Date.now() / 1000))
this.searchAvailable = true;
})
.fail((jqXHR, status) => {
alert('danger', `Failed to check MangaDex integration status. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
})
},
filtersUpdated() {
if (!this.data.chapters)
this.chapters = [];
const filters = {
chapter: this.parseRange(this.chapterRange),
volume: this.parseRange(this.volumeRange),
lang: this.langChoice,
group: this.groupChoice
};
console.log('filters:', filters);
let _chapters = this.data.chapters.slice();
Object.entries(filters).forEach(([k, v]) => {
if (v === 'All') return;
if (k === 'group') {
_chapters = _chapters.filter(c => {
const unescaped_groups = Object.entries(c.groups).map(([g, id]) => this.unescapeHTML(g));
return unescaped_groups.indexOf(v) >= 0;
});
return;
}
if (k === 'lang') {
_chapters = _chapters.filter(c => c.language === v);
return;
}
const lb = parseFloat(v[0]);
const ub = parseFloat(v[1]);
if (isNaN(lb) && isNaN(ub)) return;
_chapters = _chapters.filter(c => {
const val = parseFloat(c[k]);
if (isNaN(val)) return false;
if (isNaN(lb))
return val <= ub;
else if (isNaN(ub))
return val >= lb;
else
return val >= lb && val <= ub;
});
});
console.log('filtered chapters:', _chapters);
this.chapters = _chapters;
},
search() {
if (this.loading || this.searchInput === '') return;
this.data = {};
this.mangaAry = undefined;
var int_id = -1;
try {
const path = new URL(this.searchInput).pathname;
const match = /\/(?:title|manga)\/([0-9]+)/.exec(path);
int_id = parseInt(match[1]);
} catch (e) {
int_id = parseInt(this.searchInput);
}
if (!isNaN(int_id) && int_id > 0) {
// The input is a positive integer. We treat it as an ID.
this.loading = true;
$.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`)
.done((data) => {
if (data.error) {
alert('danger', 'Failed to get manga info. Error: ' + data.error);
return;
}
this.data = data;
this.chapters = data.chapters;
this.mangaAry = undefined;
})
.fail((jqXHR, status) => {
alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
})
.always(() => {
this.loading = false;
});
} else {
if (!this.searchAvailable) {
alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex. If you are trying to search MangaDex with a search term, please log in to MangaDex first by going to "Admin -> Connect to MangaDex".');
return;
}
// Search as a search term
this.loading = true;
$.getJSON(`${base_url}api/admin/mangadex/search?${$.param({
query: this.searchInput
})}`)
.done((data) => {
if (data.error) {
alert('danger', `Failed to search MangaDex. Error: ${data.error}`);
return;
}
this.mangaAry = data.manga;
this.data = {};
})
.fail((jqXHR, status) => {
alert('danger', `Failed to search MangaDex. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
})
.always(() => {
this.loading = false;
});
}
},
parseRange(str) {
const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m;
const matches = str.match(regex);
var num;
if (!matches) {
return [null, null];
} else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') {
// e.g., <= 30
num = parseInt(matches[2]);
if (isNaN(num)) {
return [null, null];
}
switch (matches[1]) {
case '<':
return [null, num - 1];
case '<=':
return [null, num];
case '>':
return [num + 1, null];
case '>=':
return [num, null];
}
} else if (typeof matches[3] !== 'undefined') {
// a single number
num = parseInt(matches[3]);
if (isNaN(num)) {
return [null, null];
}
return [num, num];
} else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') {
// e.g., 10 - 23
num = parseInt(matches[4]);
const n2 = parseInt(matches[5]);
if (isNaN(num) || isNaN(n2) || num > n2) {
return [null, null];
}
return [num, n2];
} else {
// empty or space only
return [null, null];
}
},
unescapeHTML(str) {
var elt = document.createElement("span");
elt.innerHTML = str;
return elt.innerText;
},
selectAll() {
$('tbody > tr').each((i, e) => {
$(e).addClass('ui-selected');
});
},
clearSelection() {
$('tbody > tr').each((i, e) => {
$(e).removeClass('ui-selected');
});
},
download() {
const selected = $('tbody > tr.ui-selected');
if (selected.length === 0) return;
UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => {
const ids = selected.map((i, e) => {
return parseInt($(e).find('td').first().text());
}).get();
const chapters = this.chapters.filter(c => ids.indexOf(c.id) >= 0);
console.log(ids);
this.addingToDownload = true;
$.ajax({
type: 'POST',
url: `${base_url}api/admin/mangadex/download`,
data: JSON.stringify({
chapters: chapters
}),
contentType: "application/json",
dataType: 'json'
})
.done(data => {
console.log(data);
if (data.error) {
alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`);
return;
}
const successCount = parseInt(data.success);
const failCount = parseInt(data.fail);
alert('success', `${successCount} of ${successCount + failCount} chapters added to the download queue. You can view and manage your download queue on the <a href="${base_url}admin/downloads">download manager page</a>.`);
})
.fail((jqXHR, status) => {
alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
})
.always(() => {
this.addingToDownload = false;
});
});
},
chooseManga(manga) {
this.candidateManga = manga;
UIkit.modal($('#modal').get(0)).show();
},
confirmManga(id) {
UIkit.modal($('#modal').get(0)).hide();
this.searchInput = id;
this.search();
},
subscribe(langConfirmed = false, groupConfirmed = false) {
const filters = {
manga: this.data.id,
language: this.langChoice === 'All' ? null : this.langChoice,
group: this.groupChoice === 'All' ? null : this.groupChoice,
volume: this.volumeRange === '' ? null : this.volumeRange,
chapter: this.chapterRange === '' ? null : this.chapterRange
};
// Get group ID
if (filters.group) {
this.data.chapters.forEach(chp => {
const gid = chp.groups[filters.group];
if (gid) {
filters.groupId = gid;
return;
}
});
}
// Parse range values
if (filters.volume) {
[filters.volumeMin, filters.volumeMax] = this.parseRange(filters.volume);
}
if (filters.chapter) {
[filters.chapterMin, filters.chapterMax] = this.parseRange(filters.chapter);
}
if (!filters.language && !langConfirmed) {
UIkit.modal.confirm('You didn\'t specify a language in the filtering rules. This might cause Mango to download chapters that are not in your preferred language. Are you sure you want to continue?', {
labels: {
ok: 'Yes',
cancel: 'Cancel'
}
}).then(() => {
this.subscribe(true, groupConfirmed);
});
return;
}
if (!filters.group && !groupConfirmed) {
UIkit.modal.confirm('You didn\'t specify a group in the filtering rules. This might cause Mango to download multiple versions of the same chapter. Are you sure you want to continue?', {
labels: {
ok: 'Yes',
cancel: 'Cancel'
}
}).then(() => {
this.subscribe(langConfirmed, true);
});
return;
}
const mangaURL = `${mangadex_base_url}/manga/${filters.manga}`;
console.log(filters);
UIkit.modal.confirm(`All <strong>FUTURE</strong> chapters matching the following filters will be downloaded:<br>
<ul>
<li>Manga ID: ${filters.manga}</li>
<li>Language: ${filters.language || 'all'}</li>
<li>Group: ${filters.group || 'all'}</li>
<li>Volume: ${filters.volume || 'all'}</li>
<li>Chapter: ${filters.chapter || 'all'}</li>
</ul>
<strong>IMPORTANT:</strong> Please make sure you are following the manga on MangaDex, otherwise Mango won't be able to receive any updates. To follow it, visit <a href="${mangaURL}">${mangaURL}</a> and click "Follow".
`, {
labels: {
ok: 'Confirm',
cancel: 'Cancel'
}
}).then(() => {
$.ajax({
type: 'POST',
url: `${base_url}api/admin/mangadex/subscriptions`,
data: JSON.stringify({
subscription: filters
}),
contentType: "application/json",
dataType: 'json'
})
.done(data => {
console.log(data);
if (data.error) {
alert('danger', `Failed to subscribe. Error: ${data.error}`);
return;
}
alert('success', `You've successfully subscribed to this manga! You can view and manage your subscriptions on the <a href="${base_url}download/subscription">subscription manager page</a>.`);
})
.fail((jqXHR, status) => {
alert('danger', `Failed to subscribe. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
});
});
}
};
};

61
public/js/mangadex.js Normal file
View File

@@ -0,0 +1,61 @@
const component = () => {
return {
username: '',
password: '',
expires: undefined,
loading: true,
loggingIn: false,
init() {
this.loading = true;
$.ajax({
type: 'GET',
url: `${base_url}api/admin/mangadex/expires`,
contentType: "application/json",
})
.done(data => {
console.log(data);
if (data.error) {
alert('danger', `Failed to retrieve MangaDex token status. Error: ${data.error}`);
return;
}
this.expires = data.expires;
this.loading = false;
})
.fail((jqXHR, status) => {
alert('danger', `Failed to retrieve MangaDex token status. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
});
},
login() {
if (!(this.username && this.password)) return;
this.loggingIn = true;
$.ajax({
type: 'POST',
url: `${base_url}api/admin/mangadex/login`,
contentType: "application/json",
dataType: 'json',
data: JSON.stringify({
username: this.username,
password: this.password
})
})
.done(data => {
console.log(data);
if (data.error) {
alert('danger', `Failed to log in. Error: ${data.error}`);
return;
}
this.expires = data.expires;
})
.fail((jqXHR, status) => {
alert('danger', `Failed to log in. Error: [${jqXHR.status}] ${jqXHR.statusText}`);
})
.always(() => {
this.loggingIn = false;
});
},
get expired() {
return this.expires && moment().diff(moment.unix(this.expires)) > 0;
}
};
};

View File

@@ -6,13 +6,11 @@ const readerComponent = () => {
alertClass: 'uk-alert-primary',
items: [],
curItem: {},
enableFlipAnimation: true,
flipAnimation: null,
longPages: false,
lastSavedPage: page,
selectedIndex: 0, // 0: not selected; 1: the first page
margin: 30,
preloadLookahead: 3,
/**
* Initialize the component by fetching the page dimensions
@@ -54,16 +52,6 @@ const readerComponent = () => {
if (savedMargin) {
this.margin = savedMargin;
}
// Preload Images
this.preloadLookahead = +(localStorage.getItem('preloadLookahead') ?? 3);
const limit = Math.min(page + this.preloadLookahead, this.items.length + 1);
for (let idx = page + 1; idx <= limit; idx++) {
this.preloadImage(this.items[idx - 1].url);
}
const savedFlipAnimation = localStorage.getItem('enableFlipAnimation');
this.enableFlipAnimation = savedFlipAnimation === null || savedFlipAnimation === 'true';
})
.catch(e => {
const errMsg = `Failed to get the page dimensions. ${e}`;
@@ -72,12 +60,6 @@ const readerComponent = () => {
this.msg = errMsg;
})
},
/**
* Preload an image, which is expected to be cached
*/
preloadImage(url) {
(new Image()).src = url;
},
/**
* Handles the `change` event for the page selector
*/
@@ -129,18 +111,12 @@ const readerComponent = () => {
if (newIdx <= 0 || newIdx > this.items.length) return;
if (newIdx + this.preloadLookahead < this.items.length + 1) {
this.preloadImage(this.items[newIdx + this.preloadLookahead - 1].url);
}
this.toPage(newIdx);
if (this.enableFlipAnimation) {
if (isNext)
this.flipAnimation = 'right';
else
this.flipAnimation = 'left';
}
if (isNext)
this.flipAnimation = 'right';
else
this.flipAnimation = 'left';
setTimeout(() => {
this.flipAnimation = null;
@@ -311,14 +287,6 @@ const readerComponent = () => {
marginChanged() {
localStorage.setItem('margin', this.margin);
this.toPage(this.selectedIndex);
},
preloadLookaheadChanged() {
localStorage.setItem('preloadLookahead', this.preloadLookahead);
},
enableFlipAnimationChanged() {
localStorage.setItem('enableFlipAnimation', this.enableFlipAnimation);
},
}
};
}

View File

@@ -2,31 +2,31 @@ version: 2.0
shards:
ameba:
git: https://github.com/crystal-ameba/ameba.git
version: 0.14.3
version: 0.14.0
archive:
git: https://github.com/hkalexling/archive.cr.git
version: 0.5.0
version: 0.4.0
baked_file_system:
git: https://github.com/schovi/baked_file_system.git
version: 0.10.0
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
clim:
git: https://github.com/at-grandpa/clim.git
version: 0.17.1
version: 0.12.0
db:
git: https://github.com/crystal-lang/crystal-db.git
version: 0.10.1
version: 0.9.0
duktape:
git: https://github.com/jessedoyle/duktape.cr.git
version: 1.0.0
version: 0.20.0
exception_page:
git: https://github.com/crystal-loot/exception_page.git
version: 0.1.5
version: 0.1.4
http_proxy:
git: https://github.com/mamantoha/http_proxy.git
@@ -34,45 +34,49 @@ shards:
image_size:
git: https://github.com/hkalexling/image_size.cr.git
version: 0.5.0
version: 0.4.0
kemal:
git: https://github.com/kemalcr/kemal.git
version: 1.0.0
version: 0.27.0
kemal-session:
git: https://github.com/kemalcr/kemal-session.git
version: 1.0.0
version: 0.13.0
kilt:
git: https://github.com/jeromegn/kilt.git
version: 0.4.1
version: 0.4.0
koa:
git: https://github.com/hkalexling/koa.git
version: 0.8.0
version: 0.7.0
mangadex:
git: https://github.com/hkalexling/mangadex.git
version: 0.11.0+git.commit.f5b0d64fbb138879fb9228b6e9ff34ec97c3e824
mg:
git: https://github.com/hkalexling/mg.git
version: 0.5.0+git.commit.697e46e27cde8c3969346e228e372db2455a6264
version: 0.3.0+git.commit.a19417abf03eece80039f89569926cff1ce3a1a3
myhtml:
git: https://github.com/kostya/myhtml.git
version: 1.5.8
version: 1.5.1
open_api:
git: https://github.com/hkalexling/open_api.cr.git
version: 1.2.1+git.commit.1d3c55dd5534c6b0af18964d031858a08515553a
git: https://github.com/jreinert/open_api.cr.git
version: 1.2.1+git.commit.95e4df2ca10b1fe88b8b35c62a18b06a10267b6c
radix:
git: https://github.com/luislavena/radix.git
version: 0.4.1
version: 0.3.9
sqlite3:
git: https://github.com/crystal-lang/crystal-sqlite3.git
version: 0.18.0
version: 0.16.0
tallboy:
git: https://github.com/epoch/tallboy.git
version: 0.9.3+git.commit.9be1510bb0391c95e92f1b288f3afb429a73caa6
version: 0.9.3

View File

@@ -1,5 +1,5 @@
name: mango
version: 0.24.0
version: 0.22.0
authors:
- Alex Ling <hkalexling@gmail.com>
@@ -8,7 +8,7 @@ targets:
mango:
main: src/mango.cr
crystal: 1.0.0
crystal: 0.36.1
license: MIT
@@ -21,6 +21,7 @@ dependencies:
github: crystal-lang/crystal-sqlite3
baked_file_system:
github: schovi/baked_file_system
version: 0.9.8+git.commit.fb3091b546797fbec3c25dc0e1e2cff60bb9033b
archive:
github: hkalexling/archive.cr
ameba:
@@ -29,6 +30,7 @@ dependencies:
github: at-grandpa/clim
duktape:
github: jessedoyle/duktape.cr
version: ~> 0.20.0
myhtml:
github: kostya/myhtml
http_proxy:
@@ -39,6 +41,7 @@ dependencies:
github: hkalexling/koa
tallboy:
github: epoch/tallboy
branch: master
mg:
github: hkalexling/mg
mangadex:
github: hkalexling/mangadex

View File

@@ -61,13 +61,3 @@ describe "chapter_sort" do
end.should eq ary
end
end
describe "sanitize_filename" do
it "returns a random string for empty sanitized string" do
sanitize_filename("..").should_not eq sanitize_filename("..")
end
it "sanitizes correctly" do
sanitize_filename(".. \n\v.\rマンゴー/|*()<[1/2] 3.14 hello world ")
.should eq "マンゴー_()[1_2] 3.14 hello world"
end
end

View File

@@ -11,8 +11,6 @@ class Config
property session_secret : String = "mango-session-secret"
property library_path : String = File.expand_path "~/mango/library",
home: true
property library_cache_path = File.expand_path "~/mango/library.yml.gz",
home: true
property db_path : String = File.expand_path "~/mango/mango.db", home: true
property scan_interval_minutes : Int32 = 5
property thumbnail_generation_interval_hours : Int32 = 24
@@ -22,9 +20,6 @@ class Config
property plugin_path : String = File.expand_path "~/mango/plugins",
home: true
property download_timeout_seconds : Int32 = 30
property cache_enabled = false
property cache_size_mbs = 50
property cache_log_enabled = true
property disable_login = false
property default_username = ""
property auth_proxy_header_name = ""
@@ -38,8 +33,10 @@ class Config
"download_retries" => 4,
"download_queue_db_path" => File.expand_path("~/mango/queue.db",
home: true),
"chapter_rename_rule" => "[Vol.{volume} ][Ch.{chapter} ]{title|id}",
"manga_rename_rule" => "{title}",
"chapter_rename_rule" => "[Vol.{volume} ]" \
"[Ch.{chapter} ]{title|id}",
"manga_rename_rule" => "{title}",
"subscription_update_interval_hours" => 24,
}
@@singlet : Config?

View File

@@ -54,9 +54,8 @@ class AuthHandler < Kemal::Handler
end
def call(env)
# Skip all authentication if requesting /login, /logout, /api/login,
# or a static file
if request_path_startswith(env, ["/login", "/logout", "/api/login"]) ||
# Skip all authentication if requesting /login, /logout, or a static file
if request_path_startswith(env, ["/login", "/logout"]) ||
requesting_static_file env
return call_next(env)
end

View File

@@ -1,188 +0,0 @@
require "digest"
require "./entry"
require "./types"
# Base class for an entry in the LRU cache.
# There are two ways to use it:
# 1. Use it as it is by instantiating with the appropriate `SaveT` and
# `ReturnT`. Note that in this case, `SaveT` and `ReturnT` must be the
# same type. That is, the input value will be stored as it is without
# any transformation.
# 2. You can also subclass it and provide custom implementations for
# `to_save_t` and `to_return_t`. This allows you to transform and store
# the input value to a different type. See `SortedEntriesCacheEntry` as
# an example.
private class CacheEntry(SaveT, ReturnT)
getter key : String, atime : Time
@value : SaveT
def initialize(@key : String, value : ReturnT)
@atime = @ctime = Time.utc
@value = self.class.to_save_t value
end
def value
@atime = Time.utc
self.class.to_return_t @value
end
def self.to_save_t(value : ReturnT)
value
end
def self.to_return_t(value : SaveT)
value
end
def instance_size
instance_sizeof(CacheEntry(SaveT, ReturnT)) + # sizeof itself
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
@value.instance_size
end
end
class SortedEntriesCacheEntry < CacheEntry(Array(String), Array(Entry))
def self.to_save_t(value : Array(Entry))
value.map &.id
end
def self.to_return_t(value : Array(String))
ids_to_entries value
end
private def self.ids_to_entries(ids : Array(String))
e_map = Library.default.deep_entries.to_h { |entry| {entry.id, entry} }
entries = [] of Entry
begin
ids.each do |id|
entries << e_map[id]
end
return entries if ids.size == entries.size
rescue
end
end
def instance_size
instance_sizeof(SortedEntriesCacheEntry) + # sizeof itself
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
@value.size * (instance_sizeof(String) + sizeof(String)) +
@value.sum(&.bytesize) # elements in Array(String)
end
def self.gen_key(book_id : String, username : String,
entries : Array(Entry), opt : SortOptions?)
entries_sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
user_context = opt && opt.method == SortMethod::Progress ? username : ""
sig = Digest::SHA1.hexdigest (book_id + entries_sig + user_context +
(opt ? opt.to_tuple.to_s : "nil"))
"#{sig}:sorted_entries"
end
end
class String
def instance_size
instance_sizeof(String) + bytesize
end
end
struct Tuple(*T)
def instance_size
sizeof(T) + # total size of non-reference types
self.sum do |e|
next 0 unless e.is_a? Reference
if e.responds_to? :instance_size
e.instance_size
else
instance_sizeof(typeof(e))
end
end
end
end
alias CacheableType = Array(Entry) | String | Tuple(String, Int32)
alias CacheEntryType = SortedEntriesCacheEntry |
CacheEntry(String, String) |
CacheEntry(Tuple(String, Int32), Tuple(String, Int32))
def generate_cache_entry(key : String, value : CacheableType)
if value.is_a? Array(Entry)
SortedEntriesCacheEntry.new key, value
else
CacheEntry(typeof(value), typeof(value)).new key, value
end
end
# LRU Cache
class LRUCache
@@limit : Int128 = Int128.new 0
@@should_log = true
# key => entry
@@cache = {} of String => CacheEntryType
def self.enabled
Config.current.cache_enabled
end
def self.init
cache_size = Config.current.cache_size_mbs
@@limit = Int128.new cache_size * 1024 * 1024 if enabled
@@should_log = Config.current.cache_log_enabled
end
def self.get(key : String)
return unless enabled
entry = @@cache[key]?
if @@should_log
Logger.debug "LRUCache #{entry.nil? ? "miss" : "hit"} #{key}"
end
return entry.value unless entry.nil?
end
def self.set(cache_entry : CacheEntryType)
return unless enabled
key = cache_entry.key
@@cache[key] = cache_entry
Logger.debug "LRUCache cached #{key}" if @@should_log
remove_least_recent_access
end
def self.invalidate(key : String)
return unless enabled
@@cache.delete key
end
def self.print
return unless @@should_log
sum = @@cache.sum { |_, entry| entry.instance_size }
Logger.debug "---- LRU Cache ----"
Logger.debug "Size: #{sum} Bytes"
Logger.debug "List:"
@@cache.each do |k, v|
Logger.debug "#{k} | #{v.atime} | #{v.instance_size}"
end
Logger.debug "-------------------"
end
private def self.is_cache_full
sum = @@cache.sum { |_, entry| entry.instance_size }
sum > @@limit
end
private def self.remove_least_recent_access
if @@should_log && is_cache_full
Logger.debug "Removing entries from LRUCache"
end
while is_cache_full && @@cache.size > 0
min_tuple = @@cache.min_by { |_, entry| entry.atime }
min_key = min_tuple[0]
min_entry = min_tuple[1]
Logger.debug " \
Target: #{min_key}, \
Last Access Time: #{min_entry.atime}" if @@should_log
invalidate min_key
end
end
end

View File

@@ -1,9 +1,6 @@
require "image_size"
require "yaml"
class Entry
include YAML::Serializable
getter zip_path : String, book : Title, title : String,
size : String, pages : Int32, id : String, encoded_path : String,
encoded_title : String, mtime : Time, err_msg : String?
@@ -49,20 +46,16 @@ class Entry
file.close
end
def build_json(*, slim = false)
JSON.build do |json|
json.object do
{% for str in ["zip_path", "title", "size", "id"] %}
def to_json(json : JSON::Builder)
json.object do
{% for str in ["zip_path", "title", "size", "id"] %}
json.field {{str}}, @{{str.id}}
{% end %}
json.field "title_id", @book.id
json.field "pages" { json.number @pages }
unless slim
json.field "display_name", @book.display_name @title
json.field "cover_url", cover_url
json.field "mtime" { json.number @mtime.to_unix }
end
end
json.field "title_id", @book.id
json.field "display_name", @book.display_name @title
json.field "cover_url", cover_url
json.field "pages" { json.number @pages }
json.field "mtime" { json.number @mtime.to_unix }
end
end
@@ -76,17 +69,9 @@ class Entry
def cover_url
return "#{Config.current.base_url}img/icon.png" if @err_msg
unless @book.entry_cover_url_cache
TitleInfo.new @book.dir do |info|
@book.entry_cover_url_cache = info.entry_cover_url
end
end
entry_cover_url = @book.entry_cover_url_cache
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
if entry_cover_url
info_url = entry_cover_url[@title]?
TitleInfo.new @book.dir do |info|
info_url = info.entry_cover_url[@title]?
unless info_url.nil? || info_url.empty?
url = File.join Config.current.base_url, info_url
end
@@ -173,16 +158,6 @@ class Entry
# For backward backward compatibility with v0.1.0, we save entry titles
# instead of IDs in info.json
def save_progress(username, page)
LRUCache.invalidate "#{@book.id}:#{username}:progress_sum"
@book.parents.each do |parent|
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
end
[false, true].each do |ascend|
sorted_entries_cache_key = SortedEntriesCacheEntry.gen_key @book.id,
username, @book.entries, SortOptions.new(SortMethod::Progress, ascend)
LRUCache.invalidate sorted_entries_cache_key
end
TitleInfo.new @book.dir do |info|
if info.progress[username]?.nil?
info.progress[username] = {@title => page}

View File

@@ -1,38 +1,12 @@
class Library
include YAML::Serializable
getter dir : String, title_ids : Array(String),
title_hash : Hash(String, Title)
use_default
def save_instance
path = Config.current.library_cache_path
Logger.debug "Caching library to #{path}"
writer = Compress::Gzip::Writer.new path,
Compress::Gzip::BEST_COMPRESSION
writer.write self.to_yaml.to_slice
writer.close
end
def self.load_instance
path = Config.current.library_cache_path
return unless File.exists? path
Logger.debug "Loading cached library from #{path}"
begin
Compress::Gzip::Reader.open path do |content|
@@default = Library.from_yaml content
end
Library.default.register_jobs
rescue e
Logger.error e
end
end
def initialize
register_mime_types
@dir = Config.current.library_path
# explicitly initialize @titles to bypass the compiler check. it will
# be filled with actual Titles in the `scan` call below
@@ -42,12 +16,6 @@ class Library
@entries_count = 0
@thumbnails_count = 0
register_jobs
end
protected def register_jobs
register_mime_types
scan_interval = Config.current.scan_interval_minutes
if scan_interval < 1
scan
@@ -57,7 +25,7 @@ class Library
start = Time.local
scan
ms = (Time.local - start).total_milliseconds
Logger.debug "Library initialized in #{ms}ms"
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
sleep scan_interval.minutes
end
end
@@ -74,6 +42,25 @@ class Library
end
end
end
subscription_interval = Config.current
.mangadex["subscription_update_interval_hours"].as Int32
unless subscription_interval < 1
spawn do
loop do
subscriptions = Storage.default.subscriptions
Logger.info "Checking MangaDex for updates on " \
"#{subscriptions.size} subscriptions"
added_count = 0
subscriptions.each do |sub|
added_count += sub.check_for_updates
end
Logger.info "Subscription update completed. Added #{added_count} " \
"chapters to the download queue"
sleep subscription_interval.hours
end
end
end
end
def titles
@@ -83,6 +70,11 @@ class Library
def sorted_titles(username, opt : SortOptions? = nil)
if opt.nil?
opt = SortOptions.from_info_json @dir, username
else
TitleInfo.new @dir do |info|
info.sort_by[username] = opt.to_tuple
info.save
end
end
# Helper function from src/util/util.cr
@@ -93,21 +85,11 @@ class Library
titles + titles.flat_map &.deep_titles
end
def deep_entries
titles.flat_map &.deep_entries
end
def build_json(*, slim = false, depth = -1)
JSON.build do |json|
json.object do
json.field "dir", @dir
json.field "titles" do
json.array do
self.titles.each do |title|
json.raw title.build_json(slim: slim, depth: depth)
end
end
end
def to_json(json : JSON::Builder)
json.object do
json.field "dir", @dir
json.field "titles" do
json.raw self.titles.to_json
end
end
end
@@ -121,7 +103,6 @@ class Library
end
def scan
start = Time.local
unless Dir.exists? @dir
Logger.info "The library directory #{@dir} does not exist. " \
"Attempting to create it"
@@ -130,36 +111,14 @@ class Library
storage = Storage.new auto_close: false
examine_context : ExamineContext = {
cached_contents_signature: {} of String => String,
deleted_title_ids: [] of String,
deleted_entry_ids: [] of String,
}
@title_ids.select! do |title_id|
title = @title_hash[title_id]
existence = title.examine examine_context
unless existence
examine_context["deleted_title_ids"].concat [title_id] +
title.deep_titles.map &.id
examine_context["deleted_entry_ids"].concat title.deep_entries.map &.id
end
existence
end
remained_title_dirs = @title_ids.map { |id| title_hash[id].dir }
examine_context["deleted_title_ids"].each do |title_id|
@title_hash.delete title_id
end
cache = examine_context["cached_contents_signature"]
(Dir.entries @dir)
.select { |fn| !fn.starts_with? "." }
.map { |fn| File.join @dir, fn }
.select { |path| !(remained_title_dirs.includes? path) }
.select { |path| File.directory? path }
.map { |path| Title.new path, "", cache }
.map { |path| Title.new path, "" }
.select { |title| !(title.entries.empty? && title.titles.empty?) }
.sort! { |a, b| a.title <=> b.title }
.tap { |_| @title_ids.clear }
.each do |title|
@title_hash[title.id] = title
@title_ids << title.id
@@ -168,15 +127,8 @@ class Library
storage.bulk_insert_ids
storage.close
ms = (Time.local - start).total_milliseconds
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
Storage.default.mark_unavailable examine_context["deleted_entry_ids"],
examine_context["deleted_title_ids"]
spawn do
save_instance
end
Logger.debug "Scan completed"
Storage.default.mark_unavailable
end
def get_continue_reading_entries(username)

View File

@@ -1,25 +1,13 @@
require "digest"
require "../archive"
class Title
include YAML::Serializable
getter dir : String, parent_id : String, title_ids : Array(String),
entries : Array(Entry), title : String, id : String,
encoded_title : String, mtime : Time, signature : UInt64,
entry_cover_url_cache : Hash(String, String)?
setter entry_cover_url_cache : Hash(String, String)?
encoded_title : String, mtime : Time, signature : UInt64
@[YAML::Field(ignore: true)]
@entry_display_name_cache : Hash(String, String)?
@[YAML::Field(ignore: true)]
@entry_cover_url_cache : Hash(String, String)?
@[YAML::Field(ignore: true)]
@cached_display_name : String?
@[YAML::Field(ignore: true)]
@cached_cover_url : String?
def initialize(@dir : String, @parent_id, cache = {} of String => String)
def initialize(@dir : String, @parent_id)
storage = Storage.default
@signature = Dir.signature dir
id = storage.get_title_id dir, signature
@@ -32,7 +20,6 @@ class Title
})
end
@id = id
@contents_signature = Dir.contents_signature dir, cache
@title = File.basename dir
@encoded_title = URI.encode @title
@title_ids = [] of String
@@ -43,7 +30,7 @@ class Title
next if fn.starts_with? "."
path = File.join dir, fn
if File.directory? path
title = Title.new path, @id, cache
title = Title.new path, @id
next if title.entries.size == 0 && title.titles.size == 0
Library.default.title_hash[title.id] = title
@title_ids << title.id
@@ -70,151 +57,27 @@ class Title
end
end
# Utility method used in library rescanning.
# - When the title does not exist on the file system anymore, return false
# and let it be deleted from the library instance
# - When the title exists, but its contents signature is now different from
# the cache, it means some of its content (nested titles or entries)
# has been added, deleted, or renamed. In this case we update its
# contents signature and instance variables
# - When the title exists and its contents signature is still the same, we
# return true so it can be reused without rescanning
def examine(context : ExamineContext) : Bool
return false unless Dir.exists? @dir
contents_signature = Dir.contents_signature @dir,
context["cached_contents_signature"]
return true if @contents_signature == contents_signature
@contents_signature = contents_signature
@signature = Dir.signature @dir
storage = Storage.default
id = storage.get_title_id dir, signature
if id.nil?
id = random_str
storage.insert_title_id({
path: dir,
id: id,
signature: signature.to_s,
})
end
@id = id
@mtime = File.info(@dir).modification_time
previous_titles_size = @title_ids.size
@title_ids.select! do |title_id|
title = Library.default.get_title! title_id
existence = title.examine context
unless existence
context["deleted_title_ids"].concat [title_id] +
title.deep_titles.map &.id
context["deleted_entry_ids"].concat title.deep_entries.map &.id
end
existence
end
remained_title_dirs = @title_ids.map do |title_id|
title = Library.default.get_title! title_id
title.dir
end
previous_entries_size = @entries.size
@entries.select! do |entry|
existence = File.exists? entry.zip_path
Fiber.yield
context["deleted_entry_ids"] << entry.id unless existence
existence
end
remained_entry_zip_paths = @entries.map &.zip_path
is_titles_added = false
is_entries_added = false
Dir.entries(dir).each do |fn|
next if fn.starts_with? "."
path = File.join dir, fn
if File.directory? path
next if remained_title_dirs.includes? path
title = Title.new path, @id, context["cached_contents_signature"]
next if title.entries.size == 0 && title.titles.size == 0
Library.default.title_hash[title.id] = title
@title_ids << title.id
is_titles_added = true
next
end
if is_supported_file path
next if remained_entry_zip_paths.includes? path
entry = Entry.new path, self
if entry.pages > 0 || entry.err_msg
@entries << entry
is_entries_added = true
end
end
end
mtimes = [@mtime]
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
mtimes += @entries.map &.mtime
@mtime = mtimes.max
if is_titles_added || previous_titles_size != @title_ids.size
@title_ids.sort! do |a, b|
compare_numerically Library.default.title_hash[a].title,
Library.default.title_hash[b].title
end
end
if is_entries_added || previous_entries_size != @entries.size
sorter = ChapterSorter.new @entries.map &.title
@entries.sort! do |a, b|
sorter.compare a.title, b.title
end
end
true
end
alias SortContext = NamedTuple(username: String, opt: SortOptions)
def build_json(*, slim = false, depth = -1,
sort_context : SortContext? = nil)
JSON.build do |json|
json.object do
{% for str in ["dir", "title", "id"] %}
def to_json(json : JSON::Builder)
json.object do
{% for str in ["dir", "title", "id"] %}
json.field {{str}}, @{{str.id}}
{% end %}
json.field "signature" { json.number @signature }
unless slim
json.field "display_name", display_name
json.field "cover_url", cover_url
json.field "mtime" { json.number @mtime.to_unix }
end
unless depth == 0
json.field "titles" do
json.array do
self.titles.each do |title|
json.raw title.build_json(slim: slim,
depth: depth > 0 ? depth - 1 : depth)
end
end
end
json.field "entries" do
json.array do
_entries = if sort_context
sorted_entries sort_context[:username],
sort_context[:opt]
else
@entries
end
_entries.each do |entry|
json.raw entry.build_json(slim: slim)
end
end
end
end
json.field "parents" do
json.array do
self.parents.each do |title|
json.object do
json.field "title", title.title
json.field "id", title.id
end
json.field "signature" { json.number @signature }
json.field "display_name", display_name
json.field "cover_url", cover_url
json.field "mtime" { json.number @mtime.to_unix }
json.field "titles" do
json.raw self.titles.to_json
end
json.field "entries" do
json.raw @entries.to_json
end
json.field "parents" do
json.array do
self.parents.each do |title|
json.object do
json.field "title", title.title
json.field "id", title.id
end
end
end
@@ -279,15 +142,11 @@ class Title
end
def display_name
cached_display_name = @cached_display_name
return cached_display_name unless cached_display_name.nil?
dn = @title
TitleInfo.new @dir do |info|
info_dn = info.display_name
dn = info_dn unless info_dn.empty?
end
@cached_display_name = dn
dn
end
@@ -311,7 +170,6 @@ class Title
end
def set_display_name(dn)
@cached_display_name = dn
TitleInfo.new @dir do |info|
info.display_name = dn
info.save
@@ -321,15 +179,11 @@ class Title
def set_display_name(entry_name : String, dn)
TitleInfo.new @dir do |info|
info.entry_display_name[entry_name] = dn
@entry_display_name_cache = info.entry_display_name
info.save
end
end
def cover_url
cached_cover_url = @cached_cover_url
return cached_cover_url unless cached_cover_url.nil?
url = "#{Config.current.base_url}img/icon.png"
readable_entries = @entries.select &.err_msg.nil?
if readable_entries.size > 0
@@ -341,12 +195,10 @@ class Title
url = File.join Config.current.base_url, info_url
end
end
@cached_cover_url = url
url
end
def set_cover_url(url : String)
@cached_cover_url = url
TitleInfo.new @dir do |info|
info.cover_url = url
info.save
@@ -356,7 +208,6 @@ class Title
def set_cover_url(entry_name : String, url : String)
TitleInfo.new @dir do |info|
info.entry_cover_url[entry_name] = url
@entry_cover_url_cache = info.entry_cover_url
info.save
end
end
@@ -376,15 +227,8 @@ class Title
end
def deep_read_page_count(username) : Int32
key = "#{@id}:#{username}:progress_sum"
sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
cached_sum = LRUCache.get key
return cached_sum[1] if cached_sum.is_a? Tuple(String, Int32) &&
cached_sum[0] == sig
sum = load_progress_for_all_entries(username, nil, true).sum +
titles.flat_map(&.deep_read_page_count username).sum
LRUCache.set generate_cache_entry key, {sig, sum}
sum
load_progress_for_all_entries(username).sum +
titles.flat_map(&.deep_read_page_count username).sum
end
def deep_total_page_count : Int32
@@ -438,12 +282,13 @@ class Title
# use the default (auto, ascending)
# When `opt` is not nil, it saves the options to info.json
def sorted_entries(username, opt : SortOptions? = nil)
cache_key = SortedEntriesCacheEntry.gen_key @id, username, @entries, opt
cached_entries = LRUCache.get cache_key
return cached_entries if cached_entries.is_a? Array(Entry)
if opt.nil?
opt = SortOptions.from_info_json @dir, username
else
TitleInfo.new @dir do |info|
info.sort_by[username] = opt.to_tuple
info.save
end
end
case opt.not_nil!.method
@@ -475,7 +320,6 @@ class Title
ary.reverse! unless opt.not_nil!.ascend
LRUCache.set generate_cache_entry cache_key, ary
ary
end
@@ -537,17 +381,6 @@ class Title
end
def bulk_progress(action, ids : Array(String), username)
LRUCache.invalidate "#{@id}:#{username}:progress_sum"
parents.each do |parent|
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
end
[false, true].each do |ascend|
sorted_entries_cache_key =
SortedEntriesCacheEntry.gen_key @id, username, @entries,
SortOptions.new(SortMethod::Progress, ascend)
LRUCache.invalidate sorted_entries_cache_key
end
selected_entries = ids
.map { |id|
@entries.find &.id.==(id)

View File

@@ -1,12 +1,4 @@
SUPPORTED_IMG_TYPES = %w(
image/jpeg
image/png
image/webp
image/apng
image/avif
image/gif
image/svg+xml
)
SUPPORTED_IMG_TYPES = ["image/jpeg", "image/png", "image/webp"]
enum SortMethod
Auto
@@ -96,18 +88,6 @@ class TitleInfo
@@mutex_hash = {} of String => Mutex
def self.new(dir, &)
key = "#{dir}:info.json"
info = LRUCache.get key
if info.is_a? String
begin
instance = TitleInfo.from_json info
instance.dir = dir
yield instance
return
rescue
end
end
if @@mutex_hash[dir]?
mutex = @@mutex_hash[dir]
else
@@ -121,7 +101,6 @@ class TitleInfo
instance = TitleInfo.from_json File.read json_path
end
instance.dir = dir
LRUCache.set generate_cache_entry key, instance.to_json
yield instance
end
end
@@ -129,12 +108,5 @@ class TitleInfo
def save
json_path = File.join @dir, "info.json"
File.write json_path, self.to_pretty_json
key = "#{@dir}:info.json"
LRUCache.set generate_cache_entry key, self.to_json
end
end
alias ExamineContext = NamedTuple(
cached_contents_signature: Hash(String, String),
deleted_title_ids: Array(String),
deleted_entry_ids: Array(String))

View File

@@ -34,11 +34,7 @@ class Logger
end
@backend.formatter = Log::Formatter.new &format_proc
Log.setup do |c|
c.bind "*", @@severity, @backend
c.bind "db.*", :error, @backend
end
Log.setup @@severity, @backend
end
def self.get_severity(level = "") : Log::Severity

172
src/mangadex/downloader.cr Normal file
View File

@@ -0,0 +1,172 @@
require "mangadex"
require "compress/zip"
require "../rename"
require "./ext"
module MangaDex
class PageJob
property success = false
property url : String
property filename : String
property writer : Compress::Zip::Writer
property tries_remaning : Int32
def initialize(@url, @filename, @writer, @tries_remaning)
end
end
class Downloader < Queue::Downloader
@wait_seconds : Int32 = Config.current.mangadex["download_wait_seconds"]
.to_i32
@retries : Int32 = Config.current.mangadex["download_retries"].to_i32
use_default
def initialize
@client = Client.from_config
super
end
def pop : Queue::Job?
job = nil
MainFiber.run do
DB.open "sqlite3://#{@queue.path}" do |db|
begin
db.query_one "select * from queue where id not like '%-%' " \
"and (status = 0 or status = 1) " \
"order by time limit 1" do |res|
job = Queue::Job.from_query_result res
end
rescue
end
end
end
job
end
private def download(job : Queue::Job)
@downloading = true
@queue.set_status Queue::JobStatus::Downloading, job
begin
chapter = @client.chapter job.id
# We must put the `.pages` call in a rescue block to handle external
# chapters.
pages = chapter.pages
rescue e
Logger.error e
@queue.set_status Queue::JobStatus::Error, job
unless e.message.nil?
@queue.add_message e.message.not_nil!, job
end
@downloading = false
return
end
@queue.set_pages pages.size, job
lib_dir = @library_path
rename_rule = Rename::Rule.new \
Config.current.mangadex["manga_rename_rule"].to_s
manga_dir = File.join lib_dir, chapter.manga.rename rename_rule
unless File.exists? manga_dir
Dir.mkdir_p manga_dir
end
zip_path = File.join manga_dir, "#{job.title}.cbz.part"
# Find the number of digits needed to store the number of pages
len = Math.log10(pages.size).to_i + 1
writer = Compress::Zip::Writer.new zip_path
# Create a buffered channel. It works as an FIFO queue
channel = Channel(PageJob).new pages.size
spawn do
pages.each_with_index do |url, i|
fn = Path.new(URI.parse(url).path).basename
ext = File.extname fn
fn = "#{i.to_s.rjust len, '0'}#{ext}"
page_job = PageJob.new url, fn, writer, @retries
Logger.debug "Downloading #{url}"
loop do
sleep @wait_seconds.seconds
download_page page_job
break if page_job.success ||
page_job.tries_remaning <= 0
page_job.tries_remaning -= 1
Logger.warn "Failed to download page #{url}. " \
"Retrying... Remaining retries: " \
"#{page_job.tries_remaning}"
end
channel.send page_job
break unless @queue.exists? job
end
end
spawn do
page_jobs = [] of PageJob
pages.size.times do
page_job = channel.receive
break unless @queue.exists? job
Logger.debug "[#{page_job.success ? "success" : "failed"}] " \
"#{page_job.url}"
page_jobs << page_job
if page_job.success
@queue.add_success job
else
@queue.add_fail job
msg = "Failed to download page #{page_job.url}"
@queue.add_message msg, job
Logger.error msg
end
end
unless @queue.exists? job
Logger.debug "Download cancelled"
@downloading = false
next
end
fail_count = page_jobs.count { |j| !j.success }
Logger.debug "Download completed. " \
"#{fail_count}/#{page_jobs.size} failed"
writer.close
filename = File.join File.dirname(zip_path), File.basename(zip_path,
".part")
File.rename zip_path, filename
Logger.debug "cbz File created at #{filename}"
zip_exception = validate_archive filename
if !zip_exception.nil?
@queue.add_message "The downloaded archive is corrupted. " \
"Error: #{zip_exception}", job
@queue.set_status Queue::JobStatus::Error, job
elsif fail_count > 0
@queue.set_status Queue::JobStatus::MissingPages, job
else
@queue.set_status Queue::JobStatus::Completed, job
end
@downloading = false
end
end
private def download_page(job : PageJob)
Logger.debug "downloading #{job.url}"
headers = HTTP::Headers{
"User-agent" => "Mangadex.cr",
}
begin
HTTP::Client.get job.url, headers do |res|
unless res.success?
raise "Failed to download page #{job.url}. " \
"[#{res.status_code}] #{res.status_message}"
end
job.writer.add job.filename, res.body_io
end
job.success = true
rescue e
Logger.error e
job.success = false
end
end
end
end

94
src/mangadex/ext.cr Normal file
View File

@@ -0,0 +1,94 @@
private macro properties_to_hash(names)
{
{% for name in names %}
"{{name.id}}" => {{name.id}}.to_s,
{% end %}
}
end
# Monkey-patch the structures in the `mangadex` shard to suit our needs
module MangaDex
struct Client
@@group_cache = {} of String => Group
def self.from_config : Client
self.new base_url: Config.current.mangadex["base_url"].to_s,
api_url: Config.current.mangadex["api_url"].to_s
end
end
struct Manga
def rename(rule : Rename::Rule)
rule.render properties_to_hash %w(id title author artist)
end
def to_info_json
hash = JSON.parse(to_json).as_h
_chapters = chapters.map do |c|
JSON.parse c.to_info_json
end
hash["chapters"] = JSON::Any.new _chapters
hash.to_json
end
end
struct Chapter
def rename(rule : Rename::Rule)
hash = properties_to_hash %w(id title volume chapter lang_code language)
hash["groups"] = groups.join(",", &.name)
rule.render hash
end
def full_title
rule = Rename::Rule.new \
Config.current.mangadex["chapter_rename_rule"].to_s
rename rule
end
def to_info_json
hash = JSON.parse(to_json).as_h
hash["language"] = JSON::Any.new language
_groups = {} of String => JSON::Any
groups.each do |g|
_groups[g.name] = JSON::Any.new g.id
end
hash["groups"] = JSON::Any.new _groups
hash["full_title"] = JSON::Any.new full_title
hash.to_json
end
# We don't need to rename the manga title here. It will be renamed in
# src/mangadex/downloader.cr
def to_job : Queue::Job
Queue::Job.new(
id.to_s,
manga_id.to_s,
full_title,
manga_title,
Queue::JobStatus::Pending,
Time.unix timestamp
)
end
end
struct User
def updates_after(time : Time, &block : Chapter ->)
page = 1
stopped = false
until stopped
chapters = followed_updates(page: page).chapters
return if chapters.empty?
chapters.each do |c|
if time > Time.unix c.timestamp
stopped = true
break
end
yield c
end
page += 1
# Let's not DDOS MangaDex :)
sleep 5.seconds
end
end
end
end

View File

@@ -2,12 +2,13 @@ require "./config"
require "./queue"
require "./server"
require "./main_fiber"
require "./mangadex/*"
require "./plugin/*"
require "option_parser"
require "clim"
require "tallboy"
MANGO_VERSION = "0.24.0"
MANGO_VERSION = "0.22.0"
# From http://www.network-science.de/ascii/
BANNER = %{
@@ -55,11 +56,10 @@ class CLI < Clim
Config.load(opts.config).set_current
# Initialize main components
LRUCache.init
Storage.default
Queue.default
Library.load_instance
Library.default
MangaDex::Downloader.default
Plugin::Downloader.default
spawn do

View File

@@ -23,6 +23,11 @@ class Plugin
job
end
private def process_filename(str)
return "_" if str == ".."
str.gsub "/", "_"
end
private def download(job : Queue::Job)
@downloading = true
@queue.set_status Queue::JobStatus::Downloading, job
@@ -37,8 +42,8 @@ class Plugin
pages = info["pages"].as_i
manga_title = sanitize_filename job.manga_title
chapter_title = sanitize_filename info["title"].as_s
manga_title = process_filename job.manga_title
chapter_title = process_filename info["title"].as_s
@queue.set_pages pages, job
lib_dir = @library_path
@@ -63,7 +68,7 @@ class Plugin
while page = plugin.next_page
break unless @queue.exists? job
fn = sanitize_filename page["filename"].as_s
fn = process_filename page["filename"].as_s
url = page["url"].as_s
headers = HTTP::Headers.new

View File

@@ -73,5 +73,9 @@ struct AdminRouter
get "/admin/missing" do |env|
layout "missing-items"
end
get "/admin/mangadex" do |env|
layout "mangadex"
end
end
end

View File

@@ -1,6 +1,6 @@
require "../mangadex/*"
require "../upload"
require "koa"
require "digest"
struct APIRouter
@@api_json : String?
@@ -23,7 +23,7 @@ struct APIRouter
# Authentication
All endpoints except `/api/login` require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
All endpoints require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
# Terminologies
@@ -56,28 +56,18 @@ struct APIRouter
"error" => String?,
}
Koa.describe "Authenticates a user", <<-MD
After successful login, the cookie `mango-sessid-#{Config.current.port}` will contain a valid session ID that can be used for subsequent requests
MD
Koa.body schema: {
"username" => String,
"password" => String,
}
Koa.tag "users"
post "/api/login" do |env|
begin
username = env.params.json["username"].as String
password = env.params.json["password"].as String
token = Storage.default.verify_user(username, password).not_nil!
Koa.schema("mdChapter", {
"id" => Int64,
"group" => {} of String => String,
}.merge(s %w(title volume chapter language full_title time
manga_title manga_id)),
desc: "A MangaDex chapter")
env.session.string "token", token
"Authenticated"
rescue e
Logger.error e
env.response.status_code = 403
e.message
end
end
Koa.schema "mdManga", {
"id" => Int64,
"chapters" => ["mdChapter"],
}.merge(s %w(title description author artist cover_url)),
desc: "A MangaDex manga"
Koa.describe "Returns a page in a manga entry"
Koa.path "tid", desc: "Title ID"
@@ -85,14 +75,12 @@ struct APIRouter
Koa.path "page", schema: Int32, desc: "The page number to return (starts from 1)"
Koa.response 200, schema: Bytes, media_type: "image/*"
Koa.response 500, "Page not found or not readable"
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
Koa.tag "reader"
get "/api/page/:tid/:eid/:page" do |env|
begin
tid = env.params.url["tid"]
eid = env.params.url["eid"]
page = env.params.url["page"].to_i
prev_e_tag = env.request.headers["If-None-Match"]?
title = Library.default.get_title tid
raise "Title ID `#{tid}` not found" if title.nil?
@@ -102,15 +90,7 @@ struct APIRouter
raise "Failed to load page #{page} of " \
"`#{title.title}/#{entry.title}`" if img.nil?
e_tag = Digest::SHA1.hexdigest img.data
if prev_e_tag == e_tag
env.response.status_code = 304
""
else
env.response.headers["ETag"] = e_tag
env.response.headers["Cache-Control"] = "public, max-age=86400"
send_img env, img
end
send_img env, img
rescue e
Logger.error e
env.response.status_code = 500
@@ -122,14 +102,12 @@ struct APIRouter
Koa.path "tid", desc: "Title ID"
Koa.path "eid", desc: "Entry ID"
Koa.response 200, schema: Bytes, media_type: "image/*"
Koa.response 304, "Page not modified (only available when `If-None-Match` is set)"
Koa.response 500, "Page not found or not readable"
Koa.tag "library"
get "/api/cover/:tid/:eid" do |env|
begin
tid = env.params.url["tid"]
eid = env.params.url["eid"]
prev_e_tag = env.request.headers["If-None-Match"]?
title = Library.default.get_title tid
raise "Title ID `#{tid}` not found" if title.nil?
@@ -140,14 +118,7 @@ struct APIRouter
raise "Failed to get cover of `#{title.title}/#{entry.title}`" \
if img.nil?
e_tag = Digest::SHA1.hexdigest img.data
if prev_e_tag == e_tag
env.response.status_code = 304
""
else
env.response.headers["ETag"] = e_tag
send_img env, img
end
send_img env, img
rescue e
Logger.error e
env.response.status_code = 500
@@ -155,39 +126,18 @@ struct APIRouter
end
end
Koa.describe "Returns the book with title `tid`", <<-MD
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
- Supply the `depth` query parameter to control the depth of nested titles to return.
- When `depth` is 1, returns the top-level titles and sub-titles/entries one level in them
- When `depth` is 0, returns the top-level titles without their sub-titles/entries
- When `depth` is N, returns the top-level titles and sub-titles/entries N levels in them
- When `depth` is negative, returns the entire library
MD
Koa.describe "Returns the book with title `tid`"
Koa.path "tid", desc: "Title ID"
Koa.query "slim"
Koa.query "depth"
Koa.query "sort", desc: "Sorting option for entries. Can be one of 'auto', 'title', 'progress', 'time_added' and 'time_modified'"
Koa.query "ascend", desc: "Sorting direction for entries. Set to 0 for the descending order. Doesn't work without specifying 'sort'"
Koa.response 200, schema: "title"
Koa.response 404, "Title not found"
Koa.tag "library"
get "/api/book/:tid" do |env|
begin
username = get_username env
sort_opt = SortOptions.new
get_sort_opt
tid = env.params.url["tid"]
title = Library.default.get_title tid
raise "Title ID `#{tid}` not found" if title.nil?
slim = !env.params.query["slim"]?.nil?
depth = env.params.query["depth"]?.try(&.to_i?) || -1
send_json env, title.build_json(slim: slim, depth: depth,
sort_context: {username: username,
opt: sort_opt})
send_json env, title.to_json
rescue e
Logger.error e
env.response.status_code = 404
@@ -195,26 +145,14 @@ struct APIRouter
end
end
Koa.describe "Returns the entire library with all titles and entries", <<-MD
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
- Supply the `dpeth` query parameter to control the depth of nested titles to return.
- When `depth` is 1, returns the requested title and sub-titles/entries one level in it
- When `depth` is 0, returns the requested title without its sub-titles/entries
- When `depth` is N, returns the requested title and sub-titles/entries N levels in it
- When `depth` is negative, returns the requested title and all sub-titles/entries in it
MD
Koa.query "slim"
Koa.query "depth"
Koa.describe "Returns the entire library with all titles and entries"
Koa.response 200, schema: {
"dir" => String,
"titles" => ["title"],
}
Koa.tag "library"
get "/api/library" do |env|
slim = !env.params.query["slim"]?.nil?
depth = env.params.query["depth"]?.try(&.to_i?) || -1
send_json env, Library.default.build_json(slim: slim, depth: depth)
send_json env, Library.default.to_json
end
Koa.describe "Triggers a library scan"
@@ -371,6 +309,58 @@ struct APIRouter
end
end
Koa.describe "Returns a MangaDex manga identified by `id`", <<-MD
On error, returns a JSON that contains the error message in the `error` field.
MD
Koa.tags ["admin", "mangadex"]
Koa.path "id", desc: "A MangaDex manga ID"
Koa.response 200, schema: "mdManga"
get "/api/admin/mangadex/manga/:id" do |env|
begin
id = env.params.url["id"]
manga = MangaDex::Client.from_config.manga id
send_json env, manga.to_info_json
rescue e
Logger.error e
send_json env, {"error" => e.message}.to_json
end
end
Koa.describe "Adds a list of MangaDex chapters to the download queue", <<-MD
On error, returns a JSON that contains the error message in the `error` field.
MD
Koa.tags ["admin", "mangadex", "downloader"]
Koa.body schema: {
"chapters" => ["mdChapter"],
}
Koa.response 200, schema: {
"success" => Int32,
"fail" => Int32,
}
post "/api/admin/mangadex/download" do |env|
begin
chapters = env.params.json["chapters"].as(Array).map &.as_h
jobs = chapters.map { |chapter|
Queue::Job.new(
chapter["id"].as_i64.to_s,
chapter["mangaId"].as_i64.to_s,
chapter["full_title"].as_s,
chapter["mangaTitle"].as_s,
Queue::JobStatus::Pending,
Time.unix chapter["timestamp"].as_i64
)
}
inserted_count = Queue.default.push jobs
send_json env, {
"success": inserted_count,
"fail": jobs.size - inserted_count,
}.to_json
rescue e
Logger.error e
send_json env, {"error" => e.message}.to_json
end
end
ws "/api/admin/mangadex/queue" do |socket, env|
interval_raw = env.params.query["interval"]?
interval = (interval_raw.to_i? if interval_raw) || 5
@@ -627,32 +617,21 @@ struct APIRouter
"height" => Int32,
}],
}
Koa.response 304, "Not modified (only available when `If-None-Match` is set)"
get "/api/dimensions/:tid/:eid" do |env|
begin
tid = env.params.url["tid"]
eid = env.params.url["eid"]
prev_e_tag = env.request.headers["If-None-Match"]?
title = Library.default.get_title tid
raise "Title ID `#{tid}` not found" if title.nil?
entry = title.get_entry eid
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
file_hash = Digest::SHA1.hexdigest (entry.zip_path + entry.mtime.to_s)
e_tag = "W/#{file_hash}"
if e_tag == prev_e_tag
env.response.status_code = 304
""
else
sizes = entry.page_dimensions
env.response.headers["ETag"] = e_tag
env.response.headers["Cache-Control"] = "public, max-age=86400"
send_json env, {
"success" => true,
"dimensions" => sizes,
}.to_json
end
sizes = entry.page_dimensions
send_json env, {
"success" => true,
"dimensions" => sizes,
}.to_json
rescue e
Logger.error e
send_json env, {
@@ -911,6 +890,239 @@ struct APIRouter
end
end
Koa.describe "Logs the current user into their MangaDex account", <<-MD
If successful, returns the expiration date (as a unix timestamp) of the newly created token.
MD
Koa.body schema: {
"username" => String,
"password" => String,
}
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
"expires" => Int64?,
}
Koa.tags ["admin", "mangadex", "users"]
post "/api/admin/mangadex/login" do |env|
begin
username = env.params.json["username"].as String
password = env.params.json["password"].as String
mango_username = get_username env
client = MangaDex::Client.from_config
client.auth username, password
Storage.default.save_md_token mango_username, client.token.not_nil!,
client.token_expires
send_json env, {
"success" => true,
"error" => nil,
"expires" => client.token_expires.to_unix,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Returns the expiration date (as a unix timestamp) of the mangadex token if it exists"
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
"expires" => Int64?,
}
Koa.tags ["admin", "mangadex", "users"]
get "/api/admin/mangadex/expires" do |env|
begin
username = get_username env
_, expires = Storage.default.get_md_token username
send_json env, {
"success" => true,
"error" => nil,
"expires" => expires.try &.to_unix,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Searches MangaDex for manga matching `query`", <<-MD
Returns an empty list if the current user hasn't logged in to MangaDex.
MD
Koa.query "query"
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
"manga?" => [{
"id" => Int64,
"title" => String,
"description" => String,
"mainCover" => String,
}],
}
Koa.tags ["admin", "mangadex"]
get "/api/admin/mangadex/search" do |env|
begin
query = env.params.query["query"]
send_json env, {
"success" => true,
"error" => nil,
"manga" => get_client(env).partial_search query,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Lists all MangaDex subscriptions"
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
"subscriptions?" => [{
"id" => Int64,
"username" => String,
"manga_id" => Int64,
"language" => String?,
"group_id" => Int64?,
"min_volume" => Int64?,
"max_volume" => Int64?,
"min_chapter" => Int64?,
"max_chapter" => Int64?,
"last_checked" => Int64,
"created_at" => Int64,
}],
}
Koa.tags ["admin", "mangadex", "subscriptions"]
get "/api/admin/mangadex/subscriptions" do |env|
begin
send_json env, {
"success" => true,
"error" => nil,
"subscriptions" => Storage.default.subscriptions,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Creates a new MangaDex subscription"
Koa.body schema: {
"subscription" => {
"manga" => Int64,
"language" => String?,
"groupId" => Int64?,
"volumeMin" => Int64?,
"volumeMax" => Int64?,
"chapterMin" => Int64?,
"chapterMax" => Int64?,
},
}
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
}
Koa.tags ["admin", "mangadex", "subscriptions"]
post "/api/admin/mangadex/subscriptions" do |env|
begin
json = env.params.json["subscription"].as Hash(String, JSON::Any)
sub = Subscription.new json["manga"].as_i64, get_username env
sub.language = json["language"]?.try &.as_s?
sub.group_id = json["groupId"]?.try &.as_i64?
sub.min_volume = json["volumeMin"]?.try &.as_i64?
sub.max_volume = json["volumeMax"]?.try &.as_i64?
sub.min_chapter = json["chapterMin"]?.try &.as_i64?
sub.max_chapter = json["chapterMax"]?.try &.as_i64?
Storage.default.save_subscription sub
send_json env, {
"success" => true,
"error" => nil,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Deletes a MangaDex subscription identified by `id`", <<-MD
Does nothing if the subscription was not created by the current user.
MD
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
}
Koa.tags ["admin", "mangadex", "subscriptions"]
delete "/api/admin/mangadex/subscriptions/:id" do |env|
begin
id = env.params.url["id"].to_i64
Storage.default.delete_subscription id, get_username env
send_json env, {
"success" => true,
"error" => nil,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
Koa.describe "Triggers an update for a MangaDex subscription identified by `id`", <<-MD
Does nothing if the subscription was not created by the current user.
MD
Koa.response 200, schema: {
"success" => Bool,
"error" => String?,
}
Koa.tags ["admin", "mangadex", "subscriptions"]
post "/api/admin/mangadex/subscriptions/check/:id" do |env|
begin
id = env.params.url["id"].to_i64
username = get_username env
sub = Storage.default.get_subscription id, username
unless sub
raise "Subscription with id #{id} not found under user #{username}"
end
spawn do
sub.check_for_updates
end
send_json env, {
"success" => true,
"error" => nil,
}.to_json
rescue e
Logger.error e
send_json env, {
"success" => false,
"error" => e.message,
}.to_json
end
end
doc = Koa.generate
@@api_json = doc.to_json if doc

View File

@@ -30,8 +30,7 @@ struct MainRouter
else
redirect env, "/"
end
rescue e
Logger.error e
rescue
redirect env, "/login"
end
end
@@ -41,7 +40,7 @@ struct MainRouter
username = get_username env
sort_opt = SortOptions.from_info_json Library.default.dir, username
get_and_save_sort_opt Library.default.dir
get_sort_opt
titles = Library.default.sorted_titles username, sort_opt
percentage = titles.map &.load_percentage username
@@ -59,12 +58,12 @@ struct MainRouter
username = get_username env
sort_opt = SortOptions.from_info_json title.dir, username
get_and_save_sort_opt title.dir
get_sort_opt
entries = title.sorted_entries username, sort_opt
percentage = title.load_percentage_for_all_entries username, sort_opt
title_percentage = title.titles.map &.load_percentage username
layout "title"
rescue e
Logger.error e
@@ -72,6 +71,11 @@ struct MainRouter
end
end
get "/download" do |env|
mangadex_base_url = Config.current.mangadex["base_url"]
layout "download"
end
get "/download/plugins" do |env|
begin
id = env.params.query["plugin"]?
@@ -91,6 +95,12 @@ struct MainRouter
end
end
get "/download/subscription" do |env|
mangadex_base_url = Config.current.mangadex["base_url"]
username = get_username env
layout "subscription"
end
get "/" do |env|
begin
username = get_username env

View File

@@ -5,6 +5,7 @@ require "base64"
require "./util/*"
require "mg"
require "../migration/*"
require "./subscription"
def hash_password(pw)
Crypto::Bcrypt::Password.create(pw).to_s
@@ -14,6 +15,9 @@ def verify_password(hash, pw)
(Crypto::Bcrypt::Password.new hash).verify pw
end
SUB_ATTR = %w(manga_id language group_id min_volume max_volume min_chapter
max_chapter username)
class Storage
@@insert_entry_ids = [] of IDTuple
@@insert_title_ids = [] of IDTuple
@@ -428,21 +432,12 @@ class Storage
end
end
# Mark titles and entries that no longer exist on the file system as
# unavailable. By supplying `id_candidates` and `titles_candidates`, it
# only checks the existence of the candidate titles/entries to speed up
# the process.
def mark_unavailable(ids_candidates : Array(String)?,
titles_candidates : Array(String)?)
def mark_unavailable
MainFiber.run do
get_db do |db|
# Detect dangling entry IDs
trash_ids = [] of String
query = "select path, id from ids where unavailable = 0"
unless ids_candidates.nil?
query += " and id in (#{ids_candidates.join "," { |i| "'#{i}'" }})"
end
db.query query do |rs|
db.query "select path, id from ids where unavailable = 0" do |rs|
rs.each do
path = rs.read String
fullpath = Path.new(path).expand(Config.current.library_path).to_s
@@ -458,11 +453,7 @@ class Storage
# Detect dangling title IDs
trash_titles = [] of String
query = "select path, id from titles where unavailable = 0"
unless titles_candidates.nil?
query += " and id in (#{titles_candidates.join "," { |i| "'#{i}'" }})"
end
db.query query do |rs|
db.query "select path, id from titles where unavailable = 0" do |rs|
rs.each do
path = rs.read String
fullpath = Path.new(path).expand(Config.current.library_path).to_s
@@ -558,6 +549,70 @@ class Storage
{token, expires}
end
def save_subscription(sub : Subscription)
MainFiber.run do
get_db do |db|
{% begin %}
db.exec "insert into subscription (#{SUB_ATTR.join ","}, " \
"last_checked, created_at) values " \
"(#{Array.new(SUB_ATTR.size + 2, "?").join ","})",
{% for type in SUB_ATTR %}
sub.{{type.id}},
{% end %}
sub.last_checked.to_unix, sub.created_at.to_unix
{% end %}
end
end
end
def subscriptions : Array(Subscription)
subs = [] of Subscription
MainFiber.run do
get_db do |db|
db.query "select * from subscription" do |rs|
subs += Subscription.from_rs rs
end
end
end
subs
end
def delete_subscription(id : Int64, username : String)
MainFiber.run do
get_db do |db|
db.exec "delete from subscription where id = (?) and username = (?)",
id, username
end
end
end
def get_subscription(id : Int64, username : String) : Subscription?
sub = nil
MainFiber.run do
get_db do |db|
db.query "select * from subscription where id = (?) and " \
"username = (?) limit 1", id, username do |rs|
sub = Subscription.from_rs(rs).first?
end
end
end
sub
end
def update_subscription_last_checked(id : Int64? = nil)
MainFiber.run do
get_db do |db|
if id
db.exec "update subscription set last_checked = (?) where id = (?)",
Time.utc.to_unix, id
else
db.exec "update subscription set last_checked = (?)",
Time.utc.to_unix
end
end
end
end
def close
MainFiber.run do
unless @db.nil?

View File

@@ -48,32 +48,4 @@ class Dir
end
Digest::CRC32.checksum(signatures.sort.join).to_u64
end
# Returns the contents signature of the directory at dirname for checking
# to rescan.
# Rescan conditions:
# - When a file added, moved, removed, renamed (including which in nested
# directories)
def self.contents_signature(dirname, cache = {} of String => String) : String
return cache[dirname] if cache[dirname]?
Fiber.yield
signatures = [] of String
self.open dirname do |dir|
dir.entries.sort.each do |fn|
next if fn.starts_with? "."
path = File.join dirname, fn
if File.directory? path
signatures << Dir.contents_signature path, cache
else
# Only add its signature value to `signatures` when it is a
# supported file
signatures << fn if is_supported_file fn
end
Fiber.yield
end
end
hash = Digest::SHA1.hexdigest(signatures.join)
cache[dirname] = hash
hash
end
end

View File

@@ -35,11 +35,6 @@ def register_mime_types
# FontAwesome fonts
".woff" => "font/woff",
".woff2" => "font/woff2",
# Supported image formats. JPG, PNG, GIF, WebP, and SVG are already
# defiend by Crystal in `MIME.DEFAULT_TYPES`
".apng" => "image/apng",
".avif" => "image/avif",
}.each do |k, v|
MIME.register k, v
end
@@ -125,22 +120,3 @@ class String
match / s.size
end
end
# Does the followings:
# - turns space-like characters into the normal whitespaces ( )
# - strips and collapses spaces
# - removes ASCII control characters
# - replaces slashes (/) with underscores (_)
# - removes leading dots (.)
# - removes the following special characters: \:*?"<>|
#
# If the sanitized string is empty, returns a random string instead.
def sanitize_filename(str : String) : String
sanitized = str
.gsub(/\s+/, " ")
.strip
.gsub(/\//, "_")
.gsub(/^[\.\s]+/, "")
.gsub(/[\177\000-\031\\:\*\?\"<>\|]/, "")
sanitized.size > 0 ? sanitized : random_str
end

View File

@@ -107,24 +107,23 @@ macro get_sort_opt
end
end
macro get_and_save_sort_opt(dir)
sort_method = env.params.query["sort"]?
# Returns an authorized client
def get_client(username : String) : MangaDex::Client
token, expires = Storage.default.get_md_token username
if sort_method
is_ascending = true
ascend = env.params.query["ascend"]?
if ascend && ascend.to_i? == 0
is_ascending = false
end
sort_opt = SortOptions.new sort_method, is_ascending
TitleInfo.new {{dir}} do |info|
info.sort_by[username] = sort_opt.to_tuple
info.save
end
unless expires && token
raise "No token found for user #{username}"
end
client = MangaDex::Client.from_config
client.token = token
client.token_expires = expires
client
end
def get_client(env) : MangaDex::Client
get_client get_username env
end
module HTTP

View File

@@ -33,6 +33,7 @@
<option>System</option>
</select>
</li>
<li><a class="uk-link-reset" href="<%= base_url %>admin/mangadex">Connect to MangaDex</a></li>
</ul>
<hr class="uk-divider-icon">

View File

@@ -5,61 +5,63 @@
<button class="uk-button uk-button-default" @click="load()" :disabled="loading">Refresh Queue</button>
<button class="uk-button uk-button-default" x-show="paused !== undefined" x-text="paused ? 'Resume Download' : 'Pause Download'" @click="toggle()" :disabled="toggling"></button>
</div>
<table class="uk-table uk-table-striped uk-overflow-auto">
<thead>
<tr>
<th>Chapter</th>
<th>Manga</th>
<th>Progress</th>
<th>Time</th>
<th>Status</th>
<th>Plugin</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
<template x-for="job in jobs" :key="job">
<tr :id="`chapter-${job.id}`">
<template x-if="job.plugin_id">
<td x-text="job.title"></td>
</template>
<template x-if="!job.plugin_id">
<td><a :href="`<%= mangadex_base_url %>/chapter/${job.id}`" x-text="job.title"></td>
</template>
<template x-if="job.plugin_id">
<td x-text="job.manga_title"></td>
</template>
<template x-if="!job.plugin_id">
<td><a :href="`<%= mangadex_base_url %>/manga/${job.manga_id}`" x-text="job.manga_title"></td>
</template>
<td x-text="`${job.success_count}/${job.pages}`"></td>
<td x-text="`${moment(job.time).fromNow()}`"></td>
<td>
<span :class="statusClass(job.status)" x-text="job.status"></span>
<template x-if="job.status_message.length > 0">
<div class="uk-inline">
<span uk-icon="info"></span>
<div uk-dropdown x-text="job.status_message" style="white-space: pre-line;"></div>
</div>
</template>
</td>
<td x-text="`${job.plugin_id || ''}`"></td>
<td>
<a @click="jobAction('delete', $event)" uk-icon="trash" uk-tooltip="Delete"></a>
<template x-if="job.status_message.length > 0">
<a @click="jobAction('retry', $event)" uk-icon="refresh" uk-tooltip="Retry"></a>
</template>
</td>
<div class="uk-overflow-auto">
<table class="uk-table uk-table-striped">
<thead>
<tr>
<th>Chapter</th>
<th>Manga</th>
<th>Progress</th>
<th>Time</th>
<th>Status</th>
<th>Plugin</th>
<th>Actions</th>
</tr>
</template>
</tbody>
</table>
</div>
</thead>
<tbody>
<template x-for="job in jobs" :key="job">
<tr :id="`chapter-${job.id}`">
<template x-if="job.plugin_id">
<td x-text="job.title"></td>
</template>
<template x-if="!job.plugin_id">
<td><a :href="`<%= mangadex_base_url %>/chapter/${job.id}`" x-text="job.title"></td>
</template>
<template x-if="job.plugin_id">
<td x-text="job.manga_title"></td>
</template>
<template x-if="!job.plugin_id">
<td><a :href="`<%= mangadex_base_url %>/manga/${job.manga_id}`" x-text="job.manga_title"></td>
</template>
<td x-text="`${job.success_count}/${job.pages}`"></td>
<td x-text="`${moment(job.time).fromNow()}`"></td>
<td>
<span :class="statusClass(job.status)" x-text="job.status"></span>
<template x-if="job.status_message.length > 0">
<div class="uk-inline">
<span uk-icon="info"></span>
<div uk-dropdown x-text="job.status_message" style="white-space: pre-line;"></div>
</div>
</template>
</td>
<td x-text="`${job.plugin_id || ''}`"></td>
<td>
<a @click="jobAction('delete', $event)" uk-icon="trash" uk-tooltip="Delete"></a>
<template x-if="job.status_message.length > 0">
<a @click="jobAction('retry', $event)" uk-icon="refresh" uk-tooltip="Retry"></a>
</template>
</td>
</tr>
</template>
</tbody>
</table>
</div>
</div>
<% content_for "script" do %>

View File

@@ -1,162 +1,170 @@
<h2 class=uk-title>Download from MangaDex</h2>
<div x-data="downloadComponent()" x-init="init()">
<div class="uk-grid-small" uk-grid style="margin-bottom:40px;">
<div class="uk-width-expand">
<input class="uk-input" type="text" :placeholder="searchAvailable ? 'Search MangaDex or enter a manga ID/URL' : 'MangaDex manga ID or URL'" x-model="searchInput" @keydown.enter.debounce="search()">
</div>
<div class="uk-width-auto">
<div uk-spinner class="uk-align-center" x-show="loading" x-cloak></div>
<button class="uk-button uk-button-default" x-show="!loading" @click="search()">Search</button>
</div>
</div>
<template x-if="mangaAry">
<div>
<p x-show="mangaAry.length === 0">No matching manga found.</p>
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
<template x-for="manga in mangaAry" :key="manga.id">
<div class="item" :data-id="manga.id" @click="chooseManga(manga)">
<div class="uk-card uk-card-default">
<div class="uk-card-media-top uk-inline">
<img uk-img :data-src="manga.mainCover">
</div>
<div class="uk-card-body">
<h3 class="uk-card-title break-word uk-margin-remove-bottom free-height" x-text="manga.title"></h3>
<p class="uk-text-meta" x-text="`ID: ${manga.id}`"></p>
</div>
</div>
</div>
</template>
</div>
</div>
</template>
<div x-show="data && data.chapters" x-cloak>
<div class"uk-grid-small" uk-grid>
<div class="uk-width-1-4@s">
<img :src="data.mainCover">
</div>
<div class="uk-width-1-4@s">
<p>Title: <a :href="`<%= mangadex_base_url %>/manga/${data.id}`" x-text="data.title"></a></p>
<p x-text="`Artist: ${data.artist}`"></p>
<p x-text="`Author: ${data.author}`"></p>
</div>
<div class="uk-form-stacked uk-width-1-2@s" id="filters">
<p class="uk-text-lead uk-margin-remove-bottom">Filter Chapters</p>
<p class="uk-text-meta uk-margin-remove-top" x-text="`${chapters.length} chapters found`"></p>
<div class="uk-margin">
<label class="uk-form-label">Language</label>
<div class="uk-form-controls">
<select class="uk-select filter-field" x-model="langChoice" @change="filtersUpdated()">
<template x-for="lang in languages" :key="lang">
<option x-text="lang"></option>
</template>
</select>
</div>
<div class="uk-grid-small" uk-grid style="margin-bottom:40px;">
<div class="uk-width-expand">
<input class="uk-input" type="text" :placeholder="searchAvailable ? 'Search MangaDex or enter a manga ID/URL' : 'MangaDex manga ID or URL'" x-model="searchInput" @keydown.enter.debounce="search()">
</div>
<div class="uk-margin">
<label class="uk-form-label">Group</label>
<div class="uk-form-controls">
<select class="uk-select filter-field" x-model="groupChoice" @change="filtersUpdated()">
<template x-for="group in groups" :key="group">
<option x-text="group"></option>
</template>
</select>
</div>
<div class="uk-width-auto">
<div uk-spinner class="uk-align-center" x-show="loading" x-cloak></div>
<button class="uk-button uk-button-default" x-show="!loading" @click="search()">Search</button>
</div>
<div class="uk-margin">
<label class="uk-form-label">Volume</label>
<div class="uk-form-controls">
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="volumeRange" @keydown.enter="filtersUpdated()">
</div>
</div>
<div class="uk-margin">
<label class="uk-form-label">Chapter</label>
<div class="uk-form-controls">
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="chapterRange" @keydown.enter="filtersUpdated()">
</div>
</div>
</div>
</div>
<div class="uk-margin">
<div class="uk-margin">
<button class="uk-button uk-button-default" @click="selectAll()">Select All</button>
<button class="uk-button uk-button-default" @click="clearSelection()">Clear Selections</button>
<button class="uk-button uk-button-primary" @click="download()" x-show="!addingToDownload">Download Selected</button>
<div uk-spinner class="uk-margin-left" x-show="addingToDownload"></div>
</div>
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
</div>
<p x-text="`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters. Please use the filter options above to narrow down your search.`" x-show="chapters.length > chaptersLimit"></p>
<table class="uk-table uk-table-striped uk-overflow-auto" x-show="chapters.length <= chaptersLimit">
<thead>
<tr>
<th>ID</th>
<th>Title</th>
<th>Language</th>
<th>Group</th>
<th>Volume</th>
<th>Chapter</th>
<th>Timestamp</th>
</tr>
</thead>
<template x-if="mangaAry">
<div>
<p x-show="mangaAry.length === 0">No matching manga found.</p>
<template x-if="chapters.length <= chaptersLimit">
<tbody id="selectable">
<template x-for="chp in chapters" :key="chp">
<tr class="ui-widget-content">
<td><a :href="`<%= mangadex_base_url %>/chapter/${chp.id}`" x-text="chp.id"></a></td>
<td x-text="chp.title"></td>
<td x-text="chp.language"></td>
<td>
<template x-for="grp in Object.entries(chp.groups)">
<div>
<a :href="`<%= mangadex_base_url %>/group/${grp[1]}`" x-text="grp[0]"></a>
</div>
<div class="uk-child-width-1-4@m uk-child-width-1-2" uk-grid>
<template x-for="manga in mangaAry" :key="manga.id">
<div class="item" :data-id="manga.id" @click="chooseManga(manga)">
<div class="uk-card uk-card-default">
<div class="uk-card-media-top uk-inline">
<img uk-img :data-src="manga.mainCover">
</div>
<div class="uk-card-body">
<h3 class="uk-card-title break-word uk-margin-remove-bottom free-height" x-text="manga.title"></h3>
<p class="uk-text-meta" x-text="`ID: ${manga.id}`"></p>
</div>
</div>
</div>
</template>
</td>
<td x-text="chp.volume"></td>
<td x-text="chp.chapter"></td>
<td x-text="`${moment.unix(chp.timestamp).fromNow()}`"></td>
</tr>
</template>
</tbody>
</template>
</table>
</div>
<div id="modal" class="uk-flex-top" uk-modal="container: false">
<div class="uk-modal-dialog uk-margin-auto-vertical">
<button class="uk-modal-close-default" type="button" uk-close></button>
<div class="uk-modal-header">
<h3 class="uk-modal-title break-word" x-text="candidateManga.title"></h3>
</div>
<div class="uk-modal-body">
<div class="uk-grid">
<div class="uk-width-1-3@s">
<img uk-img data-width data-height :src="candidateManga.mainCover" style="width:100%;margin-bottom:10px;">
<a :href="`<%= mangadex_base_url %>/manga/${candidateManga.id}`" x-text="`ID: ${candidateManga.id}`" class="uk-link-muted"></a>
</div>
<div class="uk-width-2-3@s" uk-overflow-auto>
<p x-text="candidateManga.description"></p>
</div>
</div>
</div>
</template>
<div x-show="data && data.chapters" x-cloak>
<div class"uk-grid-small" uk-grid>
<div class="uk-width-1-4@s">
<img :src="data.mainCover">
</div>
<div class="uk-width-1-4@s">
<p>Title: <a :href="`<%= mangadex_base_url %>/manga/${data.id}`" x-text="data.title"></a></p>
<p x-text="`Artist: ${data.artist}`"></p>
<p x-text="`Author: ${data.author}`"></p>
</div>
<div class="uk-form-stacked uk-width-1-2@s" id="filters">
<p class="uk-text-lead uk-margin-remove-bottom">
<span>Filter Chapters</span>
<button class="uk-icon-button uk-align-right" uk-icon="rss" uk-tooltip="Subscribe" x-show="searchAvailable" @click="subscribe()"></button>
</p>
<p class="uk-text-meta uk-margin-remove-top" x-text="`${chapters.length} chapters found`"></p>
<div class="uk-margin">
<label class="uk-form-label">Language</label>
<div class="uk-form-controls">
<select class="uk-select filter-field" x-model="langChoice" @change="filtersUpdated()">
<template x-for="lang in languages" :key="lang">
<option x-text="lang"></option>
</template>
</select>
</div>
</div>
<div class="uk-margin">
<label class="uk-form-label">Group</label>
<div class="uk-form-controls">
<select class="uk-select filter-field" x-model="groupChoice" @change="filtersUpdated()">
<template x-for="group in groups" :key="group">
<option x-text="group"></option>
</template>
</select>
</div>
</div>
<div class="uk-margin">
<label class="uk-form-label">Volume</label>
<div class="uk-form-controls">
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="volumeRange" @keydown.enter="filtersUpdated()">
</div>
</div>
<div class="uk-margin">
<label class="uk-form-label">Chapter</label>
<div class="uk-form-controls">
<input class="uk-input filter-field" type="text" placeholder="e.g., 127, 10-14, >30, <=212, or leave it empty." x-model="chapterRange" @keydown.enter="filtersUpdated()">
</div>
</div>
</div>
</div>
<div class="uk-margin">
<div class="uk-margin">
<button class="uk-button uk-button-default" @click="selectAll()">Select All</button>
<button class="uk-button uk-button-default" @click="clearSelection()">Clear Selections</button>
<button class="uk-button uk-button-primary" @click="download()" x-show="!addingToDownload">Download Selected</button>
<div uk-spinner class="uk-margin-left" x-show="addingToDownload"></div>
</div>
<p class="uk-text-meta">Click on a table row to select the chapter. Drag your mouse over multiple rows to select them all. Hold Ctrl to make multiple non-adjacent selections.</p>
</div>
<p x-text="`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters. Please use the filter options above to narrow down your search.`" x-show="chapters.length > chaptersLimit"></p>
<div class="uk-overflow-auto">
<table class="uk-table uk-table-striped" x-show="chapters.length <= chaptersLimit">
<thead>
<tr>
<th>ID</th>
<th>Title</th>
<th>Language</th>
<th>Group</th>
<th>Volume</th>
<th>Chapter</th>
<th>Timestamp</th>
</tr>
</thead>
<template x-if="chapters.length <= chaptersLimit">
<tbody id="selectable">
<template x-for="chp in chapters" :key="chp">
<tr class="ui-widget-content">
<td><a :href="`<%= mangadex_base_url %>/chapter/${chp.id}`" x-text="chp.id"></a></td>
<td x-text="chp.title"></td>
<td x-text="chp.language"></td>
<td>
<template x-for="grp in Object.entries(chp.groups)">
<div>
<a :href="`<%= mangadex_base_url %>/group/${grp[1]}`" x-text="grp[0]"></a>
</div>
</template>
</td>
<td x-text="chp.volume"></td>
<td x-text="chp.chapter"></td>
<td x-text="`${moment.unix(chp.timestamp).fromNow()}`"></td>
</tr>
</template>
</tbody>
</template>
</table>
</div>
</div>
<div id="modal" class="uk-flex-top" uk-modal="container: false">
<div class="uk-modal-dialog uk-margin-auto-vertical">
<button class="uk-modal-close-default" type="button" uk-close></button>
<div class="uk-modal-header">
<h3 class="uk-modal-title break-word" x-text="candidateManga.title"></h3>
</div>
<div class="uk-modal-body">
<div class="uk-grid">
<div class="uk-width-1-3@s">
<img uk-img data-width data-height :src="candidateManga.mainCover" style="width:100%;margin-bottom:10px;">
<a :href="`<%= mangadex_base_url %>/manga/${candidateManga.id}`" x-text="`ID: ${candidateManga.id}`" class="uk-link-muted"></a>
</div>
<div class="uk-width-2-3@s">
<p x-text="candidateManga.description"></p>
</div>
</div>
</div>
<div class="uk-modal-footer">
<button class="uk-button uk-button-primary" type="button" @click="confirmManga(candidateManga.id)">Choose</button>
</div>
</div>
</div>
<div class="uk-modal-footer">
<button class="uk-button uk-button-primary" type="button" @click="confirmManga(candidateManga.id)">Choose</button>
</div>
</div>
</div>
</div>
<% content_for "script" do %>
<%= render_component "moment" %>
<%= render_component "jquery-ui" %>
<script src="<%= base_url %>js/alert.js"></script>
<script src="<%= base_url %>js/download.js"></script>
<%= render_component "moment" %>
<%= render_component "jquery-ui" %>
<script>
const mangadex_base_url = "<%= mangadex_base_url %>";
</script>
<script src="<%= base_url %>js/alert.js"></script>
<script src="<%= base_url %>js/download.js"></script>
<% end %>

View File

@@ -17,8 +17,10 @@
<li class="uk-parent">
<a href="#">Download</a>
<ul class="uk-nav-sub">
<li><a href="<%= base_url %>download">MangaDex</a></li>
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
<li><a href="<%= base_url %>download/subscription">Subscription Manager</a></li>
</ul>
</li>
<% end %>
@@ -48,9 +50,11 @@
<div class="uk-navbar-dropdown">
<ul class="uk-nav uk-navbar-dropdown-nav">
<li class="uk-nav-header">Source</li>
<li><a href="<%= base_url %>download">MangaDex</a></li>
<li><a href="<%= base_url %>download/plugins">Plugins</a></li>
<li class="uk-nav-divider"></li>
<li><a href="<%= base_url %>admin/downloads">Download Manager</a></li>
<li><a href="<%= base_url %>download/subscription">Subscription Manager</a></li>
</ul>
</div>
</li>

View File

@@ -3,34 +3,36 @@
<div x-show="!empty">
<p>The following items were present in your library, but now we can't find them anymore. If you deleted them mistakenly, try to recover the files or folders, put them back to where they were, and rescan the library. Otherwise, you can safely delete them and the associated metadata using the buttons below to free up database space.</p>
<button class="uk-button uk-button-danger" @click="rmAll()">Delete All</button>
<table class="uk-table uk-table-striped uk-overflow-auto">
<thead>
<tr>
<th>Type</th>
<th>Relative Path</th>
<th>ID</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
<template x-for="title in titles" :key="title">
<tr :id="`title-${title.id}`">
<td>Title</td>
<td x-text="title.path"></td>
<td x-text="title.id"></td>
<td><a @click="rm($event)" uk-icon="trash"></a></td>
<div class="uk-overflow-auto">
<table class="uk-table uk-table-striped">
<thead>
<tr>
<th>Type</th>
<th>Relative Path</th>
<th>ID</th>
<th>Actions</th>
</tr>
</template>
<template x-for="entry in entries" :key="entry">
<tr :id="`entry-${entry.id}`">
<td>Entry</td>
<td x-text="entry.path"></td>
<td x-text="entry.id"></td>
<td><a @click="rm($event)" uk-icon="trash"></a></td>
</tr>
</template>
</tbody>
</table>
</thead>
<tbody>
<template x-for="title in titles" :key="title">
<tr :id="`title-${title.id}`">
<td>Title</td>
<td x-text="title.path"></td>
<td x-text="title.id"></td>
<td><a @click="rm($event)" uk-icon="trash"></a></td>
</tr>
</template>
<template x-for="entry in entries" :key="entry">
<tr :id="`entry-${entry.id}`">
<td>Entry</td>
<td x-text="entry.path"></td>
<td x-text="entry.id"></td>
<td><a @click="rm($event)" uk-icon="trash"></a></td>
</tr>
</template>
</tbody>
</table>
</div>
</div>
</div>

View File

@@ -21,7 +21,7 @@
<div
:class="{'uk-container': true, 'uk-container-small': mode === 'continuous', 'uk-container-expand': mode !== 'continuous'}">
<div x-show="!loading && mode === 'continuous'" x-cloak>
<template x-if="!loading && mode === 'continuous'" x-for="item in items">
<template x-for="item in items">
<img
uk-img
:class="{'uk-align-center': true, 'spine': item.width < 50}"
@@ -50,9 +50,6 @@
width:${mode === 'width' ? '100vw' : 'auto'};
height:${mode === 'height' ? '100vh' : 'auto'};
margin-bottom:0;
max-width:100%;
max-height:100%;
object-fit: contain;
`" />
<div style="position:absolute;z-index:1; top:0;left:0; width:30%;height:100%;" @click="flipPage(false)"></div>
@@ -101,19 +98,6 @@
</div>
</div>
<div class="uk-margin uk-form-horizontal" x-show="mode !== 'continuous'">
<label class="uk-form-label" for="enable-flip-animation">Enable Flip Animation</label>
<div class="uk-form-controls">
<input id="enable-flip-animation" class="uk-checkbox" type="checkbox" x-model="enableFlipAnimation" @change="enableFlipAnimationChanged()">
</div>
</div>
<div class="uk-margin uk-form-horizontal" x-show="mode !== 'continuous'">
<label class="uk-form-label" for="preload-lookahead" x-text="`Preload Image: ${preloadLookahead} page(s)`"></label>
<div class="uk-form-controls">
<input id="preload-lookahead" class="uk-range" type="range" min="0" max="5" step="1" x-model.number="preloadLookahead" @change="preloadLookaheadChanged()">
</div>
</div>
<hr class="uk-divider-icon">
<div class="uk-margin">