This commit is contained in:
root 2022-10-18 07:04:30 +02:00
commit ed544b52a8
37 changed files with 272 additions and 174 deletions

5
.gitattributes vendored Normal file
View File

@ -0,0 +1,5 @@
# Auto detect text files and perform LF normalization
* text=auto
# Shell scripts require LF
*.sh text eol=lf

View File

@ -1,5 +1,8 @@
name: Build Docker containers name: Build Docker containers
on: [push] on:
push:
branches:
- master
jobs: jobs:
build-client: build-client:
name: Build and push client/ Docker container name: Build and push client/ Docker container

View File

@ -1,28 +1,29 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0 rev: v4.0.1
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-yaml - id: check-yaml
- id: mixed-line-ending - id: mixed-line-ending
- repo: https://github.com/Lucas-C/pre-commit-hooks - repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.9 rev: v1.1.10
hooks: hooks:
- id: remove-tabs - id: remove-tabs
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 20.8b1 rev: '22.3.0'
hooks: hooks:
- id: black - id: black
files: 'server/' files: 'server/'
types: [python] types: [python]
language_version: python3.8 language_version: python3.9
- repo: https://github.com/timothycrosley/isort - repo: https://github.com/PyCQA/isort
rev: '5.4.2' rev: '5.10.1'
hooks: hooks:
- id: isort - id: isort
files: 'server/' files: 'server/'
@ -31,8 +32,8 @@ repos:
additional_dependencies: additional_dependencies:
- toml - toml
- repo: https://github.com/prettier/prettier - repo: https://github.com/pre-commit/mirrors-prettier
rev: '2.1.1' rev: v2.5.0
hooks: hooks:
- id: prettier - id: prettier
files: client/js/ files: client/js/
@ -40,7 +41,7 @@ repos:
args: ['--config', 'client/.prettierrc.yml'] args: ['--config', 'client/.prettierrc.yml']
- repo: https://github.com/pre-commit/mirrors-eslint - repo: https://github.com/pre-commit/mirrors-eslint
rev: v7.8.0 rev: v8.3.0
hooks: hooks:
- id: eslint - id: eslint
files: client/js/ files: client/js/
@ -48,8 +49,8 @@ repos:
additional_dependencies: additional_dependencies:
- eslint-config-prettier - eslint-config-prettier
- repo: https://gitlab.com/pycqa/flake8 - repo: https://gitlab.com/PyCQA/flake8
rev: '3.8.3' rev: '4.0.1'
hooks: hooks:
- id: flake8 - id: flake8
files: server/szurubooru/ files: server/szurubooru/

View File

@ -3,7 +3,7 @@
Szurubooru is an image board engine inspired by services such as Danbooru, Szurubooru is an image board engine inspired by services such as Danbooru,
Gelbooru and Moebooru dedicated for small and medium communities. Its name [has Gelbooru and Moebooru dedicated for small and medium communities. Its name [has
its roots in Polish language and has onomatopeic meaning of scraping or its roots in Polish language and has onomatopeic meaning of scraping or
scrubbing](http://sjp.pwn.pl/sjp/;2527372). It is pronounced as *shoorubooru*. scrubbing](https://sjp.pwn.pl/sjp/;2527372). It is pronounced as *shoorubooru*.
## Features ## Features

View File

@ -2,7 +2,6 @@ FROM node:lts as builder
WORKDIR /opt/app WORKDIR /opt/app
COPY package.json package-lock.json ./ COPY package.json package-lock.json ./
RUN npm install -g npm
RUN npm install RUN npm install
COPY . ./ COPY . ./

View File

@ -14,9 +14,11 @@ $cancel-button-color = tomato
&.inactive input[type=submit], &.inactive input[type=submit],
&.inactive .skip-duplicates &.inactive .skip-duplicates
&.inactive .always-upload-similar &.inactive .always-upload-similar
&.inactive .pause-remain-on-error
&.uploading input[type=submit], &.uploading input[type=submit],
&.uploading .skip-duplicates, &.uploading .skip-duplicates,
&.uploading .always-upload-similar &.uploading .always-upload-similar
&.uploading .pause-remain-on-error
&:not(.uploading) .cancel &:not(.uploading) .cancel
display: none display: none
@ -44,6 +46,9 @@ $cancel-button-color = tomato
.always-upload-similar .always-upload-similar
margin-left: 1em margin-left: 1em
.pause-remain-on-error
margin-left: 1em
form>.messages form>.messages
margin-top: 1em margin-top: 1em

View File

@ -7,7 +7,7 @@
<span class='skip-duplicates'> <span class='skip-duplicates'>
<%= ctx.makeCheckbox({ <%= ctx.makeCheckbox({
text: 'Skip duplicates', text: 'Skip duplicate',
name: 'skip-duplicates', name: 'skip-duplicates',
checked: false, checked: false,
}) %> }) %>
@ -15,12 +15,20 @@
<span class='always-upload-similar'> <span class='always-upload-similar'>
<%= ctx.makeCheckbox({ <%= ctx.makeCheckbox({
text: 'Always upload similar', text: 'Force upload similar',
name: 'always-upload-similar', name: 'always-upload-similar',
checked: false, checked: false,
}) %> }) %>
</span> </span>
<span class='pause-remain-on-error'>
<%= ctx.makeCheckbox({
text: 'Pause on error',
name: 'pause-remain-on-error',
checked: true,
}) %>
</span>
<input type='button' value='Cancel' class='cancel'/> <input type='button' value='Cancel' class='cancel'/>
</div> </div>

View File

@ -90,21 +90,30 @@ class PostUploadController {
uploadable uploadable
); );
} }
if (e.detail.pauseRemainOnError) {
return Promise.reject();
}
}) })
), ),
Promise.resolve() Promise.resolve()
) )
.then(() => { .then(() => {
if (anyFailures) { if (anyFailures) {
this._view.showError(genericErrorMessage); return Promise.reject();
this._view.enableForm(); }
} else { })
.then(
() => {
this._view.clearMessages(); this._view.clearMessages();
misc.disableExitConfirmation(); misc.disableExitConfirmation();
const ctx = router.show(uri.formatClientLink("posts")); const ctx = router.show(uri.formatClientLink("posts"));
ctx.controller.showSuccess("Posts uploaded."); ctx.controller.showSuccess("Posts uploaded.");
},
(error) => {
this._view.showError(genericErrorMessage);
this._view.enableForm();
} }
}); );
} }
_uploadSinglePost(uploadable, skipDuplicates, alwaysUploadSimilar) { _uploadSinglePost(uploadable, skipDuplicates, alwaysUploadSimilar) {

View File

@ -31,9 +31,8 @@ class UserController {
userTokenPromise = UserToken.get(userName).then( userTokenPromise = UserToken.get(userName).then(
(userTokens) => { (userTokens) => {
return userTokens.map((token) => { return userTokens.map((token) => {
token.isCurrentAuthToken = api.isCurrentAuthToken( token.isCurrentAuthToken =
token api.isCurrentAuthToken(token);
);
return token; return token;
}); });
}, },

View File

@ -45,9 +45,8 @@ class ExpanderControl {
// eslint-disable-next-line accessor-pairs // eslint-disable-next-line accessor-pairs
set title(newTitle) { set title(newTitle) {
if (this._expanderNode) { if (this._expanderNode) {
this._expanderNode.querySelector( this._expanderNode.querySelector("header span").textContent =
"header span" newTitle;
).textContent = newTitle;
} }
} }

View File

@ -203,9 +203,8 @@ class PostEditSidebarControl extends events.EventTarget {
); );
if (this._formNode) { if (this._formNode) {
const inputNodes = this._formNode.querySelectorAll( const inputNodes =
"input, textarea" this._formNode.querySelectorAll("input, textarea");
);
for (let node of inputNodes) { for (let node of inputNodes) {
node.addEventListener("change", (e) => node.addEventListener("change", (e) =>
this.dispatchEvent(new CustomEvent("change")) this.dispatchEvent(new CustomEvent("change"))

View File

@ -727,9 +727,8 @@ class PostNotesOverlayControl extends events.EventTarget {
} }
_showNoteText(note) { _showNoteText(note) {
this._textNode.querySelector( this._textNode.querySelector(".wrapper").innerHTML =
".wrapper" misc.formatMarkdown(note.text);
).innerHTML = misc.formatMarkdown(note.text);
this._textNode.style.display = "block"; this._textNode.style.display = "block";
const bodyRect = document.body.getBoundingClientRect(); const bodyRect = document.body.getBoundingClientRect();
const noteRect = this._textNode.getBoundingClientRect(); const noteRect = this._textNode.getBoundingClientRect();

View File

@ -65,17 +65,6 @@ class TagPermalinkFixWrapper extends BaseMarkdownWrapper {
// post, user and tags permalinks // post, user and tags permalinks
class EntityPermalinkWrapper extends BaseMarkdownWrapper { class EntityPermalinkWrapper extends BaseMarkdownWrapper {
preprocess(text) { preprocess(text) {
// URL-based permalinks
text = text.replace(new RegExp("\\b/post/(\\d+)/?\\b", "g"), "@$1");
text = text.replace(
new RegExp("\\b/tag/([a-zA-Z0-9_-]+?)/?", "g"),
"#$1"
);
text = text.replace(
new RegExp("\\b/user/([a-zA-Z0-9_-]+?)/?", "g"),
"+$1"
);
text = text.replace( text = text.replace(
/(^|^\(|(?:[^\]])\(|[\s<>\[\]\)])([+#@][a-zA-Z0-9_-]+)/g, /(^|^\(|(?:[^\]])\(|[\s<>\[\]\)])([+#@][a-zA-Z0-9_-]+)/g,
"$1[$2]($2)" "$1[$2]($2)"
@ -136,12 +125,8 @@ function createRenderer() {
const renderer = new marked.Renderer(); const renderer = new marked.Renderer();
renderer.image = (href, title, alt) => { renderer.image = (href, title, alt) => {
let [ let [_, url, width, height] =
_, /^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/.exec(href);
url,
width,
height,
] = /^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/.exec(href);
let res = '<img src="' + sanitize(url) + '" alt="' + sanitize(alt); let res = '<img src="' + sanitize(url) + '" alt="' + sanitize(alt);
if (width) { if (width) {
res += '" width="' + width; res += '" width="' + width;
@ -174,7 +159,7 @@ function formatMarkdown(text) {
for (let wrapper of wrappers) { for (let wrapper of wrappers) {
text = wrapper.preprocess(text); text = wrapper.preprocess(text);
} }
text = marked(text, options); text = marked.parse(text, options);
wrappers.reverse(); wrappers.reverse();
for (let wrapper of wrappers) { for (let wrapper of wrappers) {
text = wrapper.postprocess(text); text = wrapper.postprocess(text);
@ -200,7 +185,7 @@ function formatInlineMarkdown(text) {
for (let wrapper of wrappers) { for (let wrapper of wrappers) {
text = wrapper.preprocess(text); text = wrapper.preprocess(text);
} }
text = marked.inlineLexer(text, [], options); text = marked.parseInline(text, options);
wrappers.reverse(); wrappers.reverse();
for (let wrapper of wrappers) { for (let wrapper of wrappers) {
text = wrapper.postprocess(text); text = wrapper.postprocess(text);

View File

@ -25,9 +25,8 @@ class PostMainView {
views.replaceContent(this._hostNode, sourceNode); views.replaceContent(this._hostNode, sourceNode);
views.syncScrollPosition(); views.syncScrollPosition();
const topNavigationNode = document.body.querySelector( const topNavigationNode =
"#top-navigation" document.body.querySelector("#top-navigation");
);
this._postContentControl = new PostContentControl( this._postContentControl = new PostContentControl(
postContainerNode, postContainerNode,

View File

@ -285,7 +285,7 @@ class PostUploadView extends events.EventTarget {
for (let uploadable of this._uploadables) { for (let uploadable of this._uploadables) {
this._updateUploadableFromDom(uploadable); this._updateUploadableFromDom(uploadable);
} }
this._submitButtonNode.value = "Resume upload"; this._submitButtonNode.value = "Resume";
this._emit("submit"); this._emit("submit");
} }
@ -360,8 +360,10 @@ class PostUploadView extends events.EventTarget {
detail: { detail: {
uploadables: this._uploadables, uploadables: this._uploadables,
skipDuplicates: this._skipDuplicatesCheckboxNode.checked, skipDuplicates: this._skipDuplicatesCheckboxNode.checked,
alwaysUploadSimilar: this._alwaysUploadSimilarCheckboxNode alwaysUploadSimilar:
.checked, this._alwaysUploadSimilarCheckboxNode.checked,
pauseRemainOnError:
this._pauseRemainOnErrorCheckboxNode.checked,
}, },
}) })
); );
@ -431,6 +433,12 @@ class PostUploadView extends events.EventTarget {
); );
} }
get _pauseRemainOnErrorCheckboxNode() {
return this._hostNode.querySelector(
"form [name=pause-remain-on-error]"
);
}
get _submitButtonNode() { get _submitButtonNode() {
return this._hostNode.querySelector("form [type=submit]"); return this._hostNode.querySelector("form [type=submit]");
} }

View File

@ -253,9 +253,8 @@ class PostsHeaderView extends events.EventTarget {
e.target.classList.toggle("disabled"); e.target.classList.toggle("disabled");
const safety = e.target.getAttribute("data-safety"); const safety = e.target.getAttribute("data-safety");
let browsingSettings = settings.get(); let browsingSettings = settings.get();
browsingSettings.listPosts[safety] = !browsingSettings.listPosts[ browsingSettings.listPosts[safety] =
safety !browsingSettings.listPosts[safety];
];
settings.save(browsingSettings, true); settings.save(browsingSettings, true);
this.dispatchEvent( this.dispatchEvent(
new CustomEvent("navigate", { new CustomEvent("navigate", {

View File

@ -72,9 +72,8 @@ class UserTokenView extends events.EventTarget {
_evtDelete(e) { _evtDelete(e) {
e.preventDefault(); e.preventDefault();
const userToken = this._tokens[ const userToken =
parseInt(e.target.getAttribute("data-token-id")) this._tokens[parseInt(e.target.getAttribute("data-token-id"))];
];
this.dispatchEvent( this.dispatchEvent(
new CustomEvent("delete", { new CustomEvent("delete", {
detail: { detail: {
@ -110,9 +109,8 @@ class UserTokenView extends events.EventTarget {
_evtChangeNoteClick(e) { _evtChangeNoteClick(e) {
e.preventDefault(); e.preventDefault();
const userToken = this._tokens[ const userToken =
parseInt(e.target.getAttribute("data-token-id")) this._tokens[parseInt(e.target.getAttribute("data-token-id"))];
];
const text = window.prompt( const text = window.prompt(
"Please enter the new name:", "Please enter the new name:",
userToken.note !== null ? userToken.note : undefined userToken.note !== null ? userToken.note : undefined

View File

@ -10,7 +10,7 @@
"font-awesome": "^4.7.0", "font-awesome": "^4.7.0",
"ios-inner-height": "^1.0.3", "ios-inner-height": "^1.0.3",
"js-cookie": "^2.2.0", "js-cookie": "^2.2.0",
"marked": "^0.7.0", "marked": "^4.0.10",
"mousetrap": "^1.6.2", "mousetrap": "^1.6.2",
"nprogress": "^0.2.0", "nprogress": "^0.2.0",
"superagent": "^3.8.3" "superagent": "^3.8.3"
@ -1506,9 +1506,9 @@
"dev": true "dev": true
}, },
"node_modules/cached-path-relative": { "node_modules/cached-path-relative": {
"version": "1.0.2", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.0.2.tgz", "resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.1.0.tgz",
"integrity": "sha512-5r2GqsoEb4qMTTN9J+WzXfjov+hjxT+j3u5K+kIVNIwAd99DLCJE9pBIMP1qVeybV6JiijL385Oz0DcYxfbOIg==", "integrity": "sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==",
"dev": true "dev": true
}, },
"node_modules/call-bind": { "node_modules/call-bind": {
@ -2997,14 +2997,14 @@
"dev": true "dev": true
}, },
"node_modules/marked": { "node_modules/marked": {
"version": "0.7.0", "version": "4.0.10",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.7.0.tgz", "resolved": "https://registry.npmjs.org/marked/-/marked-4.0.10.tgz",
"integrity": "sha512-c+yYdCZJQrsRjTPhUx7VKkApw9bwDkNbHUKo1ovgcfDjb2kc8rLuRbIFyXL5WOEUwzSSKo3IXpph2K6DqB/KZg==", "integrity": "sha512-+QvuFj0nGgO970fySghXGmuw+Fd0gD2x3+MqCWLIPf5oxdv1Ka6b2q+z9RP01P/IaKPMEramy+7cNy/Lw8c3hw==",
"bin": { "bin": {
"marked": "bin/marked" "marked": "bin/marked.js"
}, },
"engines": { "engines": {
"node": ">=0.10.0" "node": ">= 12"
} }
}, },
"node_modules/md5.js": { "node_modules/md5.js": {
@ -3108,9 +3108,9 @@
} }
}, },
"node_modules/minimist": { "node_modules/minimist": {
"version": "1.2.5", "version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true "dev": true
}, },
"node_modules/mkdirp": { "node_modules/mkdirp": {
@ -3385,9 +3385,9 @@
} }
}, },
"node_modules/path-parse": { "node_modules/path-parse": {
"version": "1.0.6", "version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true "dev": true
}, },
"node_modules/path-platform": { "node_modules/path-platform": {
@ -3983,12 +3983,6 @@
"minimist": "^1.1.0" "minimist": "^1.1.0"
} }
}, },
"node_modules/subarg/node_modules/minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"dev": true
},
"node_modules/superagent": { "node_modules/superagent": {
"version": "3.8.3", "version": "3.8.3",
"resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz",
@ -6053,9 +6047,9 @@
"dev": true "dev": true
}, },
"cached-path-relative": { "cached-path-relative": {
"version": "1.0.2", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.0.2.tgz", "resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.1.0.tgz",
"integrity": "sha512-5r2GqsoEb4qMTTN9J+WzXfjov+hjxT+j3u5K+kIVNIwAd99DLCJE9pBIMP1qVeybV6JiijL385Oz0DcYxfbOIg==", "integrity": "sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==",
"dev": true "dev": true
}, },
"call-bind": { "call-bind": {
@ -7280,9 +7274,9 @@
"dev": true "dev": true
}, },
"marked": { "marked": {
"version": "0.7.0", "version": "4.0.10",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.7.0.tgz", "resolved": "https://registry.npmjs.org/marked/-/marked-4.0.10.tgz",
"integrity": "sha512-c+yYdCZJQrsRjTPhUx7VKkApw9bwDkNbHUKo1ovgcfDjb2kc8rLuRbIFyXL5WOEUwzSSKo3IXpph2K6DqB/KZg==" "integrity": "sha512-+QvuFj0nGgO970fySghXGmuw+Fd0gD2x3+MqCWLIPf5oxdv1Ka6b2q+z9RP01P/IaKPMEramy+7cNy/Lw8c3hw=="
}, },
"md5.js": { "md5.js": {
"version": "1.3.4", "version": "1.3.4",
@ -7364,9 +7358,9 @@
} }
}, },
"minimist": { "minimist": {
"version": "1.2.5", "version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true "dev": true
}, },
"mkdirp": { "mkdirp": {
@ -7607,9 +7601,9 @@
"dev": true "dev": true
}, },
"path-parse": { "path-parse": {
"version": "1.0.6", "version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true "dev": true
}, },
"path-platform": { "path-platform": {
@ -8116,14 +8110,6 @@
"dev": true, "dev": true,
"requires": { "requires": {
"minimist": "^1.1.0" "minimist": "^1.1.0"
},
"dependencies": {
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"dev": true
}
} }
}, },
"superagent": { "superagent": {

View File

@ -10,7 +10,7 @@
"font-awesome": "^4.7.0", "font-awesome": "^4.7.0",
"ios-inner-height": "^1.0.3", "ios-inner-height": "^1.0.3",
"js-cookie": "^2.2.0", "js-cookie": "^2.2.0",
"marked": "^0.7.0", "marked": "^4.0.10",
"mousetrap": "^1.6.2", "mousetrap": "^1.6.2",
"nprogress": "^0.2.0", "nprogress": "^0.2.0",
"superagent": "^3.8.3" "superagent": "^3.8.3"

View File

@ -37,6 +37,7 @@
- [Creating post](#creating-post) - [Creating post](#creating-post)
- [Updating post](#updating-post) - [Updating post](#updating-post)
- [Getting post](#getting-post) - [Getting post](#getting-post)
- [Getting around post](#getting-around-post)
- [Deleting post](#deleting-post) - [Deleting post](#deleting-post)
- [Merging posts](#merging-posts) - [Merging posts](#merging-posts)
- [Rating post](#rating-post) - [Rating post](#rating-post)
@ -951,6 +952,29 @@ data.
Retrieves information about an existing post. Retrieves information about an existing post.
## Getting around post
- **Request**
`GET /post/<id>/around`
- **Output**
```json5
{
"prev": <post-resource>,
"next": <post-resource>
}
```
- **Errors**
- the post does not exist
- privileges are too low
- **Description**
Retrieves information about posts that are before or after an existing post.
## Deleting post ## Deleting post
- **Request** - **Request**

View File

@ -34,33 +34,79 @@ and Docker Compose (version 1.6.0 or greater) already installed.
Read the comments to guide you. Note that `.env` should be in the root Read the comments to guide you. Note that `.env` should be in the root
directory of this repository. directory of this repository.
### Running the Application 4. Pull the containers:
Download containers: This pulls the latest containers from docker.io:
```console ```console
user@host:szuru$ docker-compose pull user@host:szuru$ docker-compose pull
``` ```
For first run, it is recommended to start the database separately: If you have modified the application's source and would like to manually
```console build it, follow the instructions in [**Building**](#Building) instead,
user@host:szuru$ docker-compose up -d sql then read here once you're done.
```
To start all containers: 5. Run it!
```console
user@host:szuru$ docker-compose up -d
```
To view/monitor the application logs: For first run, it is recommended to start the database separately:
```console ```console
user@host:szuru$ docker-compose logs -f user@host:szuru$ docker-compose up -d sql
# (CTRL+C to exit) ```
```
To start all containers:
```console
user@host:szuru$ docker-compose up -d
```
To view/monitor the application logs:
```console
user@host:szuru$ docker-compose logs -f
# (CTRL+C to exit)
```
### Building
1. Edit `docker-compose.yml` to tell Docker to build instead of pull containers:
```diff yaml
...
server:
- image: szurubooru/server:latest
+ build: server
...
client:
- image: szurubooru/client:latest
+ build: client
...
```
You can choose to build either one from source.
2. Build the containers:
```console
user@host:szuru$ docker-compose build
```
That will attempt to build both containers, but you can specify `client`
or `server` to make it build only one.
If `docker-compose build` spits out:
```
ERROR: Service 'server' failed to build: failed to parse platform : "" is an invalid component of "": platform specifier component must match "^[A-Za-z0-9_-]+$": invalid argument
```
...you will need to export Docker BuildKit flags:
```console
user@host:szuru$ export DOCKER_BUILDKIT=1; export COMPOSE_DOCKER_CLI_BUILD=1
```
...and run `docker-compose build` again.
*Note: If your changes are not taking effect in your builds, consider building
with `--no-cache`.*
To stop all containers:
```console
user@host:szuru$ docker-compose down
```
### Additional Features ### Additional Features

View File

@ -10,6 +10,12 @@ BUILD_INFO=latest
# otherwise the port specified here will be publicly accessible # otherwise the port specified here will be publicly accessible
PORT=8080 PORT=8080
# How many waitress threads to start
# 4 is the default amount of threads. If you experience performance
# degradation with a large number of posts, increasing this may
# improve performance, since waitress is most likely clogging up with Tasks.
THREADS=4
# URL base to run szurubooru under # URL base to run szurubooru under
# See "Additional Features" section in INSTALL.md # See "Additional Features" section in INSTALL.md
BASE_URL=/ BASE_URL=/

View File

@ -22,14 +22,15 @@ services:
#POSTGRES_DB: defaults to same as POSTGRES_USER #POSTGRES_DB: defaults to same as POSTGRES_USER
#POSTGRES_PORT: 5432 #POSTGRES_PORT: 5432
#LOG_SQL: 0 (1 for verbose SQL logs) #LOG_SQL: 0 (1 for verbose SQL logs)
THREADS:
volumes: volumes:
- "${MOUNT_DATA}:/data" - "${MOUNT_DATA}:/data"
- "./server/config.yaml:/opt/app/config.yaml" - "./server/config.yaml:/opt/app/config.yaml"
client: client:
#image: szurubooru/client:latest #image: szurubooru/client:latest
image: szuruclient:latest #image: szuruclient:latest
#build: ./client build: ./client
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
- server - server

View File

@ -7,8 +7,13 @@ WORKDIR /opt/app
RUN apk --no-cache add \ RUN apk --no-cache add \
python3 \ python3 \
python3-dev \ python3-dev \
ffmpeg \
py3-pip \ py3-pip \
build-base \
libheif \
libheif-dev \
libavif \
libavif-dev \
ffmpeg \
# from requirements.txt: # from requirements.txt:
py3-yaml \ py3-yaml \
py3-psycopg2 \ py3-psycopg2 \
@ -19,18 +24,13 @@ RUN apk --no-cache add \
py3-pynacl \ py3-pynacl \
py3-tz \ py3-tz \
py3-pyrfc3339 \ py3-pyrfc3339 \
build-base \
&& apk --no-cache add \
libheif \
libavif \
libheif-dev \
libavif-dev \
&& pip3 install --no-cache-dir --disable-pip-version-check \ && pip3 install --no-cache-dir --disable-pip-version-check \
alembic \ "alembic>=0.8.5" \
"coloredlogs==5.0" \ "coloredlogs==5.0" \
"pyheif==0.6.1" \
"heif-image-plugin>=0.3.2" \
youtube_dl \ youtube_dl \
pillow-avif-plugin \ "pillow-avif-plugin>=1.1.0" \
pyheif-pillow-opener \
&& apk --no-cache del py3-pip && apk --no-cache del py3-pip
COPY ./ /opt/app/ COPY ./ /opt/app/
@ -83,6 +83,9 @@ ARG PORT=6666
ENV PORT=${PORT} ENV PORT=${PORT}
EXPOSE ${PORT} EXPOSE ${PORT}
ARG THREADS=4
ENV THREADS=${THREADS}
VOLUME ["/data/"] VOLUME ["/data/"]
ARG DOCKER_REPO ARG DOCKER_REPO

View File

@ -4,5 +4,5 @@ cd /opt/app
alembic upgrade head alembic upgrade head
echo "Starting szurubooru API on port ${PORT}" echo "Starting szurubooru API on port ${PORT} - Running on ${THREADS} threads"
exec waitress-serve-3 --port ${PORT} szurubooru.facade:app exec waitress-serve-3 --port ${PORT} --threads ${THREADS} szurubooru.facade:app

View File

@ -1,14 +1,15 @@
alembic>=0.8.5 alembic>=0.8.5
pyyaml>=3.11
psycopg2-binary>=2.6.1
SQLAlchemy>=1.0.12, <1.4
coloredlogs==5.0
certifi>=2017.11.5 certifi>=2017.11.5
coloredlogs==5.0
heif-image-plugin==0.3.2
numpy>=1.8.2 numpy>=1.8.2
pillow>=4.3.0
pynacl>=1.2.1
pytz>=2018.3
pyRFC3339>=1.0
pillow-avif-plugin>=1.1.0 pillow-avif-plugin>=1.1.0
pyheif-pillow-opener>=0.1.0 pillow>=4.3.0
psycopg2-binary>=2.6.1
pyheif==0.6.1
pynacl>=1.2.1
pyRFC3339>=1.0
pytz>=2018.3
pyyaml>=3.11
SQLAlchemy>=1.0.12, <1.4
youtube_dl youtube_dl

View File

@ -91,6 +91,15 @@ def reset_filenames() -> None:
rename_in_dir("posts/custom-thumbnails/") rename_in_dir("posts/custom-thumbnails/")
def regenerate_thumbnails() -> None:
for post in db.session.query(model.Post).all():
print("Generating tumbnail for post %d ..." % post.post_id, end="\r")
try:
postfuncs.generate_post_thumbnail(post)
except Exception:
pass
def main() -> None: def main() -> None:
parser_top = ArgumentParser( parser_top = ArgumentParser(
description="Collection of CLI commands for an administrator to use", description="Collection of CLI commands for an administrator to use",
@ -114,6 +123,12 @@ def main() -> None:
help="reset and rename the content and thumbnail " help="reset and rename the content and thumbnail "
"filenames in case of a lost/changed secret key", "filenames in case of a lost/changed secret key",
) )
parser.add_argument(
"--regenerate-thumbnails",
action="store_true",
help="regenerate the thumbnails for posts if the "
"thumbnail files are missing",
)
command = parser_top.parse_args() command = parser_top.parse_args()
try: try:
@ -123,6 +138,8 @@ def main() -> None:
check_audio() check_audio()
elif command.reset_filenames: elif command.reset_filenames:
reset_filenames() reset_filenames()
elif command.regenerate_thumbnails:
regenerate_thumbnails()
except errors.BaseError as e: except errors.BaseError as e:
print(e, file=stderr) print(e, file=stderr)

View File

@ -33,7 +33,7 @@ def _docker_config() -> Dict:
"show_sql": int(os.getenv("LOG_SQL", 0)), "show_sql": int(os.getenv("LOG_SQL", 0)),
"data_url": os.getenv("DATA_URL", "data/"), "data_url": os.getenv("DATA_URL", "data/"),
"data_dir": "/data/", "data_dir": "/data/",
"database": "postgres://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s" "database": "postgresql://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s"
% { % {
"user": os.getenv("POSTGRES_USER"), "user": os.getenv("POSTGRES_USER"),
"pass": os.getenv("POSTGRES_PASSWORD"), "pass": os.getenv("POSTGRES_PASSWORD"),

View File

@ -135,7 +135,7 @@ _live_migrations = (
def create_app() -> Callable[[Any, Any], Any]: def create_app() -> Callable[[Any, Any], Any]:
""" Create a WSGI compatible App object. """ """Create a WSGI compatible App object."""
validate_config() validate_config()
coloredlogs.install(fmt="[%(asctime)-15s] %(name)s %(message)s") coloredlogs.install(fmt="[%(asctime)-15s] %(name)s %(message)s")
if config.config["debug"]: if config.config["debug"]:

View File

@ -25,7 +25,7 @@ RANK_MAP = OrderedDict(
def get_password_hash(salt: str, password: str) -> Tuple[str, int]: def get_password_hash(salt: str, password: str) -> Tuple[str, int]:
""" Retrieve argon2id password hash. """ """Retrieve argon2id password hash."""
return ( return (
pwhash.argon2id.str( pwhash.argon2id.str(
(config.config["secret"] + salt + password).encode("utf8") (config.config["secret"] + salt + password).encode("utf8")
@ -37,7 +37,7 @@ def get_password_hash(salt: str, password: str) -> Tuple[str, int]:
def get_sha256_legacy_password_hash( def get_sha256_legacy_password_hash(
salt: str, password: str salt: str, password: str
) -> Tuple[str, int]: ) -> Tuple[str, int]:
""" Retrieve old-style sha256 password hash. """ """Retrieve old-style sha256 password hash."""
digest = hashlib.sha256() digest = hashlib.sha256()
digest.update(config.config["secret"].encode("utf8")) digest.update(config.config["secret"].encode("utf8"))
digest.update(salt.encode("utf8")) digest.update(salt.encode("utf8"))
@ -46,7 +46,7 @@ def get_sha256_legacy_password_hash(
def get_sha1_legacy_password_hash(salt: str, password: str) -> Tuple[str, int]: def get_sha1_legacy_password_hash(salt: str, password: str) -> Tuple[str, int]:
""" Retrieve old-style sha1 password hash. """ """Retrieve old-style sha1 password hash."""
digest = hashlib.sha1() digest = hashlib.sha1()
digest.update(b"1A2/$_4xVa") digest.update(b"1A2/$_4xVa")
digest.update(salt.encode("utf8")) digest.update(salt.encode("utf8"))
@ -125,7 +125,7 @@ def verify_privilege(user: model.User, privilege_name: str) -> None:
def generate_authentication_token(user: model.User) -> str: def generate_authentication_token(user: model.User) -> str:
""" Generate nonguessable challenge (e.g. links in password reminder). """ """Generate nonguessable challenge (e.g. links in password reminder)."""
assert user assert user
digest = hashlib.md5() digest = hashlib.md5()
digest.update(config.config["secret"].encode("utf8")) digest.update(config.config["secret"].encode("utf8"))

View File

@ -4,16 +4,13 @@ from datetime import datetime
from io import BytesIO from io import BytesIO
from typing import Any, Callable, List, Optional, Set, Tuple from typing import Any, Callable, List, Optional, Set, Tuple
import HeifImagePlugin
import numpy as np import numpy as np
import pillow_avif import pillow_avif
import pyheif
from PIL import Image from PIL import Image
from pyheif_pillow_opener import register_heif_opener
from szurubooru import config, errors from szurubooru import config, errors
register_heif_opener()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Math based on paper from H. Chi Wong, Marshall Bern and David Goldberg # Math based on paper from H. Chi Wong, Marshall Bern and David Goldberg

View File

@ -7,6 +7,8 @@ import subprocess
from io import BytesIO from io import BytesIO
from typing import List from typing import List
import HeifImagePlugin
import pillow_avif
from PIL import Image as PILImage from PIL import Image as PILImage
from szurubooru import errors from szurubooru import errors
@ -277,10 +279,10 @@ class Image:
proc = subprocess.Popen( proc = subprocess.Popen(
cli, cli,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stdin=subprocess.DEVNULL,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
) )
out, err = proc.communicate(input=self.content) out, err = proc.communicate()
if proc.returncode != 0: if proc.returncode != 0:
logger.warning( logger.warning(
"Failed to execute ffmpeg command (cli=%r, err=%r)", "Failed to execute ffmpeg command (cli=%r, err=%r)",

View File

@ -36,7 +36,7 @@ def get_mime_type(content: bytes) -> str:
if content[0:4] == b"\x1A\x45\xDF\xA3": if content[0:4] == b"\x1A\x45\xDF\xA3":
return "video/webm" return "video/webm"
if content[4:12] in (b"ftypisom", b"ftypiso5", b"ftypmp42", b"ftypM4V "): if content[4:12] in (b"ftypisom", b"ftypiso5", b"ftypiso6", b"ftypmp42", b"ftypM4V "):
return "video/mp4" return "video/mp4"
return "application/octet-stream" return "application/octet-stream"

View File

@ -39,7 +39,7 @@ def download(url: str, use_video_downloader: bool = False) -> bytes:
length_tally = 0 length_tally = 0
try: try:
with urllib.request.urlopen(request) as handle: with urllib.request.urlopen(request) as handle:
while (chunk := handle.read(_dl_chunk_size)) : while chunk := handle.read(_dl_chunk_size):
length_tally += len(chunk) length_tally += len(chunk)
if length_tally > config.config["max_dl_filesize"]: if length_tally > config.config["max_dl_filesize"]:
raise DownloadTooLargeError( raise DownloadTooLargeError(

View File

@ -83,12 +83,12 @@ def flip(source: Dict[Any, Any]) -> Dict[Any, Any]:
def is_valid_email(email: Optional[str]) -> bool: def is_valid_email(email: Optional[str]) -> bool:
""" Return whether given email address is valid or empty. """ """Return whether given email address is valid or empty."""
return not email or re.match(r"^[^@]*@[^@]*\.[^@]*$", email) is not None return not email or re.match(r"^[^@]*@[^@]*\.[^@]*$", email) is not None
class dotdict(dict): class dotdict(dict):
""" dot.notation access to dictionary attributes. """ """dot.notation access to dictionary attributes."""
def __getattr__(self, attr: str) -> Any: def __getattr__(self, attr: str) -> Any:
return self.get(attr) return self.get(attr)
@ -98,7 +98,7 @@ class dotdict(dict):
def parse_time_range(value: str) -> Tuple[datetime, datetime]: def parse_time_range(value: str) -> Tuple[datetime, datetime]:
""" Return tuple containing min/max time for given text representation. """ """Return tuple containing min/max time for given text representation."""
one_day = timedelta(days=1) one_day = timedelta(days=1)
one_second = timedelta(seconds=1) one_second = timedelta(seconds=1)
almost_one_day = one_day - one_second almost_one_day = one_day - one_second

View File

@ -7,7 +7,7 @@ from szurubooru.rest.errors import HttpBadRequest
def _authenticate_basic_auth(username: str, password: str) -> model.User: def _authenticate_basic_auth(username: str, password: str) -> model.User:
""" Try to authenticate user. Throw AuthError for invalid users. """ """Try to authenticate user. Throw AuthError for invalid users."""
user = users.get_user_by_name(username) user = users.get_user_by_name(username)
if not auth.is_valid_password(user, password): if not auth.is_valid_password(user, password):
raise errors.AuthError("Invalid password.") raise errors.AuthError("Invalid password.")
@ -17,7 +17,7 @@ def _authenticate_basic_auth(username: str, password: str) -> model.User:
def _authenticate_token( def _authenticate_token(
username: str, token: str username: str, token: str
) -> Tuple[model.User, model.UserToken]: ) -> Tuple[model.User, model.UserToken]:
""" Try to authenticate user. Throw AuthError for invalid users. """ """Try to authenticate user. Throw AuthError for invalid users."""
user = users.get_user_by_name(username) user = users.get_user_by_name(username)
user_token = user_tokens.get_by_user_and_token(user, token) user_token = user_tokens.get_by_user_and_token(user, token)
if not auth.is_valid_token(user_token): if not auth.is_valid_token(user_token):
@ -72,7 +72,7 @@ def _get_user(ctx: rest.Context, bump_login: bool) -> Optional[model.User]:
def process_request(ctx: rest.Context) -> None: def process_request(ctx: rest.Context) -> None:
""" Bind the user to request. Update last login time if needed. """ """Bind the user to request. Update last login time if needed."""
bump_login = ctx.get_param_as_bool("bump-login", default=False) bump_login = ctx.get_param_as_bool("bump-login", default=False)
auth_user = _get_user(ctx, bump_login) auth_user = _get_user(ctx, bump_login)
if auth_user: if auth_user:

View File

@ -11,7 +11,7 @@ from szurubooru.rest import context, errors, middleware, routes
def _json_serializer(obj: Any) -> str: def _json_serializer(obj: Any) -> str:
""" JSON serializer for objects not serializable by default JSON code """ """JSON serializer for objects not serializable by default JSON code"""
if isinstance(obj, datetime): if isinstance(obj, datetime):
serial = obj.isoformat("T") + "Z" serial = obj.isoformat("T") + "Z"
return serial return serial