Merge branch 'Develop'
# Conflicts: # MANIFEST.in # README.md # cps/helper.py # cps/static/js/archive/archive.js # cps/translations/nl/LC_MESSAGES/messages.mo # cps/translations/nl/LC_MESSAGES/messages.po # cps/ub.py # cps/updater.py # cps/web.py # cps/worker.py # optional-requirements.txt
2
.gitattributes
vendored
@ -1,4 +1,4 @@
|
||||
updater.py ident export-subst
|
||||
constants.py ident export-subst
|
||||
/test export-ignore
|
||||
cps/static/css/libs/* linguist-vendored
|
||||
cps/static/js/libs/* linguist-vendored
|
||||
|
2
.gitignore
vendored
@ -8,6 +8,8 @@ __pycache__/
|
||||
.Python
|
||||
env/
|
||||
eggs/
|
||||
dist/
|
||||
build/
|
||||
.eggs/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
|
@ -1,3 +1 @@
|
||||
include cps/static/*
|
||||
include cps/templates/*
|
||||
include cps/translations/*
|
||||
graft src/calibreweb
|
||||
|
@ -4,7 +4,7 @@ Calibre-Web is a web app providing a clean interface for browsing, reading and d
|
||||
|
||||
*This software is a fork of [library](https://github.com/mutschler/calibreserver) and licensed under the GPL v3 License.*
|
||||
|
||||
![Main screen](../../wiki/images/main_screen.png)
|
||||
![Main screen](https://github.com/janeczku/calibre-web/wiki/images/main_screen.png)
|
||||
|
||||
## Features
|
||||
|
||||
@ -82,4 +82,4 @@ Pre-built Docker images are available in these Docker Hub repositories:
|
||||
|
||||
# Wiki
|
||||
|
||||
For further informations, How To's and FAQ please check the [Wiki](../../wiki)
|
||||
For further informations, How To's and FAQ please check the [Wiki](https://github.com/janeczku/calibre-web/wiki)
|
71
cps.py
@ -1,21 +1,68 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2012-2019 OzzieIsaacs
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
# Insert local directories into path
|
||||
sys.path.append(base_path)
|
||||
sys.path.append(os.path.join(base_path, 'cps'))
|
||||
sys.path.append(os.path.join(base_path, 'vendor'))
|
||||
if sys.version_info < (3, 0):
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__.decode('utf-8'))))
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__.decode('utf-8'))), 'vendor'))
|
||||
else:
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'vendor'))
|
||||
|
||||
|
||||
from cps import create_app
|
||||
from cps import web_server
|
||||
from cps.opds import opds
|
||||
from cps.web import web
|
||||
from cps.jinjia import jinjia
|
||||
from cps.about import about
|
||||
from cps.shelf import shelf
|
||||
from cps.admin import admi
|
||||
from cps.gdrive import gdrive
|
||||
from cps.editbooks import editbook
|
||||
try:
|
||||
from cps.oauth_bb import oauth
|
||||
oauth_available = True
|
||||
except ImportError:
|
||||
oauth_available = False
|
||||
|
||||
|
||||
def main():
|
||||
app = create_app()
|
||||
app.register_blueprint(web)
|
||||
app.register_blueprint(opds)
|
||||
app.register_blueprint(jinjia)
|
||||
app.register_blueprint(about)
|
||||
app.register_blueprint(shelf)
|
||||
app.register_blueprint(admi)
|
||||
app.register_blueprint(gdrive)
|
||||
app.register_blueprint(editbook)
|
||||
if oauth_available:
|
||||
app.register_blueprint(oauth)
|
||||
success = web_server.start()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
from cps.server import Server
|
||||
|
||||
if __name__ == '__main__':
|
||||
Server.startServer()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
main()
|
||||
|
147
cps/__init__.py
@ -1,2 +1,149 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
import mimetypes
|
||||
|
||||
from babel import Locale as LC
|
||||
from babel import negotiate_locale
|
||||
from babel.core import UnknownLocaleError
|
||||
from flask import Flask, request, g
|
||||
from flask_login import LoginManager
|
||||
from flask_babel import Babel
|
||||
from flask_principal import Principal
|
||||
|
||||
from . import logger, cache_buster, cli, config_sql, ub
|
||||
from .reverseproxy import ReverseProxied
|
||||
|
||||
|
||||
mimetypes.init()
|
||||
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
||||
mimetypes.add_type('application/epub+zip', '.epub')
|
||||
mimetypes.add_type('application/fb2+zip', '.fb2')
|
||||
mimetypes.add_type('application/x-mobipocket-ebook', '.mobi')
|
||||
mimetypes.add_type('application/x-mobipocket-ebook', '.prc')
|
||||
mimetypes.add_type('application/vnd.amazon.ebook', '.azw')
|
||||
mimetypes.add_type('application/x-cbr', '.cbr')
|
||||
mimetypes.add_type('application/x-cbz', '.cbz')
|
||||
mimetypes.add_type('application/x-cbt', '.cbt')
|
||||
mimetypes.add_type('image/vnd.djvu', '.djvu')
|
||||
mimetypes.add_type('application/mpeg', '.mpeg')
|
||||
mimetypes.add_type('application/mpeg', '.mp3')
|
||||
mimetypes.add_type('application/mp4', '.m4a')
|
||||
mimetypes.add_type('application/mp4', '.m4b')
|
||||
mimetypes.add_type('application/ogg', '.ogg')
|
||||
mimetypes.add_type('application/ogg', '.oga')
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
lm = LoginManager()
|
||||
lm.login_view = 'web.login'
|
||||
lm.anonymous_user = ub.Anonymous
|
||||
|
||||
|
||||
ub.init_db(cli.settingspath)
|
||||
config = config_sql.load_configuration(ub.session)
|
||||
from . import db, services
|
||||
|
||||
searched_ids = {}
|
||||
|
||||
from .worker import WorkerThread
|
||||
global_WorkerThread = WorkerThread()
|
||||
|
||||
from .server import WebServer
|
||||
web_server = WebServer()
|
||||
|
||||
babel = Babel()
|
||||
_BABEL_TRANSLATIONS = set()
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
def create_app():
|
||||
app.wsgi_app = ReverseProxied(app.wsgi_app)
|
||||
# For python2 convert path to unicode
|
||||
if sys.version_info < (3, 0):
|
||||
app.static_folder = app.static_folder.decode('utf-8')
|
||||
app.root_path = app.root_path.decode('utf-8')
|
||||
app.instance_path = app.instance_path .decode('utf-8')
|
||||
|
||||
cache_buster.init_cache_busting(app)
|
||||
|
||||
log.info('Starting Calibre Web...')
|
||||
Principal(app)
|
||||
lm.init_app(app)
|
||||
app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT')
|
||||
|
||||
web_server.init_app(app, config)
|
||||
db.setup_db(config)
|
||||
|
||||
babel.init_app(app)
|
||||
_BABEL_TRANSLATIONS.update(str(item) for item in babel.list_translations())
|
||||
_BABEL_TRANSLATIONS.add('en')
|
||||
|
||||
if services.ldap:
|
||||
services.ldap.init_app(app, config)
|
||||
if services.goodreads:
|
||||
services.goodreads.connect(config.config_goodreads_api_key, config.config_goodreads_api_secret, config.config_use_goodreads)
|
||||
|
||||
global_WorkerThread.start()
|
||||
return app
|
||||
|
||||
@babel.localeselector
|
||||
def negociate_locale():
|
||||
# if a user is logged in, use the locale from the user settings
|
||||
user = getattr(g, 'user', None)
|
||||
# user = None
|
||||
if user is not None and hasattr(user, "locale"):
|
||||
if user.nickname != 'Guest': # if the account is the guest account bypass the config lang settings
|
||||
return user.locale
|
||||
|
||||
preferred = set()
|
||||
if request.accept_languages:
|
||||
for x in request.accept_languages.values():
|
||||
try:
|
||||
preferred.add(str(LC.parse(x.replace('-', '_'))))
|
||||
except (UnknownLocaleError, ValueError) as e:
|
||||
log.warning('Could not parse locale "%s": %s', x, e)
|
||||
# preferred.append('en')
|
||||
|
||||
return negotiate_locale(preferred or ['en'], _BABEL_TRANSLATIONS)
|
||||
|
||||
|
||||
def get_locale():
|
||||
return request._locale
|
||||
|
||||
|
||||
@babel.timezoneselector
|
||||
def get_timezone():
|
||||
user = getattr(g, 'user', None)
|
||||
if user is not None:
|
||||
return user.timezone
|
||||
|
||||
from .updater import Updater
|
||||
updater_thread = Updater()
|
||||
|
||||
|
||||
__all__ = ['app']
|
||||
|
78
cps/about.py
Normal file
@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import requests
|
||||
|
||||
from flask import Blueprint
|
||||
from flask import __version__ as flaskVersion
|
||||
from flask_babel import gettext as _
|
||||
from flask_principal import __version__ as flask_principalVersion
|
||||
from flask_login import login_required
|
||||
try:
|
||||
from flask_login import __version__ as flask_loginVersion
|
||||
except ImportError:
|
||||
from flask_login.__about__ import __version__ as flask_loginVersion
|
||||
from werkzeug import __version__ as werkzeugVersion
|
||||
|
||||
from babel import __version__ as babelVersion
|
||||
from jinja2 import __version__ as jinja2Version
|
||||
from pytz import __version__ as pytzVersion
|
||||
from sqlalchemy import __version__ as sqlalchemyVersion
|
||||
|
||||
from . import db, converter, uploader
|
||||
from .isoLanguages import __version__ as iso639Version
|
||||
from .server import VERSION as serverVersion
|
||||
from .web import render_title_template
|
||||
|
||||
|
||||
about = Blueprint('about', __name__)
|
||||
|
||||
|
||||
@about.route("/stats")
|
||||
@login_required
|
||||
def stats():
|
||||
counter = db.session.query(db.Books).count()
|
||||
authors = db.session.query(db.Authors).count()
|
||||
categorys = db.session.query(db.Tags).count()
|
||||
series = db.session.query(db.Series).count()
|
||||
versions = uploader.get_versions()
|
||||
versions['Babel'] = 'v' + babelVersion
|
||||
versions['Sqlalchemy'] = 'v' + sqlalchemyVersion
|
||||
versions['Werkzeug'] = 'v' + werkzeugVersion
|
||||
versions['Jinja2'] = 'v' + jinja2Version
|
||||
versions['Flask'] = 'v' + flaskVersion
|
||||
versions['Flask Login'] = 'v' + flask_loginVersion
|
||||
versions['Flask Principal'] = 'v' + flask_principalVersion
|
||||
versions['Iso 639'] = 'v' + iso639Version
|
||||
versions['pytz'] = 'v' + pytzVersion
|
||||
|
||||
versions['Requests'] = 'v' + requests.__version__
|
||||
versions['pySqlite'] = 'v' + db.session.bind.dialect.dbapi.version
|
||||
versions['Sqlite'] = 'v' + db.session.bind.dialect.dbapi.sqlite_version
|
||||
versions.update(converter.versioncheck())
|
||||
versions.update(serverVersion)
|
||||
versions['Python'] = sys.version
|
||||
return render_title_template('stats.html', bookcounter=counter, authorcounter=authors, versions=versions,
|
||||
categorycounter=categorys, seriecounter=series, title=_(u"Statistics"), page="stat")
|
695
cps/admin.py
Normal file
@ -0,0 +1,695 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
# try:
|
||||
# from imp import reload
|
||||
# except ImportError:
|
||||
# pass
|
||||
|
||||
from babel import Locale as LC
|
||||
from babel.dates import format_datetime
|
||||
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory
|
||||
from flask_login import login_required, current_user, logout_user
|
||||
from flask_babel import gettext as _
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.sql.expression import func
|
||||
from werkzeug.security import generate_password_hash
|
||||
|
||||
from . import constants, logger, helper, services
|
||||
from . import db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils
|
||||
from .helper import speaking_language, check_valid_domain, send_test_mail, generate_random_password, send_registration_mail
|
||||
from .gdriveutils import is_gdrive_ready, gdrive_support
|
||||
from .web import admin_required, render_title_template, before_request, unconfigured, login_required_if_no_ano
|
||||
|
||||
feature_support = {
|
||||
'ldap': bool(services.ldap),
|
||||
'goodreads': bool(services.goodreads)
|
||||
}
|
||||
|
||||
# try:
|
||||
# import rarfile
|
||||
# feature_support['rar'] = True
|
||||
# except ImportError:
|
||||
# feature_support['rar'] = False
|
||||
|
||||
try:
|
||||
from .oauth_bb import oauth_check
|
||||
feature_support['oauth'] = True
|
||||
except ImportError:
|
||||
feature_support['oauth'] = False
|
||||
oauth_check = {}
|
||||
|
||||
|
||||
feature_support['gdrive'] = gdrive_support
|
||||
admi = Blueprint('admin', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
@admi.route("/admin")
|
||||
@login_required
|
||||
def admin_forbidden():
|
||||
abort(403)
|
||||
|
||||
|
||||
@admi.route("/shutdown")
|
||||
@login_required
|
||||
@admin_required
|
||||
def shutdown():
|
||||
task = int(request.args.get("parameter").strip())
|
||||
if task in (0, 1): # valid commandos received
|
||||
# close all database connections
|
||||
db.dispose()
|
||||
ub.dispose()
|
||||
|
||||
showtext = {}
|
||||
if task == 0:
|
||||
showtext['text'] = _(u'Server restarted, please reload page')
|
||||
else:
|
||||
showtext['text'] = _(u'Performing shutdown of server, please close window')
|
||||
# stop gevent/tornado server
|
||||
web_server.stop(task == 0)
|
||||
return json.dumps(showtext)
|
||||
|
||||
if task == 2:
|
||||
log.warning("reconnecting to calibre database")
|
||||
db.setup_db(config)
|
||||
return '{}'
|
||||
|
||||
abort(404)
|
||||
|
||||
|
||||
@admi.route("/admin/view")
|
||||
@login_required
|
||||
@admin_required
|
||||
def admin():
|
||||
version = updater_thread.get_current_version_info()
|
||||
if version is False:
|
||||
commit = _(u'Unknown')
|
||||
else:
|
||||
if 'datetime' in version:
|
||||
commit = version['datetime']
|
||||
|
||||
tz = timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||
form_date = datetime.strptime(commit[:19], "%Y-%m-%dT%H:%M:%S")
|
||||
if len(commit) > 19: # check if string has timezone
|
||||
if commit[19] == '+':
|
||||
form_date -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
elif commit[19] == '-':
|
||||
form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
commit = format_datetime(form_date - tz, format='short', locale=get_locale())
|
||||
else:
|
||||
commit = version['version']
|
||||
|
||||
allUser = ub.session.query(ub.User).all()
|
||||
email_settings = config.get_mail_settings()
|
||||
return render_title_template("admin.html", allUser=allUser, email=email_settings, config=config, commit=commit,
|
||||
title=_(u"Admin page"), page="admin")
|
||||
|
||||
|
||||
@admi.route("/admin/config", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@admin_required
|
||||
def configuration():
|
||||
if request.method == "POST":
|
||||
return _configuration_update_helper()
|
||||
return _configuration_result()
|
||||
|
||||
|
||||
@admi.route("/admin/viewconfig")
|
||||
@login_required
|
||||
@admin_required
|
||||
def view_configuration():
|
||||
readColumn = db.session.query(db.Custom_Columns)\
|
||||
.filter(and_(db.Custom_Columns.datatype == 'bool',db.Custom_Columns.mark_for_delete == 0)).all()
|
||||
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
|
||||
title=_(u"UI Configuration"), page="uiconfig")
|
||||
|
||||
|
||||
@admi.route("/admin/viewconfig", methods=["POST"])
|
||||
@login_required
|
||||
@admin_required
|
||||
def update_view_configuration():
|
||||
reboot_required = False
|
||||
to_save = request.form.to_dict()
|
||||
|
||||
_config_string = lambda x: config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)
|
||||
_config_int = lambda x: config.set_from_dictionary(to_save, x, int)
|
||||
|
||||
_config_string("config_calibre_web_title")
|
||||
_config_string("config_columns_to_ignore")
|
||||
_config_string("config_mature_content_tags")
|
||||
reboot_required |= _config_string("config_title_regex")
|
||||
|
||||
_config_int("config_read_column")
|
||||
_config_int("config_theme")
|
||||
_config_int("config_random_books")
|
||||
_config_int("config_books_per_page")
|
||||
_config_int("config_authors_max")
|
||||
|
||||
config.config_default_role = constants.selected_roles(to_save)
|
||||
config.config_default_role &= ~constants.ROLE_ANONYMOUS
|
||||
|
||||
config.config_default_show = sum(int(k[5:]) for k in to_save if k.startswith('show_'))
|
||||
if "Show_mature_content" in to_save:
|
||||
config.config_default_show |= constants.MATURE_CONTENT
|
||||
|
||||
config.save()
|
||||
flash(_(u"Calibre-Web configuration updated"), category="success")
|
||||
before_request()
|
||||
if reboot_required:
|
||||
db.dispose()
|
||||
ub.dispose()
|
||||
web_server.stop(True)
|
||||
|
||||
return view_configuration()
|
||||
|
||||
|
||||
@admi.route("/ajax/editdomain", methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_domain():
|
||||
# POST /post
|
||||
# name: 'username', //name of field (column in db)
|
||||
# pk: 1 //primary key (record id)
|
||||
# value: 'superuser!' //new value
|
||||
vals = request.form.to_dict()
|
||||
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
|
||||
# domain_name = request.args.get('domain')
|
||||
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
|
||||
ub.session.commit()
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/ajax/adddomain", methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def add_domain():
|
||||
domain_name = request.form.to_dict()['domainname'].replace('*', '%').replace('?', '_').lower()
|
||||
check = ub.session.query(ub.Registration).filter(ub.Registration.domain == domain_name).first()
|
||||
if not check:
|
||||
new_domain = ub.Registration(domain=domain_name)
|
||||
ub.session.add(new_domain)
|
||||
ub.session.commit()
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/ajax/deletedomain", methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_domain():
|
||||
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
|
||||
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
|
||||
ub.session.commit()
|
||||
# If last domain was deleted, add all domains by default
|
||||
if not ub.session.query(ub.Registration).count():
|
||||
new_domain = ub.Registration(domain="%.%")
|
||||
ub.session.add(new_domain)
|
||||
ub.session.commit()
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/ajax/domainlist")
|
||||
@login_required
|
||||
@admin_required
|
||||
def list_domain():
|
||||
answer = ub.session.query(ub.Registration).all()
|
||||
json_dumps = json.dumps([{"domain": r.domain.replace('%', '*').replace('_', '?'), "id": r.id} for r in answer])
|
||||
js = json.dumps(json_dumps.replace('"', "'")).lstrip('"').strip('"')
|
||||
response = make_response(js.replace("'", '"'))
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
@admi.route("/config", methods=["GET", "POST"])
|
||||
@unconfigured
|
||||
def basic_configuration():
|
||||
logout_user()
|
||||
if request.method == "POST":
|
||||
return _configuration_update_helper()
|
||||
return _configuration_result()
|
||||
|
||||
|
||||
def _configuration_update_helper():
|
||||
reboot_required = False
|
||||
db_change = False
|
||||
to_save = request.form.to_dict()
|
||||
|
||||
_config_string = lambda x: config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)
|
||||
_config_int = lambda x: config.set_from_dictionary(to_save, x, int)
|
||||
_config_checkbox = lambda x: config.set_from_dictionary(to_save, x, lambda y: y == "on", False)
|
||||
_config_checkbox_int = lambda x: config.set_from_dictionary(to_save, x, lambda y: 1 if (y == "on") else 0, 0)
|
||||
|
||||
db_change |= _config_string("config_calibre_dir")
|
||||
|
||||
# Google drive setup
|
||||
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
|
||||
config.config_use_google_drive = False
|
||||
|
||||
gdrive_secrets = {}
|
||||
gdriveError = gdriveutils.get_error_text(gdrive_secrets)
|
||||
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdriveError:
|
||||
if not gdrive_secrets:
|
||||
return _configuration_result('client_secrets.json is not configured for web application')
|
||||
gdriveutils.update_settings(
|
||||
gdrive_secrets['client_id'],
|
||||
gdrive_secrets['client_secret'],
|
||||
gdrive_secrets['redirect_uris'][0]
|
||||
)
|
||||
|
||||
# always show google drive settings, but in case of error deny support
|
||||
config.config_use_google_drive = (not gdriveError) and ("config_use_google_drive" in to_save)
|
||||
if _config_string("config_google_drive_folder"):
|
||||
gdriveutils.deleteDatabaseOnChange()
|
||||
|
||||
reboot_required |= _config_int("config_port")
|
||||
|
||||
reboot_required |= _config_string("config_keyfile")
|
||||
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
|
||||
return _configuration_result('Keyfile location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
reboot_required |= _config_string("config_certfile")
|
||||
if config.config_certfile and not os.path.isfile(config.config_certfile):
|
||||
return _configuration_result('Certfile location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
_config_checkbox_int("config_uploading")
|
||||
_config_checkbox_int("config_anonbrowse")
|
||||
_config_checkbox_int("config_public_reg")
|
||||
|
||||
_config_int("config_ebookconverter")
|
||||
_config_string("config_calibre")
|
||||
_config_string("config_converterpath")
|
||||
|
||||
if _config_int("config_login_type"):
|
||||
reboot_required |= config.config_login_type != constants.LOGIN_STANDARD
|
||||
|
||||
#LDAP configurator,
|
||||
if config.config_login_type == constants.LOGIN_LDAP:
|
||||
_config_string("config_ldap_provider_url")
|
||||
_config_int("config_ldap_port")
|
||||
_config_string("config_ldap_schema")
|
||||
_config_string("config_ldap_dn")
|
||||
_config_string("config_ldap_user_object")
|
||||
if not config.config_ldap_provider_url or not config.config_ldap_port or not config.config_ldap_dn or not config.config_ldap_user_object:
|
||||
return _configuration_result('Please enter a LDAP provider, port, DN and user object identifier', gdriveError)
|
||||
|
||||
_config_string("config_ldap_serv_username")
|
||||
if not config.config_ldap_serv_username or "config_ldap_serv_password" not in to_save:
|
||||
return _configuration_result('Please enter a LDAP service account and password', gdriveError)
|
||||
config.set_from_dictionary(to_save, "config_ldap_serv_password", base64.b64encode)
|
||||
|
||||
_config_checkbox("config_ldap_use_ssl")
|
||||
_config_checkbox("config_ldap_use_tls")
|
||||
_config_checkbox("config_ldap_openldap")
|
||||
_config_checkbox("config_ldap_require_cert")
|
||||
_config_string("config_ldap_cert_path")
|
||||
if config.config_ldap_cert_path and not os.path.isfile(config.config_ldap_cert_path):
|
||||
return _configuration_result('LDAP Certfile location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
# Remote login configuration
|
||||
_config_checkbox("config_remote_login")
|
||||
if not config.config_remote_login:
|
||||
ub.session.query(ub.RemoteAuthToken).delete()
|
||||
|
||||
# Goodreads configuration
|
||||
_config_checkbox("config_use_goodreads")
|
||||
_config_string("config_goodreads_api_key")
|
||||
_config_string("config_goodreads_api_secret")
|
||||
if services.goodreads:
|
||||
services.goodreads.connect(config.config_goodreads_api_key, config.config_goodreads_api_secret, config.config_use_goodreads)
|
||||
|
||||
_config_int("config_updatechannel")
|
||||
|
||||
# GitHub OAuth configuration
|
||||
if config.config_login_type == constants.LOGIN_OAUTH_GITHUB:
|
||||
_config_string("config_github_oauth_client_id")
|
||||
_config_string("config_github_oauth_client_secret")
|
||||
if not config.config_github_oauth_client_id or not config.config_github_oauth_client_secret:
|
||||
return _configuration_result('Please enter Github oauth credentials', gdriveError)
|
||||
|
||||
# Google OAuth configuration
|
||||
if config.config_login_type == constants.LOGIN_OAUTH_GOOGLE:
|
||||
_config_string("config_google_oauth_client_id")
|
||||
_config_string("config_google_oauth_client_secret")
|
||||
if not config.config_google_oauth_client_id or not config.config_google_oauth_client_secret:
|
||||
return _configuration_result('Please enter Google oauth credentials', gdriveError)
|
||||
|
||||
_config_int("config_log_level")
|
||||
_config_string("config_logfile")
|
||||
if not logger.is_valid_logfile(config.config_logfile):
|
||||
return _configuration_result('Logfile location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
reboot_required |= _config_checkbox_int("config_access_log")
|
||||
reboot_required |= _config_string("config_access_logfile")
|
||||
if not logger.is_valid_logfile(config.config_access_logfile):
|
||||
return _configuration_result('Access Logfile location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
# Rarfile Content configuration
|
||||
_config_string("config_rarfile_location")
|
||||
unrar_status = helper.check_unrar(config.config_rarfile_location)
|
||||
if unrar_status:
|
||||
return _configuration_result(unrar_status, gdriveError)
|
||||
|
||||
try:
|
||||
metadata_db = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(metadata_db):
|
||||
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
|
||||
db_change = True
|
||||
except Exception as e:
|
||||
return _configuration_result('%s' % e, gdriveError)
|
||||
|
||||
if db_change:
|
||||
# reload(db)
|
||||
if not db.setup_db(config):
|
||||
return _configuration_result('DB location is not valid, please enter correct path', gdriveError)
|
||||
|
||||
config.save()
|
||||
flash(_(u"Calibre-Web configuration updated"), category="success")
|
||||
if reboot_required:
|
||||
web_server.stop(True)
|
||||
|
||||
return _configuration_result(None, gdriveError)
|
||||
|
||||
|
||||
def _configuration_result(error_flash=None, gdriveError=None):
|
||||
gdrive_authenticate = not is_gdrive_ready()
|
||||
gdrivefolders = []
|
||||
if gdriveError is None:
|
||||
gdriveError = gdriveutils.get_error_text()
|
||||
if gdriveError:
|
||||
gdriveError = _(gdriveError)
|
||||
else:
|
||||
gdrivefolders = gdriveutils.listRootFolders()
|
||||
|
||||
show_back_button = current_user.is_authenticated
|
||||
show_login_button = config.db_configured and not current_user.is_authenticated
|
||||
if error_flash:
|
||||
config.load()
|
||||
flash(_(error_flash), category="error")
|
||||
show_login_button = False
|
||||
|
||||
return render_title_template("config_edit.html", config=config,
|
||||
show_back_button=show_back_button, show_login_button=show_login_button,
|
||||
show_authenticate_google_drive=gdrive_authenticate,
|
||||
gdriveError=gdriveError, gdrivefolders=gdrivefolders, feature_support=feature_support,
|
||||
title=_(u"Basic Configuration"), page="config")
|
||||
|
||||
|
||||
@admi.route("/admin/user/new", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@admin_required
|
||||
def new_user():
|
||||
content = ub.User()
|
||||
languages = speaking_language()
|
||||
translations = [LC('en')] + babel.list_translations()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
content.default_language = to_save["default_language"]
|
||||
content.mature_content = "Show_mature_content" in to_save
|
||||
content.locale = to_save.get("locale", content.locale)
|
||||
|
||||
content.sidebar_view = sum(int(key[5:]) for key in to_save if key.startswith('show_'))
|
||||
if "show_detail_random" in to_save:
|
||||
content.sidebar_view |= constants.DETAIL_RANDOM
|
||||
|
||||
content.role = constants.selected_roles(to_save)
|
||||
|
||||
if not to_save["nickname"] or not to_save["email"] or not to_save["password"]:
|
||||
flash(_(u"Please fill out all fields!"), category="error")
|
||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||
registered_oauth=oauth_check, title=_(u"Add new user"))
|
||||
content.password = generate_password_hash(to_save["password"])
|
||||
existing_user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == to_save["nickname"].lower())\
|
||||
.first()
|
||||
existing_email = ub.session.query(ub.User).filter(ub.User.email == to_save["email"].lower())\
|
||||
.first()
|
||||
if not existing_user and not existing_email:
|
||||
content.nickname = to_save["nickname"]
|
||||
if config.config_public_reg and not check_valid_domain(to_save["email"]):
|
||||
flash(_(u"E-mail is not from valid domain"), category="error")
|
||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||
registered_oauth=oauth_check, title=_(u"Add new user"))
|
||||
else:
|
||||
content.email = to_save["email"]
|
||||
else:
|
||||
flash(_(u"Found an existing account for this e-mail address or nickname."), category="error")
|
||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||
languages=languages, title=_(u"Add new user"), page="newuser",
|
||||
registered_oauth=oauth_check)
|
||||
try:
|
||||
ub.session.add(content)
|
||||
ub.session.commit()
|
||||
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
|
||||
return redirect(url_for('admin.admin'))
|
||||
except IntegrityError:
|
||||
ub.session.rollback()
|
||||
flash(_(u"Found an existing account for this e-mail address or nickname."), category="error")
|
||||
else:
|
||||
content.role = config.config_default_role
|
||||
content.sidebar_view = config.config_default_show
|
||||
content.mature_content = bool(config.config_default_show & constants.MATURE_CONTENT)
|
||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||
languages=languages, title=_(u"Add new user"), page="newuser",
|
||||
registered_oauth=oauth_check)
|
||||
|
||||
|
||||
@admi.route("/admin/mailsettings")
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_mailsettings():
|
||||
content = config.get_mail_settings()
|
||||
# log.debug("edit_mailsettings %r", content)
|
||||
return render_title_template("email_edit.html", content=content, title=_(u"Edit e-mail server settings"),
|
||||
page="mailset")
|
||||
|
||||
|
||||
@admi.route("/admin/mailsettings", methods=["POST"])
|
||||
@login_required
|
||||
@admin_required
|
||||
def update_mailsettings():
|
||||
to_save = request.form.to_dict()
|
||||
log.debug("update_mailsettings %r", to_save)
|
||||
|
||||
_config_string = lambda x: config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)
|
||||
_config_int = lambda x: config.set_from_dictionary(to_save, x, int)
|
||||
|
||||
_config_string("mail_server")
|
||||
_config_int("mail_port")
|
||||
_config_int("mail_use_ssl")
|
||||
_config_string("mail_login")
|
||||
_config_string("mail_password")
|
||||
_config_string("mail_from")
|
||||
config.save()
|
||||
|
||||
if to_save.get("test"):
|
||||
if current_user.kindle_mail:
|
||||
result = send_test_mail(current_user.kindle_mail, current_user.nickname)
|
||||
if result is None:
|
||||
flash(_(u"Test e-mail successfully send to %(kindlemail)s", kindlemail=current_user.kindle_mail),
|
||||
category="success")
|
||||
else:
|
||||
flash(_(u"There was an error sending the Test e-mail: %(res)s", res=result), category="error")
|
||||
else:
|
||||
flash(_(u"Please configure your kindle e-mail address first..."), category="error")
|
||||
else:
|
||||
flash(_(u"E-mail server settings updated"), category="success")
|
||||
|
||||
return edit_mailsettings()
|
||||
|
||||
|
||||
@admi.route("/admin/user/<int:user_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_user(user_id):
|
||||
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User
|
||||
downloads = list()
|
||||
languages = speaking_language()
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
for book in content.downloads:
|
||||
downloadbook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
if downloadbook:
|
||||
downloads.append(downloadbook)
|
||||
else:
|
||||
ub.delete_download(book.book_id)
|
||||
# ub.session.query(ub.Downloads).filter(book.book_id == ub.Downloads.book_id).delete()
|
||||
# ub.session.commit()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
if "delete" in to_save:
|
||||
if ub.session.query(ub.User).filter(and_(ub.User.role.op('&')
|
||||
(constants.ROLE_ADMIN)== constants.ROLE_ADMIN,
|
||||
ub.User.id != content.id)).count():
|
||||
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
|
||||
ub.session.commit()
|
||||
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
|
||||
return redirect(url_for('admin.admin'))
|
||||
else:
|
||||
flash(_(u"No admin user remaining, can't delete user", nick=content.nickname), category="error")
|
||||
return redirect(url_for('admin.admin'))
|
||||
else:
|
||||
if "password" in to_save and to_save["password"]:
|
||||
content.password = generate_password_hash(to_save["password"])
|
||||
|
||||
anonymous = content.is_anonymous
|
||||
content.role = constants.selected_roles(to_save)
|
||||
if anonymous:
|
||||
content.role |= constants.ROLE_ANONYMOUS
|
||||
else:
|
||||
content.role &= ~constants.ROLE_ANONYMOUS
|
||||
|
||||
val = [int(k[5:]) for k in to_save if k.startswith('show_')]
|
||||
sidebar = ub.get_sidebar_config()
|
||||
for element in sidebar:
|
||||
value = element['visibility']
|
||||
if value in val and not content.check_visibility(value):
|
||||
content.sidebar_view |= value
|
||||
elif not value in val and content.check_visibility(value):
|
||||
content.sidebar_view &= ~value
|
||||
|
||||
if "Show_detail_random" in to_save:
|
||||
content.sidebar_view |= constants.DETAIL_RANDOM
|
||||
else:
|
||||
content.sidebar_view &= ~constants.DETAIL_RANDOM
|
||||
|
||||
content.mature_content = "Show_mature_content" in to_save
|
||||
|
||||
if "default_language" in to_save:
|
||||
content.default_language = to_save["default_language"]
|
||||
if "locale" in to_save and to_save["locale"]:
|
||||
content.locale = to_save["locale"]
|
||||
if to_save["email"] and to_save["email"] != content.email:
|
||||
existing_email = ub.session.query(ub.User).filter(ub.User.email == to_save["email"].lower()) \
|
||||
.first()
|
||||
if not existing_email:
|
||||
content.email = to_save["email"]
|
||||
else:
|
||||
flash(_(u"Found an existing account for this e-mail address."), category="error")
|
||||
return render_title_template("user_edit.html", translations=translations, languages=languages,
|
||||
new_user=0, content=content, downloads=downloads, registered_oauth=oauth_check,
|
||||
title=_(u"Edit User %(nick)s", nick=content.nickname), page="edituser")
|
||||
|
||||
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
||||
content.kindle_mail = to_save["kindle_mail"]
|
||||
try:
|
||||
ub.session.commit()
|
||||
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
|
||||
except IntegrityError:
|
||||
ub.session.rollback()
|
||||
flash(_(u"An unknown error occured."), category="error")
|
||||
return render_title_template("user_edit.html", translations=translations, languages=languages, new_user=0,
|
||||
content=content, downloads=downloads, registered_oauth=oauth_check,
|
||||
title=_(u"Edit User %(nick)s", nick=content.nickname), page="edituser")
|
||||
|
||||
|
||||
@admi.route("/admin/resetpassword/<int:user_id>")
|
||||
@login_required
|
||||
@admin_required
|
||||
def reset_password(user_id):
|
||||
if not config.config_public_reg:
|
||||
abort(404)
|
||||
if current_user is not None and current_user.is_authenticated:
|
||||
existing_user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
|
||||
password = generate_random_password()
|
||||
existing_user.password = generate_password_hash(password)
|
||||
try:
|
||||
ub.session.commit()
|
||||
send_registration_mail(existing_user.email, existing_user.nickname, password, True)
|
||||
flash(_(u"Password for user %(user)s reset", user=existing_user.nickname), category="success")
|
||||
except Exception:
|
||||
ub.session.rollback()
|
||||
flash(_(u"An unknown error occurred. Please try again later."), category="error")
|
||||
return redirect(url_for('admin.admin'))
|
||||
|
||||
|
||||
@admi.route("/admin/logfile")
|
||||
@login_required
|
||||
@admin_required
|
||||
def view_logfile():
|
||||
logfiles = {}
|
||||
logfiles[0] = logger.get_logfile(config.config_logfile)
|
||||
logfiles[1] = logger.get_accesslogfile(config.config_access_logfile)
|
||||
return render_title_template("logviewer.html",title=_(u"Logfile viewer"), accesslog_enable=config.config_access_log,
|
||||
logfiles=logfiles, page="logfile")
|
||||
|
||||
|
||||
@admi.route("/ajax/log/<int:logtype>")
|
||||
@login_required
|
||||
@admin_required
|
||||
def send_logfile(logtype):
|
||||
if logtype == 1:
|
||||
logfile = logger.get_accesslogfile(config.config_access_logfile)
|
||||
return send_from_directory(os.path.dirname(logfile),
|
||||
os.path.basename(logfile))
|
||||
if logtype == 0:
|
||||
logfile = logger.get_logfile(config.config_logfile)
|
||||
return send_from_directory(os.path.dirname(logfile),
|
||||
os.path.basename(logfile))
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/get_update_status", methods=['GET'])
|
||||
@login_required_if_no_ano
|
||||
def get_update_status():
|
||||
return updater_thread.get_available_updates(request.method)
|
||||
|
||||
|
||||
@admi.route("/get_updater_status", methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def get_updater_status():
|
||||
status = {}
|
||||
if request.method == "POST":
|
||||
commit = request.form.to_dict()
|
||||
if "start" in commit and commit['start'] == 'True':
|
||||
text = {
|
||||
"1": _(u'Requesting update package'),
|
||||
"2": _(u'Downloading update package'),
|
||||
"3": _(u'Unzipping update package'),
|
||||
"4": _(u'Replacing files'),
|
||||
"5": _(u'Database connections are closed'),
|
||||
"6": _(u'Stopping server'),
|
||||
"7": _(u'Update finished, please press okay and reload page'),
|
||||
"8": _(u'Update failed:') + u' ' + _(u'HTTP Error'),
|
||||
"9": _(u'Update failed:') + u' ' + _(u'Connection error'),
|
||||
"10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'),
|
||||
"11": _(u'Update failed:') + u' ' + _(u'General error')
|
||||
}
|
||||
status['text'] = text
|
||||
updater_thread.status = 0
|
||||
updater_thread.start()
|
||||
status['status'] = updater_thread.get_update_status()
|
||||
elif request.method == "GET":
|
||||
try:
|
||||
status['status'] = updater_thread.get_update_status()
|
||||
if status['status'] == -1:
|
||||
status['status'] = 7
|
||||
except Exception:
|
||||
status['status'] = 11
|
||||
return json.dumps(status)
|
@ -1,217 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2016-2019 lemmsh cervinko Kennyl matthazinski OzzieIsaacs
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import uploader
|
||||
import os
|
||||
from flask_babel import gettext as _
|
||||
import comic
|
||||
|
||||
try:
|
||||
from lxml.etree import LXML_VERSION as lxmlversion
|
||||
except ImportError:
|
||||
lxmlversion = None
|
||||
|
||||
__author__ = 'lemmsh'
|
||||
|
||||
logger = logging.getLogger("book_formats")
|
||||
|
||||
try:
|
||||
from wand.image import Image
|
||||
from wand import version as ImageVersion
|
||||
from wand.exceptions import PolicyError
|
||||
use_generic_pdf_cover = False
|
||||
except (ImportError, RuntimeError) as e:
|
||||
logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
|
||||
use_generic_pdf_cover = True
|
||||
try:
|
||||
from PyPDF2 import PdfFileReader
|
||||
from PyPDF2 import __version__ as PyPdfVersion
|
||||
use_pdf_meta = True
|
||||
except ImportError as e:
|
||||
logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
|
||||
use_pdf_meta = False
|
||||
|
||||
try:
|
||||
import epub
|
||||
use_epub_meta = True
|
||||
except ImportError as e:
|
||||
logger.warning('cannot import epub, extracting epub metadata will not work: %s', e)
|
||||
use_epub_meta = False
|
||||
|
||||
try:
|
||||
import fb2
|
||||
use_fb2_meta = True
|
||||
except ImportError as e:
|
||||
logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
|
||||
use_fb2_meta = False
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import __version__ as PILversion
|
||||
use_PIL = True
|
||||
except ImportError:
|
||||
use_PIL = False
|
||||
|
||||
|
||||
def process(tmp_file_path, original_file_name, original_file_extension):
|
||||
meta = None
|
||||
try:
|
||||
if ".PDF" == original_file_extension.upper():
|
||||
meta = pdf_meta(tmp_file_path, original_file_name, original_file_extension)
|
||||
if ".EPUB" == original_file_extension.upper() and use_epub_meta is True:
|
||||
meta = epub.get_epub_info(tmp_file_path, original_file_name, original_file_extension)
|
||||
if ".FB2" == original_file_extension.upper() and use_fb2_meta is True:
|
||||
meta = fb2.get_fb2_info(tmp_file_path, original_file_extension)
|
||||
if original_file_extension.upper() in ['.CBZ', '.CBT']:
|
||||
meta = comic.get_comic_info(tmp_file_path, original_file_name, original_file_extension)
|
||||
|
||||
except Exception as ex:
|
||||
logger.warning('cannot parse metadata, using default: %s', ex)
|
||||
|
||||
if meta and meta.title.strip() and meta.author.strip():
|
||||
return meta
|
||||
else:
|
||||
return default_meta(tmp_file_path, original_file_name, original_file_extension)
|
||||
|
||||
|
||||
def default_meta(tmp_file_path, original_file_name, original_file_extension):
|
||||
return uploader.BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=original_file_name,
|
||||
author=u"Unknown",
|
||||
cover=None,
|
||||
description="",
|
||||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages="")
|
||||
|
||||
|
||||
def pdf_meta(tmp_file_path, original_file_name, original_file_extension):
|
||||
|
||||
if use_pdf_meta:
|
||||
pdf = PdfFileReader(open(tmp_file_path, 'rb'), strict=False)
|
||||
doc_info = pdf.getDocumentInfo()
|
||||
else:
|
||||
doc_info = None
|
||||
|
||||
if doc_info is not None:
|
||||
author = doc_info.author if doc_info.author else u"Unknown"
|
||||
title = doc_info.title if doc_info.title else original_file_name
|
||||
subject = doc_info.subject
|
||||
else:
|
||||
author = u"Unknown"
|
||||
title = original_file_name
|
||||
subject = ""
|
||||
return uploader.BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=title,
|
||||
author=author,
|
||||
cover=pdf_preview(tmp_file_path, original_file_name),
|
||||
description=subject,
|
||||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages="")
|
||||
|
||||
|
||||
def pdf_preview(tmp_file_path, tmp_dir):
|
||||
if use_generic_pdf_cover:
|
||||
return None
|
||||
else:
|
||||
if use_PIL:
|
||||
try:
|
||||
input1 = PdfFileReader(open(tmp_file_path, 'rb'), strict=False)
|
||||
page0 = input1.getPage(0)
|
||||
xObject = page0['/Resources']['/XObject'].getObject()
|
||||
|
||||
for obj in xObject:
|
||||
if xObject[obj]['/Subtype'] == '/Image':
|
||||
size = (xObject[obj]['/Width'], xObject[obj]['/Height'])
|
||||
data = xObject[obj]._data # xObject[obj].getData()
|
||||
if xObject[obj]['/ColorSpace'] == '/DeviceRGB':
|
||||
mode = "RGB"
|
||||
else:
|
||||
mode = "P"
|
||||
if '/Filter' in xObject[obj]:
|
||||
if xObject[obj]['/Filter'] == '/FlateDecode':
|
||||
img = Image.frombytes(mode, size, data)
|
||||
cover_file_name = os.path.splitext(tmp_file_path)[0] + ".cover.png"
|
||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
||||
return cover_file_name
|
||||
# img.save(obj[1:] + ".png")
|
||||
elif xObject[obj]['/Filter'] == '/DCTDecode':
|
||||
cover_file_name = os.path.splitext(tmp_file_path)[0] + ".cover.jpg"
|
||||
img = open(cover_file_name, "wb")
|
||||
img.write(data)
|
||||
img.close()
|
||||
return cover_file_name
|
||||
elif xObject[obj]['/Filter'] == '/JPXDecode':
|
||||
cover_file_name = os.path.splitext(tmp_file_path)[0] + ".cover.jp2"
|
||||
img = open(cover_file_name, "wb")
|
||||
img.write(data)
|
||||
img.close()
|
||||
return cover_file_name
|
||||
else:
|
||||
img = Image.frombytes(mode, size, data)
|
||||
cover_file_name = os.path.splitext(tmp_file_path)[0] + ".cover.png"
|
||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
||||
return cover_file_name
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
try:
|
||||
cover_file_name = os.path.splitext(tmp_file_path)[0] + ".cover.jpg"
|
||||
with Image(filename=tmp_file_path + "[0]", resolution=150) as img:
|
||||
img.compression_quality = 88
|
||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
||||
return cover_file_name
|
||||
except PolicyError as ex:
|
||||
logger.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
||||
return None
|
||||
except Exception as ex:
|
||||
logger.warning('Cannot extract cover image, using default: %s', ex)
|
||||
return None
|
||||
|
||||
def get_versions():
|
||||
if not use_generic_pdf_cover:
|
||||
IVersion = ImageVersion.MAGICK_VERSION
|
||||
WVersion = ImageVersion.VERSION
|
||||
else:
|
||||
IVersion = _(u'not installed')
|
||||
WVersion = _(u'not installed')
|
||||
if use_pdf_meta:
|
||||
PVersion='v'+PyPdfVersion
|
||||
else:
|
||||
PVersion=_(u'not installed')
|
||||
if lxmlversion:
|
||||
XVersion = 'v'+'.'.join(map(str, lxmlversion))
|
||||
else:
|
||||
XVersion = _(u'not installed')
|
||||
if use_PIL:
|
||||
PILVersion = 'v' + PILversion
|
||||
else:
|
||||
PILVersion = _(u'not installed')
|
||||
return {'Image Magick': IVersion,
|
||||
'PyPdf': PVersion,
|
||||
'lxml':XVersion,
|
||||
'Wand': WVersion,
|
||||
'Pillow': PILVersion}
|
@ -17,8 +17,14 @@
|
||||
# Inspired by https://github.com/ChrisTM/Flask-CacheBust
|
||||
# Uses query strings so CSS font files are found without having to resort to absolute URLs
|
||||
|
||||
import hashlib
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import hashlib
|
||||
|
||||
from . import logger
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
def init_cache_busting(app):
|
||||
@ -34,7 +40,7 @@ def init_cache_busting(app):
|
||||
|
||||
hash_table = {} # map of file hashes
|
||||
|
||||
app.logger.debug('Computing cache-busting values...')
|
||||
log.debug('Computing cache-busting values...')
|
||||
# compute file hashes
|
||||
for dirpath, __, filenames in os.walk(static_folder):
|
||||
for filename in filenames:
|
||||
@ -47,7 +53,7 @@ def init_cache_busting(app):
|
||||
file_path = rooted_filename.replace(static_folder, "")
|
||||
file_path = file_path.replace("\\", "/") # Convert Windows path to web path
|
||||
hash_table[file_path] = file_hash
|
||||
app.logger.debug('Finished computing cache-busting values')
|
||||
log.debug('Finished computing cache-busting values')
|
||||
|
||||
def bust_filename(filename):
|
||||
return hash_table.get(filename, "")
|
||||
|
95
cps/cli.py
@ -18,30 +18,87 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from .constants import CONFIG_DIR as _CONFIG_DIR
|
||||
from .constants import STABLE_VERSION as _STABLE_VERSION
|
||||
from .constants import NIGHTLY_VERSION as _NIGHTLY_VERSION
|
||||
|
||||
VALID_CHARACTERS = 'ABCDEFabcdef:0123456789'
|
||||
|
||||
ipv6 = False
|
||||
|
||||
|
||||
def version_info():
|
||||
if _NIGHTLY_VERSION[1].startswith('$Format'):
|
||||
return "Calibre-Web version: %s - unkown git-clone" % _STABLE_VERSION['version']
|
||||
else:
|
||||
return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'],_NIGHTLY_VERSION[1])
|
||||
|
||||
|
||||
def validate_ip4(address):
|
||||
address_list = address.split('.')
|
||||
if len(address_list) != 4:
|
||||
return False
|
||||
for val in address_list:
|
||||
if not val.isdigit():
|
||||
return False
|
||||
i = int(val)
|
||||
if i < 0 or i > 255:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def validate_ip6(address):
|
||||
address_list = address.split(':')
|
||||
return (
|
||||
len(address_list) == 8
|
||||
and all(len(current) <= 4 for current in address_list)
|
||||
and all(current in VALID_CHARACTERS for current in address)
|
||||
)
|
||||
|
||||
|
||||
def validate_ip(address):
|
||||
if validate_ip4(address) or ipv6:
|
||||
return address
|
||||
print("IP address is invalid. Exiting")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
|
||||
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
|
||||
parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db')
|
||||
parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db')
|
||||
parser.add_argument('-c', metavar='path', help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile')
|
||||
parser.add_argument('-k', metavar='path', help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile')
|
||||
parser.add_argument('-c', metavar='path',
|
||||
help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile')
|
||||
parser.add_argument('-k', metavar='path',
|
||||
help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile')
|
||||
parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-web',
|
||||
version=version_info())
|
||||
parser.add_argument('-i', metavar='ip-adress', help='Server IP-Adress to listen')
|
||||
parser.add_argument('-s', metavar='user:pass', help='Sets specific username to new password')
|
||||
args = parser.parse_args()
|
||||
|
||||
generalPath = os.path.normpath(os.getenv("CALIBRE_DBPATH",
|
||||
os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep))
|
||||
if args.p:
|
||||
settingspath = args.p
|
||||
else:
|
||||
settingspath = os.path.join(generalPath, "app.db")
|
||||
if sys.version_info < (3, 0):
|
||||
if args.p:
|
||||
args.p = args.p.decode('utf-8')
|
||||
if args.g:
|
||||
args.g = args.g.decode('utf-8')
|
||||
if args.k:
|
||||
args.k = args.k.decode('utf-8')
|
||||
if args.c:
|
||||
args.c = args.c.decode('utf-8')
|
||||
if args.s:
|
||||
args.s = args.s.decode('utf-8')
|
||||
|
||||
if args.g:
|
||||
gdpath = args.g
|
||||
else:
|
||||
gdpath = os.path.join(generalPath, "gdrive.db")
|
||||
|
||||
settingspath = args.p or os.path.join(_CONFIG_DIR, "app.db")
|
||||
gdpath = args.g or os.path.join(_CONFIG_DIR, "gdrive.db")
|
||||
|
||||
# handle and check parameter for ssl encryption
|
||||
certfilepath = None
|
||||
keyfilepath = None
|
||||
if args.c:
|
||||
@ -67,3 +124,13 @@ if (args.k and not args.c) or (not args.k and args.c):
|
||||
|
||||
if args.k is "":
|
||||
keyfilepath = ""
|
||||
|
||||
# handle and check ipadress argument
|
||||
if args.i:
|
||||
ipv6 = validate_ip6(args.i)
|
||||
ipadress = validate_ip(args.i)
|
||||
else:
|
||||
ipadress = None
|
||||
|
||||
# handle and check user password argument
|
||||
user_password = args.s or None
|
||||
|
16
cps/comic.py
@ -17,19 +17,21 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import uploader
|
||||
import logging
|
||||
from iso639 import languages as isoLanguages
|
||||
|
||||
from . import logger, isoLanguages
|
||||
from .constants import BookMeta
|
||||
|
||||
|
||||
logger = logging.getLogger("book_formats")
|
||||
log = logger.create()
|
||||
|
||||
|
||||
try:
|
||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||
use_comic_meta = True
|
||||
except ImportError as e:
|
||||
logger.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
|
||||
log.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
|
||||
import zipfile
|
||||
import tarfile
|
||||
use_comic_meta = False
|
||||
@ -96,7 +98,7 @@ def get_comic_info(tmp_file_path, original_file_name, original_file_extension):
|
||||
else:
|
||||
loadedMetadata.language = ""
|
||||
|
||||
return uploader.BookMeta(
|
||||
return BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=loadedMetadata.title or original_file_name,
|
||||
@ -109,7 +111,7 @@ def get_comic_info(tmp_file_path, original_file_name, original_file_extension):
|
||||
languages=loadedMetadata.language)
|
||||
else:
|
||||
|
||||
return uploader.BookMeta(
|
||||
return BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=original_file_name,
|
||||
|
287
cps/config_sql.py
Normal file
@ -0,0 +1,287 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2019 OzzieIsaacs, pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import json
|
||||
|
||||
from sqlalchemy import exc, Column, String, Integer, SmallInteger, Boolean
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from . import constants, cli, logger
|
||||
|
||||
|
||||
log = logger.create()
|
||||
_Base = declarative_base()
|
||||
|
||||
|
||||
# Baseclass for representing settings in app.db with email server settings and Calibre database settings
|
||||
# (application settings)
|
||||
class _Settings(_Base):
|
||||
__tablename__ = 'settings'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
mail_server = Column(String, default='mail.example.org')
|
||||
mail_port = Column(Integer, default=25)
|
||||
mail_use_ssl = Column(SmallInteger, default=0)
|
||||
mail_login = Column(String, default='mail@example.com')
|
||||
mail_password = Column(String, default='mypassword')
|
||||
mail_from = Column(String, default='automailer <mail@example.com>')
|
||||
config_calibre_dir = Column(String)
|
||||
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||
config_certfile = Column(String)
|
||||
config_keyfile = Column(String)
|
||||
config_calibre_web_title = Column(String, default=u'Calibre-Web')
|
||||
config_books_per_page = Column(Integer, default=60)
|
||||
config_random_books = Column(Integer, default=4)
|
||||
config_authors_max = Column(Integer, default=0)
|
||||
config_read_column = Column(Integer, default=0)
|
||||
config_title_regex = Column(String, default=u'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines)\s+')
|
||||
config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL)
|
||||
config_access_log = Column(SmallInteger, default=0)
|
||||
config_uploading = Column(SmallInteger, default=0)
|
||||
config_anonbrowse = Column(SmallInteger, default=0)
|
||||
config_public_reg = Column(SmallInteger, default=0)
|
||||
config_default_role = Column(SmallInteger, default=0)
|
||||
config_default_show = Column(SmallInteger, default=6143)
|
||||
config_columns_to_ignore = Column(String)
|
||||
config_use_google_drive = Column(Boolean, default=False)
|
||||
config_google_drive_folder = Column(String)
|
||||
config_google_drive_watch_changes_response = Column(String)
|
||||
config_remote_login = Column(Boolean, default=False)
|
||||
config_use_goodreads = Column(Boolean, default=False)
|
||||
config_goodreads_api_key = Column(String)
|
||||
config_goodreads_api_secret = Column(String)
|
||||
config_login_type = Column(Integer, default=0)
|
||||
# config_use_ldap = Column(Boolean)
|
||||
config_ldap_provider_url = Column(String)
|
||||
config_ldap_dn = Column(String)
|
||||
# config_use_github_oauth = Column(Boolean)
|
||||
config_github_oauth_client_id = Column(String)
|
||||
config_github_oauth_client_secret = Column(String)
|
||||
# config_use_google_oauth = Column(Boolean)
|
||||
config_google_oauth_client_id = Column(String)
|
||||
config_google_oauth_client_secret = Column(String)
|
||||
config_ldap_provider_url = Column(String, default='localhost')
|
||||
config_ldap_port = Column(SmallInteger, default=389)
|
||||
config_ldap_schema = Column(String, default='ldap')
|
||||
config_ldap_serv_username = Column(String)
|
||||
config_ldap_serv_password = Column(String)
|
||||
config_ldap_use_ssl = Column(Boolean, default=False)
|
||||
config_ldap_use_tls = Column(Boolean, default=False)
|
||||
config_ldap_require_cert = Column(Boolean, default=False)
|
||||
config_ldap_cert_path = Column(String)
|
||||
config_ldap_dn = Column(String)
|
||||
config_ldap_user_object = Column(String)
|
||||
config_ldap_openldap = Column(Boolean, default=False)
|
||||
config_mature_content_tags = Column(String, default='')
|
||||
config_logfile = Column(String)
|
||||
config_access_logfile = Column(String)
|
||||
config_ebookconverter = Column(Integer, default=0)
|
||||
config_converterpath = Column(String)
|
||||
config_calibre = Column(String)
|
||||
config_rarfile_location = Column(String)
|
||||
config_theme = Column(Integer, default=0)
|
||||
config_updatechannel = Column(Integer, default=constants.UPDATE_STABLE)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
|
||||
# Class holds all application specific settings in calibre-web
|
||||
class _ConfigSQL(object):
|
||||
# pylint: disable=no-member
|
||||
def __init__(self, session):
|
||||
self._session = session
|
||||
self._settings = None
|
||||
self.db_configured = None
|
||||
self.config_calibre_dir = None
|
||||
self.load()
|
||||
|
||||
def _read_from_storage(self):
|
||||
if self._settings is None:
|
||||
log.debug("_ConfigSQL._read_from_storage")
|
||||
self._settings = self._session.query(_Settings).first()
|
||||
return self._settings
|
||||
|
||||
def get_config_certfile(self):
|
||||
if cli.certfilepath:
|
||||
return cli.certfilepath
|
||||
if cli.certfilepath == "":
|
||||
return None
|
||||
return self.config_certfile
|
||||
|
||||
def get_config_keyfile(self):
|
||||
if cli.keyfilepath:
|
||||
return cli.keyfilepath
|
||||
if cli.certfilepath == "":
|
||||
return None
|
||||
return self.config_keyfile
|
||||
|
||||
def get_config_ipaddress(self):
|
||||
return cli.ipadress or ""
|
||||
|
||||
def get_ipaddress_type(self):
|
||||
return cli.ipv6
|
||||
|
||||
def _has_role(self, role_flag):
|
||||
return constants.has_flag(self.config_default_role, role_flag)
|
||||
|
||||
def role_admin(self):
|
||||
return self._has_role(constants.ROLE_ADMIN)
|
||||
|
||||
def role_download(self):
|
||||
return self._has_role(constants.ROLE_DOWNLOAD)
|
||||
|
||||
def role_viewer(self):
|
||||
return self._has_role(constants.ROLE_VIEWER)
|
||||
|
||||
def role_upload(self):
|
||||
return self._has_role(constants.ROLE_UPLOAD)
|
||||
|
||||
def role_edit(self):
|
||||
return self._has_role(constants.ROLE_EDIT)
|
||||
|
||||
def role_passwd(self):
|
||||
return self._has_role(constants.ROLE_PASSWD)
|
||||
|
||||
def role_edit_shelfs(self):
|
||||
return self._has_role(constants.ROLE_EDIT_SHELFS)
|
||||
|
||||
def role_delete_books(self):
|
||||
return self._has_role(constants.ROLE_DELETE_BOOKS)
|
||||
|
||||
def show_element_new_user(self, value):
|
||||
return constants.has_flag(self.config_default_show, value)
|
||||
|
||||
def show_detail_random(self):
|
||||
return self.show_element_new_user(constants.DETAIL_RANDOM)
|
||||
|
||||
def show_mature_content(self):
|
||||
return self.show_element_new_user(constants.MATURE_CONTENT)
|
||||
|
||||
def mature_content_tags(self):
|
||||
mct = self.config_mature_content_tags.split(",")
|
||||
return [t.strip() for t in mct]
|
||||
|
||||
def get_log_level(self):
|
||||
return logger.get_level_name(self.config_log_level)
|
||||
|
||||
def get_mail_settings(self):
|
||||
return {k:v for k, v in self.__dict__.items() if k.startswith('mail_')}
|
||||
|
||||
def set_from_dictionary(self, dictionary, field, convertor=None, default=None):
|
||||
'''Possibly updates a field of this object.
|
||||
The new value, if present, is grabbed from the given dictionary, and optionally passed through a convertor.
|
||||
|
||||
:returns: `True` if the field has changed value
|
||||
'''
|
||||
new_value = dictionary.get(field, default)
|
||||
if new_value is None:
|
||||
# log.debug("_ConfigSQL set_from_dictionary field '%s' not found", field)
|
||||
return False
|
||||
|
||||
if field not in self.__dict__:
|
||||
log.warning("_ConfigSQL trying to set unknown field '%s' = %r", field, new_value)
|
||||
return False
|
||||
|
||||
if convertor is not None:
|
||||
new_value = convertor(new_value)
|
||||
|
||||
current_value = self.__dict__.get(field)
|
||||
if current_value == new_value:
|
||||
return False
|
||||
|
||||
# log.debug("_ConfigSQL set_from_dictionary '%s' = %r (was %r)", field, new_value, current_value)
|
||||
setattr(self, field, new_value)
|
||||
return True
|
||||
|
||||
def load(self):
|
||||
'''Load all configuration values from the underlying storage.'''
|
||||
s = self._read_from_storage() # type: _Settings
|
||||
for k, v in s.__dict__.items():
|
||||
if k[0] != '_':
|
||||
if v is None:
|
||||
# if the storage column has no value, apply the (possible) default
|
||||
column = s.__class__.__dict__.get(k)
|
||||
if column.default is not None:
|
||||
v = column.default.arg
|
||||
setattr(self, k, v)
|
||||
|
||||
if self.config_google_drive_watch_changes_response:
|
||||
self.config_google_drive_watch_changes_response = json.loads(self.config_google_drive_watch_changes_response)
|
||||
self.db_configured = (self.config_calibre_dir and
|
||||
(not self.config_use_google_drive or os.path.exists(self.config_calibre_dir + '/metadata.db')))
|
||||
logger.setup(self.config_logfile, self.config_log_level)
|
||||
|
||||
def save(self):
|
||||
'''Apply all configuration values to the underlying storage.'''
|
||||
s = self._read_from_storage() # type: _Settings
|
||||
|
||||
for k, v in self.__dict__.items():
|
||||
if k[0] == '_':
|
||||
continue
|
||||
if hasattr(s, k): # and getattr(s, k, None) != v:
|
||||
# log.debug("_Settings save '%s' = %r", k, v)
|
||||
setattr(s, k, v)
|
||||
|
||||
log.debug("_ConfigSQL updating storage")
|
||||
self._session.merge(s)
|
||||
self._session.commit()
|
||||
self.load()
|
||||
|
||||
def invalidate(self):
|
||||
log.warning("invalidating configuration")
|
||||
self.db_configured = False
|
||||
self.config_calibre_dir = None
|
||||
self.save()
|
||||
|
||||
|
||||
def _migrate_table(session, orm_class):
|
||||
changed = False
|
||||
|
||||
for column_name, column in orm_class.__dict__.items():
|
||||
if column_name[0] != '_':
|
||||
try:
|
||||
session.query(column).first()
|
||||
except exc.OperationalError as err:
|
||||
log.debug("%s: %s", column_name, err)
|
||||
column_default = "" if column.default is None else ("DEFAULT %r" % column.default.arg)
|
||||
alter_table = "ALTER TABLE %s ADD COLUMN `%s` %s %s" % (orm_class.__tablename__, column_name, column.type, column_default)
|
||||
session.execute(alter_table)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
session.commit()
|
||||
|
||||
|
||||
def _migrate_database(session):
|
||||
# make sure the table is created, if it does not exist
|
||||
_Base.metadata.create_all(session.bind)
|
||||
_migrate_table(session, _Settings)
|
||||
|
||||
|
||||
def load_configuration(session):
|
||||
_migrate_database(session)
|
||||
|
||||
if not session.query(_Settings).count():
|
||||
session.add(_Settings())
|
||||
session.commit()
|
||||
|
||||
return _ConfigSQL(session)
|
131
cps/constants.py
Normal file
@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2019 OzzieIsaacs, pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
# Base dir is parent of current file, necessary if called from different folder
|
||||
if sys.version_info < (3, 0):
|
||||
BASE_DIR = os.path.abspath(os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),os.pardir)).decode('utf-8')
|
||||
else:
|
||||
BASE_DIR = os.path.abspath(os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),os.pardir))
|
||||
STATIC_DIR = os.path.join(BASE_DIR, 'cps', 'static')
|
||||
TEMPLATES_DIR = os.path.join(BASE_DIR, 'cps', 'templates')
|
||||
TRANSLATIONS_DIR = os.path.join(BASE_DIR, 'cps', 'translations')
|
||||
CONFIG_DIR = os.environ.get('CALIBRE_DBPATH', BASE_DIR)
|
||||
|
||||
|
||||
ROLE_USER = 0 << 0
|
||||
ROLE_ADMIN = 1 << 0
|
||||
ROLE_DOWNLOAD = 1 << 1
|
||||
ROLE_UPLOAD = 1 << 2
|
||||
ROLE_EDIT = 1 << 3
|
||||
ROLE_PASSWD = 1 << 4
|
||||
ROLE_ANONYMOUS = 1 << 5
|
||||
ROLE_EDIT_SHELFS = 1 << 6
|
||||
ROLE_DELETE_BOOKS = 1 << 7
|
||||
ROLE_VIEWER = 1 << 8
|
||||
|
||||
ALL_ROLES = {
|
||||
"admin_role": ROLE_ADMIN,
|
||||
"download_role": ROLE_DOWNLOAD,
|
||||
"upload_role": ROLE_UPLOAD,
|
||||
"edit_role": ROLE_EDIT,
|
||||
"passwd_role": ROLE_PASSWD,
|
||||
"edit_shelf_role": ROLE_EDIT_SHELFS,
|
||||
"delete_role": ROLE_DELETE_BOOKS,
|
||||
"viewer_role": ROLE_VIEWER,
|
||||
}
|
||||
|
||||
DETAIL_RANDOM = 1 << 0
|
||||
SIDEBAR_LANGUAGE = 1 << 1
|
||||
SIDEBAR_SERIES = 1 << 2
|
||||
SIDEBAR_CATEGORY = 1 << 3
|
||||
SIDEBAR_HOT = 1 << 4
|
||||
SIDEBAR_RANDOM = 1 << 5
|
||||
SIDEBAR_AUTHOR = 1 << 6
|
||||
SIDEBAR_BEST_RATED = 1 << 7
|
||||
SIDEBAR_READ_AND_UNREAD = 1 << 8
|
||||
SIDEBAR_RECENT = 1 << 9
|
||||
SIDEBAR_SORTED = 1 << 10
|
||||
MATURE_CONTENT = 1 << 11
|
||||
SIDEBAR_PUBLISHER = 1 << 12
|
||||
SIDEBAR_RATING = 1 << 13
|
||||
SIDEBAR_FORMAT = 1 << 14
|
||||
|
||||
ADMIN_USER_ROLES = sum(r for r in ALL_ROLES.values()) & ~ROLE_EDIT_SHELFS & ~ROLE_ANONYMOUS
|
||||
ADMIN_USER_SIDEBAR = (SIDEBAR_FORMAT << 1) - 1
|
||||
|
||||
UPDATE_STABLE = 0 << 0
|
||||
AUTO_UPDATE_STABLE = 1 << 0
|
||||
UPDATE_NIGHTLY = 1 << 1
|
||||
AUTO_UPDATE_NIGHTLY = 1 << 2
|
||||
|
||||
LOGIN_STANDARD = 0
|
||||
LOGIN_LDAP = 1
|
||||
LOGIN_OAUTH_GITHUB = 2
|
||||
LOGIN_OAUTH_GOOGLE = 3
|
||||
|
||||
|
||||
DEFAULT_PASSWORD = "admin123"
|
||||
DEFAULT_PORT = 8083
|
||||
try:
|
||||
env_CALIBRE_PORT = os.environ.get("CALIBRE_PORT", DEFAULT_PORT)
|
||||
DEFAULT_PORT = int(env_CALIBRE_PORT)
|
||||
except ValueError:
|
||||
print('Environment variable CALIBRE_PORT has invalid value (%s), faling back to default (8083)' % env_CALIBRE_PORT)
|
||||
del env_CALIBRE_PORT
|
||||
|
||||
|
||||
EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'}
|
||||
EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'}
|
||||
EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx',
|
||||
'fb2', 'html', 'rtf', 'odt', 'mp3', 'm4a', 'm4b'}
|
||||
# EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +
|
||||
# (['rar','cbr'] if feature_support['rar'] else []))
|
||||
|
||||
|
||||
def has_flag(value, bit_flag):
|
||||
return bit_flag == (bit_flag & (value or 0))
|
||||
|
||||
def selected_roles(dictionary):
|
||||
return sum(v for k, v in ALL_ROLES.items() if k in dictionary)
|
||||
|
||||
|
||||
# :rtype: BookMeta
|
||||
BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, description, tags, series, '
|
||||
'series_id, languages')
|
||||
|
||||
STABLE_VERSION = {'version': '0.6.4 Beta'}
|
||||
|
||||
NIGHTLY_VERSION = {}
|
||||
NIGHTLY_VERSION[0] = '$Format:%H$'
|
||||
NIGHTLY_VERSION[1] = '$Format:%cI$'
|
||||
# NIGHTLY_VERSION[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
||||
# NIGHTLY_VERSION[1] = '2018-09-09T10:13:08+02:00'
|
||||
|
||||
|
||||
# clean-up the module namespace
|
||||
del sys, os, namedtuple
|
||||
|
@ -17,23 +17,21 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import subprocess
|
||||
import ub
|
||||
import re
|
||||
|
||||
from flask_babel import gettext as _
|
||||
|
||||
from . import config
|
||||
from .subproc_wrapper import process_wait
|
||||
|
||||
|
||||
def versionKindle():
|
||||
versions = _(u'not installed')
|
||||
if os.path.exists(ub.config.config_converterpath):
|
||||
if os.path.exists(config.config_converterpath):
|
||||
try:
|
||||
p = subprocess.Popen(ub.config.config_converterpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
p.wait()
|
||||
for lines in p.stdout.readlines():
|
||||
if isinstance(lines, bytes):
|
||||
lines = lines.decode('utf-8')
|
||||
for lines in process_wait(config.config_converterpath):
|
||||
if re.search('Amazon kindlegen\(', lines):
|
||||
versions = lines
|
||||
except Exception:
|
||||
@ -43,13 +41,9 @@ def versionKindle():
|
||||
|
||||
def versionCalibre():
|
||||
versions = _(u'not installed')
|
||||
if os.path.exists(ub.config.config_converterpath):
|
||||
if os.path.exists(config.config_converterpath):
|
||||
try:
|
||||
p = subprocess.Popen([ub.config.config_converterpath, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
p.wait()
|
||||
for lines in p.stdout.readlines():
|
||||
if isinstance(lines, bytes):
|
||||
lines = lines.decode('utf-8')
|
||||
for lines in process_wait([config.config_converterpath, '--version']):
|
||||
if re.search('ebook-convert.*\(calibre', lines):
|
||||
versions = lines
|
||||
except Exception:
|
||||
@ -58,9 +52,9 @@ def versionCalibre():
|
||||
|
||||
|
||||
def versioncheck():
|
||||
if ub.config.config_ebookconverter == 1:
|
||||
if config.config_ebookconverter == 1:
|
||||
return versionKindle()
|
||||
elif ub.config.config_ebookconverter == 2:
|
||||
elif config.config_ebookconverter == 2:
|
||||
return versionCalibre()
|
||||
else:
|
||||
return {'ebook_converter':_(u'not configured')}
|
||||
|
131
cps/db.py
@ -18,40 +18,22 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from sqlalchemy import *
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import *
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import ast
|
||||
from ub import config
|
||||
import ub
|
||||
import sys
|
||||
import unidecode
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import Table, Column, ForeignKey
|
||||
from sqlalchemy import String, Integer, Boolean
|
||||
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
|
||||
session = None
|
||||
cc_exceptions = ['datetime', 'comments', 'float', 'composite', 'series']
|
||||
cc_classes = None
|
||||
engine = None
|
||||
|
||||
|
||||
# user defined sort function for calibre databases (Series, etc.)
|
||||
def title_sort(title):
|
||||
# calibre sort stuff
|
||||
title_pat = re.compile(config.config_title_regex, re.IGNORECASE)
|
||||
match = title_pat.search(title)
|
||||
if match:
|
||||
prep = match.group(1)
|
||||
title = title.replace(prep, '') + ', ' + prep
|
||||
return title.strip()
|
||||
|
||||
|
||||
def lcase(s):
|
||||
return unidecode.unidecode(s.lower())
|
||||
|
||||
|
||||
def ucase(s):
|
||||
return s.upper()
|
||||
cc_classes = {}
|
||||
|
||||
|
||||
Base = declarative_base()
|
||||
@ -329,37 +311,45 @@ class Custom_Columns(Base):
|
||||
return display_dict
|
||||
|
||||
|
||||
def setup_db():
|
||||
global engine
|
||||
global session
|
||||
global cc_classes
|
||||
def update_title_sort(config, conn=None):
|
||||
# user defined sort function for calibre databases (Series, etc.)
|
||||
def _title_sort(title):
|
||||
# calibre sort stuff
|
||||
title_pat = re.compile(config.config_title_regex, re.IGNORECASE)
|
||||
match = title_pat.search(title)
|
||||
if match:
|
||||
prep = match.group(1)
|
||||
title = title.replace(prep, '') + ', ' + prep
|
||||
return title.strip()
|
||||
|
||||
if config.config_calibre_dir is None or config.config_calibre_dir == u'':
|
||||
content = ub.session.query(ub.Settings).first()
|
||||
content.config_calibre_dir = None
|
||||
content.db_configured = False
|
||||
ub.session.commit()
|
||||
config.loadSettings()
|
||||
conn = conn or session.connection().connection.connection
|
||||
conn.create_function("title_sort", 1, _title_sort)
|
||||
|
||||
|
||||
def setup_db(config):
|
||||
dispose()
|
||||
|
||||
if not config.config_calibre_dir:
|
||||
config.invalidate()
|
||||
return False
|
||||
|
||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||
try:
|
||||
if not os.path.exists(dbpath):
|
||||
raise
|
||||
engine = create_engine('sqlite:///' + dbpath, echo=False, isolation_level="SERIALIZABLE", connect_args={'check_same_thread': False})
|
||||
conn = engine.connect()
|
||||
except Exception:
|
||||
content = ub.session.query(ub.Settings).first()
|
||||
content.config_calibre_dir = None
|
||||
content.db_configured = False
|
||||
ub.session.commit()
|
||||
config.loadSettings()
|
||||
if not os.path.exists(dbpath):
|
||||
config.invalidate()
|
||||
return False
|
||||
content = ub.session.query(ub.Settings).first()
|
||||
content.db_configured = True
|
||||
ub.session.commit()
|
||||
config.loadSettings()
|
||||
conn.connection.create_function('title_sort', 1, title_sort)
|
||||
|
||||
try:
|
||||
engine = create_engine('sqlite:///{0}'.format(dbpath),
|
||||
echo=False,
|
||||
isolation_level="SERIALIZABLE",
|
||||
connect_args={'check_same_thread': False})
|
||||
conn = engine.connect()
|
||||
except:
|
||||
config.invalidate()
|
||||
return False
|
||||
|
||||
config.db_configured = True
|
||||
update_title_sort(config, conn.connection)
|
||||
# conn.connection.create_function('lower', 1, lcase)
|
||||
# conn.connection.create_function('upper', 1, ucase)
|
||||
|
||||
@ -368,7 +358,6 @@ def setup_db():
|
||||
|
||||
cc_ids = []
|
||||
books_custom_column_links = {}
|
||||
cc_classes = {}
|
||||
for row in cc:
|
||||
if row.datatype not in cc_exceptions:
|
||||
books_custom_column_links[row.id] = Table('books_custom_column_' + str(row.id) + '_link', Base.metadata,
|
||||
@ -393,7 +382,7 @@ def setup_db():
|
||||
ccdict = {'__tablename__': 'custom_column_' + str(row.id),
|
||||
'id': Column(Integer, primary_key=True),
|
||||
'value': Column(String)}
|
||||
cc_classes[row.id] = type('Custom_Column_' + str(row.id), (Base,), ccdict)
|
||||
cc_classes[row.id] = type(str('Custom_Column_' + str(row.id)), (Base,), ccdict)
|
||||
|
||||
for cc_id in cc_ids:
|
||||
if (cc_id[1] == 'bool') or (cc_id[1] == 'int'):
|
||||
@ -407,8 +396,38 @@ def setup_db():
|
||||
backref='books'))
|
||||
|
||||
|
||||
global session
|
||||
Session = scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine))
|
||||
session = Session()
|
||||
return True
|
||||
|
||||
|
||||
def dispose():
|
||||
global session
|
||||
|
||||
engine = None
|
||||
if session:
|
||||
engine = session.bind
|
||||
try: session.close()
|
||||
except: pass
|
||||
session = None
|
||||
|
||||
if engine:
|
||||
try: engine.dispose()
|
||||
except: pass
|
||||
|
||||
for attr in list(Books.__dict__.keys()):
|
||||
if attr.startswith("custom_column_"):
|
||||
delattr(Books, attr)
|
||||
|
||||
for db_class in cc_classes.values():
|
||||
Base.metadata.remove(db_class.__table__)
|
||||
cc_classes.clear()
|
||||
|
||||
for table in reversed(Base.metadata.sorted_tables):
|
||||
name = table.key
|
||||
if name.startswith("custom_column_") or name.startswith("books_custom_column_"):
|
||||
if table is not None:
|
||||
Base.metadata.remove(table)
|
||||
|
711
cps/editbooks.py
Normal file
@ -0,0 +1,711 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import datetime
|
||||
import json
|
||||
from shutil import move, copyfile
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
|
||||
from flask_babel import gettext as _
|
||||
from flask_login import current_user
|
||||
|
||||
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
|
||||
from . import config, get_locale, db, ub, global_WorkerThread
|
||||
from .helper import order_authors, common_filters
|
||||
from .web import login_required_if_no_ano, render_title_template, edit_required, upload_required, login_required
|
||||
|
||||
|
||||
editbook = Blueprint('editbook', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
# Modifies different Database objects, first check if elements have to be added to database, than check
|
||||
# if elements have to be deleted, because they are no longer used
|
||||
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
|
||||
# passing input_elements not as a list may lead to undesired results
|
||||
if not isinstance(input_elements, list):
|
||||
raise TypeError(str(input_elements) + " should be passed as a list")
|
||||
|
||||
input_elements = [x for x in input_elements if x != '']
|
||||
# we have all input element (authors, series, tags) names now
|
||||
# 1. search for elements to remove
|
||||
del_elements = []
|
||||
for c_elements in db_book_object:
|
||||
found = False
|
||||
if db_type == 'languages':
|
||||
type_elements = c_elements.lang_code
|
||||
elif db_type == 'custom':
|
||||
type_elements = c_elements.value
|
||||
else:
|
||||
type_elements = c_elements.name
|
||||
for inp_element in input_elements:
|
||||
if inp_element.lower() == type_elements.lower():
|
||||
# if inp_element == type_elements:
|
||||
found = True
|
||||
break
|
||||
# if the element was not found in the new list, add it to remove list
|
||||
if not found:
|
||||
del_elements.append(c_elements)
|
||||
# 2. search for elements that need to be added
|
||||
add_elements = []
|
||||
for inp_element in input_elements:
|
||||
found = False
|
||||
for c_elements in db_book_object:
|
||||
if db_type == 'languages':
|
||||
type_elements = c_elements.lang_code
|
||||
elif db_type == 'custom':
|
||||
type_elements = c_elements.value
|
||||
else:
|
||||
type_elements = c_elements.name
|
||||
if inp_element == type_elements:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
add_elements.append(inp_element)
|
||||
# if there are elements to remove, we remove them now
|
||||
if len(del_elements) > 0:
|
||||
for del_element in del_elements:
|
||||
db_book_object.remove(del_element)
|
||||
if len(del_element.books) == 0:
|
||||
db_session.delete(del_element)
|
||||
# if there are elements to add, we add them now!
|
||||
if len(add_elements) > 0:
|
||||
if db_type == 'languages':
|
||||
db_filter = db_object.lang_code
|
||||
elif db_type == 'custom':
|
||||
db_filter = db_object.value
|
||||
else:
|
||||
db_filter = db_object.name
|
||||
for add_element in add_elements:
|
||||
# check if a element with that name exists
|
||||
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
|
||||
# if no element is found add it
|
||||
# if new_element is None:
|
||||
if db_type == 'author':
|
||||
new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "")
|
||||
elif db_type == 'series':
|
||||
new_element = db_object(add_element, add_element)
|
||||
elif db_type == 'custom':
|
||||
new_element = db_object(value=add_element)
|
||||
elif db_type == 'publisher':
|
||||
new_element = db_object(add_element, None)
|
||||
else: # db_type should be tag or language
|
||||
new_element = db_object(add_element)
|
||||
if db_element is None:
|
||||
db_session.add(new_element)
|
||||
db_book_object.append(new_element)
|
||||
else:
|
||||
if db_type == 'custom':
|
||||
if db_element.value != add_element:
|
||||
new_element.value = add_element
|
||||
# new_element = db_element
|
||||
elif db_type == 'languages':
|
||||
if db_element.lang_code != add_element:
|
||||
db_element.lang_code = add_element
|
||||
# new_element = db_element
|
||||
elif db_type == 'series':
|
||||
if db_element.name != add_element:
|
||||
db_element.name = add_element # = add_element # new_element = db_object(add_element, add_element)
|
||||
db_element.sort = add_element
|
||||
# new_element = db_element
|
||||
elif db_type == 'author':
|
||||
if db_element.name != add_element:
|
||||
db_element.name = add_element
|
||||
db_element.sort = add_element.replace('|', ',')
|
||||
# new_element = db_element
|
||||
elif db_type == 'publisher':
|
||||
if db_element.name != add_element:
|
||||
db_element.name = add_element
|
||||
db_element.sort = None
|
||||
# new_element = db_element
|
||||
elif db_element.name != add_element:
|
||||
db_element.name = add_element
|
||||
# new_element = db_element
|
||||
# add element to book
|
||||
db_book_object.append(db_element)
|
||||
|
||||
|
||||
@editbook.route("/delete/<int:book_id>/", defaults={'book_format': ""})
|
||||
@editbook.route("/delete/<int:book_id>/<string:book_format>/")
|
||||
@login_required
|
||||
def delete_book(book_id, book_format):
|
||||
if current_user.role_delete_books():
|
||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||
if book:
|
||||
helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
|
||||
if not book_format:
|
||||
# delete book from Shelfs, Downloads, Read list
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
||||
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
||||
ub.delete_download(book_id)
|
||||
ub.session.commit()
|
||||
|
||||
# check if only this book links to:
|
||||
# author, language, series, tags, custom columns
|
||||
modify_database_object([u''], book.authors, db.Authors, db.session, 'author')
|
||||
modify_database_object([u''], book.tags, db.Tags, db.session, 'tags')
|
||||
modify_database_object([u''], book.series, db.Series, db.session, 'series')
|
||||
modify_database_object([u''], book.languages, db.Languages, db.session, 'languages')
|
||||
modify_database_object([u''], book.publishers, db.Publishers, db.session, 'publishers')
|
||||
|
||||
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
for c in cc:
|
||||
cc_string = "custom_column_" + str(c.id)
|
||||
if not c.is_multiple:
|
||||
if len(getattr(book, cc_string)) > 0:
|
||||
if c.datatype == 'bool' or c.datatype == 'integer':
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
db.session.delete(del_cc)
|
||||
elif c.datatype == 'rating':
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
if len(del_cc.books) == 0:
|
||||
db.session.delete(del_cc)
|
||||
else:
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
db.session.delete(del_cc)
|
||||
else:
|
||||
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
|
||||
db.session, 'custom')
|
||||
db.session.query(db.Books).filter(db.Books.id == book_id).delete()
|
||||
else:
|
||||
db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == book_format).delete()
|
||||
db.session.commit()
|
||||
else:
|
||||
# book not found
|
||||
log.error('Book with id "%s" could not be deleted: not found', book_id)
|
||||
if book_format:
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
else:
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
|
||||
def render_edit_book(book_id):
|
||||
db.update_title_sort(config)
|
||||
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
book = db.session.query(db.Books)\
|
||||
.filter(db.Books.id == book_id).filter(common_filters()).first()
|
||||
|
||||
if not book:
|
||||
flash(_(u"Error opening eBook. File does not exist or file is not accessible"), category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
for lang in book.languages:
|
||||
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
|
||||
|
||||
book = order_authors(book)
|
||||
|
||||
author_names = []
|
||||
for authr in book.authors:
|
||||
author_names.append(authr.name.replace('|', ','))
|
||||
|
||||
# Option for showing convertbook button
|
||||
valid_source_formats=list()
|
||||
if config.config_ebookconverter == 2:
|
||||
for file in book.data:
|
||||
if file.format.lower() in constants.EXTENSIONS_CONVERT:
|
||||
valid_source_formats.append(file.format.lower())
|
||||
|
||||
# Determine what formats don't already exist
|
||||
allowed_conversion_formats = constants.EXTENSIONS_CONVERT.copy()
|
||||
for file in book.data:
|
||||
try:
|
||||
allowed_conversion_formats.remove(file.format.lower())
|
||||
except Exception:
|
||||
log.warning('%s already removed from list.', file.format.lower())
|
||||
|
||||
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
||||
title=_(u"edit metadata"), page="editbook",
|
||||
conversion_formats=allowed_conversion_formats,
|
||||
source_formats=valid_source_formats)
|
||||
|
||||
|
||||
def edit_cc_data(book_id, book, to_save):
|
||||
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
for c in cc:
|
||||
cc_string = "custom_column_" + str(c.id)
|
||||
if not c.is_multiple:
|
||||
if len(getattr(book, cc_string)) > 0:
|
||||
cc_db_value = getattr(book, cc_string)[0].value
|
||||
else:
|
||||
cc_db_value = None
|
||||
if to_save[cc_string].strip():
|
||||
if c.datatype == 'int' or c.datatype == 'bool':
|
||||
if to_save[cc_string] == 'None':
|
||||
to_save[cc_string] = None
|
||||
elif c.datatype == 'bool':
|
||||
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
|
||||
|
||||
if to_save[cc_string] != cc_db_value:
|
||||
if cc_db_value is not None:
|
||||
if to_save[cc_string] is not None:
|
||||
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
|
||||
else:
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
db.session.delete(del_cc)
|
||||
else:
|
||||
cc_class = db.cc_classes[c.id]
|
||||
new_cc = cc_class(value=to_save[cc_string], book=book_id)
|
||||
db.session.add(new_cc)
|
||||
|
||||
else:
|
||||
if c.datatype == 'rating':
|
||||
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
|
||||
if to_save[cc_string].strip() != cc_db_value:
|
||||
if cc_db_value is not None:
|
||||
# remove old cc_val
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
if len(del_cc.books) == 0:
|
||||
db.session.delete(del_cc)
|
||||
cc_class = db.cc_classes[c.id]
|
||||
new_cc = db.session.query(cc_class).filter(
|
||||
cc_class.value == to_save[cc_string].strip()).first()
|
||||
# if no cc val is found add it
|
||||
if new_cc is None:
|
||||
new_cc = cc_class(value=to_save[cc_string].strip())
|
||||
db.session.add(new_cc)
|
||||
db.session.flush()
|
||||
new_cc = db.session.query(cc_class).filter(
|
||||
cc_class.value == to_save[cc_string].strip()).first()
|
||||
# add cc value to book
|
||||
getattr(book, cc_string).append(new_cc)
|
||||
else:
|
||||
if cc_db_value is not None:
|
||||
# remove old cc_val
|
||||
del_cc = getattr(book, cc_string)[0]
|
||||
getattr(book, cc_string).remove(del_cc)
|
||||
if len(del_cc.books) == 0:
|
||||
db.session.delete(del_cc)
|
||||
else:
|
||||
input_tags = to_save[cc_string].split(',')
|
||||
input_tags = list(map(lambda it: it.strip(), input_tags))
|
||||
modify_database_object(input_tags, getattr(book, cc_string), db.cc_classes[c.id], db.session,
|
||||
'custom')
|
||||
return cc
|
||||
|
||||
def upload_single_file(request, book, book_id):
|
||||
# Check and handle Uploaded file
|
||||
if 'btn-upload-format' in request.files:
|
||||
requested_file = request.files['btn-upload-format']
|
||||
# check for empty request
|
||||
if requested_file.filename != '':
|
||||
if '.' in requested_file.filename:
|
||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||
if file_ext not in constants.EXTENSIONS_UPLOAD:
|
||||
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
|
||||
category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
else:
|
||||
flash(_('File to be uploaded must have an extension'), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
|
||||
file_name = book.path.rsplit('/', 1)[-1]
|
||||
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
|
||||
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
|
||||
|
||||
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
||||
if not os.path.exists(filepath):
|
||||
try:
|
||||
os.makedirs(filepath)
|
||||
except OSError:
|
||||
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
try:
|
||||
requested_file.save(saved_filename)
|
||||
except OSError:
|
||||
flash(_(u"Failed to store file %(file)s.", file=saved_filename), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
|
||||
file_size = os.path.getsize(saved_filename)
|
||||
is_format = db.session.query(db.Data).filter(db.Data.book == book_id).\
|
||||
filter(db.Data.format == file_ext.upper()).first()
|
||||
|
||||
# Format entry already exists, no need to update the database
|
||||
if is_format:
|
||||
log.warning('Book format %s already existing', file_ext.upper())
|
||||
else:
|
||||
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
|
||||
db.session.add(db_format)
|
||||
db.session.commit()
|
||||
db.update_title_sort(config)
|
||||
|
||||
# Queue uploader info
|
||||
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=book.title)
|
||||
global_WorkerThread.add_upload(current_user.nickname,
|
||||
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>")
|
||||
|
||||
|
||||
def upload_cover(request, book):
|
||||
if 'btn-upload-cover' in request.files:
|
||||
requested_file = request.files['btn-upload-cover']
|
||||
# check for empty request
|
||||
if requested_file.filename != '':
|
||||
if helper.save_cover(requested_file, book.path) is True:
|
||||
return True
|
||||
else:
|
||||
# ToDo Message not always coorect
|
||||
flash(_(u"Cover is not a supported imageformat (jpg/png/webp), can't save"), category="error")
|
||||
return False
|
||||
return None
|
||||
|
||||
|
||||
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
|
||||
@login_required_if_no_ano
|
||||
@edit_required
|
||||
def edit_book(book_id):
|
||||
# Show form
|
||||
if request.method != 'POST':
|
||||
return render_edit_book(book_id)
|
||||
|
||||
# create the function for sorting...
|
||||
db.update_title_sort(config)
|
||||
book = db.session.query(db.Books)\
|
||||
.filter(db.Books.id == book_id).filter(common_filters()).first()
|
||||
|
||||
# Book not found
|
||||
if not book:
|
||||
flash(_(u"Error opening eBook. File does not exist or file is not accessible"), category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
upload_single_file(request, book, book_id)
|
||||
if upload_cover(request, book) is True:
|
||||
book.has_cover = 1
|
||||
try:
|
||||
to_save = request.form.to_dict()
|
||||
# Update book
|
||||
edited_books_id = None
|
||||
#handle book title
|
||||
if book.title != to_save["book_title"].rstrip().strip():
|
||||
if to_save["book_title"] == '':
|
||||
to_save["book_title"] = _(u'unknown')
|
||||
book.title = to_save["book_title"].rstrip().strip()
|
||||
edited_books_id = book.id
|
||||
|
||||
# handle author(s)
|
||||
input_authors = to_save["author_name"].split('&')
|
||||
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
|
||||
# we have all author names now
|
||||
if input_authors == ['']:
|
||||
input_authors = [_(u'unknown')] # prevent empty Author
|
||||
|
||||
modify_database_object(input_authors, book.authors, db.Authors, db.session, 'author')
|
||||
|
||||
# Search for each author if author is in database, if not, authorname and sorted authorname is generated new
|
||||
# everything then is assembled for sorted author field in database
|
||||
sort_authors_list = list()
|
||||
for inp in input_authors:
|
||||
stored_author = db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
||||
if not stored_author:
|
||||
stored_author = helper.get_sorted_author(inp)
|
||||
else:
|
||||
stored_author = stored_author.sort
|
||||
sort_authors_list.append(helper.get_sorted_author(stored_author))
|
||||
sort_authors = ' & '.join(sort_authors_list)
|
||||
if book.author_sort != sort_authors:
|
||||
edited_books_id = book.id
|
||||
book.author_sort = sort_authors
|
||||
|
||||
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
|
||||
error = False
|
||||
if edited_books_id:
|
||||
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
|
||||
|
||||
if not error:
|
||||
if to_save["cover_url"]:
|
||||
if helper.save_cover_from_url(to_save["cover_url"], book.path) is True:
|
||||
book.has_cover = 1
|
||||
else:
|
||||
flash(_(u"Cover is not a jpg file, can't save"), category="error")
|
||||
|
||||
if book.series_index != to_save["series_index"]:
|
||||
book.series_index = to_save["series_index"]
|
||||
|
||||
# Handle book comments/description
|
||||
if len(book.comments):
|
||||
book.comments[0].text = to_save["description"]
|
||||
else:
|
||||
book.comments.append(db.Comments(text=to_save["description"], book=book.id))
|
||||
|
||||
# Handle book tags
|
||||
input_tags = to_save["tags"].split(',')
|
||||
input_tags = list(map(lambda it: it.strip(), input_tags))
|
||||
modify_database_object(input_tags, book.tags, db.Tags, db.session, 'tags')
|
||||
|
||||
# Handle book series
|
||||
input_series = [to_save["series"].strip()]
|
||||
input_series = [x for x in input_series if x != '']
|
||||
modify_database_object(input_series, book.series, db.Series, db.session, 'series')
|
||||
|
||||
if to_save["pubdate"]:
|
||||
try:
|
||||
book.pubdate = datetime.datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
|
||||
except ValueError:
|
||||
book.pubdate = db.Books.DEFAULT_PUBDATE
|
||||
else:
|
||||
book.pubdate = db.Books.DEFAULT_PUBDATE
|
||||
|
||||
if to_save["publisher"]:
|
||||
publisher = to_save["publisher"].rstrip().strip()
|
||||
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
|
||||
modify_database_object([publisher], book.publishers, db.Publishers, db.session, 'publisher')
|
||||
elif len(book.publishers):
|
||||
modify_database_object([], book.publishers, db.Publishers, db.session, 'publisher')
|
||||
|
||||
|
||||
# handle book languages
|
||||
input_languages = to_save["languages"].split(',')
|
||||
unknown_languages = []
|
||||
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
|
||||
for l in unknown_languages:
|
||||
log.error('%s is not a valid language', l)
|
||||
flash(_(u"%(langname)s is not a valid language", langname=l), category="error")
|
||||
modify_database_object(list(input_l), book.languages, db.Languages, db.session, 'languages')
|
||||
|
||||
# handle book ratings
|
||||
if to_save["rating"].strip():
|
||||
old_rating = False
|
||||
if len(book.ratings) > 0:
|
||||
old_rating = book.ratings[0].rating
|
||||
ratingx2 = int(float(to_save["rating"]) * 2)
|
||||
if ratingx2 != old_rating:
|
||||
is_rating = db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
|
||||
if is_rating:
|
||||
book.ratings.append(is_rating)
|
||||
else:
|
||||
new_rating = db.Ratings(rating=ratingx2)
|
||||
book.ratings.append(new_rating)
|
||||
if old_rating:
|
||||
book.ratings.remove(book.ratings[0])
|
||||
else:
|
||||
if len(book.ratings) > 0:
|
||||
book.ratings.remove(book.ratings[0])
|
||||
|
||||
# handle cc data
|
||||
edit_cc_data(book_id, book, to_save)
|
||||
|
||||
db.session.commit()
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
if "detail_view" in to_save:
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
else:
|
||||
flash(_("Metadata successfully updated"), category="success")
|
||||
return render_edit_book(book_id)
|
||||
else:
|
||||
db.session.rollback()
|
||||
flash(error, category="error")
|
||||
return render_edit_book(book_id)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
db.session.rollback()
|
||||
flash(_("Error editing book, please check logfile for details"), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
|
||||
|
||||
@editbook.route("/upload", methods=["GET", "POST"])
|
||||
@login_required_if_no_ano
|
||||
@upload_required
|
||||
def upload():
|
||||
if not config.config_uploading:
|
||||
abort(404)
|
||||
if request.method == 'POST' and 'btn-upload' in request.files:
|
||||
for requested_file in request.files.getlist("btn-upload"):
|
||||
# create the function for sorting...
|
||||
db.update_title_sort(config)
|
||||
db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
|
||||
|
||||
# check if file extension is correct
|
||||
if '.' in requested_file.filename:
|
||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||
if file_ext not in constants.EXTENSIONS_UPLOAD:
|
||||
flash(
|
||||
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
||||
ext=file_ext), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
else:
|
||||
flash(_('File to be uploaded must have an extension'), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
# extract metadata from file
|
||||
meta = uploader.upload(requested_file)
|
||||
title = meta.title
|
||||
authr = meta.author
|
||||
tags = meta.tags
|
||||
series = meta.series
|
||||
series_index = meta.series_id
|
||||
title_dir = helper.get_valid_filename(title)
|
||||
author_dir = helper.get_valid_filename(authr)
|
||||
filepath = os.path.join(config.config_calibre_dir, author_dir, title_dir)
|
||||
saved_filename = os.path.join(filepath, title_dir + meta.extension.lower())
|
||||
|
||||
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
||||
if not os.path.exists(filepath):
|
||||
try:
|
||||
os.makedirs(filepath)
|
||||
except OSError:
|
||||
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
try:
|
||||
copyfile(meta.file_path, saved_filename)
|
||||
except OSError:
|
||||
flash(_(u"Failed to store file %(file)s (Permission denied).", file=saved_filename), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
try:
|
||||
os.unlink(meta.file_path)
|
||||
except OSError:
|
||||
flash(_(u"Failed to delete file %(file)s (Permission denied).", file= meta.file_path),
|
||||
category="warning")
|
||||
|
||||
if meta.cover is None:
|
||||
has_cover = 0
|
||||
copyfile(os.path.join(constants.STATIC_DIR, 'generic_cover.jpg'),
|
||||
os.path.join(filepath, "cover.jpg"))
|
||||
else:
|
||||
has_cover = 1
|
||||
move(meta.cover, os.path.join(filepath, "cover.jpg"))
|
||||
|
||||
# handle authors
|
||||
is_author = db.session.query(db.Authors).filter(db.Authors.name == authr).first()
|
||||
if is_author:
|
||||
db_author = is_author
|
||||
else:
|
||||
db_author = db.Authors(authr, helper.get_sorted_author(authr), "")
|
||||
db.session.add(db_author)
|
||||
|
||||
# handle series
|
||||
db_series = None
|
||||
is_series = db.session.query(db.Series).filter(db.Series.name == series).first()
|
||||
if is_series:
|
||||
db_series = is_series
|
||||
elif series != '':
|
||||
db_series = db.Series(series, "")
|
||||
db.session.add(db_series)
|
||||
|
||||
# add language actually one value in list
|
||||
input_language = meta.languages
|
||||
db_language = None
|
||||
if input_language != "":
|
||||
input_language = isoLanguages.get(name=input_language).part3
|
||||
hasLanguage = db.session.query(db.Languages).filter(db.Languages.lang_code == input_language).first()
|
||||
if hasLanguage:
|
||||
db_language = hasLanguage
|
||||
else:
|
||||
db_language = db.Languages(input_language)
|
||||
db.session.add(db_language)
|
||||
|
||||
# combine path and normalize path from windows systems
|
||||
path = os.path.join(author_dir, title_dir).replace('\\', '/')
|
||||
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 1, 1),
|
||||
series_index, datetime.datetime.now(), path, has_cover, db_author, [], db_language)
|
||||
db_book.authors.append(db_author)
|
||||
if db_series:
|
||||
db_book.series.append(db_series)
|
||||
if db_language is not None:
|
||||
db_book.languages.append(db_language)
|
||||
file_size = os.path.getsize(saved_filename)
|
||||
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
|
||||
|
||||
# handle tags
|
||||
input_tags = tags.split(',')
|
||||
input_tags = list(map(lambda it: it.strip(), input_tags))
|
||||
if input_tags[0] !="":
|
||||
modify_database_object(input_tags, db_book.tags, db.Tags, db.session, 'tags')
|
||||
|
||||
# flush content, get db_book.id available
|
||||
db_book.data.append(db_data)
|
||||
db.session.add(db_book)
|
||||
db.session.flush()
|
||||
|
||||
# add comment
|
||||
book_id = db_book.id
|
||||
upload_comment = Markup(meta.description).unescape()
|
||||
if upload_comment != "":
|
||||
db.session.add(db.Comments(upload_comment, book_id))
|
||||
|
||||
# save data to database, reread data
|
||||
db.session.commit()
|
||||
db.update_title_sort(config)
|
||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
||||
|
||||
# upload book to gdrive if nesseccary and add "(bookid)" to folder name
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
error = helper.update_dir_stucture(book.id, config.config_calibre_dir)
|
||||
db.session.commit()
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
if error:
|
||||
flash(error, category="error")
|
||||
uploadText=_(u"File %(file)s uploaded", file=book.title)
|
||||
global_WorkerThread.add_upload(current_user.nickname,
|
||||
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>")
|
||||
|
||||
# create data for displaying display Full language name instead of iso639.part3language
|
||||
if db_language is not None:
|
||||
book.languages[0].language_name = _(meta.languages)
|
||||
author_names = []
|
||||
for author in db_book.authors:
|
||||
author_names.append(author.name)
|
||||
if len(request.files.getlist("btn-upload")) < 2:
|
||||
if current_user.role_edit() or current_user.role_admin():
|
||||
resp = {"location": url_for('editbook.edit_book', book_id=db_book.id)}
|
||||
return Response(json.dumps(resp), mimetype='application/json')
|
||||
else:
|
||||
resp = {"location": url_for('web.show_book', book_id=db_book.id)}
|
||||
return Response(json.dumps(resp), mimetype='application/json')
|
||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||
|
||||
|
||||
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
|
||||
@login_required_if_no_ano
|
||||
@edit_required
|
||||
def convert_bookformat(book_id):
|
||||
# check to see if we have form fields to work with - if not send user back
|
||||
book_format_from = request.form.get('book_format_from', None)
|
||||
book_format_to = request.form.get('book_format_to', None)
|
||||
|
||||
if (book_format_from is None) or (book_format_to is None):
|
||||
flash(_(u"Source or destination format for conversion missing"), category="error")
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
||||
|
||||
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
|
||||
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
|
||||
book_format_to.upper(), current_user.nickname)
|
||||
|
||||
if rtn is None:
|
||||
flash(_(u"Book successfully queued for converting to %(book_format)s",
|
||||
book_format=book_format_to),
|
||||
category="success")
|
||||
else:
|
||||
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
10
cps/epub.py
@ -17,11 +17,13 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import zipfile
|
||||
from lxml import etree
|
||||
import os
|
||||
import uploader
|
||||
import isoLanguages
|
||||
|
||||
from . import isoLanguages
|
||||
from .constants import BookMeta
|
||||
|
||||
|
||||
def extractCover(zipFile, coverFile, coverpath, tmp_file_name):
|
||||
@ -125,7 +127,7 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||
else:
|
||||
title = epub_metadata['title']
|
||||
|
||||
return uploader.BookMeta(
|
||||
return BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=title.encode('utf-8').decode('utf-8'),
|
||||
|
@ -17,8 +17,10 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
from lxml import etree
|
||||
import uploader
|
||||
|
||||
from .constants import BookMeta
|
||||
|
||||
|
||||
def get_fb2_info(tmp_file_path, original_file_extension):
|
||||
@ -66,7 +68,7 @@ def get_fb2_info(tmp_file_path, original_file_extension):
|
||||
else:
|
||||
description = u''
|
||||
|
||||
return uploader.BookMeta(
|
||||
return BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=title.decode('utf-8'),
|
||||
|
158
cps/gdrive.py
Normal file
@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import hashlib
|
||||
import json
|
||||
import tempfile
|
||||
from uuid import uuid4
|
||||
from time import time
|
||||
from shutil import move, copyfile
|
||||
|
||||
from flask import Blueprint, flash, request, redirect, url_for, abort
|
||||
from flask_babel import gettext as _
|
||||
from flask_login import login_required
|
||||
|
||||
try:
|
||||
from googleapiclient.errors import HttpError
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from . import logger, gdriveutils, config, db
|
||||
from .web import admin_required
|
||||
|
||||
|
||||
gdrive = Blueprint('gdrive', __name__)
|
||||
log = logger.create()
|
||||
|
||||
current_milli_time = lambda: int(round(time() * 1000))
|
||||
|
||||
gdrive_watch_callback_token = 'target=calibreweb-watch_files'
|
||||
|
||||
|
||||
@gdrive.route("/gdrive/authenticate")
|
||||
@login_required
|
||||
@admin_required
|
||||
def authenticate_google_drive():
|
||||
try:
|
||||
authUrl = gdriveutils.Gauth.Instance().auth.GetAuthUrl()
|
||||
except gdriveutils.InvalidConfigError:
|
||||
flash(_(u'Google Drive setup not completed, try to deactivate and activate Google Drive again'),
|
||||
category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return redirect(authUrl)
|
||||
|
||||
|
||||
@gdrive.route("/gdrive/callback")
|
||||
def google_drive_callback():
|
||||
auth_code = request.args.get('code')
|
||||
if not auth_code:
|
||||
abort(403)
|
||||
try:
|
||||
credentials = gdriveutils.Gauth.Instance().auth.flow.step2_exchange(auth_code)
|
||||
with open(gdriveutils.CREDENTIALS, 'w') as f:
|
||||
f.write(credentials.to_json())
|
||||
except ValueError as error:
|
||||
log.error(error)
|
||||
return redirect(url_for('admin.configuration'))
|
||||
|
||||
|
||||
@gdrive.route("/gdrive/watch/subscribe")
|
||||
@login_required
|
||||
@admin_required
|
||||
def watch_gdrive():
|
||||
if not config.config_google_drive_watch_changes_response:
|
||||
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
||||
filedata = json.load(settings)
|
||||
if filedata['web']['redirect_uris'][0].endswith('/'):
|
||||
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-((len('/gdrive/callback')+1))]
|
||||
else:
|
||||
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-(len('/gdrive/callback'))]
|
||||
address = '%s/gdrive/watch/callback' % filedata['web']['redirect_uris'][0]
|
||||
notification_id = str(uuid4())
|
||||
try:
|
||||
result = gdriveutils.watchChange(gdriveutils.Gdrive.Instance().drive, notification_id,
|
||||
'web_hook', address, gdrive_watch_callback_token, current_milli_time() + 604800*1000)
|
||||
config.config_google_drive_watch_changes_response = json.dumps(result)
|
||||
# after save(), config_google_drive_watch_changes_response will be a json object, not string
|
||||
config.save()
|
||||
except HttpError as e:
|
||||
reason=json.loads(e.content)['error']['errors'][0]
|
||||
if reason['reason'] == u'push.webhookUrlUnauthorized':
|
||||
flash(_(u'Callback domain is not verified, please follow steps to verify domain in google developer console'), category="error")
|
||||
else:
|
||||
flash(reason['message'], category="error")
|
||||
|
||||
return redirect(url_for('admin.configuration'))
|
||||
|
||||
|
||||
@gdrive.route("/gdrive/watch/revoke")
|
||||
@login_required
|
||||
@admin_required
|
||||
def revoke_watch_gdrive():
|
||||
last_watch_response = config.config_google_drive_watch_changes_response
|
||||
if last_watch_response:
|
||||
try:
|
||||
gdriveutils.stopChannel(gdriveutils.Gdrive.Instance().drive, last_watch_response['id'],
|
||||
last_watch_response['resourceId'])
|
||||
except HttpError:
|
||||
pass
|
||||
config.config_google_drive_watch_changes_response = None
|
||||
config.save()
|
||||
return redirect(url_for('admin.configuration'))
|
||||
|
||||
|
||||
@gdrive.route("/gdrive/watch/callback", methods=['GET', 'POST'])
|
||||
def on_received_watch_confirmation():
|
||||
log.debug('%r', request.headers)
|
||||
if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \
|
||||
and request.headers.get('X-Goog-Resource-State') == 'change' \
|
||||
and request.data:
|
||||
|
||||
data = request.data
|
||||
|
||||
def updateMetaData():
|
||||
log.info('Change received from gdrive')
|
||||
log.debug('%r', data)
|
||||
try:
|
||||
j = json.loads(data)
|
||||
log.info('Getting change details')
|
||||
response = gdriveutils.getChangeById(gdriveutils.Gdrive.Instance().drive, j['id'])
|
||||
log.debug('%r', response)
|
||||
if response:
|
||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath):
|
||||
tmpDir = tempfile.gettempdir()
|
||||
log.info('Database file updated')
|
||||
copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time())))
|
||||
log.info('Backing up existing and downloading updated metadata.db')
|
||||
gdriveutils.downloadFile(None, "metadata.db", os.path.join(tmpDir, "tmp_metadata.db"))
|
||||
log.info('Setting up new DB')
|
||||
# prevent error on windows, as os.rename does on exisiting files
|
||||
move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath)
|
||||
db.setup_db(config)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
updateMetaData()
|
||||
return ''
|
@ -17,24 +17,37 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from flask import Response, stream_with_context
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import Column, UniqueConstraint
|
||||
from sqlalchemy import String, Integer
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
try:
|
||||
from pydrive.auth import GoogleAuth
|
||||
from pydrive.drive import GoogleDrive
|
||||
from pydrive.auth import RefreshError, InvalidConfigError
|
||||
from pydrive.auth import RefreshError
|
||||
from apiclient import errors
|
||||
gdrive_support = True
|
||||
except ImportError:
|
||||
gdrive_support = False
|
||||
|
||||
import os
|
||||
from ub import config
|
||||
import cli
|
||||
import shutil
|
||||
from flask import Response, stream_with_context
|
||||
from sqlalchemy import *
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import *
|
||||
import web
|
||||
from . import logger, cli, config
|
||||
from .constants import BASE_DIR as _BASE_DIR
|
||||
|
||||
|
||||
SETTINGS_YAML = os.path.join(_BASE_DIR, 'settings.yaml')
|
||||
CREDENTIALS = os.path.join(_BASE_DIR, 'gdrive_credentials')
|
||||
CLIENT_SECRETS = os.path.join(_BASE_DIR, 'client_secrets.json')
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
class Singleton:
|
||||
"""
|
||||
@ -67,6 +80,9 @@ class Singleton:
|
||||
except AttributeError:
|
||||
self._instance = self._decorated()
|
||||
return self._instance
|
||||
except ImportError as e:
|
||||
log.debug(e)
|
||||
return None
|
||||
|
||||
def __call__(self):
|
||||
raise TypeError('Singletons must be accessed through `Instance()`.')
|
||||
@ -78,7 +94,7 @@ class Singleton:
|
||||
@Singleton
|
||||
class Gauth:
|
||||
def __init__(self):
|
||||
self.auth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
||||
self.auth = GoogleAuth(settings_file=SETTINGS_YAML)
|
||||
|
||||
|
||||
@Singleton
|
||||
@ -86,6 +102,9 @@ class Gdrive:
|
||||
def __init__(self):
|
||||
self.drive = getDrive(gauth=Gauth.Instance().auth)
|
||||
|
||||
def is_gdrive_ready():
|
||||
return os.path.exists(SETTINGS_YAML) and os.path.exists(CREDENTIALS)
|
||||
|
||||
|
||||
engine = create_engine('sqlite:///{0}'.format(cli.gdpath), echo=False)
|
||||
Base = declarative_base()
|
||||
@ -146,17 +165,17 @@ migrate()
|
||||
def getDrive(drive=None, gauth=None):
|
||||
if not drive:
|
||||
if not gauth:
|
||||
gauth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
||||
gauth = GoogleAuth(settings_file=SETTINGS_YAML)
|
||||
# Try to load saved client credentials
|
||||
gauth.LoadCredentialsFile(os.path.join(config.get_main_dir,'gdrive_credentials'))
|
||||
gauth.LoadCredentialsFile(CREDENTIALS)
|
||||
if gauth.access_token_expired:
|
||||
# Refresh them if expired
|
||||
try:
|
||||
gauth.Refresh()
|
||||
except RefreshError as e:
|
||||
web.app.logger.error("Google Drive error: " + e.message)
|
||||
log.error("Google Drive error: %s", e)
|
||||
except Exception as e:
|
||||
web.app.logger.exception(e)
|
||||
log.exception(e)
|
||||
else:
|
||||
# Initialize the saved creds
|
||||
gauth.Authorize()
|
||||
@ -166,7 +185,7 @@ def getDrive(drive=None, gauth=None):
|
||||
try:
|
||||
drive.auth.Refresh()
|
||||
except RefreshError as e:
|
||||
web.app.logger.error("Google Drive error: " + e.message)
|
||||
log.error("Google Drive error: %s", e)
|
||||
return drive
|
||||
|
||||
def listRootFolders():
|
||||
@ -203,7 +222,7 @@ def getEbooksFolderId(drive=None):
|
||||
try:
|
||||
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
|
||||
except Exception:
|
||||
web.app.logger.error('Error gDrive, root ID not found')
|
||||
log.error('Error gDrive, root ID not found')
|
||||
gDriveId.path = '/'
|
||||
session.merge(gDriveId)
|
||||
session.commit()
|
||||
@ -443,10 +462,10 @@ def getChangeById (drive, change_id):
|
||||
change = drive.auth.service.changes().get(changeId=change_id).execute()
|
||||
return change
|
||||
except (errors.HttpError) as error:
|
||||
web.app.logger.info(error.message)
|
||||
log.error(error)
|
||||
return None
|
||||
except Exception as e:
|
||||
web.app.logger.info(e)
|
||||
log.error(e)
|
||||
return None
|
||||
|
||||
|
||||
@ -516,6 +535,54 @@ def do_gdrive_download(df, headers):
|
||||
if resp.status == 206:
|
||||
yield content
|
||||
else:
|
||||
web.app.logger.info('An error occurred: %s' % resp)
|
||||
log.warning('An error occurred: %s', resp)
|
||||
return
|
||||
return Response(stream_with_context(stream()), headers=headers)
|
||||
|
||||
|
||||
_SETTINGS_YAML_TEMPLATE = """
|
||||
client_config_backend: settings
|
||||
client_config_file: %(client_file)s
|
||||
client_config:
|
||||
client_id: %(client_id)s
|
||||
client_secret: %(client_secret)s
|
||||
redirect_uri: %(redirect_uri)s
|
||||
|
||||
save_credentials: True
|
||||
save_credentials_backend: file
|
||||
save_credentials_file: %(credential)s
|
||||
|
||||
get_refresh_token: True
|
||||
|
||||
oauth_scope:
|
||||
- https://www.googleapis.com/auth/drive
|
||||
"""
|
||||
|
||||
def update_settings(client_id, client_secret, redirect_uri):
|
||||
if redirect_uri.endswith('/'):
|
||||
redirect_uri = redirect_uri[:-1]
|
||||
config_params = {
|
||||
'client_file': CLIENT_SECRETS,
|
||||
'client_id': client_id,
|
||||
'client_secret': client_secret,
|
||||
'redirect_uri': redirect_uri,
|
||||
'credential': CREDENTIALS
|
||||
}
|
||||
|
||||
with open(SETTINGS_YAML, 'w') as f:
|
||||
f.write(_SETTINGS_YAML_TEMPLATE % config_params)
|
||||
|
||||
|
||||
def get_error_text(client_secrets=None):
|
||||
if not gdrive_support:
|
||||
return 'Import of optional Google Drive requirements missing'
|
||||
|
||||
if not os.path.isfile(CLIENT_SECRETS):
|
||||
return 'client_secrets.json is missing or not readable'
|
||||
|
||||
with open(CLIENT_SECRETS, 'r') as settings:
|
||||
filedata = json.load(settings)
|
||||
if 'web' not in filedata:
|
||||
return 'client_secrets.json is not configured for web application'
|
||||
if client_secrets:
|
||||
client_secrets.update(filedata['web'])
|
||||
|
362
cps/helper.py
@ -18,33 +18,35 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import db
|
||||
import ub
|
||||
from flask import current_app as app
|
||||
from tempfile import gettempdir
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import io
|
||||
import os
|
||||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
import random
|
||||
import re
|
||||
import unicodedata
|
||||
import worker
|
||||
import shutil
|
||||
import time
|
||||
import unicodedata
|
||||
from datetime import datetime, timedelta
|
||||
from tempfile import gettempdir
|
||||
|
||||
import requests
|
||||
from babel import Locale as LC
|
||||
from babel.core import UnknownLocaleError
|
||||
from babel.dates import format_datetime
|
||||
from babel.units import format_unit
|
||||
from flask import send_from_directory, make_response, redirect, abort
|
||||
from flask_babel import gettext as _
|
||||
from flask_login import current_user
|
||||
from babel.dates import format_datetime
|
||||
from babel.units import format_unit
|
||||
from datetime import datetime, timedelta
|
||||
import shutil
|
||||
import requests
|
||||
from sqlalchemy.sql.expression import true, false, and_, or_, text, func
|
||||
from werkzeug.datastructures import Headers
|
||||
|
||||
try:
|
||||
import gdriveutils as gd
|
||||
from urllib.parse import quote
|
||||
except ImportError:
|
||||
pass
|
||||
import web
|
||||
import random
|
||||
import subprocess
|
||||
from urllib import quote
|
||||
|
||||
try:
|
||||
import unidecode
|
||||
@ -58,10 +60,16 @@ try:
|
||||
except ImportError:
|
||||
use_PIL = False
|
||||
|
||||
# Global variables
|
||||
# updater_thread = None
|
||||
global_WorkerThread = worker.WorkerThread()
|
||||
global_WorkerThread.start()
|
||||
from . import logger, config, global_WorkerThread, get_locale, db, ub, isoLanguages
|
||||
from . import gdriveutils as gd
|
||||
from .constants import STATIC_DIR as _STATIC_DIR
|
||||
from .pagination import Pagination
|
||||
from .subproc_wrapper import process_wait
|
||||
from .worker import STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
||||
from .worker import TASK_EMAIL, TASK_CONVERT, TASK_UPLOAD, TASK_CONVERT_ANY
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
def update_download(book_id, user_id):
|
||||
@ -78,9 +86,9 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
||||
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first()
|
||||
if not data:
|
||||
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
||||
app.logger.error("convert_book_format: " + error_message)
|
||||
log.error("convert_book_format: %s", error_message)
|
||||
return error_message
|
||||
if ub.config.config_use_google_drive:
|
||||
if config.config_use_google_drive:
|
||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower())
|
||||
if df:
|
||||
datafile = os.path.join(calibrepath, book.path, data.name + u"." + old_book_format.lower())
|
||||
@ -95,7 +103,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
||||
if os.path.exists(file_path + "." + old_book_format.lower()):
|
||||
# read settings and append converter task to queue
|
||||
if kindle_mail:
|
||||
settings = ub.get_mail_settings()
|
||||
settings = config.get_mail_settings()
|
||||
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
|
||||
settings['body'] = _(u'This e-mail has been sent via Calibre-Web.')
|
||||
# text = _(u"%(format)s: %(book)s", format=new_book_format, book=book.title)
|
||||
@ -113,7 +121,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
||||
|
||||
|
||||
def send_test_mail(kindle_mail, user_name):
|
||||
global_WorkerThread.add_email(_(u'Calibre-Web test e-mail'),None, None, ub.get_mail_settings(),
|
||||
global_WorkerThread.add_email(_(u'Calibre-Web test e-mail'),None, None, config.get_mail_settings(),
|
||||
kindle_mail, user_name, _(u"Test e-mail"),
|
||||
_(u'This e-mail has been sent via Calibre-Web.'))
|
||||
return
|
||||
@ -130,17 +138,18 @@ def send_registration_mail(e_mail, user_name, default_password, resend=False):
|
||||
text += "Don't forget to change your password after first login.\r\n"
|
||||
text += "Sincerely\r\n\r\n"
|
||||
text += "Your Calibre-Web team"
|
||||
global_WorkerThread.add_email(_(u'Get Started with Calibre-Web'),None, None, ub.get_mail_settings(),
|
||||
global_WorkerThread.add_email(_(u'Get Started with Calibre-Web'),None, None, config.get_mail_settings(),
|
||||
e_mail, None, _(u"Registration e-mail for user: %(name)s", name=user_name), text)
|
||||
return
|
||||
|
||||
|
||||
def check_send_to_kindle(entry):
|
||||
"""
|
||||
returns all available book formats for sending to Kindle
|
||||
"""
|
||||
if len(entry.data):
|
||||
bookformats=list()
|
||||
if ub.config.config_ebookconverter == 0:
|
||||
if config.config_ebookconverter == 0:
|
||||
# no converter - only for mobi and pdf formats
|
||||
for ele in iter(entry.data):
|
||||
if 'MOBI' in ele.format:
|
||||
@ -161,17 +170,17 @@ def check_send_to_kindle(entry):
|
||||
bookformats.append({'format': 'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
|
||||
if 'PDF' in formats:
|
||||
bookformats.append({'format': 'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
|
||||
if ub.config.config_ebookconverter >= 1:
|
||||
if config.config_ebookconverter >= 1:
|
||||
if 'EPUB' in formats and not 'MOBI' in formats:
|
||||
bookformats.append({'format': 'Mobi','convert':1,
|
||||
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Mobi')})
|
||||
'''if ub.config.config_ebookconverter == 2:
|
||||
'''if config.config_ebookconverter == 2:
|
||||
if 'EPUB' in formats and not 'AZW3' in formats:
|
||||
bookformats.append({'format': 'Azw3','convert':1,
|
||||
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})'''
|
||||
return bookformats
|
||||
else:
|
||||
app.logger.error(u'Cannot find book entry %d', entry.id)
|
||||
log.error(u'Cannot find book entry %d', entry.id)
|
||||
return None
|
||||
|
||||
|
||||
@ -202,7 +211,7 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
|
||||
for entry in iter(book.data):
|
||||
if entry.format.upper() == book_format.upper():
|
||||
result = entry.name + '.' + book_format.lower()
|
||||
global_WorkerThread.add_email(_(u"Send to Kindle"), book.path, result, ub.get_mail_settings(),
|
||||
global_WorkerThread.add_email(_(u"Send to Kindle"), book.path, result, config.get_mail_settings(),
|
||||
kindle_mail, user_id, _(u"E-mail: %(book)s", book=book.title),
|
||||
_(u'This e-mail has been sent via Calibre-Web.'))
|
||||
return
|
||||
@ -256,8 +265,8 @@ def get_sorted_author(value):
|
||||
value2 = value[-1] + ", " + " ".join(value[:-1])
|
||||
else:
|
||||
value2 = value
|
||||
except Exception:
|
||||
web.app.logger.error("Sorting author " + str(value) + "failed")
|
||||
except Exception as ex:
|
||||
log.error("Sorting author %s failed: %s", value, ex)
|
||||
value2 = value
|
||||
return value2
|
||||
|
||||
@ -274,13 +283,12 @@ def delete_book_file(book, calibrepath, book_format=None):
|
||||
else:
|
||||
if os.path.isdir(path):
|
||||
if len(next(os.walk(path))[1]):
|
||||
web.app.logger.error(
|
||||
"Deleting book " + str(book.id) + " failed, path has subfolders: " + book.path)
|
||||
log.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path)
|
||||
return False
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return True
|
||||
else:
|
||||
web.app.logger.error("Deleting book " + str(book.id) + " failed, book path not valid: " + book.path)
|
||||
log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
|
||||
return False
|
||||
|
||||
|
||||
@ -303,16 +311,16 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
||||
if not os.path.exists(new_title_path):
|
||||
os.renames(path, new_title_path)
|
||||
else:
|
||||
web.app.logger.info("Copying title: " + path + " into existing: " + new_title_path)
|
||||
for dir_name, subdir_list, file_list in os.walk(path):
|
||||
log.info("Copying title: %s into existing: %s", path, new_title_path)
|
||||
for dir_name, __, file_list in os.walk(path):
|
||||
for file in file_list:
|
||||
os.renames(os.path.join(dir_name, file),
|
||||
os.path.join(new_title_path + dir_name[len(path):], file))
|
||||
path = new_title_path
|
||||
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename title from: " + path + " to " + new_title_path + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
log.error("Rename title from: %s to %s: %s", path, new_title_path, ex)
|
||||
log.debug(ex, exc_info=True)
|
||||
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_title_path, error=str(ex))
|
||||
if authordir != new_authordir:
|
||||
@ -321,8 +329,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
||||
os.renames(path, new_author_path)
|
||||
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename author from: " + path + " to " + new_author_path + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
log.error("Rename author from: %s to %s: %s", path, new_author_path, ex)
|
||||
log.debug(ex, exc_info=True)
|
||||
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_author_path, error=str(ex))
|
||||
# Rename all files from old names to new names
|
||||
@ -335,8 +343,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
||||
os.path.join(path_name, new_name + '.' + file_format.format.lower()))
|
||||
file_format.name = new_name
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename file in path " + path + " to " + new_name + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
log.error("Rename file in path %s to %s: %s", path, new_name, ex)
|
||||
log.debug(ex, exc_info=True)
|
||||
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_name, error=str(ex))
|
||||
return False
|
||||
@ -415,37 +423,45 @@ def generate_random_password():
|
||||
################################## External interface
|
||||
|
||||
def update_dir_stucture(book_id, calibrepath, first_author = None):
|
||||
if ub.config.config_use_google_drive:
|
||||
if config.config_use_google_drive:
|
||||
return update_dir_structure_gdrive(book_id, first_author)
|
||||
else:
|
||||
return update_dir_structure_file(book_id, calibrepath, first_author)
|
||||
|
||||
|
||||
def delete_book(book, calibrepath, book_format):
|
||||
if ub.config.config_use_google_drive:
|
||||
if config.config_use_google_drive:
|
||||
return delete_book_gdrive(book, book_format)
|
||||
else:
|
||||
return delete_book_file(book, calibrepath, book_format)
|
||||
|
||||
|
||||
def get_book_cover(cover_path):
|
||||
if ub.config.config_use_google_drive:
|
||||
try:
|
||||
if not web.is_gdrive_ready():
|
||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
||||
path=gd.get_cover_via_gdrive(cover_path)
|
||||
if path:
|
||||
return redirect(path)
|
||||
def get_book_cover(book_id):
|
||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||
if book.has_cover:
|
||||
|
||||
if config.config_use_google_drive:
|
||||
try:
|
||||
if not gd.is_gdrive_ready():
|
||||
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
||||
path=gd.get_cover_via_gdrive(book.path)
|
||||
if path:
|
||||
return redirect(path)
|
||||
else:
|
||||
log.error('%s/cover.jpg not found on Google Drive', book.path)
|
||||
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
# traceback.print_exc()
|
||||
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||
else:
|
||||
cover_file_path = os.path.join(config.config_calibre_dir, book.path)
|
||||
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
||||
return send_from_directory(cover_file_path, "cover.jpg")
|
||||
else:
|
||||
web.app.logger.error(cover_path + '/cover.jpg not found on Google Drive')
|
||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
||||
except Exception as e:
|
||||
web.app.logger.error("Error Message: " + e.message)
|
||||
web.app.logger.exception(e)
|
||||
# traceback.print_exc()
|
||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
||||
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||
else:
|
||||
return send_from_directory(os.path.join(ub.config.config_calibre_dir, cover_path), "cover.jpg")
|
||||
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||
|
||||
|
||||
# saves book cover from url
|
||||
@ -455,7 +471,7 @@ def save_cover_from_url(url, book_path):
|
||||
|
||||
|
||||
def save_cover_from_filestorage(filepath, saved_filename, img):
|
||||
if hasattr(img,'_content'):
|
||||
if hasattr(img, '_content'):
|
||||
f = open(os.path.join(filepath, saved_filename), "wb")
|
||||
f.write(img._content)
|
||||
f.close()
|
||||
@ -465,15 +481,15 @@ def save_cover_from_filestorage(filepath, saved_filename, img):
|
||||
try:
|
||||
os.makedirs(filepath)
|
||||
except OSError:
|
||||
web.app.logger.error(u"Failed to create path for cover")
|
||||
log.error(u"Failed to create path for cover")
|
||||
return False
|
||||
try:
|
||||
img.save(os.path.join(filepath, saved_filename))
|
||||
except OSError:
|
||||
web.app.logger.error(u"Failed to store cover-file")
|
||||
return False
|
||||
except IOError:
|
||||
web.app.logger.error(u"Cover-file is not a valid image file")
|
||||
log.error(u"Cover-file is not a valid image file")
|
||||
return False
|
||||
except OSError:
|
||||
log.error(u"Failed to store cover-file")
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -484,7 +500,7 @@ def save_cover(img, book_path):
|
||||
|
||||
if use_PIL:
|
||||
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
|
||||
web.app.logger.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
||||
log.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
||||
return False
|
||||
# convert to jpg because calibre only supports jpg
|
||||
if content_type in ('image/png', 'image/webp'):
|
||||
@ -498,7 +514,7 @@ def save_cover(img, book_path):
|
||||
img._content = tmp_bytesio.getvalue()
|
||||
else:
|
||||
if content_type not in ('image/jpeg'):
|
||||
web.app.logger.error("Only jpg/jpeg files are supported as coverfile")
|
||||
log.error("Only jpg/jpeg files are supported as coverfile")
|
||||
return False
|
||||
|
||||
if ub.config.config_use_google_drive:
|
||||
@ -506,29 +522,29 @@ def save_cover(img, book_path):
|
||||
if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True:
|
||||
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
|
||||
os.path.join(tmpDir, "uploaded_cover.jpg"))
|
||||
web.app.logger.info("Cover is saved on Google Drive")
|
||||
log.info("Cover is saved on Google Drive")
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return save_cover_from_filestorage(os.path.join(ub.config.config_calibre_dir, book_path), "cover.jpg", img)
|
||||
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
|
||||
|
||||
|
||||
|
||||
def do_download_file(book, book_format, data, headers):
|
||||
if ub.config.config_use_google_drive:
|
||||
if config.config_use_google_drive:
|
||||
startTime = time.time()
|
||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
||||
web.app.logger.debug(time.time() - startTime)
|
||||
log.debug('%s', time.time() - startTime)
|
||||
if df:
|
||||
return gd.do_gdrive_download(df, headers)
|
||||
else:
|
||||
abort(404)
|
||||
else:
|
||||
filename = os.path.join(ub.config.config_calibre_dir, book.path)
|
||||
filename = os.path.join(config.config_calibre_dir, book.path)
|
||||
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
|
||||
# ToDo: improve error handling
|
||||
web.app.logger.error('File not found: %s' % os.path.join(filename, data.name + "." + book_format))
|
||||
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
|
||||
response = make_response(send_from_directory(filename, data.name + "." + book_format))
|
||||
response.headers = headers
|
||||
return response
|
||||
@ -538,27 +554,23 @@ def do_download_file(book, book_format, data, headers):
|
||||
|
||||
|
||||
def check_unrar(unrarLocation):
|
||||
error = False
|
||||
if os.path.exists(unrarLocation):
|
||||
try:
|
||||
if sys.version_info < (3, 0):
|
||||
unrarLocation = unrarLocation.encode(sys.getfilesystemencoding())
|
||||
p = subprocess.Popen(unrarLocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
p.wait()
|
||||
for lines in p.stdout.readlines():
|
||||
if isinstance(lines, bytes):
|
||||
lines = lines.decode('utf-8')
|
||||
value=re.search('UNRAR (.*) freeware', lines)
|
||||
if value:
|
||||
version = value.group(1)
|
||||
except OSError as e:
|
||||
error = True
|
||||
web.app.logger.exception(e)
|
||||
version =_(u'Error excecuting UnRar')
|
||||
else:
|
||||
version = _(u'Unrar binary file not found')
|
||||
error=True
|
||||
return (error, version)
|
||||
if not unrarLocation:
|
||||
return
|
||||
|
||||
if not os.path.exists(unrarLocation):
|
||||
return 'Unrar binary file not found'
|
||||
|
||||
try:
|
||||
if sys.version_info < (3, 0):
|
||||
unrarLocation = unrarLocation.encode(sys.getfilesystemencoding())
|
||||
for lines in process_wait(unrarLocation):
|
||||
value = re.search('UNRAR (.*) freeware', lines)
|
||||
if value:
|
||||
version = value.group(1)
|
||||
log.debug("unrar version %s", version)
|
||||
except OSError as err:
|
||||
log.exception(err)
|
||||
return 'Error excecuting UnRar'
|
||||
|
||||
|
||||
|
||||
@ -574,6 +586,7 @@ def json_serial(obj):
|
||||
'seconds': obj.seconds,
|
||||
'microseconds': obj.microseconds,
|
||||
}
|
||||
# return obj.isoformat()
|
||||
raise TypeError ("Type %s not serializable" % type(obj))
|
||||
|
||||
|
||||
@ -581,7 +594,7 @@ def json_serial(obj):
|
||||
def format_runtime(runtime):
|
||||
retVal = ""
|
||||
if runtime.days:
|
||||
retVal = format_unit(runtime.days, 'duration-day', length="long", locale=web.get_locale()) + ', '
|
||||
retVal = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', '
|
||||
mins, seconds = divmod(runtime.seconds, 60)
|
||||
hours, minutes = divmod(mins, 60)
|
||||
# ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ?
|
||||
@ -600,7 +613,8 @@ def render_task_status(tasklist):
|
||||
for task in tasklist:
|
||||
if task['user'] == current_user.nickname or current_user.role_admin():
|
||||
if task['formStarttime']:
|
||||
task['starttime'] = format_datetime(task['formStarttime'], format='short', locale=web.get_locale())
|
||||
task['starttime'] = format_datetime(task['formStarttime'], format='short', locale=get_locale())
|
||||
# task2['formStarttime'] = ""
|
||||
else:
|
||||
if 'starttime' not in task:
|
||||
task['starttime'] = ""
|
||||
@ -612,26 +626,26 @@ def render_task_status(tasklist):
|
||||
|
||||
# localize the task status
|
||||
if isinstance( task['stat'], int ):
|
||||
if task['stat'] == worker.STAT_WAITING:
|
||||
if task['stat'] == STAT_WAITING:
|
||||
task['status'] = _(u'Waiting')
|
||||
elif task['stat'] == worker.STAT_FAIL:
|
||||
elif task['stat'] == STAT_FAIL:
|
||||
task['status'] = _(u'Failed')
|
||||
elif task['stat'] == worker.STAT_STARTED:
|
||||
elif task['stat'] == STAT_STARTED:
|
||||
task['status'] = _(u'Started')
|
||||
elif task['stat'] == worker.STAT_FINISH_SUCCESS:
|
||||
elif task['stat'] == STAT_FINISH_SUCCESS:
|
||||
task['status'] = _(u'Finished')
|
||||
else:
|
||||
task['status'] = _(u'Unknown Status')
|
||||
|
||||
# localize the task type
|
||||
if isinstance( task['taskType'], int ):
|
||||
if task['taskType'] == worker.TASK_EMAIL:
|
||||
if task['taskType'] == TASK_EMAIL:
|
||||
task['taskMessage'] = _(u'E-mail: ') + task['taskMess']
|
||||
elif task['taskType'] == worker.TASK_CONVERT:
|
||||
elif task['taskType'] == TASK_CONVERT:
|
||||
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
||||
elif task['taskType'] == worker.TASK_UPLOAD:
|
||||
elif task['taskType'] == TASK_UPLOAD:
|
||||
task['taskMessage'] = _(u'Upload: ') + task['taskMess']
|
||||
elif task['taskType'] == worker.TASK_CONVERT_ANY:
|
||||
elif task['taskType'] == TASK_CONVERT_ANY:
|
||||
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
||||
else:
|
||||
task['taskMessage'] = _(u'Unknown Task: ') + task['taskMess']
|
||||
@ -639,3 +653,135 @@ def render_task_status(tasklist):
|
||||
renderedtasklist.append(task)
|
||||
|
||||
return renderedtasklist
|
||||
|
||||
|
||||
# Language and content filters for displaying in the UI
|
||||
def common_filters():
|
||||
if current_user.filter_language() != "all":
|
||||
lang_filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
||||
else:
|
||||
lang_filter = true()
|
||||
content_rating_filter = false() if current_user.mature_content else \
|
||||
db.Books.tags.any(db.Tags.name.in_(config.mature_content_tags()))
|
||||
return and_(lang_filter, ~content_rating_filter)
|
||||
|
||||
|
||||
# Creates for all stored languages a translated speaking name in the array for the UI
|
||||
def speaking_language(languages=None):
|
||||
if not languages:
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except UnknownLocaleError:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
return languages
|
||||
|
||||
# checks if domain is in database (including wildcards)
|
||||
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
||||
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
||||
def check_valid_domain(domain_text):
|
||||
domain_text = domain_text.split('@', 1)[-1].lower()
|
||||
sql = "SELECT * FROM registration WHERE :domain LIKE domain;"
|
||||
result = ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
||||
return len(result)
|
||||
|
||||
|
||||
# Orders all Authors in the list according to authors sort
|
||||
def order_authors(entry):
|
||||
sort_authors = entry.author_sort.split('&')
|
||||
authors_ordered = list()
|
||||
error = False
|
||||
for auth in sort_authors:
|
||||
# ToDo: How to handle not found authorname
|
||||
result = db.session.query(db.Authors).filter(db.Authors.sort == auth.lstrip().strip()).first()
|
||||
if not result:
|
||||
error = True
|
||||
break
|
||||
authors_ordered.append(result)
|
||||
if not error:
|
||||
entry.authors = authors_ordered
|
||||
return entry
|
||||
|
||||
|
||||
# Fill indexpage with all requested data from database
|
||||
def fill_indexpage(page, database, db_filter, order, *join):
|
||||
if current_user.show_detail_random():
|
||||
randm = db.session.query(db.Books).filter(common_filters())\
|
||||
.order_by(func.random()).limit(config.config_random_books)
|
||||
else:
|
||||
randm = false()
|
||||
off = int(int(config.config_books_per_page) * (page - 1))
|
||||
pagination = Pagination(page, config.config_books_per_page,
|
||||
len(db.session.query(database).filter(db_filter).filter(common_filters()).all()))
|
||||
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters()).\
|
||||
order_by(*order).offset(off).limit(config.config_books_per_page).all()
|
||||
for book in entries:
|
||||
book = order_authors(book)
|
||||
return entries, randm, pagination
|
||||
|
||||
|
||||
def get_typeahead(database, query, replace=('','')):
|
||||
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
||||
entries = db.session.query(database).filter(func.lower(database.name).ilike("%" + query + "%")).all()
|
||||
json_dumps = json.dumps([dict(name=r.name.replace(*replace)) for r in entries])
|
||||
return json_dumps
|
||||
|
||||
# read search results from calibre-database and return it (function is used for feed and simple search
|
||||
def get_search_results(term):
|
||||
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
||||
q = list()
|
||||
authorterms = re.split("[, ]+", term)
|
||||
for authorterm in authorterms:
|
||||
q.append(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + authorterm + "%")))
|
||||
|
||||
db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + term + "%"))
|
||||
|
||||
return db.session.query(db.Books).filter(common_filters()).filter(
|
||||
or_(db.Books.tags.any(func.lower(db.Tags.name).ilike("%" + term + "%")),
|
||||
db.Books.series.any(func.lower(db.Series.name).ilike("%" + term + "%")),
|
||||
db.Books.authors.any(and_(*q)),
|
||||
db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + term + "%")),
|
||||
func.lower(db.Books.title).ilike("%" + term + "%")
|
||||
)).all()
|
||||
|
||||
def get_cc_columns():
|
||||
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
if config.config_columns_to_ignore:
|
||||
cc = []
|
||||
for col in tmpcc:
|
||||
r = re.compile(config.config_columns_to_ignore)
|
||||
if r.match(col.label):
|
||||
cc.append(col)
|
||||
else:
|
||||
cc = tmpcc
|
||||
return cc
|
||||
|
||||
def get_download_link(book_id, book_format):
|
||||
book_format = book_format.split(".")[0]
|
||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||
data = db.session.query(db.Data).filter(db.Data.book == book.id)\
|
||||
.filter(db.Data.format == book_format.upper()).first()
|
||||
if data:
|
||||
# collect downloaded books only for registered user and not for anonymous user
|
||||
if current_user.is_authenticated:
|
||||
ub.update_download(book_id, int(current_user.id))
|
||||
file_name = book.title
|
||||
if len(book.authors) > 0:
|
||||
file_name = book.authors[0].name + '_' + file_name
|
||||
file_name = get_valid_filename(file_name)
|
||||
headers = Headers()
|
||||
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
|
||||
headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (quote(file_name.encode('utf-8')),
|
||||
book_format)
|
||||
return do_download_file(book, book_format, data, headers)
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
|
||||
|
||||
############### Database Helper functions
|
||||
|
||||
def lcase(s):
|
||||
return unidecode.unidecode(s.lower())
|
||||
|
@ -17,6 +17,17 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
try:
|
||||
import cPickle
|
||||
except ImportError:
|
||||
import pickle as cPickle
|
||||
|
||||
from .constants import TRANSLATIONS_DIR as _TRANSLATIONS_DIR
|
||||
|
||||
|
||||
try:
|
||||
from iso639 import languages, __version__
|
||||
get = languages.get
|
||||
@ -30,14 +41,43 @@ except ImportError:
|
||||
__version__ = "? (PyCountry)"
|
||||
|
||||
def _copy_fields(l):
|
||||
l.part1 = l.alpha_2
|
||||
l.part3 = l.alpha_3
|
||||
l.part1 = getattr(l, 'alpha_2', None)
|
||||
l.part3 = getattr(l, 'alpha_3', None)
|
||||
return l
|
||||
|
||||
def get(name=None, part1=None, part3=None):
|
||||
if (part3 is not None):
|
||||
if part3 is not None:
|
||||
return _copy_fields(pyc_languages.get(alpha_3=part3))
|
||||
if (part1 is not None):
|
||||
if part1 is not None:
|
||||
return _copy_fields(pyc_languages.get(alpha_2=part1))
|
||||
if (name is not None):
|
||||
if name is not None:
|
||||
return _copy_fields(pyc_languages.get(name=name))
|
||||
|
||||
|
||||
try:
|
||||
with open(os.path.join(_TRANSLATIONS_DIR, 'iso639.pickle'), 'rb') as f:
|
||||
_LANGUAGES = cPickle.load(f)
|
||||
except cPickle.UnpicklingError as error:
|
||||
print("Can't read file cps/translations/iso639.pickle: %s" % error)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_language_names(locale):
|
||||
return _LANGUAGES.get(locale)
|
||||
|
||||
|
||||
def get_language_name(locale, lang_code):
|
||||
return get_language_names(locale)[lang_code]
|
||||
|
||||
|
||||
def get_language_codes(locale, language_names, remainder=None):
|
||||
language_names = set(x.strip().lower() for x in language_names if x)
|
||||
|
||||
for k, v in get_language_names(locale).items():
|
||||
v = v.lower()
|
||||
if v in language_names:
|
||||
language_names.remove(v)
|
||||
yield k
|
||||
|
||||
if remainder is not None:
|
||||
remainder.extend(language_names)
|
||||
|
117
cps/jinjia.py
Normal file
@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# custom jinja filters
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import datetime
|
||||
import mimetypes
|
||||
import re
|
||||
|
||||
from babel.dates import format_date
|
||||
from flask import Blueprint, request, url_for
|
||||
from flask_babel import get_locale
|
||||
from flask_login import current_user
|
||||
|
||||
from . import logger
|
||||
|
||||
|
||||
jinjia = Blueprint('jinjia', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
# pagination links in jinja
|
||||
@jinjia.app_template_filter('url_for_other_page')
|
||||
def url_for_other_page(page):
|
||||
args = request.view_args.copy()
|
||||
args['page'] = page
|
||||
return url_for(request.endpoint, **args)
|
||||
|
||||
|
||||
# shortentitles to at longest nchar, shorten longer words if necessary
|
||||
@jinjia.app_template_filter('shortentitle')
|
||||
def shortentitle_filter(s, nchar=20):
|
||||
text = s.split()
|
||||
res = "" # result
|
||||
suml = 0 # overall length
|
||||
for line in text:
|
||||
if suml >= 60:
|
||||
res += '...'
|
||||
break
|
||||
# if word longer than 20 chars truncate line and append '...', otherwise add whole word to result
|
||||
# string, and summarize total length to stop at chars given by nchar
|
||||
if len(line) > nchar:
|
||||
res += line[:(nchar-3)] + '[..] '
|
||||
suml += nchar+3
|
||||
else:
|
||||
res += line + ' '
|
||||
suml += len(line) + 1
|
||||
return res.strip()
|
||||
|
||||
|
||||
@jinjia.app_template_filter('mimetype')
|
||||
def mimetype_filter(val):
|
||||
return mimetypes.types_map.get('.' + val, 'application/octet-stream')
|
||||
|
||||
|
||||
@jinjia.app_template_filter('formatdate')
|
||||
def formatdate_filter(val):
|
||||
try:
|
||||
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)
|
||||
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
||||
return format_date(formatdate, format='medium', locale=get_locale())
|
||||
except AttributeError as e:
|
||||
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
|
||||
return formatdate
|
||||
|
||||
@jinjia.app_template_filter('formatdateinput')
|
||||
def format_date_input(val):
|
||||
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)
|
||||
date_obj = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
||||
input_date = date_obj.isoformat().split('T', 1)[0] # Hack to support dates <1900
|
||||
return '' if input_date == "0101-01-01" else input_date
|
||||
|
||||
|
||||
@jinjia.app_template_filter('strftime')
|
||||
def timestamptodate(date, fmt=None):
|
||||
date = datetime.datetime.fromtimestamp(
|
||||
int(date)/1000
|
||||
)
|
||||
native = date.replace(tzinfo=None)
|
||||
if fmt:
|
||||
time_format = fmt
|
||||
else:
|
||||
time_format = '%d %m %Y - %H:%S'
|
||||
return native.strftime(time_format)
|
||||
|
||||
|
||||
@jinjia.app_template_filter('yesno')
|
||||
def yesno(value, yes, no):
|
||||
return yes if value else no
|
||||
|
||||
|
||||
'''@jinjia.app_template_filter('canread')
|
||||
def canread(ext):
|
||||
if isinstance(ext, db.Data):
|
||||
ext = ext.format
|
||||
return ext.lower() in EXTENSIONS_READER'''
|
164
cps/logger.py
Normal file
@ -0,0 +1,164 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2019 pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
from logging import Formatter, StreamHandler
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from .constants import BASE_DIR as _BASE_DIR
|
||||
|
||||
|
||||
ACCESS_FORMATTER_GEVENT = Formatter("%(message)s")
|
||||
ACCESS_FORMATTER_TORNADO = Formatter("[%(asctime)s] %(message)s")
|
||||
|
||||
FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d} %(message)s")
|
||||
DEFAULT_LOG_LEVEL = logging.INFO
|
||||
DEFAULT_LOG_FILE = os.path.join(_BASE_DIR, "calibre-web.log")
|
||||
DEFAULT_ACCESS_LOG = os.path.join(_BASE_DIR, "access.log")
|
||||
LOG_TO_STDERR = '/dev/stderr'
|
||||
|
||||
logging.addLevelName(logging.WARNING, "WARN")
|
||||
logging.addLevelName(logging.CRITICAL, "CRIT")
|
||||
|
||||
|
||||
def get(name=None):
|
||||
return logging.getLogger(name)
|
||||
|
||||
|
||||
def create():
|
||||
parent_frame = inspect.stack(0)[1]
|
||||
if hasattr(parent_frame, 'frame'):
|
||||
parent_frame = parent_frame.frame
|
||||
else:
|
||||
parent_frame = parent_frame[0]
|
||||
parent_module = inspect.getmodule(parent_frame)
|
||||
return get(parent_module.__name__)
|
||||
|
||||
|
||||
def is_debug_enabled():
|
||||
return logging.root.level <= logging.DEBUG
|
||||
|
||||
def is_info_enabled(logger):
|
||||
return logging.getLogger(logger).level <= logging.INFO
|
||||
|
||||
|
||||
def get_level_name(level):
|
||||
return logging.getLevelName(level)
|
||||
|
||||
|
||||
def is_valid_logfile(file_path):
|
||||
if not file_path:
|
||||
return True
|
||||
if os.path.isdir(file_path):
|
||||
return False
|
||||
log_dir = os.path.dirname(file_path)
|
||||
return (not log_dir) or os.path.isdir(log_dir)
|
||||
|
||||
|
||||
def _absolute_log_file(log_file, default_log_file):
|
||||
if log_file:
|
||||
if not os.path.dirname(log_file):
|
||||
log_file = os.path.join(_BASE_DIR, log_file)
|
||||
return os.path.abspath(log_file)
|
||||
|
||||
return default_log_file
|
||||
|
||||
|
||||
def get_logfile(log_file):
|
||||
return _absolute_log_file(log_file, DEFAULT_LOG_FILE)
|
||||
|
||||
|
||||
def get_accesslogfile(log_file):
|
||||
return _absolute_log_file(log_file, DEFAULT_ACCESS_LOG)
|
||||
|
||||
|
||||
def setup(log_file, log_level=None):
|
||||
'''
|
||||
Configure the logging output.
|
||||
May be called multiple times.
|
||||
'''
|
||||
log_file = _absolute_log_file(log_file, DEFAULT_LOG_FILE)
|
||||
|
||||
r = logging.root
|
||||
r.setLevel(log_level or DEFAULT_LOG_LEVEL)
|
||||
|
||||
previous_handler = r.handlers[0] if r.handlers else None
|
||||
if previous_handler:
|
||||
# if the log_file has not changed, don't create a new handler
|
||||
if getattr(previous_handler, 'baseFilename', None) == log_file:
|
||||
return
|
||||
r.debug("logging to %s level %s", log_file, r.level)
|
||||
|
||||
if log_file == LOG_TO_STDERR:
|
||||
file_handler = StreamHandler()
|
||||
file_handler.baseFilename = LOG_TO_STDERR
|
||||
else:
|
||||
try:
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=50000, backupCount=2)
|
||||
except IOError:
|
||||
if log_file == DEFAULT_LOG_FILE:
|
||||
raise
|
||||
file_handler = RotatingFileHandler(DEFAULT_LOG_FILE, maxBytes=50000, backupCount=2)
|
||||
file_handler.setFormatter(FORMATTER)
|
||||
|
||||
for h in r.handlers:
|
||||
r.removeHandler(h)
|
||||
h.close()
|
||||
r.addHandler(file_handler)
|
||||
|
||||
|
||||
def create_access_log(log_file, log_name, formatter):
|
||||
'''
|
||||
One-time configuration for the web server's access log.
|
||||
'''
|
||||
log_file = _absolute_log_file(log_file, DEFAULT_ACCESS_LOG)
|
||||
logging.debug("access log: %s", log_file)
|
||||
|
||||
access_log = logging.getLogger(log_name)
|
||||
access_log.propagate = False
|
||||
access_log.setLevel(logging.INFO)
|
||||
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=50000, backupCount=2)
|
||||
file_handler.setFormatter(formatter)
|
||||
access_log.addHandler(file_handler)
|
||||
return access_log
|
||||
|
||||
|
||||
# Enable logging of smtp lib debug output
|
||||
class StderrLogger(object):
|
||||
def __init__(self, name=None):
|
||||
self.log = get(name or self.__class__.__name__)
|
||||
self.buffer = ''
|
||||
|
||||
def write(self, message):
|
||||
try:
|
||||
if message == '\n':
|
||||
self.log.debug(self.buffer.replace('\n', '\\n'))
|
||||
self.buffer = ''
|
||||
else:
|
||||
self.buffer += message
|
||||
except Exception:
|
||||
self.log.debug("Logging Error")
|
||||
|
||||
|
||||
# if debugging, start logging to stderr immediately
|
||||
if os.environ.get('FLASK_DEBUG', None):
|
||||
setup(LOG_TO_STDERR, logging.DEBUG)
|
140
cps/oauth.py
Normal file
@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
from flask import session
|
||||
|
||||
|
||||
try:
|
||||
from flask_dance.consumer.backend.sqla import SQLAlchemyBackend, first, _get_real_user
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
class OAuthBackend(SQLAlchemyBackend):
|
||||
"""
|
||||
Stores and retrieves OAuth tokens using a relational database through
|
||||
the `SQLAlchemy`_ ORM.
|
||||
|
||||
.. _SQLAlchemy: http://www.sqlalchemy.org/
|
||||
"""
|
||||
def __init__(self, model, session,
|
||||
user=None, user_id=None, user_required=None, anon_user=None,
|
||||
cache=None):
|
||||
super(OAuthBackend, self).__init__(model, session, user, user_id, user_required, anon_user, cache)
|
||||
|
||||
def get(self, blueprint, user=None, user_id=None):
|
||||
if blueprint.name + '_oauth_token' in session and session[blueprint.name + '_oauth_token'] != '':
|
||||
return session[blueprint.name + '_oauth_token']
|
||||
# check cache
|
||||
cache_key = self.make_cache_key(blueprint=blueprint, user=user, user_id=user_id)
|
||||
token = self.cache.get(cache_key)
|
||||
if token:
|
||||
return token
|
||||
|
||||
# if not cached, make database queries
|
||||
query = (
|
||||
self.session.query(self.model)
|
||||
.filter_by(provider=blueprint.name)
|
||||
)
|
||||
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
|
||||
u = first(_get_real_user(ref, self.anon_user)
|
||||
for ref in (user, self.user, blueprint.config.get("user")))
|
||||
|
||||
use_provider_user_id = False
|
||||
if blueprint.name + '_oauth_user_id' in session and session[blueprint.name + '_oauth_user_id'] != '':
|
||||
query = query.filter_by(provider_user_id=session[blueprint.name + '_oauth_user_id'])
|
||||
use_provider_user_id = True
|
||||
|
||||
if self.user_required and not u and not uid and not use_provider_user_id:
|
||||
#raise ValueError("Cannot get OAuth token without an associated user")
|
||||
return None
|
||||
# check for user ID
|
||||
if hasattr(self.model, "user_id") and uid:
|
||||
query = query.filter_by(user_id=uid)
|
||||
# check for user (relationship property)
|
||||
elif hasattr(self.model, "user") and u:
|
||||
query = query.filter_by(user=u)
|
||||
# if we have the property, but not value, filter by None
|
||||
elif hasattr(self.model, "user_id"):
|
||||
query = query.filter_by(user_id=None)
|
||||
# run query
|
||||
try:
|
||||
token = query.one().token
|
||||
except NoResultFound:
|
||||
token = None
|
||||
|
||||
# cache the result
|
||||
self.cache.set(cache_key, token)
|
||||
|
||||
return token
|
||||
|
||||
def set(self, blueprint, token, user=None, user_id=None):
|
||||
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
|
||||
u = first(_get_real_user(ref, self.anon_user)
|
||||
for ref in (user, self.user, blueprint.config.get("user")))
|
||||
|
||||
if self.user_required and not u and not uid:
|
||||
raise ValueError("Cannot set OAuth token without an associated user")
|
||||
|
||||
# if there was an existing model, delete it
|
||||
existing_query = (
|
||||
self.session.query(self.model)
|
||||
.filter_by(provider=blueprint.name)
|
||||
)
|
||||
# check for user ID
|
||||
has_user_id = hasattr(self.model, "user_id")
|
||||
if has_user_id and uid:
|
||||
existing_query = existing_query.filter_by(user_id=uid)
|
||||
# check for user (relationship property)
|
||||
has_user = hasattr(self.model, "user")
|
||||
if has_user and u:
|
||||
existing_query = existing_query.filter_by(user=u)
|
||||
# queue up delete query -- won't be run until commit()
|
||||
existing_query.delete()
|
||||
# create a new model for this token
|
||||
kwargs = {
|
||||
"provider": blueprint.name,
|
||||
"token": token,
|
||||
}
|
||||
if has_user_id and uid:
|
||||
kwargs["user_id"] = uid
|
||||
if has_user and u:
|
||||
kwargs["user"] = u
|
||||
self.session.add(self.model(**kwargs))
|
||||
# commit to delete and add simultaneously
|
||||
self.session.commit()
|
||||
# invalidate cache
|
||||
self.cache.delete(self.make_cache_key(
|
||||
blueprint=blueprint, user=user, user_id=user_id
|
||||
))
|
||||
|
||||
def delete(self, blueprint, user=None, user_id=None):
|
||||
query = (
|
||||
self.session.query(self.model)
|
||||
.filter_by(provider=blueprint.name)
|
||||
)
|
||||
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
|
||||
u = first(_get_real_user(ref, self.anon_user)
|
||||
for ref in (user, self.user, blueprint.config.get("user")))
|
||||
|
||||
if self.user_required and not u and not uid:
|
||||
raise ValueError("Cannot delete OAuth token without an associated user")
|
||||
|
||||
# check for user ID
|
||||
if hasattr(self.model, "user_id") and uid:
|
||||
query = query.filter_by(user_id=uid)
|
||||
# check for user (relationship property)
|
||||
elif hasattr(self.model, "user") and u:
|
||||
query = query.filter_by(user=u)
|
||||
# if we have the property, but not value, filter by None
|
||||
elif hasattr(self.model, "user_id"):
|
||||
query = query.filter_by(user_id=None)
|
||||
# run query
|
||||
query.delete()
|
||||
self.session.commit()
|
||||
# invalidate cache
|
||||
self.cache.delete(self.make_cache_key(
|
||||
blueprint=blueprint, user=user, user_id=user_id,
|
||||
))
|
||||
|
||||
except ImportError:
|
||||
pass
|
344
cps/oauth_bb.py
Normal file
@ -0,0 +1,344 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import json
|
||||
from functools import wraps
|
||||
|
||||
from flask import session, request, make_response, abort
|
||||
from flask import Blueprint, flash, redirect, url_for
|
||||
from flask_babel import gettext as _
|
||||
from flask_dance.consumer import oauth_authorized, oauth_error
|
||||
from flask_dance.contrib.github import make_github_blueprint, github
|
||||
from flask_dance.contrib.google import make_google_blueprint, google
|
||||
from flask_login import login_user, current_user
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from . import constants, logger, config, app, ub
|
||||
from .web import login_required
|
||||
from .oauth import OAuthBackend
|
||||
# from .web import github_oauth_required
|
||||
|
||||
|
||||
oauth_check = {}
|
||||
oauth = Blueprint('oauth', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
def github_oauth_required(f):
|
||||
@wraps(f)
|
||||
def inner(*args, **kwargs):
|
||||
if config.config_login_type == constants.LOGIN_OAUTH_GITHUB:
|
||||
return f(*args, **kwargs)
|
||||
if request.is_xhr:
|
||||
data = {'status': 'error', 'message': 'Not Found'}
|
||||
response = make_response(json.dumps(data, ensure_ascii=False))
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response, 404
|
||||
abort(404)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def google_oauth_required(f):
|
||||
@wraps(f)
|
||||
def inner(*args, **kwargs):
|
||||
if config.config_use_google_oauth == constants.LOGIN_OAUTH_GOOGLE:
|
||||
return f(*args, **kwargs)
|
||||
if request.is_xhr:
|
||||
data = {'status': 'error', 'message': 'Not Found'}
|
||||
response = make_response(json.dumps(data, ensure_ascii=False))
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response, 404
|
||||
abort(404)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def register_oauth_blueprint(blueprint, show_name):
|
||||
if blueprint.name != "":
|
||||
oauth_check[blueprint.name] = show_name
|
||||
|
||||
|
||||
def register_user_with_oauth(user=None):
|
||||
all_oauth = {}
|
||||
for oauth in oauth_check.keys():
|
||||
if oauth + '_oauth_user_id' in session and session[oauth + '_oauth_user_id'] != '':
|
||||
all_oauth[oauth] = oauth_check[oauth]
|
||||
if len(all_oauth.keys()) == 0:
|
||||
return
|
||||
if user is None:
|
||||
flash(_(u"Register with %(provider)s", provider=", ".join(list(all_oauth.values()))), category="success")
|
||||
else:
|
||||
for oauth in all_oauth.keys():
|
||||
# Find this OAuth token in the database, or create it
|
||||
query = ub.session.query(ub.OAuth).filter_by(
|
||||
provider=oauth,
|
||||
provider_user_id=session[oauth + "_oauth_user_id"],
|
||||
)
|
||||
try:
|
||||
oauth = query.one()
|
||||
oauth.user_id = user.id
|
||||
except NoResultFound:
|
||||
# no found, return error
|
||||
return
|
||||
try:
|
||||
ub.session.commit()
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
ub.session.rollback()
|
||||
|
||||
|
||||
def logout_oauth_user():
|
||||
for oauth in oauth_check.keys():
|
||||
if oauth + '_oauth_user_id' in session:
|
||||
session.pop(oauth + '_oauth_user_id')
|
||||
|
||||
if ub.oauth_support:
|
||||
github_blueprint = make_github_blueprint(
|
||||
client_id=config.config_github_oauth_client_id,
|
||||
client_secret=config.config_github_oauth_client_secret,
|
||||
redirect_to="oauth.github_login",)
|
||||
|
||||
google_blueprint = make_google_blueprint(
|
||||
client_id=config.config_google_oauth_client_id,
|
||||
client_secret=config.config_google_oauth_client_secret,
|
||||
redirect_to="oauth.google_login",
|
||||
scope=[
|
||||
"https://www.googleapis.com/auth/plus.me",
|
||||
"https://www.googleapis.com/auth/userinfo.email",
|
||||
]
|
||||
)
|
||||
|
||||
app.register_blueprint(google_blueprint, url_prefix="/login")
|
||||
app.register_blueprint(github_blueprint, url_prefix='/login')
|
||||
|
||||
github_blueprint.backend = OAuthBackend(ub.OAuth, ub.session, user=current_user, user_required=True)
|
||||
google_blueprint.backend = OAuthBackend(ub.OAuth, ub.session, user=current_user, user_required=True)
|
||||
|
||||
|
||||
if config.config_login_type == constants.LOGIN_OAUTH_GITHUB:
|
||||
register_oauth_blueprint(github_blueprint, 'GitHub')
|
||||
if config.config_login_type == constants.LOGIN_OAUTH_GOOGLE:
|
||||
register_oauth_blueprint(google_blueprint, 'Google')
|
||||
|
||||
|
||||
@oauth_authorized.connect_via(github_blueprint)
|
||||
def github_logged_in(blueprint, token):
|
||||
if not token:
|
||||
flash(_(u"Failed to log in with GitHub."), category="error")
|
||||
return False
|
||||
|
||||
resp = blueprint.session.get("/user")
|
||||
if not resp.ok:
|
||||
flash(_(u"Failed to fetch user info from GitHub."), category="error")
|
||||
return False
|
||||
|
||||
github_info = resp.json()
|
||||
github_user_id = str(github_info["id"])
|
||||
return oauth_update_token(blueprint, token, github_user_id)
|
||||
|
||||
|
||||
@oauth_authorized.connect_via(google_blueprint)
|
||||
def google_logged_in(blueprint, token):
|
||||
if not token:
|
||||
flash(_(u"Failed to log in with Google."), category="error")
|
||||
return False
|
||||
|
||||
resp = blueprint.session.get("/oauth2/v2/userinfo")
|
||||
if not resp.ok:
|
||||
flash(_(u"Failed to fetch user info from Google."), category="error")
|
||||
return False
|
||||
|
||||
google_info = resp.json()
|
||||
google_user_id = str(google_info["id"])
|
||||
|
||||
return oauth_update_token(blueprint, token, google_user_id)
|
||||
|
||||
|
||||
def oauth_update_token(blueprint, token, provider_user_id):
|
||||
session[blueprint.name + "_oauth_user_id"] = provider_user_id
|
||||
session[blueprint.name + "_oauth_token"] = token
|
||||
|
||||
# Find this OAuth token in the database, or create it
|
||||
query = ub.session.query(ub.OAuth).filter_by(
|
||||
provider=blueprint.name,
|
||||
provider_user_id=provider_user_id,
|
||||
)
|
||||
try:
|
||||
oauth = query.one()
|
||||
# update token
|
||||
oauth.token = token
|
||||
except NoResultFound:
|
||||
oauth = ub.OAuth(
|
||||
provider=blueprint.name,
|
||||
provider_user_id=provider_user_id,
|
||||
token=token,
|
||||
)
|
||||
try:
|
||||
ub.session.add(oauth)
|
||||
ub.session.commit()
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
ub.session.rollback()
|
||||
|
||||
# Disable Flask-Dance's default behavior for saving the OAuth token
|
||||
return False
|
||||
|
||||
|
||||
def bind_oauth_or_register(provider, provider_user_id, redirect_url):
|
||||
query = ub.session.query(ub.OAuth).filter_by(
|
||||
provider=provider,
|
||||
provider_user_id=provider_user_id,
|
||||
)
|
||||
try:
|
||||
oauth = query.one()
|
||||
# already bind with user, just login
|
||||
if oauth.user:
|
||||
login_user(oauth.user)
|
||||
return redirect(url_for('web.index'))
|
||||
else:
|
||||
# bind to current user
|
||||
if current_user and current_user.is_authenticated:
|
||||
oauth.user = current_user
|
||||
try:
|
||||
ub.session.add(oauth)
|
||||
ub.session.commit()
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
ub.session.rollback()
|
||||
return redirect(url_for('web.login'))
|
||||
#if config.config_public_reg:
|
||||
# return redirect(url_for('web.register'))
|
||||
#else:
|
||||
# flash(_(u"Public registration is not enabled"), category="error")
|
||||
# return redirect(url_for(redirect_url))
|
||||
except NoResultFound:
|
||||
return redirect(url_for(redirect_url))
|
||||
|
||||
|
||||
def get_oauth_status():
|
||||
status = []
|
||||
query = ub.session.query(ub.OAuth).filter_by(
|
||||
user_id=current_user.id,
|
||||
)
|
||||
try:
|
||||
oauths = query.all()
|
||||
for oauth in oauths:
|
||||
status.append(oauth.provider)
|
||||
return status
|
||||
except NoResultFound:
|
||||
return None
|
||||
|
||||
|
||||
def unlink_oauth(provider):
|
||||
if request.host_url + 'me' != request.referrer:
|
||||
pass
|
||||
query = ub.session.query(ub.OAuth).filter_by(
|
||||
provider=provider,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
try:
|
||||
oauth = query.one()
|
||||
if current_user and current_user.is_authenticated:
|
||||
oauth.user = current_user
|
||||
try:
|
||||
ub.session.delete(oauth)
|
||||
ub.session.commit()
|
||||
logout_oauth_user()
|
||||
flash(_(u"Unlink to %(oauth)s success.", oauth=oauth_check[provider]), category="success")
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
ub.session.rollback()
|
||||
flash(_(u"Unlink to %(oauth)s failed.", oauth=oauth_check[provider]), category="error")
|
||||
except NoResultFound:
|
||||
log.warning("oauth %s for user %d not fount", provider, current_user.id)
|
||||
flash(_(u"Not linked to %(oauth)s.", oauth=oauth_check[provider]), category="error")
|
||||
return redirect(url_for('web.profile'))
|
||||
|
||||
|
||||
# notify on OAuth provider error
|
||||
@oauth_error.connect_via(github_blueprint)
|
||||
def github_error(blueprint, error, error_description=None, error_uri=None):
|
||||
msg = (
|
||||
u"OAuth error from {name}! "
|
||||
u"error={error} description={description} uri={uri}"
|
||||
).format(
|
||||
name=blueprint.name,
|
||||
error=error,
|
||||
description=error_description,
|
||||
uri=error_uri,
|
||||
) # ToDo: Translate
|
||||
flash(msg, category="error")
|
||||
|
||||
|
||||
@oauth.route('/github')
|
||||
@github_oauth_required
|
||||
def github_login():
|
||||
if not github.authorized:
|
||||
return redirect(url_for('github.login'))
|
||||
account_info = github.get('/user')
|
||||
if account_info.ok:
|
||||
account_info_json = account_info.json()
|
||||
return bind_oauth_or_register(github_blueprint.name, account_info_json['id'], 'github.login')
|
||||
flash(_(u"GitHub Oauth error, please retry later."), category="error")
|
||||
return redirect(url_for('web.login'))
|
||||
|
||||
|
||||
@oauth.route('/unlink/github', methods=["GET"])
|
||||
@login_required
|
||||
def github_login_unlink():
|
||||
return unlink_oauth(github_blueprint.name)
|
||||
|
||||
|
||||
@oauth.route('/login/google')
|
||||
@google_oauth_required
|
||||
def google_login():
|
||||
if not google.authorized:
|
||||
return redirect(url_for("google.login"))
|
||||
resp = google.get("/oauth2/v2/userinfo")
|
||||
if resp.ok:
|
||||
account_info_json = resp.json()
|
||||
return bind_oauth_or_register(google_blueprint.name, account_info_json['id'], 'google.login')
|
||||
flash(_(u"Google Oauth error, please retry later."), category="error")
|
||||
return redirect(url_for('web.login'))
|
||||
|
||||
|
||||
@oauth_error.connect_via(google_blueprint)
|
||||
def google_error(blueprint, error, error_description=None, error_uri=None):
|
||||
msg = (
|
||||
u"OAuth error from {name}! "
|
||||
u"error={error} description={description} uri={uri}"
|
||||
).format(
|
||||
name=blueprint.name,
|
||||
error=error,
|
||||
description=error_description,
|
||||
uri=error_uri,
|
||||
) # ToDo: Translate
|
||||
flash(msg, category="error")
|
||||
|
||||
|
||||
@oauth.route('/unlink/google', methods=["GET"])
|
||||
@login_required
|
||||
def google_login_unlink():
|
||||
return unlink_oauth(google_blueprint.name)
|
328
cps/opds.py
Normal file
@ -0,0 +1,328 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import datetime
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint, request, render_template, Response, g, make_response
|
||||
from flask_login import current_user
|
||||
from sqlalchemy.sql.expression import func, text, or_, and_
|
||||
from werkzeug.security import check_password_hash
|
||||
|
||||
from . import constants, logger, config, db, ub, services
|
||||
from .helper import fill_indexpage, get_download_link, get_book_cover
|
||||
from .pagination import Pagination
|
||||
from .web import common_filters, get_search_results, render_read_books, download_required
|
||||
|
||||
|
||||
opds = Blueprint('opds', __name__)
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
def requires_basic_auth_if_no_ano(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
auth = request.authorization
|
||||
if config.config_anonbrowse != 1:
|
||||
if not auth or not check_auth(auth.username, auth.password):
|
||||
return authenticate()
|
||||
return f(*args, **kwargs)
|
||||
if config.config_login_type == constants.LOGIN_LDAP and services.ldap:
|
||||
return services.ldap.basic_auth_required(f)
|
||||
return decorated
|
||||
|
||||
|
||||
@opds.route("/opds/")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_index():
|
||||
return render_xml_template('index.xml')
|
||||
|
||||
|
||||
@opds.route("/opds/osd")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_osd():
|
||||
return render_xml_template('osd.xml', lang='en-EN')
|
||||
|
||||
|
||||
@opds.route("/opds/search", defaults={'query': ""})
|
||||
@opds.route("/opds/search/<query>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_cc_search(query):
|
||||
return feed_search(query.strip())
|
||||
|
||||
|
||||
@opds.route("/opds/search", methods=["GET"])
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_normal_search():
|
||||
return feed_search(request.args.get("query").strip())
|
||||
|
||||
|
||||
@opds.route("/opds/new")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_new():
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, True, [db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/discover")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_discover():
|
||||
entries = db.session.query(db.Books).filter(common_filters()).order_by(func.random())\
|
||||
.limit(config.config_books_per_page)
|
||||
pagination = Pagination(1, config.config_books_per_page, int(config.config_books_per_page))
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/rated")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_best_rated():
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, db.Books.ratings.any(db.Ratings.rating > 9), [db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/hot")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_hot():
|
||||
off = request.args.get("offset") or 0
|
||||
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
|
||||
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
|
||||
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
||||
entries = list()
|
||||
for book in hot_books:
|
||||
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.Downloads.book_id).first()
|
||||
if downloadBook:
|
||||
entries.append(
|
||||
db.session.query(db.Books).filter(common_filters())
|
||||
.filter(db.Books.id == book.Downloads.book_id).first()
|
||||
)
|
||||
else:
|
||||
ub.delete_download(book.Downloads.book_id)
|
||||
# ub.session.query(ub.Downloads).filter(book.Downloads.book_id == ub.Downloads.book_id).delete()
|
||||
# ub.session.commit()
|
||||
numBooks = entries.__len__()
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
config.config_books_per_page, numBooks)
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/author")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_authorindex():
|
||||
off = request.args.get("offset") or 0
|
||||
entries = db.session.query(db.Authors).join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
||||
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).limit(config.config_books_per_page).offset(off)
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(db.session.query(db.Authors).all()))
|
||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_author', pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/author/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_author(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, db.Books.authors.any(db.Authors.id == book_id), [db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/publisher")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_publisherindex():
|
||||
off = request.args.get("offset") or 0
|
||||
entries = db.session.query(db.Publishers).join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
||||
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort).limit(config.config_books_per_page).offset(off)
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(db.session.query(db.Publishers).all()))
|
||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_publisher', pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/publisher/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_publisher(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, db.Books.publishers.any(db.Publishers.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/category")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_categoryindex():
|
||||
off = request.args.get("offset") or 0
|
||||
entries = db.session.query(db.Tags).join(db.books_tags_link).join(db.Books).filter(common_filters())\
|
||||
.group_by(text('books_tags_link.tag')).order_by(db.Tags.name).offset(off).limit(config.config_books_per_page)
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(db.session.query(db.Tags).all()))
|
||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_category', pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/category/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_category(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, db.Books.tags.any(db.Tags.id == book_id), [db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/series")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_seriesindex():
|
||||
off = request.args.get("offset") or 0
|
||||
entries = db.session.query(db.Series).join(db.books_series_link).join(db.Books).filter(common_filters())\
|
||||
.group_by(text('books_series_link.series')).order_by(db.Series.sort).offset(off).all()
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(db.session.query(db.Series).all()))
|
||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_series', pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/series/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_series(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
db.Books, db.Books.series.any(db.Series.id == book_id), [db.Books.series_index])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/shelfindex/", defaults={'public': 0})
|
||||
@opds.route("/opds/shelfindex/<string:public>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_shelfindex(public):
|
||||
off = request.args.get("offset") or 0
|
||||
if public is not 0:
|
||||
shelf = g.public_shelfes
|
||||
number = len(shelf)
|
||||
else:
|
||||
shelf = g.user.shelf
|
||||
number = shelf.count()
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
number)
|
||||
return render_xml_template('feed.xml', listelements=shelf, folder='opds.feed_shelf', pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/shelf/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_shelf(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
if current_user.is_anonymous:
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == book_id).first()
|
||||
else:
|
||||
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||
ub.Shelf.id == book_id),
|
||||
and_(ub.Shelf.is_public == 1,
|
||||
ub.Shelf.id == book_id))).first()
|
||||
result = list()
|
||||
# user is allowed to access shelf
|
||||
if shelf:
|
||||
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == book_id).order_by(
|
||||
ub.BookShelf.order.asc()).all()
|
||||
for book in books_in_shelf:
|
||||
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
result.append(cur_book)
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(result))
|
||||
return render_xml_template('feed.xml', entries=result, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/download/<book_id>/<book_format>/")
|
||||
@requires_basic_auth_if_no_ano
|
||||
@download_required
|
||||
def opds_download_link(book_id, book_format):
|
||||
return get_download_link(book_id,book_format)
|
||||
|
||||
|
||||
@opds.route("/ajax/book/<string:uuid>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def get_metadata_calibre_companion(uuid):
|
||||
entry = db.session.query(db.Books).filter(db.Books.uuid.like("%" + uuid + "%")).first()
|
||||
if entry is not None:
|
||||
js = render_template('json.txt', entry=entry)
|
||||
response = make_response(js)
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def feed_search(term):
|
||||
if term:
|
||||
term = term.strip().lower()
|
||||
entries = get_search_results( term)
|
||||
entriescount = len(entries) if len(entries) > 0 else 1
|
||||
pagination = Pagination(1, entriescount, entriescount)
|
||||
return render_xml_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
|
||||
else:
|
||||
return render_xml_template('feed.xml', searchterm="")
|
||||
|
||||
def check_auth(username, password):
|
||||
if sys.version_info.major == 3:
|
||||
username=username.encode('windows-1252')
|
||||
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) ==
|
||||
username.decode('utf-8').lower()).first()
|
||||
return bool(user and check_password_hash(user.password, password))
|
||||
|
||||
|
||||
def authenticate():
|
||||
return Response(
|
||||
'Could not verify your access level for that URL.\n'
|
||||
'You have to login with proper credentials', 401,
|
||||
{'WWW-Authenticate': 'Basic realm="Login Required"'})
|
||||
|
||||
|
||||
def render_xml_template(*args, **kwargs):
|
||||
#ToDo: return time in current timezone similar to %z
|
||||
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
||||
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
|
||||
response = make_response(xml)
|
||||
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
|
||||
return response
|
||||
|
||||
@opds.route("/opds/thumb_240_240/<book_id>")
|
||||
@opds.route("/opds/cover_240_240/<book_id>")
|
||||
@opds.route("/opds/cover_90_90/<book_id>")
|
||||
@opds.route("/opds/cover/<book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_get_cover(book_id):
|
||||
return get_book_cover(book_id)
|
||||
|
||||
@opds.route("/opds/readbooks/")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_read_books():
|
||||
off = request.args.get("offset") or 0
|
||||
return render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
|
||||
|
||||
|
||||
@opds.route("/opds/unreadbooks/")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_unread_books():
|
||||
off = request.args.get("offset") or 0
|
||||
return render_read_books(int(off) / (int(config.config_books_per_page)) + 1, False, True)
|
77
cps/pagination.py
Normal file
@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
from math import ceil
|
||||
|
||||
|
||||
# simple pagination for the feed
|
||||
class Pagination(object):
|
||||
def __init__(self, page, per_page, total_count):
|
||||
self.page = int(page)
|
||||
self.per_page = int(per_page)
|
||||
self.total_count = int(total_count)
|
||||
|
||||
@property
|
||||
def next_offset(self):
|
||||
return int(self.page * self.per_page)
|
||||
|
||||
@property
|
||||
def previous_offset(self):
|
||||
return int((self.page - 2) * self.per_page)
|
||||
|
||||
@property
|
||||
def last_offset(self):
|
||||
last = int(self.total_count) - int(self.per_page)
|
||||
if last < 0:
|
||||
last = 0
|
||||
return int(last)
|
||||
|
||||
@property
|
||||
def pages(self):
|
||||
return int(ceil(self.total_count / float(self.per_page)))
|
||||
|
||||
@property
|
||||
def has_prev(self):
|
||||
return self.page > 1
|
||||
|
||||
@property
|
||||
def has_next(self):
|
||||
return self.page < self.pages
|
||||
|
||||
# right_edge: last right_edges count of all pages are shown as number, means, if 10 pages are paginated -> 9,10 shwn
|
||||
# left_edge: first left_edges count of all pages are shown as number -> 1,2 shwn
|
||||
# left_current: left_current count below current page are shown as number, means if current page 5 -> 3,4 shwn
|
||||
# left_current: right_current count above current page are shown as number, means if current page 5 -> 6,7 shwn
|
||||
def iter_pages(self, left_edge=2, left_current=2,
|
||||
right_current=4, right_edge=2):
|
||||
last = 0
|
||||
left_current = self.page - left_current - 1
|
||||
right_current = self.page + right_current + 1
|
||||
right_edge = self.pages - right_edge
|
||||
for num in range(1, (self.pages + 1)):
|
||||
if num <= left_edge or (left_current < num < right_current) or num > right_edge:
|
||||
if last + 1 != num:
|
||||
yield None
|
||||
yield num
|
||||
last = num
|
@ -28,10 +28,12 @@
|
||||
|
||||
# http://flask.pocoo.org/snippets/62/
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
try:
|
||||
from urllib.parse import urlparse, urljoin
|
||||
except ImportError:
|
||||
from urlparse import urlparse, urljoin
|
||||
|
||||
from flask import request, url_for, redirect
|
||||
|
||||
|
||||
|
@ -37,6 +37,9 @@
|
||||
#
|
||||
# Inspired by http://flask.pocoo.org/snippets/35/
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
|
||||
|
||||
class ReverseProxied(object):
|
||||
"""Wrap the application in this middleware and configure the
|
||||
front-end server to add these headers, to let you quietly bind
|
||||
|
269
cps/server.py
@ -17,149 +17,190 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from socket import error as SocketError
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
import errno
|
||||
import signal
|
||||
import web
|
||||
import socket
|
||||
|
||||
try:
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from gevent.pool import Pool
|
||||
from gevent import __version__ as geventVersion
|
||||
gevent_present = True
|
||||
from gevent import __version__ as _version
|
||||
VERSION = {'Gevent': 'v' + _version}
|
||||
_GEVENT = True
|
||||
except ImportError:
|
||||
from tornado.wsgi import WSGIContainer
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado import version as tornadoVersion
|
||||
gevent_present = False
|
||||
from tornado import version as _version
|
||||
VERSION = {'Tornado': 'v' + _version}
|
||||
_GEVENT = False
|
||||
|
||||
from . import logger, global_WorkerThread
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
class server:
|
||||
|
||||
wsgiserver = None
|
||||
restart= False
|
||||
class WebServer:
|
||||
|
||||
def __init__(self):
|
||||
signal.signal(signal.SIGINT, self.killServer)
|
||||
signal.signal(signal.SIGTERM, self.killServer)
|
||||
signal.signal(signal.SIGINT, self._killServer)
|
||||
signal.signal(signal.SIGTERM, self._killServer)
|
||||
|
||||
def start_gevent(self):
|
||||
self.wsgiserver = None
|
||||
self.access_logger = None
|
||||
self.restart = False
|
||||
self.app = None
|
||||
self.listen_address = None
|
||||
self.listen_port = None
|
||||
self.IPV6 = False
|
||||
self.unix_socket_file = None
|
||||
self.ssl_args = None
|
||||
|
||||
def init_app(self, application, config):
|
||||
self.app = application
|
||||
self.listen_address = config.get_config_ipaddress()
|
||||
self.IPV6 = config.get_ipaddress_type()
|
||||
self.listen_port = config.config_port
|
||||
|
||||
if config.config_access_log:
|
||||
log_name = "gevent.access" if _GEVENT else "tornado.access"
|
||||
formatter = logger.ACCESS_FORMATTER_GEVENT if _GEVENT else logger.ACCESS_FORMATTER_TORNADO
|
||||
self.access_logger = logger.create_access_log(config.config_access_logfile, log_name, formatter)
|
||||
else:
|
||||
if not _GEVENT:
|
||||
logger.get('tornado.access').disabled = True
|
||||
|
||||
certfile_path = config.get_config_certfile()
|
||||
keyfile_path = config.get_config_keyfile()
|
||||
if certfile_path and keyfile_path:
|
||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
||||
self.ssl_args = {"certfile": certfile_path,
|
||||
"keyfile": keyfile_path}
|
||||
else:
|
||||
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
|
||||
log.warning('Cert path: %s', certfile_path)
|
||||
log.warning('Key path: %s', keyfile_path)
|
||||
|
||||
def _make_gevent_unix_socket(self, socket_file):
|
||||
# the socket file must not exist prior to bind()
|
||||
if os.path.exists(socket_file):
|
||||
# avoid nuking regular files and symbolic links (could be a mistype or security issue)
|
||||
if os.path.isfile(socket_file) or os.path.islink(socket_file):
|
||||
raise OSError(errno.EEXIST, os.strerror(errno.EEXIST), socket_file)
|
||||
os.remove(socket_file)
|
||||
|
||||
unix_sock = WSGIServer.get_listener(socket_file, family=socket.AF_UNIX)
|
||||
self.unix_socket_file = socket_file
|
||||
|
||||
# ensure current user and group have r/w permissions, no permissions for other users
|
||||
# this way the socket can be shared in a semi-secure manner
|
||||
# between the user running calibre-web and the user running the fronting webserver
|
||||
os.chmod(socket_file, 0o660)
|
||||
|
||||
return unix_sock
|
||||
|
||||
def _make_gevent_socket(self):
|
||||
if os.name != 'nt':
|
||||
unix_socket_file = os.environ.get("CALIBRE_UNIX_SOCKET")
|
||||
if unix_socket_file:
|
||||
output = "socket:" + unix_socket_file + ":" + str(self.listen_port)
|
||||
return self._make_gevent_unix_socket(unix_socket_file), output
|
||||
|
||||
if self.listen_address:
|
||||
return (self.listen_address, self.listen_port), self._get_readable_listen_address()
|
||||
|
||||
if os.name == 'nt':
|
||||
self.listen_address = '0.0.0.0'
|
||||
return (self.listen_address, self.listen_port), self._get_readable_listen_address()
|
||||
|
||||
address = ('', self.listen_port)
|
||||
try:
|
||||
ssl_args = dict()
|
||||
certfile_path = web.ub.config.get_config_certfile()
|
||||
keyfile_path = web.ub.config.get_config_keyfile()
|
||||
if certfile_path and keyfile_path:
|
||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
||||
ssl_args = {"certfile": certfile_path,
|
||||
"keyfile": keyfile_path}
|
||||
else:
|
||||
web.app.logger.info('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s' % (certfile_path, keyfile_path))
|
||||
if os.name == 'nt':
|
||||
self.wsgiserver= WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
|
||||
else:
|
||||
self.wsgiserver = WSGIServer(('', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
|
||||
web.py3_gevent_link = self.wsgiserver
|
||||
sock = WSGIServer.get_listener(address, family=socket.AF_INET6)
|
||||
output = self._get_readable_listen_address(True)
|
||||
except socket.error as ex:
|
||||
log.error('%s', ex)
|
||||
log.warning('Unable to listen on "", trying on IPv4 only...')
|
||||
output = self._get_readable_listen_address(False)
|
||||
sock = WSGIServer.get_listener(address, family=socket.AF_INET)
|
||||
return sock, output
|
||||
|
||||
def _start_gevent(self):
|
||||
ssl_args = self.ssl_args or {}
|
||||
|
||||
try:
|
||||
sock, output = self._make_gevent_socket()
|
||||
log.info('Starting Gevent server on %s', output)
|
||||
self.wsgiserver = WSGIServer(sock, self.app, log=self.access_logger, spawn=Pool(), **ssl_args)
|
||||
self.wsgiserver.serve_forever()
|
||||
finally:
|
||||
if self.unix_socket_file:
|
||||
os.remove(self.unix_socket_file)
|
||||
self.unix_socket_file = None
|
||||
|
||||
except SocketError:
|
||||
try:
|
||||
web.app.logger.info('Unable to listen on \'\', trying on IPv4 only...')
|
||||
self.wsgiserver = WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
|
||||
web.py3_gevent_link = self.wsgiserver
|
||||
self.wsgiserver.serve_forever()
|
||||
except (OSError, SocketError) as e:
|
||||
web.app.logger.info("Error starting server: %s" % e.strerror)
|
||||
print("Error starting server: %s" % e.strerror)
|
||||
web.helper.global_WorkerThread.stop()
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
web.app.logger.info("Unknown error while starting gevent")
|
||||
def _start_tornado(self):
|
||||
log.info('Starting Tornado server on %s', self._get_readable_listen_address())
|
||||
|
||||
def startServer(self):
|
||||
if gevent_present:
|
||||
web.app.logger.info('Starting Gevent server')
|
||||
# leave subprocess out to allow forking for fetchers and processors
|
||||
self.start_gevent()
|
||||
# Max Buffersize set to 200MB )
|
||||
http_server = HTTPServer(WSGIContainer(self.app),
|
||||
max_buffer_size = 209700000,
|
||||
ssl_options=self.ssl_args)
|
||||
http_server.listen(self.listen_port, self.listen_address)
|
||||
self.wsgiserver=IOLoop.instance()
|
||||
self.wsgiserver.start()
|
||||
# wait for stop signal
|
||||
self.wsgiserver.close(True)
|
||||
|
||||
def _get_readable_listen_address(self, ipV6=False):
|
||||
if self.listen_address == "":
|
||||
listen_string = '""'
|
||||
else:
|
||||
try:
|
||||
ssl = None
|
||||
web.app.logger.info('Starting Tornado server')
|
||||
certfile_path = web.ub.config.get_config_certfile()
|
||||
keyfile_path = web.ub.config.get_config_keyfile()
|
||||
if certfile_path and keyfile_path:
|
||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
||||
ssl = {"certfile": certfile_path,
|
||||
"keyfile": keyfile_path}
|
||||
else:
|
||||
web.app.logger.info('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s' % (certfile_path, keyfile_path))
|
||||
ipV6 = self.IPV6
|
||||
listen_string = self.listen_address
|
||||
if ipV6:
|
||||
adress = "[" + listen_string + "]"
|
||||
else:
|
||||
adress = listen_string
|
||||
return adress + ":" + str(self.listen_port)
|
||||
|
||||
# Max Buffersize set to 200MB
|
||||
http_server = HTTPServer(WSGIContainer(web.app),
|
||||
max_buffer_size = 209700000,
|
||||
ssl_options=ssl)
|
||||
http_server.listen(web.ub.config.config_port)
|
||||
self.wsgiserver=IOLoop.instance()
|
||||
web.py3_gevent_link = self.wsgiserver
|
||||
self.wsgiserver.start()
|
||||
# wait for stop signal
|
||||
self.wsgiserver.close(True)
|
||||
except SocketError as e:
|
||||
web.app.logger.info("Error starting server: %s" % e.strerror)
|
||||
print("Error starting server: %s" % e.strerror)
|
||||
web.helper.global_WorkerThread.stop()
|
||||
sys.exit(1)
|
||||
|
||||
# ToDo: Somehow caused by circular import under python3 refactor
|
||||
if sys.version_info > (3, 0):
|
||||
self.restart = web.py3_restart_Typ
|
||||
if self.restart == True:
|
||||
web.app.logger.info("Performing restart of Calibre-Web")
|
||||
web.helper.global_WorkerThread.stop()
|
||||
if os.name == 'nt':
|
||||
arguments = ["\"" + sys.executable + "\""]
|
||||
for e in sys.argv:
|
||||
arguments.append("\"" + e + "\"")
|
||||
os.execv(sys.executable, arguments)
|
||||
def start(self):
|
||||
try:
|
||||
if _GEVENT:
|
||||
# leave subprocess out to allow forking for fetchers and processors
|
||||
self._start_gevent()
|
||||
else:
|
||||
os.execl(sys.executable, sys.executable, *sys.argv)
|
||||
else:
|
||||
web.app.logger.info("Performing shutdown of Calibre-Web")
|
||||
web.helper.global_WorkerThread.stop()
|
||||
sys.exit(0)
|
||||
self._start_tornado()
|
||||
except Exception as ex:
|
||||
log.error("Error starting server: %s", ex)
|
||||
print("Error starting server: %s" % ex)
|
||||
return False
|
||||
finally:
|
||||
self.wsgiserver = None
|
||||
global_WorkerThread.stop()
|
||||
|
||||
def setRestartTyp(self,starttyp):
|
||||
self.restart = starttyp
|
||||
# ToDo: Somehow caused by circular import under python3 refactor
|
||||
web.py3_restart_Typ = starttyp
|
||||
if not self.restart:
|
||||
log.info("Performing shutdown of Calibre-Web")
|
||||
return True
|
||||
|
||||
def killServer(self, signum, frame):
|
||||
self.stopServer()
|
||||
log.info("Performing restart of Calibre-Web")
|
||||
arguments = list(sys.argv)
|
||||
arguments.insert(0, sys.executable)
|
||||
if os.name == 'nt':
|
||||
arguments = ["\"%s\"" % a for a in arguments]
|
||||
os.execv(sys.executable, arguments)
|
||||
return True
|
||||
|
||||
def stopServer(self):
|
||||
# ToDo: Somehow caused by circular import under python3 refactor
|
||||
if sys.version_info > (3, 0):
|
||||
if not self.wsgiserver:
|
||||
# if gevent_present:
|
||||
self.wsgiserver = web.py3_gevent_link
|
||||
#else:
|
||||
# self.wsgiserver = IOLoop.instance()
|
||||
def _killServer(self, signum, frame):
|
||||
self.stop()
|
||||
|
||||
def stop(self, restart=False):
|
||||
log.info("webserver stop (restart=%s)", restart)
|
||||
self.restart = restart
|
||||
if self.wsgiserver:
|
||||
if gevent_present:
|
||||
if _GEVENT:
|
||||
self.wsgiserver.close()
|
||||
else:
|
||||
self.wsgiserver.add_callback(self.wsgiserver.stop)
|
||||
|
||||
@staticmethod
|
||||
def getNameVersion():
|
||||
if gevent_present:
|
||||
return {'Gevent':'v'+geventVersion}
|
||||
else:
|
||||
return {'Tornado':'v'+tornadoVersion}
|
||||
|
||||
|
||||
# Start Instance of Server
|
||||
Server=server()
|
||||
|
36
cps/services/__init__.py
Normal file
@ -0,0 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2019 pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
|
||||
from .. import logger
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
try: from . import goodreads
|
||||
except ImportError as err:
|
||||
log.warning("goodreads: %s", err)
|
||||
goodreads = None
|
||||
|
||||
|
||||
try: from . import simpleldap as ldap
|
||||
except ImportError as err:
|
||||
log.warning("simpleldap: %s", err)
|
||||
ldap = None
|
106
cps/services/goodreads.py
Normal file
@ -0,0 +1,106 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import time
|
||||
from functools import reduce
|
||||
|
||||
from goodreads.client import GoodreadsClient
|
||||
|
||||
try: import Levenshtein
|
||||
except ImportError: Levenshtein = False
|
||||
|
||||
from .. import logger
|
||||
|
||||
|
||||
log = logger.create()
|
||||
_client = None # type: GoodreadsClient
|
||||
|
||||
# GoodReads TOS allows for 24h caching of data
|
||||
_CACHE_TIMEOUT = 23 * 60 * 60 # 23 hours (in seconds)
|
||||
_AUTHORS_CACHE = {}
|
||||
|
||||
|
||||
def connect(key=None, secret=None, enabled=True):
|
||||
global _client
|
||||
|
||||
if not enabled or not key or not secret:
|
||||
_client = None
|
||||
return
|
||||
|
||||
if _client:
|
||||
# make sure the configuration has not changed since last we used the client
|
||||
if _client.client_key != key or _client.client_secret != secret:
|
||||
_client = None
|
||||
|
||||
if not _client:
|
||||
_client = GoodreadsClient(key, secret)
|
||||
|
||||
|
||||
def get_author_info(author_name):
|
||||
now = time.time()
|
||||
author_info = _AUTHORS_CACHE.get(author_name, None)
|
||||
if author_info:
|
||||
if now < author_info._timestamp + _CACHE_TIMEOUT:
|
||||
return author_info
|
||||
# clear expired entries
|
||||
del _AUTHORS_CACHE[author_name]
|
||||
|
||||
if not _client:
|
||||
log.warning("failed to get a Goodreads client")
|
||||
return
|
||||
|
||||
try:
|
||||
author_info = _client.find_author(author_name=author_name)
|
||||
except Exception as ex:
|
||||
# Skip goodreads, if site is down/inaccessible
|
||||
log.warning('Goodreads website is down/inaccessible? %s', ex)
|
||||
return
|
||||
|
||||
if author_info:
|
||||
author_info._timestamp = now
|
||||
_AUTHORS_CACHE[author_name] = author_info
|
||||
return author_info
|
||||
|
||||
|
||||
def get_other_books(author_info, library_books=None):
|
||||
# Get all identifiers (ISBN, Goodreads, etc) and filter author's books by that list so we show fewer duplicates
|
||||
# Note: Not all images will be shown, even though they're available on Goodreads.com.
|
||||
# See https://www.goodreads.com/topic/show/18213769-goodreads-book-images
|
||||
|
||||
if not author_info:
|
||||
return
|
||||
|
||||
identifiers = []
|
||||
library_titles = []
|
||||
if library_books:
|
||||
identifiers = list(reduce(lambda acc, book: acc + [i.val for i in book.identifiers if i.val], library_books, []))
|
||||
library_titles = [book.title for book in library_books]
|
||||
|
||||
for book in author_info.books:
|
||||
if book.isbn in identifiers:
|
||||
continue
|
||||
if book.gid["#text"] in identifiers:
|
||||
continue
|
||||
|
||||
if Levenshtein and library_titles:
|
||||
goodreads_title = book._book_dict['title_without_series']
|
||||
if any(Levenshtein.ratio(goodreads_title, title) > 0.7 for title in library_titles):
|
||||
continue
|
||||
|
||||
yield book
|
82
cps/services/simpleldap.py
Normal file
@ -0,0 +1,82 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, pwr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
import base64
|
||||
|
||||
from flask_simpleldap import LDAP, LDAPException
|
||||
|
||||
from .. import constants, logger
|
||||
|
||||
|
||||
log = logger.create()
|
||||
_ldap = LDAP()
|
||||
|
||||
|
||||
def init_app(app, config):
|
||||
global _ldap
|
||||
|
||||
if config.config_login_type != constants.LOGIN_LDAP:
|
||||
_ldap = None
|
||||
return
|
||||
|
||||
app.config['LDAP_HOST'] = config.config_ldap_provider_url
|
||||
app.config['LDAP_PORT'] = config.config_ldap_port
|
||||
app.config['LDAP_SCHEMA'] = config.config_ldap_schema
|
||||
app.config['LDAP_USERNAME'] = config.config_ldap_user_object.replace('%s', config.config_ldap_serv_username)\
|
||||
+ ',' + config.config_ldap_dn
|
||||
app.config['LDAP_PASSWORD'] = base64.b64decode(config.config_ldap_serv_password)
|
||||
app.config['LDAP_REQUIRE_CERT'] = bool(config.config_ldap_require_cert)
|
||||
if config.config_ldap_require_cert:
|
||||
app.config['LDAP_CERT_PATH'] = config.config_ldap_cert_path
|
||||
app.config['LDAP_BASE_DN'] = config.config_ldap_dn
|
||||
app.config['LDAP_USER_OBJECT_FILTER'] = config.config_ldap_user_object
|
||||
app.config['LDAP_USE_SSL'] = bool(config.config_ldap_use_ssl)
|
||||
app.config['LDAP_USE_TLS'] = bool(config.config_ldap_use_tls)
|
||||
app.config['LDAP_OPENLDAP'] = bool(config.config_ldap_openldap)
|
||||
|
||||
# app.config['LDAP_BASE_DN'] = 'ou=users,dc=yunohost,dc=org'
|
||||
# app.config['LDAP_USER_OBJECT_FILTER'] = '(uid=%s)'
|
||||
_ldap.init_app(app)
|
||||
|
||||
|
||||
|
||||
def basic_auth_required(func):
|
||||
return _ldap.basic_auth_required(func)
|
||||
|
||||
|
||||
def bind_user(username, password):
|
||||
# ulf= _ldap.get_object_details('admin')
|
||||
'''Attempts a LDAP login.
|
||||
|
||||
:returns: True if login succeeded, False if login failed, None if server unavailable.
|
||||
'''
|
||||
try:
|
||||
result = _ldap.bind_user(username, password)
|
||||
log.debug("LDAP login '%s': %r", username, result)
|
||||
return result is not None
|
||||
except LDAPException as ex:
|
||||
if ex.message == 'Invalid credentials':
|
||||
log.info("LDAP login '%s' failed: %s", username, ex)
|
||||
return False
|
||||
if ex.message == "Can't contact LDAP server":
|
||||
log.warning('LDAP Server down: %s', ex)
|
||||
return None
|
||||
else:
|
||||
log.warning('LDAP Server error: %s', ex.message)
|
||||
return None
|
331
cps/shelf.py
Normal file
@ -0,0 +1,331 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
|
||||
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
|
||||
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
|
||||
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
|
||||
# apetresc, nanu-c, mutschler
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division, print_function, unicode_literals
|
||||
|
||||
from flask import Blueprint, request, flash, redirect, url_for
|
||||
from flask_babel import gettext as _
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.sql.expression import func, or_, and_
|
||||
|
||||
from . import logger, ub, searched_ids, db
|
||||
from .web import render_title_template
|
||||
|
||||
|
||||
shelf = Blueprint('shelf', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
@shelf.route("/shelf/add/<int:shelf_id>/<int:book_id>")
|
||||
@login_required
|
||||
def add_to_shelf(shelf_id, book_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
if shelf is None:
|
||||
log.error("Invalid shelf specified: %s", shelf_id)
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Invalid shelf specified"), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return "Invalid shelf specified", 400
|
||||
|
||||
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
||||
log.error("User %s not allowed to add a book to %s", current_user, shelf)
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name),
|
||||
category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
||||
|
||||
if shelf.is_public and not current_user.role_edit_shelfs():
|
||||
log.info("User %s not allowed to edit public shelves", current_user)
|
||||
if not request.is_xhr:
|
||||
flash(_(u"You are not allowed to edit public shelves"), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return "User is not allowed to edit public shelves", 403
|
||||
|
||||
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
||||
ub.BookShelf.book_id == book_id).first()
|
||||
if book_in_shelf:
|
||||
log.error("Book %s is already part of %s", book_id, shelf)
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Book is already part of the shelf: %(shelfname)s", shelfname=shelf.name), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return "Book is already part of the shelf: %s" % shelf.name, 400
|
||||
|
||||
maxOrder = ub.session.query(func.max(ub.BookShelf.order)).filter(ub.BookShelf.shelf == shelf_id).first()
|
||||
if maxOrder[0] is None:
|
||||
maxOrder = 0
|
||||
else:
|
||||
maxOrder = maxOrder[0]
|
||||
|
||||
ins = ub.BookShelf(shelf=shelf.id, book_id=book_id, order=maxOrder + 1)
|
||||
ub.session.add(ins)
|
||||
ub.session.commit()
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Book has been added to shelf: %(sname)s", sname=shelf.name), category="success")
|
||||
if "HTTP_REFERER" in request.environ:
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
||||
else:
|
||||
return redirect(url_for('web.index'))
|
||||
return "", 204
|
||||
|
||||
|
||||
@shelf.route("/shelf/massadd/<int:shelf_id>")
|
||||
@login_required
|
||||
def search_to_shelf(shelf_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
if shelf is None:
|
||||
log.error("Invalid shelf specified: %s", shelf_id)
|
||||
flash(_(u"Invalid shelf specified"), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
||||
log.error("User %s not allowed to add a book to %s", current_user, shelf)
|
||||
flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
if shelf.is_public and not current_user.role_edit_shelfs():
|
||||
log.error("User %s not allowed to edit public shelves", current_user)
|
||||
flash(_(u"User is not allowed to edit public shelves"), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
if current_user.id in searched_ids and searched_ids[current_user.id]:
|
||||
books_for_shelf = list()
|
||||
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).all()
|
||||
if books_in_shelf:
|
||||
book_ids = list()
|
||||
for book_id in books_in_shelf:
|
||||
book_ids.append(book_id.book_id)
|
||||
for searchid in searched_ids[current_user.id]:
|
||||
if searchid not in book_ids:
|
||||
books_for_shelf.append(searchid)
|
||||
else:
|
||||
books_for_shelf = searched_ids[current_user.id]
|
||||
|
||||
if not books_for_shelf:
|
||||
log.error("Books are already part of %s", shelf)
|
||||
flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
maxOrder = ub.session.query(func.max(ub.BookShelf.order)).filter(ub.BookShelf.shelf == shelf_id).first()
|
||||
if maxOrder[0] is None:
|
||||
maxOrder = 0
|
||||
else:
|
||||
maxOrder = maxOrder[0]
|
||||
|
||||
for book in books_for_shelf:
|
||||
maxOrder = maxOrder + 1
|
||||
ins = ub.BookShelf(shelf=shelf.id, book_id=book, order=maxOrder)
|
||||
ub.session.add(ins)
|
||||
ub.session.commit()
|
||||
flash(_(u"Books have been added to shelf: %(sname)s", sname=shelf.name), category="success")
|
||||
else:
|
||||
flash(_(u"Could not add books to shelf: %(sname)s", sname=shelf.name), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
|
||||
@shelf.route("/shelf/remove/<int:shelf_id>/<int:book_id>")
|
||||
@login_required
|
||||
def remove_from_shelf(shelf_id, book_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
if shelf is None:
|
||||
log.error("Invalid shelf specified: %s", shelf_id)
|
||||
if not request.is_xhr:
|
||||
return redirect(url_for('web.index'))
|
||||
return "Invalid shelf specified", 400
|
||||
|
||||
# if shelf is public and use is allowed to edit shelfs, or if shelf is private and user is owner
|
||||
# allow editing shelfs
|
||||
# result shelf public user allowed user owner
|
||||
# false 1 0 x
|
||||
# true 1 1 x
|
||||
# true 0 x 1
|
||||
# false 0 x 0
|
||||
|
||||
if (not shelf.is_public and shelf.user_id == int(current_user.id)) \
|
||||
or (shelf.is_public and current_user.role_edit_shelfs()):
|
||||
book_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
||||
ub.BookShelf.book_id == book_id).first()
|
||||
|
||||
if book_shelf is None:
|
||||
log.error("Book %s already removed from %s", book_id, shelf)
|
||||
if not request.is_xhr:
|
||||
return redirect(url_for('web.index'))
|
||||
return "Book already removed from shelf", 410
|
||||
|
||||
ub.session.delete(book_shelf)
|
||||
ub.session.commit()
|
||||
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Book has been removed from shelf: %(sname)s", sname=shelf.name), category="success")
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
||||
return "", 204
|
||||
else:
|
||||
log.error("User %s not allowed to remove a book from %s", current_user, shelf)
|
||||
if not request.is_xhr:
|
||||
flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name),
|
||||
category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
return "Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name, 403
|
||||
|
||||
|
||||
|
||||
@shelf.route("/shelf/create", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def create_shelf():
|
||||
shelf = ub.Shelf()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
if "is_public" in to_save:
|
||||
shelf.is_public = 1
|
||||
shelf.name = to_save["title"]
|
||||
shelf.user_id = int(current_user.id)
|
||||
existing_shelf = ub.session.query(ub.Shelf).filter(
|
||||
or_((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1),
|
||||
(ub.Shelf.name == to_save["title"]) & (ub.Shelf.user_id == int(current_user.id)))).first()
|
||||
if existing_shelf:
|
||||
flash(_(u"A shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
||||
else:
|
||||
try:
|
||||
ub.session.add(shelf)
|
||||
ub.session.commit()
|
||||
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
||||
except Exception:
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"), page="shelfcreate")
|
||||
else:
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"), page="shelfcreate")
|
||||
|
||||
|
||||
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def edit_shelf(shelf_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
existing_shelf = ub.session.query(ub.Shelf).filter(
|
||||
or_((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1),
|
||||
(ub.Shelf.name == to_save["title"]) & (ub.Shelf.user_id == int(current_user.id)))).filter(
|
||||
ub.Shelf.id != shelf_id).first()
|
||||
if existing_shelf:
|
||||
flash(_(u"A shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
||||
else:
|
||||
shelf.name = to_save["title"]
|
||||
if "is_public" in to_save:
|
||||
shelf.is_public = 1
|
||||
else:
|
||||
shelf.is_public = 0
|
||||
try:
|
||||
ub.session.commit()
|
||||
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
||||
except Exception:
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
||||
else:
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
||||
|
||||
|
||||
@shelf.route("/shelf/delete/<int:shelf_id>")
|
||||
@login_required
|
||||
def delete_shelf(shelf_id):
|
||||
cur_shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
deleted = None
|
||||
if current_user.role_admin():
|
||||
deleted = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).delete()
|
||||
else:
|
||||
if (not cur_shelf.is_public and cur_shelf.user_id == int(current_user.id)) \
|
||||
or (cur_shelf.is_public and current_user.role_edit_shelfs()):
|
||||
deleted = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||
ub.Shelf.id == shelf_id),
|
||||
and_(ub.Shelf.is_public == 1,
|
||||
ub.Shelf.id == shelf_id))).delete()
|
||||
|
||||
if deleted:
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
||||
ub.session.commit()
|
||||
log.info("successfully deleted %s", cur_shelf)
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
# @shelf.route("/shelfdown/<int:shelf_id>")
|
||||
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
||||
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
||||
@login_required
|
||||
def show_shelf(shelf_type, shelf_id):
|
||||
if current_user.is_anonymous:
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
||||
else:
|
||||
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||
ub.Shelf.id == shelf_id),
|
||||
and_(ub.Shelf.is_public == 1,
|
||||
ub.Shelf.id == shelf_id))).first()
|
||||
result = list()
|
||||
# user is allowed to access shelf
|
||||
if shelf:
|
||||
page = "shelf.html" if shelf_type == 1 else 'shelfdown.html'
|
||||
|
||||
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).order_by(
|
||||
ub.BookShelf.order.asc()).all()
|
||||
for book in books_in_shelf:
|
||||
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
if cur_book:
|
||||
result.append(cur_book)
|
||||
else:
|
||||
log.info('Not existing book %s in %s deleted', book.book_id, shelf)
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
|
||||
ub.session.commit()
|
||||
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
||||
shelf=shelf, page="shelf")
|
||||
else:
|
||||
flash(_(u"Error opening shelf. Shelf does not exist or is not accessible"), category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
|
||||
|
||||
@shelf.route("/shelf/order/<int:shelf_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def order_shelf(shelf_id):
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).order_by(
|
||||
ub.BookShelf.order.asc()).all()
|
||||
counter = 0
|
||||
for book in books_in_shelf:
|
||||
setattr(book, 'order', to_save[str(book.book_id)])
|
||||
counter += 1
|
||||
ub.session.commit()
|
||||
if current_user.is_anonymous:
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
||||
else:
|
||||
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||
ub.Shelf.id == shelf_id),
|
||||
and_(ub.Shelf.is_public == 1,
|
||||
ub.Shelf.id == shelf_id))).first()
|
||||
result = list()
|
||||
if shelf:
|
||||
books_in_shelf2 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
||||
.order_by(ub.BookShelf.order.asc()).all()
|
||||
for book in books_in_shelf2:
|
||||
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
result.append(cur_book)
|
||||
return render_title_template('shelf_order.html', entries=result,
|
||||
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
||||
shelf=shelf, page="shelforder")
|
BIN
cps/static/css/images/black-10.png
Normal file
After Width: | Height: | Size: 88 B |
BIN
cps/static/css/images/black-25.png
Normal file
After Width: | Height: | Size: 88 B |
BIN
cps/static/css/images/black-33.png
Normal file
After Width: | Height: | Size: 88 B |
6
cps/static/css/images/icomoon/credits.txt
Normal file
@ -0,0 +1,6 @@
|
||||
SVG icons via Icomoon
|
||||
https://icomoon.io/app
|
||||
|
||||
Icons used from the following sets:
|
||||
* Entypo - Creative Commons BY-SA 3.0 http://creativecommons.org/licenses/by-sa/3.0/us/
|
||||
* IcoMoon - Free (GPL) http://www.gnu.org/licenses/gpl.html
|
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/arrow.png
Normal file
After Width: | Height: | Size: 160 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/cart.png
Normal file
After Width: | Height: | Size: 230 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/first.png
Normal file
After Width: | Height: | Size: 172 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/last.png
Normal file
After Width: | Height: | Size: 167 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/list.png
Normal file
After Width: | Height: | Size: 117 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/list2.png
Normal file
After Width: | Height: | Size: 98 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/loop.png
Normal file
After Width: | Height: | Size: 190 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/music.png
Normal file
After Width: | Height: | Size: 191 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/pause.png
Normal file
After Width: | Height: | Size: 152 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/play.png
Normal file
After Width: | Height: | Size: 162 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/shuffle.png
Normal file
After Width: | Height: | Size: 280 B |
BIN
cps/static/css/images/icomoon/entypo-25px-000000/PNG/volume.png
Normal file
After Width: | Height: | Size: 169 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20.002 7.75h-13.5v-2.75l-5.25 4.5 5.25 4.5v-2.75h12.5v5.25h-16.25v3.5h17.25c1.38 0 2.5-1.12 2.5-2.5v-7.25c0-1.381-1.121-2.5-2.5-2.5z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 491 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M5 21.25c0 1.38 1.12 2.5 2.5 2.5 1.381 0 2.5-1.12 2.5-2.5s-1.119-2.5-2.5-2.5c-1.38 0-2.5 1.12-2.5 2.5zM17.5 21.25c0 1.38 1.12 2.5 2.5 2.5 1.381 0 2.5-1.12 2.5-2.5s-1.119-2.5-2.5-2.5c-1.38 0-2.5 1.12-2.5 2.5zM9.434 15.34l13.836-3.952c0.264-0.076 0.48-0.363 0.48-0.638v-6.875h-17.625v-2.125c0-0.275-0.224-0.5-0.5-0.5h-3.875c-0.275 0-0.5 0.225-0.5 0.5v2h2.429l2.46 11.321 0.238 1.179v1.875c0 0.275 0.225 0.5 0.499 0.5h16.376c0.274 0 0.499-0.225 0.499-0.5v-1.875h-14.060c-1.436 0-1.466-0.562-0.256-0.91z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 857 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M9.356 12.5c0 0.323 0.327 0.552 0.327 0.552l9.094 5.711c0.674 0.441 1.223 0.116 1.223-0.719v-11.090c0-0.836-0.549-1.161-1.223-0.72l-9.094 5.713c0 0.001-0.327 0.23-0.327 0.552zM5 6.696v11.607c0 1.11 0.765 1.45 1.875 1.45s1.875-0.34 1.875-1.449v-11.609c0-1.111-0.765-1.451-1.875-1.451s-1.875 0.341-1.875 1.451z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 666 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M15.316 11.948l-9.094-5.713c-0.673-0.44-1.223-0.116-1.223 0.721v11.089c0 0.837 0.55 1.16 1.223 0.72l9.094-5.711c0 0 0.329-0.231 0.329-0.552s-0.329-0.554-0.329-0.554zM18.125 5.245c-1.11 0-1.875 0.341-1.875 1.451v11.607c0 1.111 0.765 1.452 1.875 1.452s1.875-0.341 1.875-1.452v-11.607c0-1.11-0.765-1.451-1.875-1.451z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 671 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M6.25 16.25h-1.25c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.559 1.25-1.25s-0.56-1.25-1.25-1.25zM6.25 11.25h-1.25c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.56 1.25-1.25s-0.56-1.25-1.25-1.25zM6.25 6.25h-1.25c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.56 1.25-1.25s-0.56-1.25-1.25-1.25zM11.25 8.75h8.75c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25h-8.75c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25zM20 11.25h-8.75c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h8.75c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25zM20 16.25h-8.75c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h8.75c0.691 0 1.25-0.559 1.25-1.25s-0.559-1.25-1.25-1.25z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20 11.25h-15c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h15c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25zM5 8.75h15c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25h-15c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25zM20 16.25h-15c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h15c0.691 0 1.25-0.559 1.25-1.25s-0.559-1.25-1.25-1.25z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 703 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M21.25 7.75h-4.75v3.5h3.75v5.25h-15.5v-5.25h5.25v2.75l5-4.5-5-4.5v2.75h-6.25c-1.38 0-2.5 1.119-2.5 2.5v7.25c0 1.38 1.12 2.5 2.5 2.5h17.5c1.381 0 2.5-1.12 2.5-2.5v-7.25c0-1.381-1.119-2.5-2.5-2.5z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 552 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M18.75 1.25h-12.5c-1.375 0-2.5 1.125-2.5 2.5v17.5c0 1.375 1.125 2.5 2.5 2.5h12.5c1.376 0 2.5-1.125 2.5-2.5v-17.5c0-1.375-1.124-2.5-2.5-2.5zM15.995 13.151c-0.296 0.463-0.454 0.265-0.365 0 0.235-0.698 0.226-2.809-1.74-3.118v6.516c0 1.215-0.7 1.977-2.188 2.455-1.445 0.462-3.063-0.019-3.441-1.057-0.377-1.037 0.473-2.3 1.899-2.82 0.796-0.289 1.604-0.286 2.226-0.045v-9.193h1.504c0 1.579 4.226 3.961 2.105 7.261z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 766 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M16.875 3.75c-1.243 0-2.25 0.382-2.25 1.625v14.25c0 1.243 1.007 1.625 2.25 1.625s2.25-0.382 2.25-1.625v-14.25c0-1.243-1.007-1.625-2.25-1.625zM8.125 3.75c-1.243 0-2.25 0.382-2.25 1.625v14.25c0 1.243 1.007 1.625 2.25 1.625s2.25-0.382 2.25-1.625v-14.25c0-1.243-1.007-1.625-2.25-1.625z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 639 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20.873 11.859l-10.701-6.63c-0.78-0.511-1.418-0.134-1.418 0.838v12.87c0 0.971 0.639 1.348 1.418 0.836l10.702-6.63c0 0 0.38-0.268 0.38-0.643-0.001-0.374-0.381-0.641-0.381-0.641z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 534 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M18.866 8.368h0.884v2.516l5.25-4.5-5.25-4.5v2.984h-0.884c-4.616 0-7.22 3.421-9.518 6.44-2.066 2.715-3.85 5.060-6.732 5.060h-2.616v3.501h2.616c4.618 0 7.22-3.423 9.519-6.441 2.065-2.716 3.849-5.060 6.731-5.060zM6.764 10.573c0.196-0.255 0.395-0.515 0.596-0.779 0.489-0.643 1.005-1.32 1.564-1.995-1.651-1.543-3.644-2.681-6.307-2.681h-2.616v3.5h2.616c1.659 0 2.954 0.779 4.148 1.955zM19.75 16.616h-0.884c-1.759 0-3.107-0.874-4.363-2.173-0.126 0.164-0.251 0.33-0.379 0.498-0.551 0.723-1.145 1.501-1.799 2.27 1.696 1.655 3.751 2.905 6.54 2.905h0.884v3l5.25-4.5-5.25-4.502v2.502z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 930 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M23.709 16.798c0 1.101-0.901 2-2.001 2h-19.5c-1.1 0-1.229-0.461-0.285-1.027l20.070-11.318c0.944-0.566 1.716-0.129 1.716 0.971v9.374z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 490 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/arrow.png
Normal file
After Width: | Height: | Size: 165 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/cart.png
Normal file
After Width: | Height: | Size: 236 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/first.png
Normal file
After Width: | Height: | Size: 179 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/last.png
Normal file
After Width: | Height: | Size: 170 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/list.png
Normal file
After Width: | Height: | Size: 117 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/list2.png
Normal file
After Width: | Height: | Size: 99 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/loop.png
Normal file
After Width: | Height: | Size: 201 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/music.png
Normal file
After Width: | Height: | Size: 195 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/pause.png
Normal file
After Width: | Height: | Size: 154 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/play.png
Normal file
After Width: | Height: | Size: 166 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/shuffle.png
Normal file
After Width: | Height: | Size: 291 B |
BIN
cps/static/css/images/icomoon/entypo-25px-ffffff/PNG/volume.png
Normal file
After Width: | Height: | Size: 165 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20.002 7.75h-13.5v-2.75l-5.25 4.5 5.25 4.5v-2.75h12.5v5.25h-16.25v3.5h17.25c1.38 0 2.5-1.12 2.5-2.5v-7.25c0-1.381-1.121-2.5-2.5-2.5z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 491 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M5 21.25c0 1.38 1.12 2.5 2.5 2.5 1.381 0 2.5-1.12 2.5-2.5s-1.119-2.5-2.5-2.5c-1.38 0-2.5 1.12-2.5 2.5zM17.5 21.25c0 1.38 1.12 2.5 2.5 2.5 1.381 0 2.5-1.12 2.5-2.5s-1.119-2.5-2.5-2.5c-1.38 0-2.5 1.12-2.5 2.5zM9.434 15.34l13.836-3.952c0.264-0.076 0.48-0.363 0.48-0.638v-6.875h-17.625v-2.125c0-0.275-0.224-0.5-0.5-0.5h-3.875c-0.275 0-0.5 0.225-0.5 0.5v2h2.429l2.46 11.321 0.238 1.179v1.875c0 0.275 0.225 0.5 0.499 0.5h16.376c0.274 0 0.499-0.225 0.499-0.5v-1.875h-14.060c-1.436 0-1.466-0.562-0.256-0.91z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 857 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M9.356 12.5c0 0.323 0.327 0.552 0.327 0.552l9.094 5.711c0.674 0.441 1.223 0.116 1.223-0.719v-11.090c0-0.836-0.549-1.161-1.223-0.72l-9.094 5.713c0 0.001-0.327 0.23-0.327 0.552zM5 6.696v11.607c0 1.11 0.765 1.45 1.875 1.45s1.875-0.34 1.875-1.449v-11.609c0-1.111-0.765-1.451-1.875-1.451s-1.875 0.341-1.875 1.451z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 666 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M15.316 11.948l-9.094-5.713c-0.673-0.44-1.223-0.116-1.223 0.721v11.089c0 0.837 0.55 1.16 1.223 0.72l9.094-5.711c0 0 0.329-0.231 0.329-0.552s-0.329-0.554-0.329-0.554zM18.125 5.245c-1.11 0-1.875 0.341-1.875 1.451v11.607c0 1.111 0.765 1.452 1.875 1.452s1.875-0.341 1.875-1.452v-11.607c0-1.11-0.765-1.451-1.875-1.451z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 671 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M6.25 16.25h-1.25c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.559 1.25-1.25s-0.56-1.25-1.25-1.25zM6.25 11.25h-1.25c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.56 1.25-1.25s-0.56-1.25-1.25-1.25zM6.25 6.25h-1.25c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h1.25c0.69 0 1.25-0.56 1.25-1.25s-0.56-1.25-1.25-1.25zM11.25 8.75h8.75c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25h-8.75c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25zM20 11.25h-8.75c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h8.75c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25zM20 16.25h-8.75c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h8.75c0.691 0 1.25-0.559 1.25-1.25s-0.559-1.25-1.25-1.25z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20 11.25h-15c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25h15c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25zM5 8.75h15c0.691 0 1.25-0.56 1.25-1.25s-0.559-1.25-1.25-1.25h-15c-0.69 0-1.25 0.56-1.25 1.25s0.56 1.25 1.25 1.25zM20 16.25h-15c-0.69 0-1.25 0.559-1.25 1.25s0.56 1.25 1.25 1.25h15c0.691 0 1.25-0.559 1.25-1.25s-0.559-1.25-1.25-1.25z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 703 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M21.25 7.75h-4.75v3.5h3.75v5.25h-15.5v-5.25h5.25v2.75l5-4.5-5-4.5v2.75h-6.25c-1.38 0-2.5 1.119-2.5 2.5v7.25c0 1.38 1.12 2.5 2.5 2.5h17.5c1.381 0 2.5-1.12 2.5-2.5v-7.25c0-1.381-1.119-2.5-2.5-2.5z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 552 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M18.75 1.25h-12.5c-1.375 0-2.5 1.125-2.5 2.5v17.5c0 1.375 1.125 2.5 2.5 2.5h12.5c1.376 0 2.5-1.125 2.5-2.5v-17.5c0-1.375-1.124-2.5-2.5-2.5zM15.995 13.151c-0.296 0.463-0.454 0.265-0.365 0 0.235-0.698 0.226-2.809-1.74-3.118v6.516c0 1.215-0.7 1.977-2.188 2.455-1.445 0.462-3.063-0.019-3.441-1.057-0.377-1.037 0.473-2.3 1.899-2.82 0.796-0.289 1.604-0.286 2.226-0.045v-9.193h1.504c0 1.579 4.226 3.961 2.105 7.261z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 766 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M16.875 3.75c-1.243 0-2.25 0.382-2.25 1.625v14.25c0 1.243 1.007 1.625 2.25 1.625s2.25-0.382 2.25-1.625v-14.25c0-1.243-1.007-1.625-2.25-1.625zM8.125 3.75c-1.243 0-2.25 0.382-2.25 1.625v14.25c0 1.243 1.007 1.625 2.25 1.625s2.25-0.382 2.25-1.625v-14.25c0-1.243-1.007-1.625-2.25-1.625z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 639 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M20.873 11.859l-10.701-6.63c-0.78-0.511-1.418-0.134-1.418 0.838v12.87c0 0.971 0.639 1.348 1.418 0.836l10.702-6.63c0 0 0.38-0.268 0.38-0.643-0.001-0.374-0.381-0.641-0.381-0.641z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 534 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M18.866 8.368h0.884v2.516l5.25-4.5-5.25-4.5v2.984h-0.884c-4.616 0-7.22 3.421-9.518 6.44-2.066 2.715-3.85 5.060-6.732 5.060h-2.616v3.501h2.616c4.618 0 7.22-3.423 9.519-6.441 2.065-2.716 3.849-5.060 6.731-5.060zM6.764 10.573c0.196-0.255 0.395-0.515 0.596-0.779 0.489-0.643 1.005-1.32 1.564-1.995-1.651-1.543-3.644-2.681-6.307-2.681h-2.616v3.5h2.616c1.659 0 2.954 0.779 4.148 1.955zM19.75 16.616h-0.884c-1.759 0-3.107-0.874-4.363-2.173-0.126 0.164-0.251 0.33-0.379 0.498-0.551 0.723-1.145 1.501-1.799 2.27 1.696 1.655 3.751 2.905 6.54 2.905h0.884v3l5.25-4.5-5.25-4.502v2.502z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 930 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M23.709 16.798c0 1.101-0.901 2-2.001 2h-19.5c-1.1 0-1.229-0.461-0.285-1.027l20.070-11.318c0.944-0.566 1.716-0.129 1.716 0.971v9.374z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 490 B |
BIN
cps/static/css/images/icomoon/free-25px-000000/PNG/spinner.png
Normal file
After Width: | Height: | Size: 293 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M25 12.5c-0.031-1.632-0.385-3.26-1.039-4.748-0.652-1.489-1.598-2.838-2.76-3.953-1.161-1.116-2.538-1.998-4.028-2.579-1.489-0.584-3.091-0.863-4.673-0.829-1.582 0.031-3.158 0.375-4.598 1.010-1.442 0.632-2.748 1.55-3.827 2.675-1.080 1.125-1.933 2.459-2.495 3.901-0.564 1.442-0.833 2.991-0.799 4.523 0.031 1.532 0.365 3.055 0.98 4.448 0.612 1.394 1.501 2.657 2.591 3.7 1.089 1.044 2.38 1.868 3.775 2.41 1.394 0.545 2.892 0.803 4.374 0.77 1.482-0.031 2.953-0.355 4.299-0.95 1.346-0.593 2.566-1.452 3.573-2.506 1.008-1.053 1.803-2.301 2.326-3.648 0.318-0.817 0.534-1.67 0.648-2.534 0.030 0.002 0.061 0.003 0.092 0.003 0.863 0 1.563-0.7 1.563-1.563 0-0.044-0.002-0.087-0.006-0.13h0.006zM22.517 16.649c-0.573 1.299-1.403 2.476-2.421 3.447-1.017 0.972-2.222 1.738-3.522 2.241-1.3 0.505-2.693 0.743-4.074 0.71-1.382-0.032-2.749-0.335-4-0.89-1.251-0.553-2.385-1.355-3.32-2.337-0.936-0.981-1.673-2.143-2.156-3.395-0.485-1.252-0.714-2.593-0.68-3.925 0.032-1.332 0.325-2.647 0.861-3.85 0.534-1.204 1.306-2.294 2.252-3.194 0.946-0.9 2.064-1.608 3.268-2.072 1.205-0.465 2.494-0.684 3.775-0.65 1.282 0.032 2.545 0.315 3.7 0.831 1.156 0.514 2.204 1.257 3.067 2.168 0.864 0.91 1.543 1.985 1.987 3.142 0.446 1.157 0.654 2.394 0.621 3.625h0.006c-0.004 0.043-0.006 0.086-0.006 0.13 0 0.806 0.61 1.469 1.394 1.553-0.152 0.85-0.404 1.68-0.751 2.466z" fill="#000000" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.6 KiB |
BIN
cps/static/css/images/icomoon/free-25px-ffffff/PNG/spinner.png
Normal file
After Width: | Height: | Size: 299 B |
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="25" viewBox="0 0 25 25">
|
||||
<g>
|
||||
</g>
|
||||
<path d="M25 12.5c-0.031-1.632-0.385-3.26-1.039-4.748-0.652-1.489-1.598-2.838-2.76-3.953-1.161-1.116-2.538-1.998-4.028-2.579-1.489-0.584-3.091-0.863-4.673-0.829-1.582 0.031-3.158 0.375-4.598 1.010-1.442 0.632-2.748 1.55-3.827 2.675-1.080 1.125-1.933 2.459-2.495 3.901-0.564 1.442-0.833 2.991-0.799 4.523 0.031 1.532 0.365 3.055 0.98 4.448 0.612 1.394 1.501 2.657 2.591 3.7 1.089 1.044 2.38 1.868 3.775 2.41 1.394 0.545 2.892 0.803 4.374 0.77 1.482-0.031 2.953-0.355 4.299-0.95 1.346-0.593 2.566-1.452 3.573-2.506 1.008-1.053 1.803-2.301 2.326-3.648 0.318-0.817 0.534-1.67 0.648-2.534 0.030 0.002 0.061 0.003 0.092 0.003 0.863 0 1.563-0.7 1.563-1.563 0-0.044-0.002-0.087-0.006-0.13h0.006zM22.517 16.649c-0.573 1.299-1.403 2.476-2.421 3.447-1.017 0.972-2.222 1.738-3.522 2.241-1.3 0.505-2.693 0.743-4.074 0.71-1.382-0.032-2.749-0.335-4-0.89-1.251-0.553-2.385-1.355-3.32-2.337-0.936-0.981-1.673-2.143-2.156-3.395-0.485-1.252-0.714-2.593-0.68-3.925 0.032-1.332 0.325-2.647 0.861-3.85 0.534-1.204 1.306-2.294 2.252-3.194 0.946-0.9 2.064-1.608 3.268-2.072 1.205-0.465 2.494-0.684 3.775-0.65 1.282 0.032 2.545 0.315 3.7 0.831 1.156 0.514 2.204 1.257 3.067 2.168 0.864 0.91 1.543 1.985 1.987 3.142 0.446 1.157 0.654 2.394 0.621 3.625h0.006c-0.004 0.043-0.006 0.086-0.006 0.13 0 0.806 0.61 1.469 1.394 1.553-0.152 0.85-0.404 1.68-0.751 2.466z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.6 KiB |
2
cps/static/css/images/patterns/credits.txt
Normal file
@ -0,0 +1,2 @@
|
||||
Patterns from subtlepatterns.com.
|
||||
"If you need more, that's where to get 'em."
|
BIN
cps/static/css/images/patterns/pinstriped_suit_vertical.png
Normal file
After Width: | Height: | Size: 11 KiB |
BIN
cps/static/css/images/patterns/pool_table.png
Normal file
After Width: | Height: | Size: 40 KiB |
BIN
cps/static/css/images/patterns/rubber_grip.png
Normal file
After Width: | Height: | Size: 123 B |
BIN
cps/static/css/images/patterns/tasky_pattern.png
Normal file
After Width: | Height: | Size: 104 B |
BIN
cps/static/css/images/patterns/textured_paper.png
Normal file
After Width: | Height: | Size: 127 KiB |
BIN
cps/static/css/images/patterns/tweed.png
Normal file
After Width: | Height: | Size: 21 KiB |
BIN
cps/static/css/images/patterns/wood_pattern.png
Normal file
After Width: | Height: | Size: 101 KiB |