diff --git a/.gitignore b/.gitignore index 614e9936..14da8a03 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,7 @@ vendor/ # calibre-web *.db *.log +cps/cache .idea/ *.bak diff --git a/cps.py b/cps.py index 2db11e2b..e4f9c520 100755 --- a/cps.py +++ b/cps.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) -# Copyright (C) 2012-2019 OzzieIsaacs +# Copyright (C) 2022 OzzieIsaacs # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,66 +17,18 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import sys import os +import sys -# Insert local directories into path -sys.path.append(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'vendor')) - - -from cps import create_app -from cps import web_server -from cps.opds import opds -from cps.web import web -from cps.jinjia import jinjia -from cps.about import about -from cps.shelf import shelf -from cps.admin import admi -from cps.gdrive import gdrive -from cps.editbooks import EditBook -from cps.remotelogin import remotelogin -from cps.search_metadata import meta -from cps.error_handler import init_errorhandler - -try: - from cps.kobo import kobo, get_kobo_activated - from cps.kobo_auth import kobo_auth - kobo_available = get_kobo_activated() -except (ImportError, AttributeError): # Catch also error for not installed flask-WTF (missing csrf decorator) - kobo_available = False - -try: - from cps.oauth_bb import oauth - oauth_available = True -except ImportError: - oauth_available = False - - -def main(): - app = create_app() - - init_errorhandler() - - app.register_blueprint(web) - app.register_blueprint(opds) - app.register_blueprint(jinjia) - app.register_blueprint(about) - app.register_blueprint(shelf) - app.register_blueprint(admi) - app.register_blueprint(remotelogin) - app.register_blueprint(meta) - app.register_blueprint(gdrive) - app.register_blueprint(EditBook) - if kobo_available: - app.register_blueprint(kobo) - app.register_blueprint(kobo_auth) - if oauth_available: - app.register_blueprint(oauth) - success = web_server.start() - sys.exit(0 if success else 1) +# Add local path to sys.path so we can import cps +path = os.path.dirname(os.path.abspath(__file__)) +sys.path.insert(0, path) +from cps.main import main if __name__ == '__main__': main() + + + diff --git a/cps/__init__.py b/cps/__init__.py index 62e45e72..1ba1f778 100644 --- a/cps/__init__.py +++ b/cps/__init__.py @@ -25,24 +25,21 @@ import sys import os import mimetypes -from babel import Locale as LC -from babel import negotiate_locale -from babel.core import UnknownLocaleError -from flask import Flask, request, g +from flask import Flask from .MyLoginManager import MyLoginManager -from flask_babel import Babel from flask_principal import Principal -from . import config_sql, logger, cache_buster, cli, ub, db +from . import logger +from .cli import CliParameter +from .constants import CONFIG_DIR from .reverseproxy import ReverseProxied from .server import WebServer from .dep_check import dependency_check - -try: - import lxml - lxml_present = True -except ImportError: - lxml_present = False +from .updater import Updater +from .babel import babel +from . import config_sql +from . import cache_buster +from . import ub, db try: from flask_wtf.csrf import CSRFProtect @@ -50,6 +47,7 @@ try: except ImportError: wtf_present = False + mimetypes.init() mimetypes.add_type('application/xhtml+xml', '.xhtml') mimetypes.add_type('application/epub+zip', '.epub') @@ -71,6 +69,8 @@ mimetypes.add_type('application/ogg', '.oga') mimetypes.add_type('text/css', '.css') mimetypes.add_type('text/javascript; charset=UTF-8', '.js') +log = logger.create() + app = Flask(__name__) app.config.update( SESSION_COOKIE_HTTPONLY=True, @@ -79,61 +79,72 @@ app.config.update( WTF_CSRF_SSL_STRICT=False ) - lm = MyLoginManager() -lm.login_view = 'web.login' -lm.anonymous_user = ub.Anonymous -lm.session_protection = 'strong' + +config = config_sql._ConfigSQL() + +cli_param = CliParameter() if wtf_present: csrf = CSRFProtect() - csrf.init_app(app) else: csrf = None -ub.init_db(cli.settings_path) -# pylint: disable=no-member -config = config_sql.load_configuration(ub.session) +calibre_db = db.CalibreDB() web_server = WebServer() -babel = Babel() -_BABEL_TRANSLATIONS = set() +updater_thread = Updater() -log = logger.create() - - -from . import services - -db.CalibreDB.update_config(config) -db.CalibreDB.setup_db(config.config_calibre_dir, cli.settings_path) - - -calibre_db = db.CalibreDB() def create_app(): + lm.login_view = 'web.login' + lm.anonymous_user = ub.Anonymous + lm.session_protection = 'strong' + + if csrf: + csrf.init_app(app) + + cli_param.init() + + ub.init_db(cli_param.settings_path, cli_param.user_credentials) + + # pylint: disable=no-member + config_sql.load_configuration(config, ub.session, cli_param) + + db.CalibreDB.update_config(config) + db.CalibreDB.setup_db(config.config_calibre_dir, cli_param.settings_path) + calibre_db.init_db() + + updater_thread.init_updater(config, web_server) + # Perform dry run of updater and exit afterwards + if cli_param.dry_run: + updater_thread.dry_run() + sys.exit(0) + updater_thread.start() + if sys.version_info < (3, 0): log.info( - '*** Python2 is EOL since end of 2019, this version of Calibre-Web is no longer supporting Python2, please update your installation to Python3 ***') + '*** Python2 is EOL since end of 2019, this version of Calibre-Web is no longer supporting Python2, ' + 'please update your installation to Python3 ***') print( - '*** Python2 is EOL since end of 2019, this version of Calibre-Web is no longer supporting Python2, please update your installation to Python3 ***') + '*** Python2 is EOL since end of 2019, this version of Calibre-Web is no longer supporting Python2, ' + 'please update your installation to Python3 ***') web_server.stop(True) sys.exit(5) - if not lxml_present: - log.info('*** "lxml" is needed for calibre-web to run. Please install it using pip: "pip install lxml" ***') - print('*** "lxml" is needed for calibre-web to run. Please install it using pip: "pip install lxml" ***') - web_server.stop(True) - sys.exit(6) if not wtf_present: - log.info('*** "flask-WTF" is needed for calibre-web to run. Please install it using pip: "pip install flask-WTF" ***') - print('*** "flask-WTF" is needed for calibre-web to run. Please install it using pip: "pip install flask-WTF" ***') + log.info('*** "flask-WTF" is needed for calibre-web to run. ' + 'Please install it using pip: "pip install flask-WTF" ***') + print('*** "flask-WTF" is needed for calibre-web to run. ' + 'Please install it using pip: "pip install flask-WTF" ***') web_server.stop(True) sys.exit(7) for res in dependency_check() + dependency_check(True): - log.info('*** "{}" version does not fit the requirements. Should: {}, Found: {}, please consider installing required version ***' - .format(res['name'], - res['target'], - res['found'])) + log.info('*** "{}" version does not fit the requirements. ' + 'Should: {}, Found: {}, please consider installing required version ***' + .format(res['name'], + res['target'], + res['found'])) app.wsgi_app = ReverseProxied(app.wsgi_app) if os.environ.get('FLASK_DEBUG'): @@ -147,8 +158,8 @@ def create_app(): web_server.init_app(app, config) babel.init_app(app) - _BABEL_TRANSLATIONS.update(str(item) for item in babel.list_translations()) - _BABEL_TRANSLATIONS.add('en') + + from . import services if services.ldap: services.ldap.init_app(app, config) @@ -156,39 +167,12 @@ def create_app(): services.goodreads_support.connect(config.config_goodreads_api_key, config.config_goodreads_api_secret, config.config_use_goodreads) - config.store_calibre_uuid(calibre_db, db.LibraryId) + config.store_calibre_uuid(calibre_db, db.Library_Id) + # Register scheduled tasks + from .schedule import register_scheduled_tasks, register_startup_tasks + register_scheduled_tasks(config.schedule_reconnect) + register_startup_tasks() + return app -@babel.localeselector -def get_locale(): - # if a user is logged in, use the locale from the user settings - user = getattr(g, 'user', None) - if user is not None and hasattr(user, "locale"): - if user.name != 'Guest': # if the account is the guest account bypass the config lang settings - return user.locale - preferred = list() - if request.accept_languages: - for x in request.accept_languages.values(): - try: - preferred.append(str(LC.parse(x.replace('-', '_')))) - except (UnknownLocaleError, ValueError) as e: - log.debug('Could not parse locale "%s": %s', x, e) - - return negotiate_locale(preferred or ['en'], _BABEL_TRANSLATIONS) - - -@babel.timezoneselector -def get_timezone(): - user = getattr(g, 'user', None) - return user.timezone if user else None - - -from .updater import Updater -updater_thread = Updater() - -# Perform dry run of updater and exit afterwards -if cli.dry_run: - updater_thread.dry_run() - sys.exit(0) -updater_thread.start() diff --git a/cps/about.py b/cps/about.py index 8f2bf715..1b68818d 100644 --- a/cps/about.py +++ b/cps/about.py @@ -65,13 +65,13 @@ _VERSIONS = OrderedDict( SQLite=sqlite3.sqlite_version, ) _VERSIONS.update(ret) -_VERSIONS.update(uploader.get_versions(False)) +_VERSIONS.update(uploader.get_versions()) def collect_stats(): - _VERSIONS['ebook converter'] = _(converter.get_calibre_version()) - _VERSIONS['unrar'] = _(converter.get_unrar_version()) - _VERSIONS['kepubify'] = _(converter.get_kepubify_version()) + _VERSIONS['ebook converter'] = converter.get_calibre_version() + _VERSIONS['unrar'] = converter.get_unrar_version() + _VERSIONS['kepubify'] = converter.get_kepubify_version() return _VERSIONS diff --git a/cps/admin.py b/cps/admin.py index 1004ee78..76275922 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -24,29 +24,31 @@ import os import re import base64 import json -import time import operator -from datetime import datetime, timedelta +from datetime import datetime, timedelta, time from functools import wraps -from babel import Locale -from babel.dates import format_datetime + from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response from flask_login import login_required, current_user, logout_user, confirm_login from flask_babel import gettext as _ +from flask_babel import get_locale, format_time, format_datetime, format_timedelta from flask import session as flask_session from sqlalchemy import and_ from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.exc import IntegrityError, OperationalError, InvalidRequestError from sqlalchemy.sql.expression import func, or_, text -from . import constants, logger, helper, services, cli -from . import db, calibre_db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils, kobo_sync_status +from . import constants, logger, helper, services, cli_param +from . import db, calibre_db, ub, web_server, config, updater_thread, gdriveutils, \ + kobo_sync_status, schedule from .helper import check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \ valid_email, check_username from .gdriveutils import is_gdrive_ready, gdrive_support from .render_template import render_title_template, get_sidebar_config -from . import debug_info, _BABEL_TRANSLATIONS +from .services.worker import WorkerThread +from .babel import get_available_translations, get_available_locale, get_user_locale_language +from . import debug_info log = logger.create() @@ -56,7 +58,9 @@ feature_support = { 'goodreads': bool(services.goodreads_support), 'kobo': bool(services.kobo), 'updater': constants.UPDATER_AVAILABLE, - 'gmail': bool(services.gmail) + 'gmail': bool(services.gmail), + 'scheduler': schedule.use_APScheduler, + 'gdrive': gdrive_support } try: @@ -75,7 +79,6 @@ except ImportError as err: oauth_check = {} -feature_support['gdrive'] = gdrive_support admi = Blueprint('admin', __name__) @@ -159,7 +162,7 @@ def shutdown(): # needed for docker applications, as changes on metadata.db from host are not visible to application @admi.route("/reconnect", methods=['GET']) def reconnect(): - if cli.reconnect_enable: + if cli_param.reconnect_enable: calibre_db.reconnect_db(config, ub.app_DB_path) return json.dumps({}) else: @@ -167,6 +170,17 @@ def reconnect(): abort(404) +@admi.route("/ajax/updateThumbnails", methods=['POST']) +@admin_required +@login_required +def update_thumbnails(): + content = config.get_scheduled_task_settings() + if content['schedule_generate_book_covers']: + log.info("Update of Cover cache requested") + helper.update_thumbnail_cache() + return "" + + @admi.route("/admin/view") @login_required @admin_required @@ -185,15 +199,19 @@ def admin(): form_date -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:])) elif commit[19] == '-': form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:])) - commit = format_datetime(form_date - tz, format='short', locale=get_locale()) + commit = format_datetime(form_date - tz, format='short') else: commit = version['version'] all_user = ub.session.query(ub.User).all() email_settings = config.get_mail_settings() - kobo_support = feature_support['kobo'] and config.config_kobo_sync + schedule_time = format_time(time(hour=config.schedule_start_time), format="short") + t = timedelta(hours=config.schedule_duration // 60, minutes=config.schedule_duration % 60) + schedule_duration = format_timedelta(t, threshold=.99) + return render_title_template("admin.html", allUser=all_user, email=email_settings, config=config, commit=commit, - feature_support=feature_support, kobo_support=kobo_support, + feature_support=feature_support, schedule_time=schedule_time, + schedule_duration=schedule_duration, title=_(u"Admin page"), page="admin") @@ -247,7 +265,7 @@ def view_configuration(): restrict_columns = calibre_db.session.query(db.CustomColumns)\ .filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all() languages = calibre_db.speaking_language() - translations = [Locale('en')] + babel.list_translations() + translations = get_available_locale() return render_title_template("config_view_edit.html", conf=config, readColumns=read_column, restrictColumns=restrict_columns, languages=languages, @@ -261,7 +279,7 @@ def view_configuration(): def edit_user_table(): visibility = current_user.view_settings.get('useredit', {}) languages = calibre_db.speaking_language() - translations = babel.list_translations() + [Locale('en')] + translations = get_available_locale() all_user = ub.session.query(ub.User) tags = calibre_db.session.query(db.Tags)\ .join(db.books_tags_link)\ @@ -332,7 +350,7 @@ def list_users(): if user.default_language == "all": user.default = _("All") else: - user.default = Locale.parse(user.default_language).get_language_name(get_locale()) + user.default = get_user_locale_language(user.default_language) table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": users} js_list = json.dumps(table_entries, cls=db.AlchemyEncoder) @@ -380,7 +398,7 @@ def delete_user(): @login_required @admin_required def table_get_locale(): - locale = babel.list_translations() + [Locale('en')] + locale = get_available_locale() ret = list() current_locale = get_locale() for loc in locale: @@ -481,7 +499,7 @@ def edit_list_user(param): elif param == 'locale': if user.name == "Guest": raise Exception(_("Guest's Locale is determined automatically and can't be set")) - if vals['value'] in _BABEL_TRANSLATIONS: + if vals['value'] in get_available_translations(): user.locale = vals['value'] else: raise Exception(_("No Valid Locale Given")) @@ -522,22 +540,6 @@ def update_table_settings(): return "" -def check_valid_read_column(column): - if column != "0": - if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ - .filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all(): - return False - return True - - -def check_valid_restricted_column(column): - if column != "0": - if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ - .filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all(): - return False - return True - - @admi.route("/admin/viewconfig", methods=["POST"]) @login_required @admin_required @@ -610,6 +612,8 @@ def load_dialogtexts(element_id): texts["main"] = _('Are you sure you want to change shelf sync behavior for the selected user(s)?') elif element_id == "db_submit": texts["main"] = _('Are you sure you want to change Calibre library location?') + elif element_id == "admin_refresh_cover_cache": + texts["main"] = _('Calibre-Web will search for updated Covers and update Cover Thumbnails, this may take a while?') elif element_id == "btnfullsync": texts["main"] = _("Are you sure you want delete Calibre-Web's sync database " "to force a full sync with your Kobo Reader?") @@ -740,43 +744,6 @@ def edit_restriction(res_type, user_id): ub.session_commit("Changed denied columns of user {} to {}".format(usr.name, usr.denied_column_value)) return "" - -def restriction_addition(element, list_func): - elementlist = list_func() - if elementlist == ['']: - elementlist = [] - if not element['add_element'] in elementlist: - elementlist += [element['add_element']] - return ','.join(elementlist) - - -def restriction_deletion(element, list_func): - elementlist = list_func() - if element['Element'] in elementlist: - elementlist.remove(element['Element']) - return ','.join(elementlist) - - -def prepare_tags(user, action, tags_name, id_list): - if "tags" in tags_name: - tags = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(id_list)).all() - if not tags: - raise Exception(_("Tag not found")) - new_tags_list = [x.name for x in tags] - else: - tags = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\ - .filter(db.cc_classes[config.config_restricted_column].id.in_(id_list)).all() - new_tags_list = [x.value for x in tags] - saved_tags_list = user.__dict__[tags_name].split(",") if len(user.__dict__[tags_name]) else [] - if action == "remove": - saved_tags_list = [x for x in saved_tags_list if x not in new_tags_list] - elif action == "add": - saved_tags_list.extend(x for x in new_tags_list if x not in saved_tags_list) - else: - raise Exception(_("Invalid Action")) - return ",".join(saved_tags_list) - - @admi.route("/ajax/addrestriction/", methods=['POST']) @login_required @admin_required @@ -945,6 +912,58 @@ def ajax_pathchooser(): return pathchooser() +def check_valid_read_column(column): + if column != "0": + if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ + .filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all(): + return False + return True + + +def check_valid_restricted_column(column): + if column != "0": + if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ + .filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all(): + return False + return True + + +def restriction_addition(element, list_func): + elementlist = list_func() + if elementlist == ['']: + elementlist = [] + if not element['add_element'] in elementlist: + elementlist += [element['add_element']] + return ','.join(elementlist) + + +def restriction_deletion(element, list_func): + elementlist = list_func() + if element['Element'] in elementlist: + elementlist.remove(element['Element']) + return ','.join(elementlist) + + +def prepare_tags(user, action, tags_name, id_list): + if "tags" in tags_name: + tags = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(id_list)).all() + if not tags: + raise Exception(_("Tag not found")) + new_tags_list = [x.name for x in tags] + else: + tags = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\ + .filter(db.cc_classes[config.config_restricted_column].id.in_(id_list)).all() + new_tags_list = [x.value for x in tags] + saved_tags_list = user.__dict__[tags_name].split(",") if len(user.__dict__[tags_name]) else [] + if action == "remove": + saved_tags_list = [x for x in saved_tags_list if x not in new_tags_list] + elif action == "add": + saved_tags_list.extend(x for x in new_tags_list if x not in saved_tags_list) + else: + raise Exception(_("Invalid Action")) + return ",".join(saved_tags_list) + + def pathchooser(): browse_for = "folder" folder_only = request.args.get('folder', False) == "true" @@ -1188,6 +1207,420 @@ def simulatedbchange(): return Response(json.dumps({"change": db_change, "valid": db_valid}), mimetype='application/json') +@admi.route("/admin/user/new", methods=["GET", "POST"]) +@login_required +@admin_required +def new_user(): + content = ub.User() + languages = calibre_db.speaking_language() + translations = get_available_locale() + kobo_support = feature_support['kobo'] and config.config_kobo_sync + if request.method == "POST": + to_save = request.form.to_dict() + _handle_new_user(to_save, content, languages, translations, kobo_support) + else: + content.role = config.config_default_role + content.sidebar_view = config.config_default_show + content.locale = config.config_default_locale + content.default_language = config.config_default_language + return render_title_template("user_edit.html", new_user=1, content=content, + config=config, translations=translations, + languages=languages, title=_(u"Add new user"), page="newuser", + kobo_support=kobo_support, registered_oauth=oauth_check) + + +@admi.route("/admin/mailsettings") +@login_required +@admin_required +def edit_mailsettings(): + content = config.get_mail_settings() + return render_title_template("email_edit.html", content=content, title=_(u"Edit E-mail Server Settings"), + page="mailset", feature_support=feature_support) + + +@admi.route("/admin/mailsettings", methods=["POST"]) +@login_required +@admin_required +def update_mailsettings(): + to_save = request.form.to_dict() + _config_int(to_save, "mail_server_type") + if to_save.get("invalidate"): + config.mail_gmail_token = {} + try: + flag_modified(config, "mail_gmail_token") + except AttributeError: + pass + elif to_save.get("gmail"): + try: + config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token) + flash(_(u"Gmail Account Verification Successful"), category="success") + except Exception as ex: + flash(str(ex), category="error") + log.error(ex) + return edit_mailsettings() + + else: + _config_string(to_save, "mail_server") + _config_int(to_save, "mail_port") + _config_int(to_save, "mail_use_ssl") + _config_string(to_save, "mail_login") + _config_string(to_save, "mail_password") + _config_string(to_save, "mail_from") + _config_int(to_save, "mail_size", lambda y: int(y)*1024*1024) + try: + config.save() + except (OperationalError, InvalidRequestError) as e: + ub.session.rollback() + log.error_or_exception("Settings Database error: {}".format(e)) + flash(_(u"Database error: %(error)s.", error=e.orig), category="error") + return edit_mailsettings() + + if to_save.get("test"): + if current_user.email: + result = send_test_mail(current_user.email, current_user.name) + if result is None: + flash(_(u"Test e-mail queued for sending to %(email)s, please check Tasks for result", + email=current_user.email), category="info") + else: + flash(_(u"There was an error sending the Test e-mail: %(res)s", res=result), category="error") + else: + flash(_(u"Please configure your e-mail address first..."), category="error") + else: + flash(_(u"E-mail server settings updated"), category="success") + + return edit_mailsettings() + + +@admi.route("/admin/scheduledtasks") +@login_required +@admin_required +def edit_scheduledtasks(): + content = config.get_scheduled_task_settings() + time_field = list() + duration_field = list() + + for n in range(24): + time_field.append((n , format_time(time(hour=n), format="short",))) + for n in range(5, 65, 5): + t = timedelta(hours=n // 60, minutes=n % 60) + duration_field.append((n, format_timedelta(t, threshold=.9))) + + return render_title_template("schedule_edit.html", + config=content, + starttime=time_field, + duration=duration_field, + title=_(u"Edit Scheduled Tasks Settings")) + + +@admi.route("/admin/scheduledtasks", methods=["POST"]) +@login_required +@admin_required +def update_scheduledtasks(): + error = False + to_save = request.form.to_dict() + if 0 <= int(to_save.get("schedule_start_time")) <= 23: + _config_int(to_save, "schedule_start_time") + else: + flash(_(u"Invalid start time for task specified"), category="error") + error = True + if 0 < int(to_save.get("schedule_duration")) <= 60: + _config_int(to_save, "schedule_duration") + else: + flash(_(u"Invalid duration for task specified"), category="error") + error = True + _config_checkbox(to_save, "schedule_generate_book_covers") + _config_checkbox(to_save, "schedule_generate_series_covers") + _config_checkbox(to_save, "schedule_reconnect") + + if not error: + try: + config.save() + flash(_(u"Scheduled tasks settings updated"), category="success") + + # Cancel any running tasks + schedule.end_scheduled_tasks() + + # Re-register tasks with new settings + schedule.register_scheduled_tasks(config.schedule_reconnect) + except IntegrityError: + ub.session.rollback() + log.error("An unknown error occurred while saving scheduled tasks settings") + flash(_(u"An unknown error occurred. Please try again later."), category="error") + except OperationalError: + ub.session.rollback() + log.error("Settings DB is not Writeable") + flash(_("Settings DB is not Writeable"), category="error") + + return edit_scheduledtasks() + + +@admi.route("/admin/user/", methods=["GET", "POST"]) +@login_required +@admin_required +def edit_user(user_id): + content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User + if not content or (not config.config_anonbrowse and content.name == "Guest"): + flash(_(u"User not found"), category="error") + return redirect(url_for('admin.admin')) + languages = calibre_db.speaking_language(return_all_languages=True) + translations = get_available_locale() + kobo_support = feature_support['kobo'] and config.config_kobo_sync + if request.method == "POST": + to_save = request.form.to_dict() + resp = _handle_edit_user(to_save, content, languages, translations, kobo_support) + if resp: + return resp + return render_title_template("user_edit.html", + translations=translations, + languages=languages, + new_user=0, + content=content, + config=config, + registered_oauth=oauth_check, + mail_configured=config.get_mail_server_configured(), + kobo_support=kobo_support, + title=_(u"Edit User %(nick)s", nick=content.name), + page="edituser") + + +@admi.route("/admin/resetpassword/", methods=["POST"]) +@login_required +@admin_required +def reset_user_password(user_id): + if current_user is not None and current_user.is_authenticated: + ret, message = reset_password(user_id) + if ret == 1: + log.debug(u"Password for user %s reset", message) + flash(_(u"Password for user %(user)s reset", user=message), category="success") + elif ret == 0: + log.error(u"An unknown error occurred. Please try again later.") + flash(_(u"An unknown error occurred. Please try again later."), category="error") + else: + log.error(u"Please configure the SMTP mail settings first...") + flash(_(u"Please configure the SMTP mail settings first..."), category="error") + return redirect(url_for('admin.admin')) + + +@admi.route("/admin/logfile") +@login_required +@admin_required +def view_logfile(): + logfiles = {0: logger.get_logfile(config.config_logfile), + 1: logger.get_accesslogfile(config.config_access_logfile)} + return render_title_template("logviewer.html", + title=_(u"Logfile viewer"), + accesslog_enable=config.config_access_log, + log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT), + logfiles=logfiles, + page="logfile") + + +@admi.route("/ajax/log/") +@login_required +@admin_required +def send_logfile(logtype): + if logtype == 1: + logfile = logger.get_accesslogfile(config.config_access_logfile) + return send_from_directory(os.path.dirname(logfile), + os.path.basename(logfile)) + if logtype == 0: + logfile = logger.get_logfile(config.config_logfile) + return send_from_directory(os.path.dirname(logfile), + os.path.basename(logfile)) + else: + return "" + + +@admi.route("/admin/logdownload/") +@login_required +@admin_required +def download_log(logtype): + if logtype == 0: + file_name = logger.get_logfile(config.config_logfile) + elif logtype == 1: + file_name = logger.get_accesslogfile(config.config_access_logfile) + else: + abort(404) + if logger.is_valid_logfile(file_name): + return debug_info.assemble_logfiles(file_name) + abort(404) + + +@admi.route("/admin/debug") +@login_required +@admin_required +def download_debug(): + return debug_info.send_debug() + + +@admi.route("/get_update_status", methods=['GET']) +@login_required +@admin_required +def get_update_status(): + if feature_support['updater']: + log.info(u"Update status requested") + return updater_thread.get_available_updates(request.method) + else: + return '' + + +@admi.route("/get_updater_status", methods=['GET', 'POST']) +@login_required +@admin_required +def get_updater_status(): + status = {} + if feature_support['updater']: + if request.method == "POST": + commit = request.form.to_dict() + if "start" in commit and commit['start'] == 'True': + txt = { + "1": _(u'Requesting update package'), + "2": _(u'Downloading update package'), + "3": _(u'Unzipping update package'), + "4": _(u'Replacing files'), + "5": _(u'Database connections are closed'), + "6": _(u'Stopping server'), + "7": _(u'Update finished, please press okay and reload page'), + "8": _(u'Update failed:') + u' ' + _(u'HTTP Error'), + "9": _(u'Update failed:') + u' ' + _(u'Connection error'), + "10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'), + "11": _(u'Update failed:') + u' ' + _(u'General error'), + "12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'), + "13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update') + } + status['text'] = txt + updater_thread.status = 0 + updater_thread.resume() + status['status'] = updater_thread.get_update_status() + elif request.method == "GET": + try: + status['status'] = updater_thread.get_update_status() + if status['status'] == -1: + status['status'] = 7 + except Exception: + status['status'] = 11 + return json.dumps(status) + return '' + + +def ldap_import_create_user(user, user_data): + user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object) + + try: + username = user_data[user_login_field][0].decode('utf-8') + except KeyError as ex: + log.error("Failed to extract LDAP user: %s - %s", user, ex) + message = _(u'Failed to extract at least One LDAP User') + return 0, message + + # check for duplicate username + if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).first(): + # if ub.session.query(ub.User).filter(ub.User.name == username).first(): + log.warning("LDAP User %s Already in Database", user_data) + return 0, None + + kindlemail = '' + if 'mail' in user_data: + useremail = user_data['mail'][0].decode('utf-8') + if len(user_data['mail']) > 1: + kindlemail = user_data['mail'][1].decode('utf-8') + + else: + log.debug('No Mail Field Found in LDAP Response') + useremail = username + '@email.com' + + try: + # check for duplicate email + useremail = check_email(useremail) + except Exception as ex: + log.warning("LDAP Email Error: {}, {}".format(user_data, ex)) + return 0, None + content = ub.User() + content.name = username + content.password = '' # dummy password which will be replaced by ldap one + content.email = useremail + content.kindle_mail = kindlemail + content.default_language = config.config_default_language + content.locale = config.config_default_locale + content.role = config.config_default_role + content.sidebar_view = config.config_default_show + content.allowed_tags = config.config_allowed_tags + content.denied_tags = config.config_denied_tags + content.allowed_column_value = config.config_allowed_column_value + content.denied_column_value = config.config_denied_column_value + ub.session.add(content) + try: + ub.session.commit() + return 1, None # increase no of users + except Exception as ex: + log.warning("Failed to create LDAP user: %s - %s", user, ex) + ub.session.rollback() + message = _(u'Failed to Create at Least One LDAP User') + return 0, message + + +@admi.route('/import_ldap_users', methods=["POST"]) +@login_required +@admin_required +def import_ldap_users(): + showtext = {} + try: + new_users = services.ldap.get_group_members(config.config_ldap_group_name) + except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e: + log.error_or_exception(e) + showtext['text'] = _(u'Error: %(ldaperror)s', ldaperror=e) + return json.dumps(showtext) + if not new_users: + log.debug('LDAP empty response') + showtext['text'] = _(u'Error: No user returned in response of LDAP server') + return json.dumps(showtext) + + imported = 0 + for username in new_users: + user = username.decode('utf-8') + if '=' in user: + # if member object field is empty take user object as filter + if config.config_ldap_member_user_object: + query_filter = config.config_ldap_member_user_object + else: + query_filter = config.config_ldap_user_object + try: + user_identifier = extract_user_identifier(user, query_filter) + except Exception as ex: + log.warning(ex) + continue + else: + user_identifier = user + query_filter = None + try: + user_data = services.ldap.get_object_details(user=user_identifier, query_filter=query_filter) + except AttributeError as ex: + log.error_or_exception(ex) + continue + if user_data: + user_count, message = ldap_import_create_user(user, user_data) + if message: + showtext['text'] = message + else: + imported += user_count + else: + log.warning("LDAP User: %s Not Found", user) + showtext['text'] = _(u'At Least One LDAP User Not Found in Database') + if not showtext: + showtext['text'] = _(u'{} User Successfully Imported'.format(imported)) + return json.dumps(showtext) + + +@admi.route("/ajax/canceltask", methods=['POST']) +@login_required +@admin_required +def cancel_task(): + task_id = request.get_json().get('task_id', None) + worker = WorkerThread.get_instance() + worker.end_task(task_id) + return "" + + def _db_simulate_change(): param = request.form.to_dict() to_save = dict() @@ -1234,7 +1667,7 @@ def _db_configuration_update_helper(): if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path): return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error) - config.store_calibre_uuid(calibre_db, db.LibraryId) + config.store_calibre_uuid(calibre_db, db.Library_Id) # if db changed -> delete shelfs, delete download books, delete read books, kobo sync... if db_change: log.info("Calibre Database changed, all Calibre-Web info related to old Database gets deleted") @@ -1246,6 +1679,7 @@ def _db_configuration_update_helper(): ub.session.query(ub.KoboReadingState).delete() ub.session.query(ub.KoboStatistics).delete() ub.session.query(ub.KoboSyncedBooks).delete() + helper.delete_thumbnail_cache() ub.session_commit() _config_string(to_save, "config_calibre_dir") calibre_db.update_config(config) @@ -1555,347 +1989,6 @@ def _handle_edit_user(to_save, content, languages, translations, kobo_support): return "" -@admi.route("/admin/user/new", methods=["GET", "POST"]) -@login_required -@admin_required -def new_user(): - content = ub.User() - languages = calibre_db.speaking_language() - translations = [Locale('en')] + babel.list_translations() - kobo_support = feature_support['kobo'] and config.config_kobo_sync - if request.method == "POST": - to_save = request.form.to_dict() - _handle_new_user(to_save, content, languages, translations, kobo_support) - else: - content.role = config.config_default_role - content.sidebar_view = config.config_default_show - content.locale = config.config_default_locale - content.default_language = config.config_default_language - return render_title_template("user_edit.html", new_user=1, content=content, - config=config, translations=translations, - languages=languages, title=_(u"Add new user"), page="newuser", - kobo_support=kobo_support, registered_oauth=oauth_check) - - -@admi.route("/admin/mailsettings") -@login_required -@admin_required -def edit_mailsettings(): - content = config.get_mail_settings() - return render_title_template("email_edit.html", content=content, title=_(u"Edit E-mail Server Settings"), - page="mailset", feature_support=feature_support) - - -@admi.route("/admin/mailsettings", methods=["POST"]) -@login_required -@admin_required -def update_mailsettings(): - to_save = request.form.to_dict() - _config_int(to_save, "mail_server_type") - if to_save.get("invalidate"): - config.mail_gmail_token = {} - try: - flag_modified(config, "mail_gmail_token") - except AttributeError: - pass - elif to_save.get("gmail"): - try: - config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token) - flash(_(u"Gmail Account Verification Successful"), category="success") - except Exception as ex: - flash(str(ex), category="error") - log.error(ex) - return edit_mailsettings() - - else: - _config_string(to_save, "mail_server") - _config_int(to_save, "mail_port") - _config_int(to_save, "mail_use_ssl") - _config_string(to_save, "mail_login") - _config_string(to_save, "mail_password") - _config_string(to_save, "mail_from") - _config_int(to_save, "mail_size", lambda y: int(y)*1024*1024) - try: - config.save() - except (OperationalError, InvalidRequestError) as e: - ub.session.rollback() - log.error_or_exception("Settings Database error: {}".format(e)) - flash(_(u"Database error: %(error)s.", error=e.orig), category="error") - return edit_mailsettings() - - if to_save.get("test"): - if current_user.email: - result = send_test_mail(current_user.email, current_user.name) - if result is None: - flash(_(u"Test e-mail queued for sending to %(email)s, please check Tasks for result", - email=current_user.email), category="info") - else: - flash(_(u"There was an error sending the Test e-mail: %(res)s", res=result), category="error") - else: - flash(_(u"Please configure your e-mail address first..."), category="error") - else: - flash(_(u"E-mail server settings updated"), category="success") - - return edit_mailsettings() - - -@admi.route("/admin/user/", methods=["GET", "POST"]) -@login_required -@admin_required -def edit_user(user_id): - content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User - if not content or (not config.config_anonbrowse and content.name == "Guest"): - flash(_(u"User not found"), category="error") - return redirect(url_for('admin.admin')) - languages = calibre_db.speaking_language(return_all_languages=True) - translations = babel.list_translations() + [Locale('en')] - kobo_support = feature_support['kobo'] and config.config_kobo_sync - if request.method == "POST": - to_save = request.form.to_dict() - resp = _handle_edit_user(to_save, content, languages, translations, kobo_support) - if resp: - return resp - return render_title_template("user_edit.html", - translations=translations, - languages=languages, - new_user=0, - content=content, - config=config, - registered_oauth=oauth_check, - mail_configured=config.get_mail_server_configured(), - kobo_support=kobo_support, - title=_(u"Edit User %(nick)s", nick=content.name), - page="edituser") - - -@admi.route("/admin/resetpassword/", methods=["POST"]) -@login_required -@admin_required -def reset_user_password(user_id): - if current_user is not None and current_user.is_authenticated: - ret, message = reset_password(user_id) - if ret == 1: - log.debug(u"Password for user %s reset", message) - flash(_(u"Password for user %(user)s reset", user=message), category="success") - elif ret == 0: - log.error(u"An unknown error occurred. Please try again later.") - flash(_(u"An unknown error occurred. Please try again later."), category="error") - else: - log.error(u"Please configure the SMTP mail settings first...") - flash(_(u"Please configure the SMTP mail settings first..."), category="error") - return redirect(url_for('admin.admin')) - - -@admi.route("/admin/logfile") -@login_required -@admin_required -def view_logfile(): - logfiles = {0: logger.get_logfile(config.config_logfile), - 1: logger.get_accesslogfile(config.config_access_logfile)} - return render_title_template("logviewer.html", - title=_(u"Logfile viewer"), - accesslog_enable=config.config_access_log, - log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT), - logfiles=logfiles, - page="logfile") - - -@admi.route("/ajax/log/") -@login_required -@admin_required -def send_logfile(logtype): - if logtype == 1: - logfile = logger.get_accesslogfile(config.config_access_logfile) - return send_from_directory(os.path.dirname(logfile), - os.path.basename(logfile)) - if logtype == 0: - logfile = logger.get_logfile(config.config_logfile) - return send_from_directory(os.path.dirname(logfile), - os.path.basename(logfile)) - else: - return "" - - -@admi.route("/admin/logdownload/") -@login_required -@admin_required -def download_log(logtype): - if logtype == 0: - file_name = logger.get_logfile(config.config_logfile) - elif logtype == 1: - file_name = logger.get_accesslogfile(config.config_access_logfile) - else: - abort(404) - if logger.is_valid_logfile(file_name): - return debug_info.assemble_logfiles(file_name) - abort(404) - - -@admi.route("/admin/debug") -@login_required -@admin_required -def download_debug(): - return debug_info.send_debug() - - -@admi.route("/get_update_status", methods=['GET']) -@login_required -@admin_required -def get_update_status(): - if feature_support['updater']: - log.info(u"Update status requested") - return updater_thread.get_available_updates(request.method, locale=get_locale()) - else: - return '' - - -@admi.route("/get_updater_status", methods=['GET', 'POST']) -@login_required -@admin_required -def get_updater_status(): - status = {} - if feature_support['updater']: - if request.method == "POST": - commit = request.form.to_dict() - if "start" in commit and commit['start'] == 'True': - txt = { - "1": _(u'Requesting update package'), - "2": _(u'Downloading update package'), - "3": _(u'Unzipping update package'), - "4": _(u'Replacing files'), - "5": _(u'Database connections are closed'), - "6": _(u'Stopping server'), - "7": _(u'Update finished, please press okay and reload page'), - "8": _(u'Update failed:') + u' ' + _(u'HTTP Error'), - "9": _(u'Update failed:') + u' ' + _(u'Connection error'), - "10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'), - "11": _(u'Update failed:') + u' ' + _(u'General error'), - "12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'), - "13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update') - } - status['text'] = txt - updater_thread.status = 0 - updater_thread.resume() - status['status'] = updater_thread.get_update_status() - elif request.method == "GET": - try: - status['status'] = updater_thread.get_update_status() - if status['status'] == -1: - status['status'] = 7 - except Exception: - status['status'] = 11 - return json.dumps(status) - return '' - - -def ldap_import_create_user(user, user_data): - user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object) - - try: - username = user_data[user_login_field][0].decode('utf-8') - except KeyError as ex: - log.error("Failed to extract LDAP user: %s - %s", user, ex) - message = _(u'Failed to extract at least One LDAP User') - return 0, message - - # check for duplicate username - if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).first(): - # if ub.session.query(ub.User).filter(ub.User.name == username).first(): - log.warning("LDAP User %s Already in Database", user_data) - return 0, None - - kindlemail = '' - if 'mail' in user_data: - useremail = user_data['mail'][0].decode('utf-8') - if len(user_data['mail']) > 1: - kindlemail = user_data['mail'][1].decode('utf-8') - - else: - log.debug('No Mail Field Found in LDAP Response') - useremail = username + '@email.com' - - try: - # check for duplicate email - useremail = check_email(useremail) - except Exception as ex: - log.warning("LDAP Email Error: {}, {}".format(user_data, ex)) - return 0, None - content = ub.User() - content.name = username - content.password = '' # dummy password which will be replaced by ldap one - content.email = useremail - content.kindle_mail = kindlemail - content.default_language = config.config_default_language - content.locale = config.config_default_locale - content.role = config.config_default_role - content.sidebar_view = config.config_default_show - content.allowed_tags = config.config_allowed_tags - content.denied_tags = config.config_denied_tags - content.allowed_column_value = config.config_allowed_column_value - content.denied_column_value = config.config_denied_column_value - ub.session.add(content) - try: - ub.session.commit() - return 1, None # increase no of users - except Exception as ex: - log.warning("Failed to create LDAP user: %s - %s", user, ex) - ub.session.rollback() - message = _(u'Failed to Create at Least One LDAP User') - return 0, message - - -@admi.route('/import_ldap_users', methods=["POST"]) -@login_required -@admin_required -def import_ldap_users(): - showtext = {} - try: - new_users = services.ldap.get_group_members(config.config_ldap_group_name) - except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e: - log.error_or_exception(e) - showtext['text'] = _(u'Error: %(ldaperror)s', ldaperror=e) - return json.dumps(showtext) - if not new_users: - log.debug('LDAP empty response') - showtext['text'] = _(u'Error: No user returned in response of LDAP server') - return json.dumps(showtext) - - imported = 0 - for username in new_users: - user = username.decode('utf-8') - if '=' in user: - # if member object field is empty take user object as filter - if config.config_ldap_member_user_object: - query_filter = config.config_ldap_member_user_object - else: - query_filter = config.config_ldap_user_object - try: - user_identifier = extract_user_identifier(user, query_filter) - except Exception as ex: - log.warning(ex) - continue - else: - user_identifier = user - query_filter = None - try: - user_data = services.ldap.get_object_details(user=user_identifier, query_filter=query_filter) - except AttributeError as ex: - log.error_or_exception(ex) - continue - if user_data: - user_count, message = ldap_import_create_user(user, user_data) - if message: - showtext['text'] = message - else: - imported += user_count - else: - log.warning("LDAP User: %s Not Found", user) - showtext['text'] = _(u'At Least One LDAP User Not Found in Database') - if not showtext: - showtext['text'] = _(u'{} User Successfully Imported'.format(imported)) - return json.dumps(showtext) - - def extract_user_data_from_field(user, field): match = re.search(field + r"=([\.\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE) if match: diff --git a/cps/babel.py b/cps/babel.py new file mode 100644 index 00000000..f5ecaf5a --- /dev/null +++ b/cps/babel.py @@ -0,0 +1,39 @@ +from babel import negotiate_locale +from flask_babel import Babel, Locale +from babel.core import UnknownLocaleError +from flask import request, g + +from . import logger + +log = logger.create() + +babel = Babel() + + +@babel.localeselector +def get_locale(): + # if a user is logged in, use the locale from the user settings + user = getattr(g, 'user', None) + if user is not None and hasattr(user, "locale"): + if user.name != 'Guest': # if the account is the guest account bypass the config lang settings + return user.locale + + preferred = list() + if request.accept_languages: + for x in request.accept_languages.values(): + try: + preferred.append(str(Locale.parse(x.replace('-', '_')))) + except (UnknownLocaleError, ValueError) as e: + log.debug('Could not parse locale "%s": %s', x, e) + + return negotiate_locale(preferred or ['en'], get_available_translations()) + + +def get_user_locale_language(user_language): + return Locale.parse(user_language).get_language_name(get_locale()) + +def get_available_locale(): + return [Locale('en')] + babel.list_translations() + +def get_available_translations(): + return set(str(item) for item in get_available_locale()) diff --git a/cps/cli.py b/cps/cli.py index 629fd76d..cf4f36fb 100644 --- a/cps/cli.py +++ b/cps/cli.py @@ -31,96 +31,99 @@ def version_info(): return "Calibre-Web version: %s - unkown git-clone" % _STABLE_VERSION['version'] return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'], _NIGHTLY_VERSION[1]) +class CliParameter(object): -parser = argparse.ArgumentParser(description='Calibre Web is a web app' - ' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py') -parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db') -parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db') -parser.add_argument('-c', metavar='path', - help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile') -parser.add_argument('-k', metavar='path', - help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile') -parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-Web', - version=version_info()) -parser.add_argument('-i', metavar='ip-address', help='Server IP-Address to listen') -parser.add_argument('-s', metavar='user:pass', help='Sets specific username to new password and exits Calibre-Web') -parser.add_argument('-f', action='store_true', help='Flag is depreciated and will be removed in next version') -parser.add_argument('-l', action='store_true', help='Allow loading covers from localhost') -parser.add_argument('-d', action='store_true', help='Dry run of updater to check file permissions in advance ' - 'and exits Calibre-Web') -parser.add_argument('-r', action='store_true', help='Enable public database reconnect route under /reconnect') -args = parser.parse_args() + def init(self): + self.arg_parser() -settings_path = args.p or os.path.join(_CONFIG_DIR, DEFAULT_SETTINGS_FILE) -gd_path = args.g or os.path.join(_CONFIG_DIR, DEFAULT_GDRIVE_FILE) + def arg_parser(self): + parser = argparse.ArgumentParser(description='Calibre Web is a web app' + ' providing a interface for browsing, reading and downloading eBooks\n', + prog='cps.py') + parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db') + parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db') + parser.add_argument('-c', metavar='path', + help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile') + parser.add_argument('-k', metavar='path', + help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile') + parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-Web', + version=version_info()) + parser.add_argument('-i', metavar='ip-address', help='Server IP-Address to listen') + parser.add_argument('-s', metavar='user:pass', + help='Sets specific username to new password and exits Calibre-Web') + parser.add_argument('-f', action='store_true', help='Flag is depreciated and will be removed in next version') + parser.add_argument('-l', action='store_true', help='Allow loading covers from localhost') + parser.add_argument('-d', action='store_true', help='Dry run of updater to check file permissions in advance ' + 'and exits Calibre-Web') + parser.add_argument('-r', action='store_true', help='Enable public database reconnect route under /reconnect') + args = parser.parse_args() -if os.path.isdir(settings_path): - settings_path = os.path.join(settings_path, DEFAULT_SETTINGS_FILE) + self.settings_path = args.p or os.path.join(_CONFIG_DIR, DEFAULT_SETTINGS_FILE) + self.gd_path = args.g or os.path.join(_CONFIG_DIR, DEFAULT_GDRIVE_FILE) -if os.path.isdir(gd_path): - gd_path = os.path.join(gd_path, DEFAULT_GDRIVE_FILE) + if os.path.isdir(self.settings_path): + self.settings_path = os.path.join(self.settings_path, DEFAULT_SETTINGS_FILE) + + if os.path.isdir(self.gd_path): + self.gd_path = os.path.join(self.gd_path, DEFAULT_GDRIVE_FILE) -# handle and check parameter for ssl encryption -certfilepath = None -keyfilepath = None -if args.c: - if os.path.isfile(args.c): - certfilepath = args.c - else: - print("Certfile path is invalid. Exiting...") - sys.exit(1) - -if args.c == "": - certfilepath = "" - -if args.k: - if os.path.isfile(args.k): - keyfilepath = args.k - else: - print("Keyfile path is invalid. Exiting...") - sys.exit(1) - -if (args.k and not args.c) or (not args.k and args.c): - print("Certfile and Keyfile have to be used together. Exiting...") - sys.exit(1) - -if args.k == "": - keyfilepath = "" - - -# dry run updater -dry_run = args.d or None -# enable reconnect endpoint for docker database reconnect -reconnect_enable = args.r or os.environ.get("CALIBRE_RECONNECT", None) -# load covers from localhost -allow_localhost = args.l or os.environ.get("CALIBRE_LOCALHOST", None) -# handle and check ip address argument -ip_address = args.i or None - - -if ip_address: - try: - # try to parse the given ip address with socket - if hasattr(socket, 'inet_pton'): - if ':' in ip_address: - socket.inet_pton(socket.AF_INET6, ip_address) + # handle and check parameter for ssl encryption + self.certfilepath = None + self.keyfilepath = None + if args.c: + if os.path.isfile(args.c): + self.certfilepath = args.c else: - socket.inet_pton(socket.AF_INET, ip_address) - else: - # on windows python < 3.4, inet_pton is not available - # inet_atom only handles IPv4 addresses - socket.inet_aton(ip_address) - except socket.error as err: - print(ip_address, ':', err) - sys.exit(1) + print("Certfile path is invalid. Exiting...") + sys.exit(1) -# handle and check user password argument -user_credentials = args.s or None -if user_credentials and ":" not in user_credentials: - print("No valid 'username:password' format") - sys.exit(3) + if args.c == "": + self.certfilepath = "" -if args.f: - print("Warning: -f flag is depreciated and will be removed in next version") + if args.k: + if os.path.isfile(args.k): + self.keyfilepath = args.k + else: + print("Keyfile path is invalid. Exiting...") + sys.exit(1) + if (args.k and not args.c) or (not args.k and args.c): + print("Certfile and Keyfile have to be used together. Exiting...") + sys.exit(1) + + if args.k == "": + self.keyfilepath = "" + + # dry run updater + self.dry_run =args.d or None + # enable reconnect endpoint for docker database reconnect + self.reconnect_enable = args.r or os.environ.get("CALIBRE_RECONNECT", None) + # load covers from localhost + self.allow_localhost = args.l or os.environ.get("CALIBRE_LOCALHOST", None) + # handle and check ip address argument + self.ip_address = args.i or None + if self.ip_address: + try: + # try to parse the given ip address with socket + if hasattr(socket, 'inet_pton'): + if ':' in self.ip_address: + socket.inet_pton(socket.AF_INET6, self.ip_address) + else: + socket.inet_pton(socket.AF_INET, self.ip_address) + else: + # on windows python < 3.4, inet_pton is not available + # inet_atom only handles IPv4 addresses + socket.inet_aton(self.ip_address) + except socket.error as err: + print(self.ip_address, ':', err) + sys.exit(1) + + # handle and check user password argument + self.user_credentials = args.s or None + if self.user_credentials and ":" not in self.user_credentials: + print("No valid 'username:password' format") + sys.exit(3) + + if args.f: + print("Warning: -f flag is depreciated and will be removed in next version") diff --git a/cps/config_sql.py b/cps/config_sql.py index 01523a01..743b2ce7 100644 --- a/cps/config_sql.py +++ b/cps/config_sql.py @@ -29,7 +29,7 @@ try: except ImportError: from sqlalchemy.ext.declarative import declarative_base -from . import constants, cli, logger +from . import constants, logger log = logger.create() @@ -134,13 +134,19 @@ class _Settings(_Base): config_calibre = Column(String) config_rarfile_location = Column(String, default=None) config_upload_formats = Column(String, default=','.join(constants.EXTENSIONS_UPLOAD)) - config_unicode_filename =Column(Boolean, default=False) + config_unicode_filename = Column(Boolean, default=False) config_updatechannel = Column(Integer, default=constants.UPDATE_STABLE) config_reverse_proxy_login_header_name = Column(String) config_allow_reverse_proxy_header_login = Column(Boolean, default=False) + schedule_start_time = Column(Integer, default=4) + schedule_duration = Column(Integer, default=10) + schedule_generate_book_covers = Column(Boolean, default=False) + schedule_generate_series_covers = Column(Boolean, default=False) + schedule_reconnect = Column(Boolean, default=False) + def __repr__(self): return self.__class__.__name__ @@ -148,12 +154,16 @@ class _Settings(_Base): # Class holds all application specific settings in calibre-web class _ConfigSQL(object): # pylint: disable=no-member - def __init__(self, session): + def __init__(self): + pass + + def init_config(self, session, cli): self._session = session self._settings = None self.db_configured = None self.config_calibre_dir = None self.load() + self.cli = cli change = False if self.config_converterpath == None: # pylint: disable=access-member-before-definition @@ -171,7 +181,6 @@ class _ConfigSQL(object): if change: self.save() - def _read_from_storage(self): if self._settings is None: log.debug("_ConfigSQL._read_from_storage") @@ -179,22 +188,21 @@ class _ConfigSQL(object): return self._settings def get_config_certfile(self): - if cli.certfilepath: - return cli.certfilepath - if cli.certfilepath == "": + if self.cli.certfilepath: + return self.cli.certfilepath + if self.cli.certfilepath == "": return None return self.config_certfile def get_config_keyfile(self): - if cli.keyfilepath: - return cli.keyfilepath - if cli.certfilepath == "": + if self.cli.keyfilepath: + return self.cli.keyfilepath + if self.cli.certfilepath == "": return None return self.config_keyfile - @staticmethod - def get_config_ipaddress(): - return cli.ip_address or "" + def get_config_ipaddress(self): + return self.cli.ip_address or "" def _has_role(self, role_flag): return constants.has_flag(self.config_default_role, role_flag) @@ -255,6 +263,8 @@ class _ConfigSQL(object): return bool((self.mail_server != constants.DEFAULT_MAIL_SERVER and self.mail_server_type == 0) or (self.mail_gmail_token != {} and self.mail_server_type == 1)) + def get_scheduled_task_settings(self): + return {k:v for k, v in self.__dict__.items() if k.startswith('schedule_')} def set_from_dictionary(self, dictionary, field, convertor=None, default=None, encode=None): """Possibly updates a field of this object. @@ -286,11 +296,10 @@ class _ConfigSQL(object): def toDict(self): storage = {} for k, v in self.__dict__.items(): - if k[0] != '_' and not k.endswith("password") and not k.endswith("secret"): + if k[0] != '_' and not k.endswith("password") and not k.endswith("secret") and not k == "cli": storage[k] = v return storage - def load(self): '''Load all configuration values from the underlying storage.''' s = self._read_from_storage() # type: _Settings @@ -411,6 +420,7 @@ def autodetect_calibre_binary(): return element return "" + def autodetect_unrar_binary(): if sys.platform == "win32": calibre_path = ["C:\\program files\\WinRar\\unRAR.exe", @@ -422,6 +432,7 @@ def autodetect_unrar_binary(): return element return "" + def autodetect_kepubify_binary(): if sys.platform == "win32": calibre_path = ["C:\\program files\\kepubify\\kepubify-windows-64Bit.exe", @@ -433,6 +444,7 @@ def autodetect_kepubify_binary(): return element return "" + def _migrate_database(session): # make sure the table is created, if it does not exist _Base.metadata.create_all(session.bind) @@ -440,14 +452,15 @@ def _migrate_database(session): _migrate_table(session, _Flask_Settings) -def load_configuration(session): +def load_configuration(conf, session, cli): _migrate_database(session) if not session.query(_Settings).count(): session.add(_Settings()) session.commit() - conf = _ConfigSQL(session) - return conf + # conf = _ConfigSQL() + conf.init_config(session, cli) + # return conf def get_flask_session_key(_session): flask_settings = _session.query(_Flask_Settings).one_or_none() diff --git a/cps/constants.py b/cps/constants.py index cd0864c8..0f3b2530 100644 --- a/cps/constants.py +++ b/cps/constants.py @@ -23,6 +23,9 @@ from sqlalchemy import __version__ as sql_version sqlalchemy_version2 = ([int(x) for x in sql_version.split('.')] >= [2, 0, 0]) +# APP_MODE - production, development, or test +APP_MODE = os.environ.get('APP_MODE', 'production') + # if installed via pip this variable is set to true (empty file with name .HOMEDIR present) HOME_CONFIG = os.path.isfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), '.HOMEDIR')) @@ -35,6 +38,10 @@ STATIC_DIR = os.path.join(BASE_DIR, 'cps', 'static') TEMPLATES_DIR = os.path.join(BASE_DIR, 'cps', 'templates') TRANSLATIONS_DIR = os.path.join(BASE_DIR, 'cps', 'translations') +# Cache dir - use CACHE_DIR environment variable, otherwise use the default directory: cps/cache +DEFAULT_CACHE_DIR = os.path.join(BASE_DIR, 'cps', 'cache') +CACHE_DIR = os.environ.get('CACHE_DIR', DEFAULT_CACHE_DIR) + if HOME_CONFIG: home_dir = os.path.join(os.path.expanduser("~"), ".calibre-web") if not os.path.exists(home_dir): @@ -164,6 +171,19 @@ NIGHTLY_VERSION[1] = '$Format:%cI$' # NIGHTLY_VERSION[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57' # NIGHTLY_VERSION[1] = '2018-09-09T10:13:08+02:00' +# CACHE +CACHE_TYPE_THUMBNAILS = 'thumbnails' + +# Thumbnail Types +THUMBNAIL_TYPE_COVER = 1 +THUMBNAIL_TYPE_SERIES = 2 +THUMBNAIL_TYPE_AUTHOR = 3 + +# Thumbnails Sizes +COVER_THUMBNAIL_ORIGINAL = 0 +COVER_THUMBNAIL_SMALL = 1 +COVER_THUMBNAIL_MEDIUM = 2 +COVER_THUMBNAIL_LARGE = 3 # clean-up the module namespace del sys, os, namedtuple diff --git a/cps/converter.py b/cps/converter.py index bb197467..af2a6c09 100644 --- a/cps/converter.py +++ b/cps/converter.py @@ -18,7 +18,8 @@ import os import re -from flask_babel import gettext as _ + +from flask_babel import lazy_gettext as N_ from . import config, logger from .subproc_wrapper import process_wait @@ -26,9 +27,9 @@ from .subproc_wrapper import process_wait log = logger.create() -# _() necessary to make babel aware of string for translation -_NOT_INSTALLED = _('not installed') -_EXECUTION_ERROR = _('Execution permissions missing') +# strings getting translated when used +_NOT_INSTALLED = N_('not installed') +_EXECUTION_ERROR = N_('Execution permissions missing') def _get_command_version(path, pattern, argument=None): diff --git a/cps/db.py b/cps/db.py index 68e02b81..f4dc4ccb 100644 --- a/cps/db.py +++ b/cps/db.py @@ -25,6 +25,7 @@ from datetime import datetime from urllib.parse import quote import unidecode +from sqlite3 import OperationalError as sqliteOperationalError from sqlalchemy import create_engine from sqlalchemy import Table, Column, ForeignKey, CheckConstraint from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float @@ -42,6 +43,7 @@ from sqlalchemy.sql.expression import and_, true, false, text, func, or_ from sqlalchemy.ext.associationproxy import association_proxy from flask_login import current_user from flask_babel import gettext as _ +from flask_babel import get_locale from flask import flash from . import logger, ub, isoLanguages @@ -88,7 +90,7 @@ books_publishers_link = Table('books_publishers_link', Base.metadata, ) -class LibraryId(Base): +class Library_Id(Base): __tablename__ = 'library_id' id = Column(Integer, primary_key=True) uuid = Column(String, nullable=False) @@ -439,10 +441,15 @@ class CalibreDB: # instances alive once they reach the end of their respective scopes instances = WeakSet() - def __init__(self, expire_on_commit=True): + def __init__(self, expire_on_commit=True, init=False): """ Initialize a new CalibreDB session """ self.session = None + if init: + self.init_db(expire_on_commit) + + + def init_db(self, expire_on_commit=True): if self._init: self.init_session(expire_on_commit) @@ -542,7 +549,7 @@ class CalibreDB: connection.execute(text("attach database '{}' as app_settings;".format(app_db_path))) local_session = scoped_session(sessionmaker()) local_session.configure(bind=connection) - database_uuid = local_session().query(LibraryId).one_or_none() + database_uuid = local_session().query(Library_Id).one_or_none() # local_session.dispose() check_engine.connect() @@ -895,7 +902,6 @@ class CalibreDB: # Creates for all stored languages a translated speaking name in the array for the UI def speaking_language(self, languages=None, return_all_languages=False, with_count=False, reverse_order=False): - from . import get_locale if with_count: if not languages: @@ -916,7 +922,7 @@ class CalibreDB: .count()) if no_lang_count: tags.append([Category(_("None"), "none"), no_lang_count]) - return sorted(tags, key=lambda x: x[0].name, reverse=reverse_order) + return sorted(tags, key=lambda x: x[0].name.lower(), reverse=reverse_order) else: if not languages: languages = self.session.query(Languages) \ @@ -940,7 +946,10 @@ class CalibreDB: return title.strip() conn = conn or self.session.connection().connection.connection - conn.create_function("title_sort", 1, _title_sort) + try: + conn.create_function("title_sort", 1, _title_sort) + except sqliteOperationalError: + pass @classmethod def dispose(cls): diff --git a/cps/dep_check.py b/cps/dep_check.py index 84e16e85..929185c2 100644 --- a/cps/dep_check.py +++ b/cps/dep_check.py @@ -5,7 +5,7 @@ import json from .constants import BASE_DIR try: - from importlib_metadata import version + from importlib.metadata import version importlib = True ImportNotFound = BaseException except ImportError: diff --git a/cps/editbooks.py b/cps/editbooks.py index 07b8cfe0..d3615050 100755 --- a/cps/editbooks.py +++ b/cps/editbooks.py @@ -25,7 +25,7 @@ from datetime import datetime import json from shutil import copyfile from uuid import uuid4 -from markupsafe import escape +from markupsafe import escape # dependency of flask from functools import wraps try: @@ -35,12 +35,13 @@ except ImportError: from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response from flask_babel import gettext as _ +from flask_babel import lazy_gettext as N_ +from flask_babel import get_locale from flask_login import current_user, login_required from sqlalchemy.exc import OperationalError, IntegrityError -from sqlite3 import OperationalError as sqliteOperationalError + from . import constants, logger, isoLanguages, gdriveutils, uploader, helper, kobo_sync_status -from . import config, get_locale, ub, db -from . import calibre_db +from . import config, ub, db, calibre_db from .services.worker import WorkerThread from .tasks.upload import TaskUpload from .render_template import render_title_template @@ -48,7 +49,7 @@ from .usermanagement import login_required_if_no_ano from .kobo_sync_status import change_archived_books -EditBook = Blueprint('edit-book', __name__) +editbook = Blueprint('edit-book', __name__) log = logger.create() @@ -72,176 +73,692 @@ def edit_required(f): return inner -def search_objects_remove(db_book_object, db_type, input_elements): - del_elements = [] - for c_elements in db_book_object: - found = False - if db_type == 'languages': - type_elements = c_elements.lang_code - elif db_type == 'custom': - type_elements = c_elements.value - else: - type_elements = c_elements.name - for inp_element in input_elements: - if inp_element.lower() == type_elements.lower(): - found = True - break - # if the element was not found in the new list, add it to remove list - if not found: - del_elements.append(c_elements) - return del_elements - - -def search_objects_add(db_book_object, db_type, input_elements): - add_elements = [] - for inp_element in input_elements: - found = False - for c_elements in db_book_object: - if db_type == 'languages': - type_elements = c_elements.lang_code - elif db_type == 'custom': - type_elements = c_elements.value - else: - type_elements = c_elements.name - if inp_element == type_elements: - found = True - break - if not found: - add_elements.append(inp_element) - return add_elements - - -def remove_objects(db_book_object, db_session, del_elements): - changed = False - if len(del_elements) > 0: - for del_element in del_elements: - db_book_object.remove(del_element) - changed = True - if len(del_element.books) == 0: - db_session.delete(del_element) - return changed - - -def add_objects(db_book_object, db_object, db_session, db_type, add_elements): - changed = False - if db_type == 'languages': - db_filter = db_object.lang_code - elif db_type == 'custom': - db_filter = db_object.value - else: - db_filter = db_object.name - for add_element in add_elements: - # check if an element with that name exists - db_element = db_session.query(db_object).filter(db_filter == add_element).first() - # if no element is found add it - if db_type == 'author': - new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "") - elif db_type == 'series': - new_element = db_object(add_element, add_element) - elif db_type == 'custom': - new_element = db_object(value=add_element) - elif db_type == 'publisher': - new_element = db_object(add_element, None) - else: # db_type should be tag or language - new_element = db_object(add_element) - if db_element is None: - changed = True - db_session.add(new_element) - db_book_object.append(new_element) - else: - db_element = create_objects_for_addition(db_element, add_element, db_type) - # add element to book - changed = True - db_book_object.append(db_element) - return changed - - -def create_objects_for_addition(db_element, add_element, db_type): - if db_type == 'custom': - if db_element.value != add_element: - db_element.value = add_element - elif db_type == 'languages': - if db_element.lang_code != add_element: - db_element.lang_code = add_element - elif db_type == 'series': - if db_element.name != add_element: - db_element.name = add_element - db_element.sort = add_element - elif db_type == 'author': - if db_element.name != add_element: - db_element.name = add_element - db_element.sort = helper.get_sorted_author(add_element.replace('|', ',')) - elif db_type == 'publisher': - if db_element.name != add_element: - db_element.name = add_element - db_element.sort = None - elif db_element.name != add_element: - db_element.name = add_element - return db_element - - -# Modifies different Database objects, first check if elements have to be deleted, -# because they are no longer used, than check if elements have to be added to database -def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type): - # passing input_elements not as a list may lead to undesired results - if not isinstance(input_elements, list): - raise TypeError(str(input_elements) + " should be passed as a list") - input_elements = [x for x in input_elements if x != ''] - # we have all input element (authors, series, tags) names now - # 1. search for elements to remove - del_elements = search_objects_remove(db_book_object, db_type, input_elements) - # 2. search for elements that need to be added - add_elements = search_objects_add(db_book_object, db_type, input_elements) - # if there are elements to remove, we remove them now - changed = remove_objects(db_book_object, db_session, del_elements) - # if there are elements to add, we add them now! - if len(add_elements) > 0: - changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements) - return changed - - -def modify_identifiers(input_identifiers, db_identifiers, db_session): - """Modify Identifiers to match input information. - input_identifiers is a list of read-to-persist Identifiers objects. - db_identifiers is a list of already persisted list of Identifiers objects.""" - changed = False - error = False - input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers]) - if len(input_identifiers) != len(input_dict): - error = True - db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers]) - # delete db identifiers not present in input or modify them with input val - for identifier_type, identifier in db_dict.items(): - if identifier_type not in input_dict.keys(): - db_session.delete(identifier) - changed = True - else: - input_identifier = input_dict[identifier_type] - identifier.type = input_identifier.type - identifier.val = input_identifier.val - # add input identifiers not present in db - for identifier_type, identifier in input_dict.items(): - if identifier_type not in db_dict.keys(): - db_session.add(identifier) - changed = True - return changed, error - - -@EditBook.route("/ajax/delete/", methods=["POST"]) +@editbook.route("/ajax/delete/", methods=["POST"]) @login_required def delete_book_from_details(book_id): return Response(delete_book_from_table(book_id, "", True), mimetype='application/json') -@EditBook.route("/delete/", defaults={'book_format': ""}, methods=["POST"]) -@EditBook.route("/delete//", methods=["POST"]) +@editbook.route("/delete/", defaults={'book_format': ""}, methods=["POST"]) +@editbook.route("/delete//", methods=["POST"]) @login_required def delete_book_ajax(book_id, book_format): return delete_book_from_table(book_id, book_format, False) +@editbook.route("/admin/book/", methods=['GET']) +@login_required_if_no_ano +@edit_required +def show_edit_book(book_id): + return render_edit_book(book_id) + + +@editbook.route("/admin/book/", methods=['POST']) +@login_required_if_no_ano +@edit_required +def edit_book(book_id): + modify_date = False + edit_error = False + + # create the function for sorting... + calibre_db.update_title_sort(config) + + book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) + # Book not found + if not book: + flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), + category="error") + return redirect(url_for("web.index")) + + to_save = request.form.to_dict() + + try: + # Update folder of book on local disk + edited_books_id = None + title_author_error = None + # handle book title change + title_change = handle_title_on_edit(book, to_save["book_title"]) + # handle book author change + input_authors, author_change, renamed = handle_author_on_edit(book, to_save["author_name"]) + if author_change or title_change: + edited_books_id = book.id + modify_date = True + title_author_error = helper.update_dir_structure(edited_books_id, + config.config_calibre_dir, + input_authors[0], + renamed_author=renamed) + if title_author_error: + flash(title_author_error, category="error") + calibre_db.session.rollback() + book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) + + # handle upload other formats from local disk + meta = upload_single_file(request, book, book_id) + # only merge metadata if file was uploaded and no error occurred (meta equals not false or none) + if meta: + merge_metadata(to_save, meta) + # handle upload covers from local disk + cover_upload_success = upload_cover(request, book) + if cover_upload_success: + book.has_cover = 1 + modify_date = True + + # upload new covers or new file formats to google drive + if config.config_use_google_drive: + gdriveutils.updateGdriveCalibreFromLocal() + + if to_save.get("cover_url", None): + if not current_user.role_upload(): + edit_error = True + flash(_(u"User has no rights to upload cover"), category="error") + if to_save["cover_url"].endswith('/static/generic_cover.jpg'): + book.has_cover = 0 + else: + result, error = helper.save_cover_from_url(to_save["cover_url"].strip(), book.path) + if result is True: + book.has_cover = 1 + modify_date = True + helper.replace_cover_thumbnail_cache(book.id) + else: + flash(error, category="error") + + # Add default series_index to book + modify_date |= edit_book_series_index(to_save["series_index"], book) + # Handle book comments/description + modify_date |= edit_book_comments(Markup(to_save['description']).unescape(), book) + # Handle identifiers + input_identifiers = identifier_list(to_save, book) + modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session) + if warning: + flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning") + modify_date |= modification + # Handle book tags + modify_date |= edit_book_tags(to_save['tags'], book) + # Handle book series + modify_date |= edit_book_series(to_save["series"], book) + # handle book publisher + modify_date |= edit_book_publisher(to_save['publisher'], book) + # handle book languages + try: + modify_date |= edit_book_languages(to_save['languages'], book) + except ValueError as e: + flash(str(e), category="error") + edit_error = True + # handle book ratings + modify_date |= edit_book_ratings(to_save, book) + # handle cc data + modify_date |= edit_all_cc_data(book_id, book, to_save) + + if to_save.get("pubdate", None): + try: + book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d") + except ValueError as e: + book.pubdate = db.Books.DEFAULT_PUBDATE + flash(str(e), category="error") + edit_error = True + else: + book.pubdate = db.Books.DEFAULT_PUBDATE + + if modify_date: + book.last_modified = datetime.utcnow() + kobo_sync_status.remove_synced_book(edited_books_id, all=True) + + calibre_db.session.merge(book) + calibre_db.session.commit() + if config.config_use_google_drive: + gdriveutils.updateGdriveCalibreFromLocal() + if meta is not False \ + and edit_error is not True \ + and title_author_error is not True \ + and cover_upload_success is not False: + flash(_("Metadata successfully updated"), category="success") + if "detail_view" in to_save: + return redirect(url_for('web.show_book', book_id=book.id)) + else: + return render_edit_book(book_id) + except ValueError as e: + log.error_or_exception("Error: {}".format(e)) + calibre_db.session.rollback() + flash(str(e), category="error") + return redirect(url_for('web.show_book', book_id=book.id)) + except (OperationalError, IntegrityError) as e: + log.error_or_exception("Database error: {}".format(e)) + calibre_db.session.rollback() + flash(_(u"Database error: %(error)s.", error=e.orig), category="error") + return redirect(url_for('web.show_book', book_id=book.id)) + except Exception as ex: + log.error_or_exception(ex) + calibre_db.session.rollback() + flash(_("Error editing book: {}".format(ex)), category="error") + return redirect(url_for('web.show_book', book_id=book.id)) + + +@editbook.route("/upload", methods=["POST"]) +@login_required_if_no_ano +@upload_required +def upload(): + if not config.config_uploading: + abort(404) + if request.method == 'POST' and 'btn-upload' in request.files: + for requested_file in request.files.getlist("btn-upload"): + try: + modify_date = False + # create the function for sorting... + calibre_db.update_title_sort(config) + calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4())) + + meta, error = file_handling_on_upload(requested_file) + if error: + return error + + db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modify_date, meta) + + # Comments need book id therefore only possible after flush + modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book) + + book_id = db_book.id + title = db_book.title + if config.config_use_google_drive: + helper.upload_new_file_gdrive(book_id, + input_authors[0], + renamed_authors, + title, + title_dir, + meta.file_path, + meta.extension.lower()) + else: + error = helper.update_dir_structure(book_id, + config.config_calibre_dir, + input_authors[0], + meta.file_path, + title_dir + meta.extension.lower(), + renamed_author=renamed_authors) + + move_coverfile(meta, db_book) + + # save data to database, reread data + calibre_db.session.commit() + + if config.config_use_google_drive: + gdriveutils.updateGdriveCalibreFromLocal() + if error: + flash(error, category="error") + link = '{}'.format(url_for('web.show_book', book_id=book_id), escape(title)) + upload_text = N_(u"File %(file)s uploaded", file=link) + WorkerThread.add(current_user.name, TaskUpload(upload_text, escape(title))) + helper.add_book_to_thumbnail_cache(book_id) + + if len(request.files.getlist("btn-upload")) < 2: + if current_user.role_edit() or current_user.role_admin(): + resp = {"location": url_for('edit-book.show_edit_book', book_id=book_id)} + return Response(json.dumps(resp), mimetype='application/json') + else: + resp = {"location": url_for('web.show_book', book_id=book_id)} + return Response(json.dumps(resp), mimetype='application/json') + except (OperationalError, IntegrityError) as e: + calibre_db.session.rollback() + log.error_or_exception("Database error: {}".format(e)) + flash(_(u"Database error: %(error)s.", error=e.orig), category="error") + return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') + + +@editbook.route("/admin/book/convert/", methods=['POST']) +@login_required_if_no_ano +@edit_required +def convert_bookformat(book_id): + # check to see if we have form fields to work with - if not send user back + book_format_from = request.form.get('book_format_from', None) + book_format_to = request.form.get('book_format_to', None) + + if (book_format_from is None) or (book_format_to is None): + flash(_(u"Source or destination format for conversion missing"), category="error") + return redirect(url_for('edit-book.show_edit_book', book_id=book_id)) + + log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to) + rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(), + book_format_to.upper(), current_user.name) + + if rtn is None: + flash(_(u"Book successfully queued for converting to %(book_format)s", + book_format=book_format_to), + category="success") + else: + flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error") + return redirect(url_for('edit-book.show_edit_book', book_id=book_id)) + + +@editbook.route("/ajax/getcustomenum/") +@login_required +def table_get_custom_enum(c_id): + ret = list() + cc = (calibre_db.session.query(db.CustomColumns) + .filter(db.CustomColumns.id == c_id) + .filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).one_or_none()) + ret.append({'value': "", 'text': ""}) + for idx, en in enumerate(cc.get_display_dict()['enum_values']): + ret.append({'value': en, 'text': en}) + return json.dumps(ret) + + +@editbook.route("/ajax/editbooks/", methods=['POST']) +@login_required_if_no_ano +@edit_required +def edit_list_book(param): + vals = request.form.to_dict() + book = calibre_db.get_book(vals['pk']) + sort_param = "" + ret = "" + try: + if param == 'series_index': + edit_book_series_index(vals['value'], book) + ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json') + elif param == 'tags': + edit_book_tags(vals['value'], book) + ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}), + mimetype='application/json') + elif param == 'series': + edit_book_series(vals['value'], book) + ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}), + mimetype='application/json') + elif param == 'publishers': + edit_book_publisher(vals['value'], book) + ret = Response(json.dumps({'success': True, + 'newValue': ', '.join([publisher.name for publisher in book.publishers])}), + mimetype='application/json') + elif param == 'languages': + invalid = list() + edit_book_languages(vals['value'], book, invalid=invalid) + if invalid: + ret = Response(json.dumps({'success': False, + 'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}), + mimetype='application/json') + else: + lang_names = list() + for lang in book.languages: + lang_names.append(isoLanguages.get_language_name(get_locale(), lang.lang_code)) + ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}), + mimetype='application/json') + elif param == 'author_sort': + book.author_sort = vals['value'] + ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}), + mimetype='application/json') + elif param == 'title': + sort_param = book.sort + if handle_title_on_edit(book, vals.get('value', "")): + rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir) + if not rename_error: + ret = Response(json.dumps({'success': True, 'newValue': book.title}), + mimetype='application/json') + else: + ret = Response(json.dumps({'success': False, + 'msg': rename_error}), + mimetype='application/json') + elif param == 'sort': + book.sort = vals['value'] + ret = Response(json.dumps({'success': True, 'newValue': book.sort}), + mimetype='application/json') + elif param == 'comments': + edit_book_comments(vals['value'], book) + ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}), + mimetype='application/json') + elif param == 'authors': + input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true") + rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir, input_authors[0], + renamed_author=renamed) + if not rename_error: + ret = Response(json.dumps({ + 'success': True, + 'newValue': ' & '.join([author.replace('|', ',') for author in input_authors])}), + mimetype='application/json') + else: + ret = Response(json.dumps({'success': False, + 'msg': rename_error}), + mimetype='application/json') + elif param == 'is_archived': + is_archived = change_archived_books(book.id, vals['value'] == "True", + message="Book {} archive bit set to: {}".format(book.id, vals['value'])) + if is_archived: + kobo_sync_status.remove_synced_book(book.id) + return "" + elif param == 'read_status': + ret = helper.edit_book_read_status(book.id, vals['value'] == "True") + if ret: + return ret, 400 + elif param.startswith("custom_column_"): + new_val = dict() + new_val[param] = vals['value'] + edit_single_cc_data(book.id, book, param[14:], new_val) + # ToDo: Very hacky find better solution + if vals['value'] in ["True", "False"]: + ret = "" + else: + ret = Response(json.dumps({'success': True, 'newValue': vals['value']}), + mimetype='application/json') + else: + return _("Parameter not found"), 400 + book.last_modified = datetime.utcnow() + + calibre_db.session.commit() + # revert change for sort if automatic fields link is deactivated + if param == 'title' and vals.get('checkT') == "false": + book.sort = sort_param + calibre_db.session.commit() + except (OperationalError, IntegrityError) as e: + calibre_db.session.rollback() + log.error_or_exception("Database error: {}".format(e)) + ret = Response(json.dumps({'success': False, + 'msg': 'Database error: {}'.format(e.orig)}), + mimetype='application/json') + return ret + + +@editbook.route("/ajax/sort_value//") +@login_required +def get_sorted_entry(field, bookid): + if field in ['title', 'authors', 'sort', 'author_sort']: + book = calibre_db.get_filtered_book(bookid) + if book: + if field == 'title': + return json.dumps({'sort': book.sort}) + elif field == 'authors': + return json.dumps({'author_sort': book.author_sort}) + if field == 'sort': + return json.dumps({'sort': book.title}) + if field == 'author_sort': + return json.dumps({'author_sort': book.author}) + return "" + + +@editbook.route("/ajax/simulatemerge", methods=['POST']) +@login_required +@edit_required +def simulate_merge_list_book(): + vals = request.get_json().get('Merge_books') + if vals: + to_book = calibre_db.get_book(vals[0]).title + vals.pop(0) + if to_book: + from_book = [] + for book_id in vals: + from_book.append(calibre_db.get_book(book_id).title) + return json.dumps({'to': to_book, 'from': from_book}) + return "" + + +@editbook.route("/ajax/mergebooks", methods=['POST']) +@login_required +@edit_required +def merge_list_book(): + vals = request.get_json().get('Merge_books') + to_file = list() + if vals: + # load all formats from target book + to_book = calibre_db.get_book(vals[0]) + vals.pop(0) + if to_book: + for file in to_book.data: + to_file.append(file.format) + to_name = helper.get_valid_filename(to_book.title, + chars=96) + ' - ' + helper.get_valid_filename(to_book.authors[0].name, + chars=96) + for book_id in vals: + from_book = calibre_db.get_book(book_id) + if from_book: + for element in from_book.data: + if element.format not in to_file: + # create new data entry with: book_id, book_format, uncompressed_size, name + filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir, + to_book.path, + to_name + "." + element.format.lower())) + filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir, + from_book.path, + element.name + "." + element.format.lower())) + copyfile(filepath_old, filepath_new) + to_book.data.append(db.Data(to_book.id, + element.format, + element.uncompressed_size, + to_name)) + delete_book_from_table(from_book.id, "", True) + return json.dumps({'success': True}) + return "" + + +@editbook.route("/ajax/xchange", methods=['POST']) +@login_required +@edit_required +def table_xchange_author_title(): + vals = request.get_json().get('xchange') + edited_books_id = False + if vals: + for val in vals: + modify_date = False + book = calibre_db.get_book(val) + authors = book.title + book.authors = calibre_db.order_authors([book]) + author_names = [] + for authr in book.authors: + author_names.append(authr.name.replace('|', ',')) + + title_change = handle_title_on_edit(book, " ".join(author_names)) + input_authors, author_change, renamed = handle_author_on_edit(book, authors) + if author_change or title_change: + edited_books_id = book.id + modify_date = True + + if config.config_use_google_drive: + gdriveutils.updateGdriveCalibreFromLocal() + + if edited_books_id: + # toDo: Handle error + edit_error = helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0], + renamed_author=renamed) + if modify_date: + book.last_modified = datetime.utcnow() + try: + calibre_db.session.commit() + except (OperationalError, IntegrityError) as e: + calibre_db.session.rollback() + log.error_or_exception("Database error: %s", e) + return json.dumps({'success': False}) + + if config.config_use_google_drive: + gdriveutils.updateGdriveCalibreFromLocal() + return json.dumps({'success': True}) + return "" + + +def merge_metadata(to_save, meta): + if to_save.get('author_name', "") == _(u'Unknown'): + to_save['author_name'] = '' + if to_save.get('book_title', "") == _(u'Unknown'): + to_save['book_title'] = '' + for s_field, m_field in [ + ('tags', 'tags'), ('author_name', 'author'), ('series', 'series'), + ('series_index', 'series_id'), ('languages', 'languages'), + ('book_title', 'title')]: + to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '') + to_save["description"] = to_save["description"] or Markup( + getattr(meta, 'description', '')).unescape() + + +def identifier_list(to_save, book): + """Generate a list of Identifiers from form information""" + id_type_prefix = 'identifier-type-' + id_val_prefix = 'identifier-val-' + result = [] + for type_key, type_value in to_save.items(): + if not type_key.startswith(id_type_prefix): + continue + val_key = id_val_prefix + type_key[len(id_type_prefix):] + if val_key not in to_save.keys(): + continue + result.append(db.Identifiers(to_save[val_key], type_value, book.id)) + return result + + +def prepare_authors(authr): + # handle authors + input_authors = authr.split('&') + # handle_authors(input_authors) + input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors)) + # Remove duplicates in authors list + input_authors = helper.uniq(input_authors) + + # we have all author names now + if input_authors == ['']: + input_authors = [_(u'Unknown')] # prevent empty Author + + renamed = list() + for in_aut in input_authors: + renamed_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == in_aut).first() + if renamed_author and in_aut != renamed_author.name: + renamed.append(renamed_author.name) + all_books = calibre_db.session.query(db.Books) \ + .filter(db.Books.authors.any(db.Authors.name == renamed_author.name)).all() + sorted_renamed_author = helper.get_sorted_author(renamed_author.name) + sorted_old_author = helper.get_sorted_author(in_aut) + for one_book in all_books: + one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author) + return input_authors, renamed + + +def prepare_authors_on_upload(title, authr): + if title != _(u'Unknown') and authr != _(u'Unknown'): + entry = calibre_db.check_exists_book(authr, title) + if entry: + log.info("Uploaded book probably exists in library") + flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ") + + Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning") + + input_authors, renamed = prepare_authors(authr) + + sort_authors_list = list() + db_author = None + for inp in input_authors: + stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first() + if not stored_author: + if not db_author: + db_author = db.Authors(inp, helper.get_sorted_author(inp), "") + calibre_db.session.add(db_author) + calibre_db.session.commit() + sort_author = helper.get_sorted_author(inp) + else: + if not db_author: + db_author = stored_author + sort_author = stored_author.sort + sort_authors_list.append(sort_author) + sort_authors = ' & '.join(sort_authors_list) + return sort_authors, input_authors, db_author, renamed + + +def create_book_on_upload(modify_date, meta): + title = meta.title + authr = meta.author + sort_authors, input_authors, db_author, renamed_authors = prepare_authors_on_upload(title, authr) + + title_dir = helper.get_valid_filename(title, chars=96) + author_dir = helper.get_valid_filename(db_author.name, chars=96) + + # combine path and normalize path from Windows systems + path = os.path.join(author_dir, title_dir).replace('\\', '/') + + try: + pubdate = datetime.strptime(meta.pubdate[:10], "%Y-%m-%d") + except ValueError: + pubdate = datetime(101, 1, 1) + + # Calibre adds books with utc as timezone + db_book = db.Books(title, "", sort_authors, datetime.utcnow(), pubdate, + '1', datetime.utcnow(), path, meta.cover, db_author, [], "") + + modify_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session, + 'author') + + # Add series_index to book + modify_date |= edit_book_series_index(meta.series_id, db_book) + + # add languages + invalid = [] + modify_date |= edit_book_languages(meta.languages, db_book, upload_mode=True, invalid=invalid) + if invalid: + for lang in invalid: + flash(_(u"'%(langname)s' is not a valid language", langname=lang), category="warning") + + # handle tags + modify_date |= edit_book_tags(meta.tags, db_book) + + # handle publisher + modify_date |= edit_book_publisher(meta.publisher, db_book) + + # handle series + modify_date |= edit_book_series(meta.series, db_book) + + # Add file to book + file_size = os.path.getsize(meta.file_path) + db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir) + db_book.data.append(db_data) + calibre_db.session.add(db_book) + + # flush content, get db_book.id available + calibre_db.session.flush() + + # Handle identifiers now that db_book.id is available + identifier_list = [] + for type_key, type_value in meta.identifiers: + identifier_list.append(db.Identifiers(type_value, type_key, db_book.id)) + modification, warning = modify_identifiers(identifier_list, db_book.identifiers, calibre_db.session) + if warning: + flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning") + modify_date |= modification + + return db_book, input_authors, title_dir, renamed_authors + + +def file_handling_on_upload(requested_file): + # check if file extension is correct + if '.' in requested_file.filename: + file_ext = requested_file.filename.rsplit('.', 1)[-1].lower() + if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD: + flash( + _("File extension '%(ext)s' is not allowed to be uploaded to this server", + ext=file_ext), category="error") + return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') + else: + flash(_('File to be uploaded must have an extension'), category="error") + return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') + + # extract metadata from file + try: + meta = uploader.upload(requested_file, config.config_rarfile_location) + except (IOError, OSError): + log.error("File %s could not saved to temp dir", requested_file.filename) + flash(_(u"File %(filename)s could not saved to temp dir", + filename=requested_file.filename), category="error") + return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') + return meta, None + + +def move_coverfile(meta, db_book): + # move cover to final directory, including book id + if meta.cover: + cover_file = meta.cover + else: + cover_file = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg') + new_cover_path = os.path.join(config.config_calibre_dir, db_book.path) + try: + os.makedirs(new_cover_path, exist_ok=True) + copyfile(cover_file, os.path.join(new_cover_path, "cover.jpg")) + if meta.cover: + os.unlink(meta.cover) + except OSError as e: + log.error("Failed to move cover file %s: %s", new_cover_path, e) + flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_cover_path, + error=e), + category="error") + + def delete_whole_book(book_id, book): - # delete book from Shelfs, Downloads, Read list + # delete book from shelves, Downloads, Read list ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete() ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete() ub.delete_download(book_id) @@ -383,7 +900,7 @@ def render_edit_book(book_id): for authr in book.authors: author_names.append(authr.name.replace('|', ',')) - # Option for showing convertbook button + # Option for showing convert_book button valid_source_formats = list() allowed_conversion_formats = list() kepub_possible = None @@ -413,11 +930,11 @@ def render_edit_book(book_id): def edit_book_ratings(to_save, book): changed = False - if to_save.get("rating","").strip(): + if to_save.get("rating", "").strip(): old_rating = False if len(book.ratings) > 0: old_rating = book.ratings[0].rating - rating_x2 = int(float(to_save.get("rating","")) * 2) + rating_x2 = int(float(to_save.get("rating", "")) * 2) if rating_x2 != old_rating: changed = True is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == rating_x2).first() @@ -622,8 +1139,9 @@ def edit_cc_data(book_id, book, to_save, cc): 'custom') return changed + # returns None if no file is uploaded -# returns False if an error occours, in all other cases the ebook metadata is returned +# returns False if an error occurs, in all other cases the ebook metadata is returned def upload_single_file(file_request, book, book_id): # Check and handle Uploaded file requested_file = file_request.files.get('btn-upload-format', None) @@ -676,11 +1194,11 @@ def upload_single_file(file_request, book, book_id): calibre_db.session.rollback() log.error_or_exception("Database error: {}".format(e)) flash(_(u"Database error: %(error)s.", error=e.orig), category="error") - return False # return redirect(url_for('web.show_book', book_id=book.id)) + return False # return redirect(url_for('web.show_book', book_id=book.id)) # Queue uploader info link = '{}'.format(url_for('web.show_book', book_id=book.id), escape(book.title)) - upload_text = _(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=link) + upload_text = N_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=link) WorkerThread.add(current_user.name, TaskUpload(upload_text, escape(book.title))) return uploader.process( @@ -688,6 +1206,7 @@ def upload_single_file(file_request, book, book_id): rarExecutable=config.config_rarfile_location) return None + def upload_cover(cover_request, book): requested_file = cover_request.files.get('btn-upload-cover', None) if requested_file: @@ -698,6 +1217,7 @@ def upload_cover(cover_request, book): return False ret, message = helper.save_cover(requested_file, book.path) if ret is True: + helper.replace_cover_thumbnail_cache(book.id) return True else: flash(message, category="error") @@ -738,674 +1258,157 @@ def handle_author_on_edit(book, author_name, update_stored=True): change = True return input_authors, change, renamed -@EditBook.route("/admin/book/", methods=['GET']) -@login_required_if_no_ano -@edit_required -def show_edit_book(book_id): - return render_edit_book(book_id) + +def search_objects_remove(db_book_object, db_type, input_elements): + del_elements = [] + for c_elements in db_book_object: + found = False + if db_type == 'languages': + type_elements = c_elements.lang_code + elif db_type == 'custom': + type_elements = c_elements.value + else: + type_elements = c_elements.name + for inp_element in input_elements: + if inp_element.lower() == type_elements.lower(): + found = True + break + # if the element was not found in the new list, add it to remove list + if not found: + del_elements.append(c_elements) + return del_elements -@EditBook.route("/admin/book/", methods=['POST']) -@login_required_if_no_ano -@edit_required -def edit_book(book_id): - modify_date = False - edit_error = False - - # create the function for sorting... - try: - calibre_db.update_title_sort(config) - except sqliteOperationalError as e: - log.error_or_exception(e) - calibre_db.session.rollback() - - book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) - # Book not found - if not book: - flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), - category="error") - return redirect(url_for("web.index")) - - to_save = request.form.to_dict() - - try: - # Update folder of book on local disk - edited_books_id = None - title_author_error = None - # handle book title change - title_change = handle_title_on_edit(book, to_save["book_title"]) - # handle book author change - input_authors, author_change, renamed = handle_author_on_edit(book, to_save["author_name"]) - if author_change or title_change: - edited_books_id = book.id - modify_date = True - title_author_error = helper.update_dir_structure(edited_books_id, - config.config_calibre_dir, - input_authors[0], - renamed_author=renamed) - if title_author_error: - flash(title_author_error, category="error") - calibre_db.session.rollback() - book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) - - # handle upload other formats from local disk - meta = upload_single_file(request, book, book_id) - # only merge metadata if file was uploaded and no error occurred (meta equals not false or none) - if meta: - merge_metadata(to_save, meta) - # handle upload covers from local disk - cover_upload_success = upload_cover(request, book) - if cover_upload_success: - book.has_cover = 1 - modify_date = True - - # upload new covers or new file formats to google drive - if config.config_use_google_drive: - gdriveutils.updateGdriveCalibreFromLocal() - - if to_save.get("cover_url", None): - if not current_user.role_upload(): - edit_error = True - flash(_(u"User has no rights to upload cover"), category="error") - if to_save["cover_url"].endswith('/static/generic_cover.jpg'): - book.has_cover = 0 +def search_objects_add(db_book_object, db_type, input_elements): + add_elements = [] + for inp_element in input_elements: + found = False + for c_elements in db_book_object: + if db_type == 'languages': + type_elements = c_elements.lang_code + elif db_type == 'custom': + type_elements = c_elements.value else: - result, error = helper.save_cover_from_url(to_save["cover_url"].strip(), book.path) - if result is True: - book.has_cover = 1 - modify_date = True - else: - flash(error, category="error") - - # Add default series_index to book - modify_date |= edit_book_series_index(to_save["series_index"], book) - # Handle book comments/description - modify_date |= edit_book_comments(Markup(to_save['description']).unescape(), book) - # Handle identifiers - input_identifiers = identifier_list(to_save, book) - modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session) - if warning: - flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning") - modify_date |= modification - # Handle book tags - modify_date |= edit_book_tags(to_save['tags'], book) - # Handle book series - modify_date |= edit_book_series(to_save["series"], book) - # handle book publisher - modify_date |= edit_book_publisher(to_save['publisher'], book) - # handle book languages - try: - modify_date |= edit_book_languages(to_save['languages'], book) - except ValueError as e: - flash(str(e), category="error") - edit_error = True - # handle book ratings - modify_date |= edit_book_ratings(to_save, book) - # handle cc data - modify_date |= edit_all_cc_data(book_id, book, to_save) - - if to_save.get("pubdate", None): - try: - book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d") - except ValueError as e: - book.pubdate = db.Books.DEFAULT_PUBDATE - flash(str(e), category="error") - edit_error = True - else: - book.pubdate = db.Books.DEFAULT_PUBDATE - - if modify_date: - book.last_modified = datetime.utcnow() - kobo_sync_status.remove_synced_book(edited_books_id, all=True) - - calibre_db.session.merge(book) - calibre_db.session.commit() - if config.config_use_google_drive: - gdriveutils.updateGdriveCalibreFromLocal() - if meta is not False \ - and edit_error is not True \ - and title_author_error is not True \ - and cover_upload_success is not False: - flash(_("Metadata successfully updated"), category="success") - if "detail_view" in to_save: - return redirect(url_for('web.show_book', book_id=book.id)) - else: - return render_edit_book(book_id) - except ValueError as e: - log.error_or_exception("Error: {}".format(e)) - calibre_db.session.rollback() - flash(str(e), category="error") - return redirect(url_for('web.show_book', book_id=book.id)) - except (OperationalError, IntegrityError) as e: - log.error_or_exception("Database error: {}".format(e)) - calibre_db.session.rollback() - flash(_(u"Database error: %(error)s.", error=e.orig), category="error") - return redirect(url_for('web.show_book', book_id=book.id)) - except Exception as ex: - log.error_or_exception(ex) - calibre_db.session.rollback() - flash(_("Error editing book: {}".format(ex)), category="error") - return redirect(url_for('web.show_book', book_id=book.id)) + type_elements = c_elements.name + if inp_element == type_elements: + found = True + break + if not found: + add_elements.append(inp_element) + return add_elements -def merge_metadata(to_save, meta): - if to_save.get('author_name', "") == _(u'Unknown'): - to_save['author_name'] = '' - if to_save.get('book_title', "") == _(u'Unknown'): - to_save['book_title'] = '' - for s_field, m_field in [ - ('tags', 'tags'), ('author_name', 'author'), ('series', 'series'), - ('series_index', 'series_id'), ('languages', 'languages'), - ('book_title', 'title')]: - to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '') - to_save["description"] = to_save["description"] or Markup( - getattr(meta, 'description', '')).unescape() +def remove_objects(db_book_object, db_session, del_elements): + changed = False + if len(del_elements) > 0: + for del_element in del_elements: + db_book_object.remove(del_element) + changed = True + if len(del_element.books) == 0: + db_session.delete(del_element) + return changed -def identifier_list(to_save, book): - """Generate a list of Identifiers from form information""" - id_type_prefix = 'identifier-type-' - id_val_prefix = 'identifier-val-' - result = [] - for type_key, type_value in to_save.items(): - if not type_key.startswith(id_type_prefix): - continue - val_key = id_val_prefix + type_key[len(id_type_prefix):] - if val_key not in to_save.keys(): - continue - result.append(db.Identifiers(to_save[val_key], type_value, book.id)) - return result - - -def prepare_authors(authr): - # handle authors - input_authors = authr.split('&') - # handle_authors(input_authors) - input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors)) - # Remove duplicates in authors list - input_authors = helper.uniq(input_authors) - - # we have all author names now - if input_authors == ['']: - input_authors = [_(u'Unknown')] # prevent empty Author - - renamed = list() - for in_aut in input_authors: - renamed_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == in_aut).first() - if renamed_author and in_aut != renamed_author.name: - renamed.append(renamed_author.name) - all_books = calibre_db.session.query(db.Books) \ - .filter(db.Books.authors.any(db.Authors.name == renamed_author.name)).all() - sorted_renamed_author = helper.get_sorted_author(renamed_author.name) - sorted_old_author = helper.get_sorted_author(in_aut) - for one_book in all_books: - one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author) - return input_authors, renamed - - -def prepare_authors_on_upload(title, authr): - if title != _(u'Unknown') and authr != _(u'Unknown'): - entry = calibre_db.check_exists_book(authr, title) - if entry: - log.info("Uploaded book probably exists in library") - flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ") - + Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning") - - input_authors, renamed = prepare_authors(authr) - - sort_authors_list = list() - db_author = None - for inp in input_authors: - stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first() - if not stored_author: - if not db_author: - db_author = db.Authors(inp, helper.get_sorted_author(inp), "") - calibre_db.session.add(db_author) - calibre_db.session.commit() - sort_author = helper.get_sorted_author(inp) - else: - if not db_author: - db_author = stored_author - sort_author = stored_author.sort - sort_authors_list.append(sort_author) - sort_authors = ' & '.join(sort_authors_list) - return sort_authors, input_authors, db_author, renamed - - -def create_book_on_upload(modify_date, meta): - title = meta.title - authr = meta.author - sort_authors, input_authors, db_author, renamed_authors = prepare_authors_on_upload(title, authr) - - title_dir = helper.get_valid_filename(title, chars=96) - author_dir = helper.get_valid_filename(db_author.name, chars=96) - - # combine path and normalize path from Windows systems - path = os.path.join(author_dir, title_dir).replace('\\', '/') - - try: - pubdate = datetime.strptime(meta.pubdate[:10], "%Y-%m-%d") - except: - pubdate = datetime(101, 1, 1) - - # Calibre adds books with utc as timezone - db_book = db.Books(title, "", sort_authors, datetime.utcnow(), pubdate, - '1', datetime.utcnow(), path, meta.cover, db_author, [], "") - - modify_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session, - 'author') - - # Add series_index to book - modify_date |= edit_book_series_index(meta.series_id, db_book) - - # add languages - invalid = [] - modify_date |= edit_book_languages(meta.languages, db_book, upload_mode=True, invalid=invalid) - if invalid: - for lang in invalid: - flash(_(u"'%(langname)s' is not a valid language", langname=lang), category="warning") - - # handle tags - modify_date |= edit_book_tags(meta.tags, db_book) - - # handle publisher - modify_date |= edit_book_publisher(meta.publisher, db_book) - - # handle series - modify_date |= edit_book_series(meta.series, db_book) - - # Add file to book - file_size = os.path.getsize(meta.file_path) - db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir) - db_book.data.append(db_data) - calibre_db.session.add(db_book) - - # flush content, get db_book.id available - calibre_db.session.flush() - - # Handle identifiers now that db_book.id is available - identifier_list = [] - for type_key, type_value in meta.identifiers: - identifier_list.append(db.Identifiers(type_value, type_key, db_book.id)) - modification, warning = modify_identifiers(identifier_list, db_book.identifiers, calibre_db.session) - if warning: - flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning") - modify_date |= modification - - return db_book, input_authors, title_dir, renamed_authors - - -def file_handling_on_upload(requested_file): - # check if file extension is correct - if '.' in requested_file.filename: - file_ext = requested_file.filename.rsplit('.', 1)[-1].lower() - if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD: - flash( - _("File extension '%(ext)s' is not allowed to be uploaded to this server", - ext=file_ext), category="error") - return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') +def add_objects(db_book_object, db_object, db_session, db_type, add_elements): + changed = False + if db_type == 'languages': + db_filter = db_object.lang_code + elif db_type == 'custom': + db_filter = db_object.value else: - flash(_('File to be uploaded must have an extension'), category="error") - return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') - - # extract metadata from file - try: - meta = uploader.upload(requested_file, config.config_rarfile_location) - except (IOError, OSError): - log.error("File %s could not saved to temp dir", requested_file.filename) - flash(_(u"File %(filename)s could not saved to temp dir", - filename=requested_file.filename), category="error") - return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') - return meta, None - - -def move_coverfile(meta, db_book): - # move cover to final directory, including book id - if meta.cover: - coverfile = meta.cover - else: - coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg') - new_coverpath = os.path.join(config.config_calibre_dir, db_book.path) - try: - os.makedirs(new_coverpath, exist_ok=True) - copyfile(coverfile, os.path.join(new_coverpath, "cover.jpg")) - if meta.cover: - os.unlink(meta.cover) - except OSError as e: - log.error("Failed to move cover file %s: %s", new_coverpath, e) - flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath, - error=e), - category="error") - - -@EditBook.route("/upload", methods=["POST"]) -@login_required_if_no_ano -@upload_required -def upload(): - if not config.config_uploading: - abort(404) - if request.method == 'POST' and 'btn-upload' in request.files: - for requested_file in request.files.getlist("btn-upload"): - try: - modify_date = False - # create the function for sorting... - calibre_db.update_title_sort(config) - calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4())) - - meta, error = file_handling_on_upload(requested_file) - if error: - return error - - db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modify_date, meta) - - # Comments need book id therefore only possible after flush - modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book) - - book_id = db_book.id - title = db_book.title - if config.config_use_google_drive: - helper.upload_new_file_gdrive(book_id, - input_authors[0], - renamed_authors, - title, - title_dir, - meta.file_path, - meta.extension.lower()) - else: - error = helper.update_dir_structure(book_id, - config.config_calibre_dir, - input_authors[0], - meta.file_path, - title_dir + meta.extension.lower(), - renamed_author=renamed_authors) - - move_coverfile(meta, db_book) - - # save data to database, reread data - calibre_db.session.commit() - - if config.config_use_google_drive: - gdriveutils.updateGdriveCalibreFromLocal() - if error: - flash(error, category="error") - link = '{}'.format(url_for('web.show_book', book_id=book_id), escape(title)) - upload_text = _(u"File %(file)s uploaded", file=link) - WorkerThread.add(current_user.name, TaskUpload(upload_text, escape(title))) - - if len(request.files.getlist("btn-upload")) < 2: - if current_user.role_edit() or current_user.role_admin(): - resp = {"location": url_for('edit-book.show_edit_book', book_id=book_id)} - return Response(json.dumps(resp), mimetype='application/json') - else: - resp = {"location": url_for('web.show_book', book_id=book_id)} - return Response(json.dumps(resp), mimetype='application/json') - except (OperationalError, IntegrityError) as e: - calibre_db.session.rollback() - log.error_or_exception("Database error: {}".format(e)) - flash(_(u"Database error: %(error)s.", error=e.orig), category="error") - return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') - - -@EditBook.route("/admin/book/convert/", methods=['POST']) -@login_required_if_no_ano -@edit_required -def convert_bookformat(book_id): - # check to see if we have form fields to work with - if not send user back - book_format_from = request.form.get('book_format_from', None) - book_format_to = request.form.get('book_format_to', None) - - if (book_format_from is None) or (book_format_to is None): - flash(_(u"Source or destination format for conversion missing"), category="error") - return redirect(url_for('edit-book.show_edit_book', book_id=book_id)) - - log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to) - rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(), - book_format_to.upper(), current_user.name) - - if rtn is None: - flash(_(u"Book successfully queued for converting to %(book_format)s", - book_format=book_format_to), - category="success") - else: - flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error") - return redirect(url_for('edit-book.show_edit_book', book_id=book_id)) - - -@EditBook.route("/ajax/getcustomenum/") -@login_required -def table_get_custom_enum(c_id): - ret = list() - cc = (calibre_db.session.query(db.CustomColumns) - .filter(db.CustomColumns.id == c_id) - .filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).one_or_none()) - ret.append({'value': "", 'text': ""}) - for idx, en in enumerate(cc.get_display_dict()['enum_values']): - ret.append({'value': en, 'text': en}) - return json.dumps(ret) - - -@EditBook.route("/ajax/editbooks/", methods=['POST']) -@login_required_if_no_ano -@edit_required -def edit_list_book(param): - vals = request.form.to_dict() - book = calibre_db.get_book(vals['pk']) - sort_param = "" - # ret = "" - try: - if param == 'series_index': - edit_book_series_index(vals['value'], book) - ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json') - elif param == 'tags': - edit_book_tags(vals['value'], book) - ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}), - mimetype='application/json') - elif param == 'series': - edit_book_series(vals['value'], book) - ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}), - mimetype='application/json') - elif param == 'publishers': - edit_book_publisher(vals['value'], book) - ret = Response(json.dumps({'success': True, - 'newValue': ', '.join([publisher.name for publisher in book.publishers])}), - mimetype='application/json') - elif param == 'languages': - invalid = list() - edit_book_languages(vals['value'], book, invalid=invalid) - if invalid: - ret = Response(json.dumps({'success': False, - 'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}), - mimetype='application/json') - else: - lang_names = list() - for lang in book.languages: - lang_names.append(isoLanguages.get_language_name(get_locale(), lang.lang_code)) - ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}), - mimetype='application/json') - elif param == 'author_sort': - book.author_sort = vals['value'] - ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}), - mimetype='application/json') - elif param == 'title': - sort_param = book.sort - if handle_title_on_edit(book, vals.get('value', "")): - rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir) - if not rename_error: - ret = Response(json.dumps({'success': True, 'newValue': book.title}), - mimetype='application/json') - else: - ret = Response(json.dumps({'success': False, - 'msg': rename_error}), - mimetype='application/json') - elif param == 'sort': - book.sort = vals['value'] - ret = Response(json.dumps({'success': True, 'newValue': book.sort}), - mimetype='application/json') - elif param == 'comments': - edit_book_comments(vals['value'], book) - ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}), - mimetype='application/json') - elif param == 'authors': - input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true") - rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir, input_authors[0], - renamed_author=renamed) - if not rename_error: - ret = Response(json.dumps({ - 'success': True, - 'newValue': ' & '.join([author.replace('|', ',') for author in input_authors])}), - mimetype='application/json') - else: - ret = Response(json.dumps({'success': False, - 'msg': rename_error}), - mimetype='application/json') - elif param == 'is_archived': - is_archived = change_archived_books(book.id, vals['value'] == "True", - message="Book {} archive bit set to: {}".format(book.id, vals['value'])) - if is_archived: - kobo_sync_status.remove_synced_book(book.id) - return "" - elif param == 'read_status': - ret = helper.edit_book_read_status(book.id, vals['value'] == "True") - if ret: - return ret, 400 - elif param.startswith("custom_column_"): - new_val = dict() - new_val[param] = vals['value'] - edit_single_cc_data(book.id, book, param[14:], new_val) - # ToDo: Very hacky find better solution - if vals['value'] in ["True", "False"]: - ret = "" - else: - ret = Response(json.dumps({'success': True, 'newValue': vals['value']}), - mimetype='application/json') + db_filter = db_object.name + for add_element in add_elements: + # check if an element with that name exists + db_element = db_session.query(db_object).filter(db_filter == add_element).first() + # if no element is found add it + if db_type == 'author': + new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "") + elif db_type == 'series': + new_element = db_object(add_element, add_element) + elif db_type == 'custom': + new_element = db_object(value=add_element) + elif db_type == 'publisher': + new_element = db_object(add_element, None) + else: # db_type should be tag or language + new_element = db_object(add_element) + if db_element is None: + changed = True + db_session.add(new_element) + db_book_object.append(new_element) else: - return _("Parameter not found"), 400 - book.last_modified = datetime.utcnow() - - calibre_db.session.commit() - # revert change for sort if automatic fields link is deactivated - if param == 'title' and vals.get('checkT') == "false": - book.sort = sort_param - calibre_db.session.commit() - except (OperationalError, IntegrityError) as e: - calibre_db.session.rollback() - log.error_or_exception("Database error: {}".format(e)) - ret = Response(json.dumps({'success': False, - 'msg': 'Database error: {}'.format(e.orig)}), - mimetype='application/json') - return ret + db_element = create_objects_for_addition(db_element, add_element, db_type) + # add element to book + changed = True + db_book_object.append(db_element) + return changed -@EditBook.route("/ajax/sort_value//") -@login_required -def get_sorted_entry(field, bookid): - if field in ['title', 'authors', 'sort', 'author_sort']: - book = calibre_db.get_filtered_book(bookid) - if book: - if field == 'title': - return json.dumps({'sort': book.sort}) - elif field == 'authors': - return json.dumps({'author_sort': book.author_sort}) - if field == 'sort': - return json.dumps({'sort': book.title}) - if field == 'author_sort': - return json.dumps({'author_sort': book.author}) - return "" +def create_objects_for_addition(db_element, add_element, db_type): + if db_type == 'custom': + if db_element.value != add_element: + db_element.value = add_element + elif db_type == 'languages': + if db_element.lang_code != add_element: + db_element.lang_code = add_element + elif db_type == 'series': + if db_element.name != add_element: + db_element.name = add_element + db_element.sort = add_element + elif db_type == 'author': + if db_element.name != add_element: + db_element.name = add_element + db_element.sort = helper.get_sorted_author(add_element.replace('|', ',')) + elif db_type == 'publisher': + if db_element.name != add_element: + db_element.name = add_element + db_element.sort = None + elif db_element.name != add_element: + db_element.name = add_element + return db_element -@EditBook.route("/ajax/simulatemerge", methods=['POST']) -@login_required -@edit_required -def simulate_merge_list_book(): - vals = request.get_json().get('Merge_books') - if vals: - to_book = calibre_db.get_book(vals[0]).title - vals.pop(0) - if to_book: - from_book = [] - for book_id in vals: - from_book.append(calibre_db.get_book(book_id).title) - return json.dumps({'to': to_book, 'from': from_book}) - return "" +# Modifies different Database objects, first check if elements have to be deleted, +# because they are no longer used, than check if elements have to be added to database +def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type): + # passing input_elements not as a list may lead to undesired results + if not isinstance(input_elements, list): + raise TypeError(str(input_elements) + " should be passed as a list") + input_elements = [x for x in input_elements if x != ''] + # we have all input element (authors, series, tags) names now + # 1. search for elements to remove + del_elements = search_objects_remove(db_book_object, db_type, input_elements) + # 2. search for elements that need to be added + add_elements = search_objects_add(db_book_object, db_type, input_elements) + # if there are elements to remove, we remove them now + changed = remove_objects(db_book_object, db_session, del_elements) + # if there are elements to add, we add them now! + if len(add_elements) > 0: + changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements) + return changed -@EditBook.route("/ajax/mergebooks", methods=['POST']) -@login_required -@edit_required -def merge_list_book(): - vals = request.get_json().get('Merge_books') - to_file = list() - if vals: - # load all formats from target book - to_book = calibre_db.get_book(vals[0]) - vals.pop(0) - if to_book: - for file in to_book.data: - to_file.append(file.format) - to_name = helper.get_valid_filename(to_book.title, - chars=96) + ' - ' + helper.get_valid_filename(to_book.authors[0].name, - chars=96) - for book_id in vals: - from_book = calibre_db.get_book(book_id) - if from_book: - for element in from_book.data: - if element.format not in to_file: - # create new data entry with: book_id, book_format, uncompressed_size, name - filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir, - to_book.path, - to_name + "." + element.format.lower())) - filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir, - from_book.path, - element.name + "." + element.format.lower())) - copyfile(filepath_old, filepath_new) - to_book.data.append(db.Data(to_book.id, - element.format, - element.uncompressed_size, - to_name)) - delete_book_from_table(from_book.id, "", True) - return json.dumps({'success': True}) - return "" - - -@EditBook.route("/ajax/xchange", methods=['POST']) -@login_required -@edit_required -def table_xchange_author_title(): - vals = request.get_json().get('xchange') - edited_books_id = False - if vals: - for val in vals: - modify_date = False - book = calibre_db.get_book(val) - authors = book.title - book.authors = calibre_db.order_authors([book]) - author_names = [] - for authr in book.authors: - author_names.append(authr.name.replace('|', ',')) - - title_change = handle_title_on_edit(book, " ".join(author_names)) - input_authors, author_change, renamed = handle_author_on_edit(book, authors) - if author_change or title_change: - edited_books_id = book.id - modify_date = True - - if config.config_use_google_drive: - gdriveutils.updateGdriveCalibreFromLocal() - - if edited_books_id: - # toDo: Handle error - edit_error = helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0], - renamed_author=renamed) - if modify_date: - book.last_modified = datetime.utcnow() - try: - calibre_db.session.commit() - except (OperationalError, IntegrityError) as e: - calibre_db.session.rollback() - log.error_or_exception("Database error: %s", e) - return json.dumps({'success': False}) - - if config.config_use_google_drive: - gdriveutils.updateGdriveCalibreFromLocal() - return json.dumps({'success': True}) - return "" +def modify_identifiers(input_identifiers, db_identifiers, db_session): + """Modify Identifiers to match input information. + input_identifiers is a list of read-to-persist Identifiers objects. + db_identifiers is a list of already persisted list of Identifiers objects.""" + changed = False + error = False + input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers]) + if len(input_identifiers) != len(input_dict): + error = True + db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers]) + # delete db identifiers not present in input or modify them with input val + for identifier_type, identifier in db_dict.items(): + if identifier_type not in input_dict.keys(): + db_session.delete(identifier) + changed = True + else: + input_identifier = input_dict[identifier_type] + identifier.type = input_identifier.type + identifier.val = input_identifier.val + # add input identifiers not present in db + for identifier_type, identifier in input_dict.items(): + if identifier_type not in db_dict.keys(): + db_session.add(identifier) + changed = True + return changed, error diff --git a/cps/error_handler.py b/cps/error_handler.py index 67252a66..7c003bdb 100644 --- a/cps/error_handler.py +++ b/cps/error_handler.py @@ -17,6 +17,7 @@ # along with this program. If not, see . import traceback + from flask import render_template from werkzeug.exceptions import default_exceptions try: diff --git a/cps/fs.py b/cps/fs.py new file mode 100644 index 00000000..996499c3 --- /dev/null +++ b/cps/fs.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2020 mmonkey +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from . import logger +from .constants import CACHE_DIR +from os import makedirs, remove +from os.path import isdir, isfile, join +from shutil import rmtree + + +class FileSystem: + _instance = None + _cache_dir = CACHE_DIR + + def __new__(cls): + if cls._instance is None: + cls._instance = super(FileSystem, cls).__new__(cls) + cls.log = logger.create() + return cls._instance + + def get_cache_dir(self, cache_type=None): + if not isdir(self._cache_dir): + try: + makedirs(self._cache_dir) + except OSError: + self.log.info(f'Failed to create path {self._cache_dir} (Permission denied).') + raise + + path = join(self._cache_dir, cache_type) + if cache_type and not isdir(path): + try: + makedirs(path) + except OSError: + self.log.info(f'Failed to create path {path} (Permission denied).') + raise + + return path if cache_type else self._cache_dir + + def get_cache_file_dir(self, filename, cache_type=None): + path = join(self.get_cache_dir(cache_type), filename[:2]) + if not isdir(path): + try: + makedirs(path) + except OSError: + self.log.info(f'Failed to create path {path} (Permission denied).') + raise + + return path + + def get_cache_file_path(self, filename, cache_type=None): + return join(self.get_cache_file_dir(filename, cache_type), filename) if filename else None + + def get_cache_file_exists(self, filename, cache_type=None): + path = self.get_cache_file_path(filename, cache_type) + return isfile(path) + + def delete_cache_dir(self, cache_type=None): + if not cache_type and isdir(self._cache_dir): + try: + rmtree(self._cache_dir) + except OSError: + self.log.info(f'Failed to delete path {self._cache_dir} (Permission denied).') + raise + + path = join(self._cache_dir, cache_type) + if cache_type and isdir(path): + try: + rmtree(path) + except OSError: + self.log.info(f'Failed to delete path {path} (Permission denied).') + raise + + def delete_cache_file(self, filename, cache_type=None): + path = self.get_cache_file_path(filename, cache_type) + if isfile(path): + try: + remove(path) + except OSError: + self.log.info(f'Failed to delete path {path} (Permission denied).') + raise diff --git a/cps/gdriveutils.py b/cps/gdriveutils.py index ee8ee953..e2e0a536 100644 --- a/cps/gdriveutils.py +++ b/cps/gdriveutils.py @@ -63,7 +63,7 @@ except ImportError as err: importError = err gdrive_support = False -from . import logger, cli, config +from . import logger, cli_param, config from .constants import CONFIG_DIR as _CONFIG_DIR @@ -142,7 +142,7 @@ def is_gdrive_ready(): return os.path.exists(SETTINGS_YAML) and os.path.exists(CREDENTIALS) -engine = create_engine('sqlite:///{0}'.format(cli.gd_path), echo=False) +engine = create_engine('sqlite:///{0}'.format(cli_param.gd_path), echo=False) Base = declarative_base() # Open session for database connection @@ -190,11 +190,11 @@ def migrate(): session.execute('ALTER TABLE gdrive_ids2 RENAME to gdrive_ids') break -if not os.path.exists(cli.gd_path): +if not os.path.exists(cli_param.gd_path): try: Base.metadata.create_all(engine) except Exception as ex: - log.error("Error connect to database: {} - {}".format(cli.gd_path, ex)) + log.error("Error connect to database: {} - {}".format(cli_param.gd_path, ex)) raise migrate() @@ -544,6 +544,7 @@ def deleteDatabaseOnChange(): except (OperationalError, InvalidRequestError) as ex: session.rollback() log.error_or_exception('Database error: {}'.format(ex)) + session.rollback() def updateGdriveCalibreFromLocal(): @@ -679,8 +680,3 @@ def get_error_text(client_secrets=None): return 'Callback url (redirect url) is missing in client_secrets.json' if client_secrets: client_secrets.update(filedata['web']) - - -def get_versions(): - return { # 'six': six_version, - 'httplib2': httplib2_version} diff --git a/cps/helper.py b/cps/helper.py index 69b3e2f8..ed11e1c0 100644 --- a/cps/helper.py +++ b/cps/helper.py @@ -29,19 +29,17 @@ from tempfile import gettempdir import requests import unidecode -from babel.dates import format_datetime -from babel.units import format_unit from flask import send_from_directory, make_response, redirect, abort, url_for from flask_babel import gettext as _ +from flask_babel import lazy_gettext as N_ from flask_login import current_user -from sqlalchemy.sql.expression import true, false, and_, text, func +from sqlalchemy.sql.expression import true, false, and_, or_, text, func from sqlalchemy.exc import InvalidRequestError, OperationalError from werkzeug.datastructures import Headers from werkzeug.security import generate_password_hash from markupsafe import escape from urllib.parse import quote - try: import advocate from advocate.exceptions import UnacceptableAddressException @@ -51,14 +49,15 @@ except ImportError: advocate = requests UnacceptableAddressException = MissingSchema = BaseException -from . import calibre_db, cli +from . import calibre_db, cli_param from .tasks.convert import TaskConvert -from . import logger, config, get_locale, db, ub +from . import logger, config, db, ub, fs from . import gdriveutils as gd -from .constants import STATIC_DIR as _STATIC_DIR +from .constants import STATIC_DIR as _STATIC_DIR, CACHE_TYPE_THUMBNAILS, THUMBNAIL_TYPE_COVER, THUMBNAIL_TYPE_SERIES from .subproc_wrapper import process_wait -from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS +from .services.worker import WorkerThread from .tasks.mail import TaskEmail +from .tasks.thumbnail import TaskClearCoverThumbnailCache, TaskGenerateCoverThumbnails log = logger.create() @@ -73,10 +72,10 @@ except (ImportError, RuntimeError) as e: # Convert existing book entry to new format -def convert_book_format(book_id, calibrepath, old_book_format, new_book_format, user_id, kindle_mail=None): +def convert_book_format(book_id, calibre_path, old_book_format, new_book_format, user_id, kindle_mail=None): book = calibre_db.get_book(book_id) data = calibre_db.get_book_format(book.id, old_book_format) - file_path = os.path.join(calibrepath, book.path, data.name) + file_path = os.path.join(calibre_path, book.path, data.name) if not data: error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id) log.error("convert_book_format: %s", error_message) @@ -109,9 +108,10 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format, return None +# Texts are not lazy translated as they are supposed to get send out as is def send_test_mail(kindle_mail, user_name): WorkerThread.add(user_name, TaskEmail(_(u'Calibre-Web test e-mail'), None, None, - config.get_mail_settings(), kindle_mail, _(u"Test e-mail"), + config.get_mail_settings(), kindle_mail, N_(u"Test e-mail"), _(u'This e-mail has been sent via Calibre-Web.'))) return @@ -133,27 +133,27 @@ def send_registration_mail(e_mail, user_name, default_password, resend=False): attachment=None, settings=config.get_mail_settings(), recipient=e_mail, - taskMessage=_(u"Registration e-mail for user: %(name)s", name=user_name), + task_message=N_(u"Registration e-mail for user: %(name)s", name=user_name), text=txt )) return def check_send_to_kindle_with_converter(formats): - bookformats = list() + book_formats = list() if 'EPUB' in formats and 'MOBI' not in formats: - bookformats.append({'format': 'Mobi', - 'convert': 1, - 'text': _('Convert %(orig)s to %(format)s and send to Kindle', - orig='Epub', - format='Mobi')}) + book_formats.append({'format': 'Mobi', + 'convert': 1, + 'text': _('Convert %(orig)s to %(format)s and send to Kindle', + orig='Epub', + format='Mobi')}) if 'AZW3' in formats and 'MOBI' not in formats: - bookformats.append({'format': 'Mobi', - 'convert': 2, - 'text': _('Convert %(orig)s to %(format)s and send to Kindle', - orig='Azw3', - format='Mobi')}) - return bookformats + book_formats.append({'format': 'Mobi', + 'convert': 2, + 'text': _('Convert %(orig)s to %(format)s and send to Kindle', + orig='Azw3', + format='Mobi')}) + return book_formats def check_send_to_kindle(entry): @@ -161,26 +161,26 @@ def check_send_to_kindle(entry): returns all available book formats for sending to Kindle """ formats = list() - bookformats = list() + book_formats = list() if len(entry.data): for ele in iter(entry.data): if ele.uncompressed_size < config.mail_size: formats.append(ele.format) if 'MOBI' in formats: - bookformats.append({'format': 'Mobi', - 'convert': 0, - 'text': _('Send %(format)s to Kindle', format='Mobi')}) + book_formats.append({'format': 'Mobi', + 'convert': 0, + 'text': _('Send %(format)s to Kindle', format='Mobi')}) if 'PDF' in formats: - bookformats.append({'format': 'Pdf', - 'convert': 0, - 'text': _('Send %(format)s to Kindle', format='Pdf')}) + book_formats.append({'format': 'Pdf', + 'convert': 0, + 'text': _('Send %(format)s to Kindle', format='Pdf')}) if 'AZW' in formats: - bookformats.append({'format': 'Azw', - 'convert': 0, - 'text': _('Send %(format)s to Kindle', format='Azw')}) + book_formats.append({'format': 'Azw', + 'convert': 0, + 'text': _('Send %(format)s to Kindle', format='Azw')}) if config.config_converterpath: - bookformats.extend(check_send_to_kindle_with_converter(formats)) - return bookformats + book_formats.extend(check_send_to_kindle_with_converter(formats)) + return book_formats else: log.error(u'Cannot find book entry %d', entry.id) return None @@ -190,12 +190,12 @@ def check_send_to_kindle(entry): # list with supported formats def check_read_formats(entry): extensions_reader = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU'} - bookformats = list() + book_formats = list() if len(entry.data): for ele in iter(entry.data): if ele.format.upper() in extensions_reader: - bookformats.append(ele.format.lower()) - return bookformats + book_formats.append(ele.format.lower()) + return book_formats # Files are processed in the following order/priority: @@ -217,7 +217,7 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id): if entry.format.upper() == book_format.upper(): converted_file_name = entry.name + '.' + book_format.lower() link = '{}'.format(url_for('web.show_book', book_id=book_id), escape(book.title)) - email_text = _(u"%(book)s send to Kindle", book=link) + email_text = N_(u"%(book)s send to Kindle", book=link) WorkerThread.add(user_id, TaskEmail(_(u"Send to Kindle"), book.path, converted_file_name, config.get_mail_settings(), kindle_mail, email_text, _(u'This e-mail has been sent via Calibre-Web.'))) @@ -225,23 +225,11 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id): return _(u"The requested file could not be read. Maybe wrong permissions?") -def shorten_component(s, by_what): - l = len(s) - if l < by_what: - return s - l = (l - by_what)//2 - if l <= 0: - return s - return s[:l] + s[-l:] - - def get_valid_filename(value, replace_whitespace=True, chars=128): """ Returns the given string converted to a string that can be used for a clean filename. Limits num characters to 128 max. """ - - if value[-1:] == u'.': value = value[:-1]+u'_' value = value.replace("/", "_").replace(":", "_").strip('\0') @@ -354,9 +342,9 @@ def edit_book_read_status(book_id, read_status=None): return "" -# Deletes a book fro the local filestorage, returns True if deleting is successfull, otherwise false +# Deletes a book from the local filestorage, returns True if deleting is successful, otherwise false def delete_book_file(book, calibrepath, book_format=None): - # check that path is 2 elements deep, check that target path has no subfolders + # check that path is 2 elements deep, check that target path has no sub folders if book.path.count('/') == 1: path = os.path.join(calibrepath, book.path) if book_format: @@ -511,6 +499,7 @@ def upload_new_file_gdrive(book_id, first_author, renamed_author, title, title_d return rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True) + def update_dir_structure_gdrive(book_id, first_author, renamed_author): book = calibre_db.get_book(book_id) @@ -690,6 +679,8 @@ def update_dir_structure(book_id, def delete_book(book, calibrepath, book_format): + if not book_format: + clear_cover_thumbnail_cache(book.id) ## here it breaks if config.config_use_google_drive: return delete_book_gdrive(book, book_format) else: @@ -706,19 +697,30 @@ def get_cover_on_failure(use_generic_cover): abort(404) -def get_book_cover(book_id): +def get_book_cover(book_id, resolution=None): book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) - return get_book_cover_internal(book, use_generic_cover_on_failure=True) + return get_book_cover_internal(book, use_generic_cover_on_failure=True, resolution=resolution) -def get_book_cover_with_uuid(book_uuid, - use_generic_cover_on_failure=True): +# Called only by kobo sync -> cover not found should be answered with 404 and not with default cover +def get_book_cover_with_uuid(book_uuid, resolution=None): book = calibre_db.get_book_by_uuid(book_uuid) - return get_book_cover_internal(book, use_generic_cover_on_failure) + return get_book_cover_internal(book, use_generic_cover_on_failure=False, resolution=resolution) -def get_book_cover_internal(book, use_generic_cover_on_failure): +def get_book_cover_internal(book, use_generic_cover_on_failure, resolution=None): if book and book.has_cover: + + # Send the book cover thumbnail if it exists in cache + if resolution: + thumbnail = get_book_cover_thumbnail(book, resolution) + if thumbnail: + cache = fs.FileSystem() + if cache.get_cache_file_exists(thumbnail.filename, CACHE_TYPE_THUMBNAILS): + return send_from_directory(cache.get_cache_file_dir(thumbnail.filename, CACHE_TYPE_THUMBNAILS), + thumbnail.filename) + + # Send the book cover from Google Drive if configured if config.config_use_google_drive: try: if not gd.is_gdrive_ready(): @@ -732,6 +734,8 @@ def get_book_cover_internal(book, use_generic_cover_on_failure): except Exception as ex: log.error_or_exception(ex) return get_cover_on_failure(use_generic_cover_on_failure) + + # Send the book cover from the Calibre directory else: cover_file_path = os.path.join(config.config_calibre_dir, book.path) if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")): @@ -742,20 +746,67 @@ def get_book_cover_internal(book, use_generic_cover_on_failure): return get_cover_on_failure(use_generic_cover_on_failure) +def get_book_cover_thumbnail(book, resolution): + if book and book.has_cover: + return ub.session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == THUMBNAIL_TYPE_COVER) \ + .filter(ub.Thumbnail.entity_id == book.id) \ + .filter(ub.Thumbnail.resolution == resolution) \ + .filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.utcnow())) \ + .first() + + +def get_series_thumbnail_on_failure(series_id, resolution): + book = calibre_db.session \ + .query(db.Books) \ + .join(db.books_series_link) \ + .join(db.Series) \ + .filter(db.Series.id == series_id) \ + .filter(db.Books.has_cover == 1) \ + .first() + + return get_book_cover_internal(book, use_generic_cover_on_failure=True, resolution=resolution) + + +def get_series_cover_thumbnail(series_id, resolution=None): + return get_series_cover_internal(series_id, resolution) + + +def get_series_cover_internal(series_id, resolution=None): + # Send the series thumbnail if it exists in cache + if resolution: + thumbnail = get_series_thumbnail(series_id, resolution) + if thumbnail: + cache = fs.FileSystem() + if cache.get_cache_file_exists(thumbnail.filename, CACHE_TYPE_THUMBNAILS): + return send_from_directory(cache.get_cache_file_dir(thumbnail.filename, CACHE_TYPE_THUMBNAILS), + thumbnail.filename) + + return get_series_thumbnail_on_failure(series_id, resolution) + + +def get_series_thumbnail(series_id, resolution): + return ub.session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == THUMBNAIL_TYPE_SERIES) \ + .filter(ub.Thumbnail.entity_id == series_id) \ + .filter(ub.Thumbnail.resolution == resolution) \ + .filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.utcnow())) \ + .first() + + # saves book cover from url def save_cover_from_url(url, book_path): try: - if cli.allow_localhost: + if cli_param.allow_localhost: img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling elif use_advocate: img = advocate.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling else: - log.error("python modul advocate is not installed but is needed") - return False, _("Python modul 'advocate' is not installed but is needed for cover downloads") + log.error("python module advocate is not installed but is needed") + return False, _("Python module 'advocate' is not installed but is needed for cover downloads") img.raise_for_status() - # # cover_processing() - # move_coverfile(meta, db_book) - return save_cover(img, book_path) except (socket.gaierror, requests.exceptions.HTTPError, @@ -904,54 +955,6 @@ def json_serial(obj): raise TypeError("Type %s not serializable" % type(obj)) -# helper function for displaying the runtime of tasks -def format_runtime(runtime): - ret_val = "" - if runtime.days: - ret_val = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', ' - mins, seconds = divmod(runtime.seconds, 60) - hours, minutes = divmod(mins, 60) - # ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ? - if hours: - ret_val += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds) - elif minutes: - ret_val += '{:2d}:{:02d}s'.format(minutes, seconds) - else: - ret_val += '{:2d}s'.format(seconds) - return ret_val - - -# helper function to apply localize status information in tasklist entries -def render_task_status(tasklist): - renderedtasklist = list() - for __, user, __, task in tasklist: - if user == current_user.name or current_user.role_admin(): - ret = {} - if task.start_time: - ret['starttime'] = format_datetime(task.start_time, format='short', locale=get_locale()) - ret['runtime'] = format_runtime(task.runtime) - - # localize the task status - if isinstance(task.stat, int): - if task.stat == STAT_WAITING: - ret['status'] = _(u'Waiting') - elif task.stat == STAT_FAIL: - ret['status'] = _(u'Failed') - elif task.stat == STAT_STARTED: - ret['status'] = _(u'Started') - elif task.stat == STAT_FINISH_SUCCESS: - ret['status'] = _(u'Finished') - else: - ret['status'] = _(u'Unknown Status') - - ret['taskMessage'] = "{}: {}".format(_(task.name), task.message) - ret['progress'] = "{} %".format(int(task.progress * 100)) - ret['user'] = escape(user) # prevent xss - renderedtasklist.append(ret) - - return renderedtasklist - - def tags_filters(): negtags_list = current_user.list_denied_tags() postags_list = current_user.list_allowed_tags() @@ -998,3 +1001,28 @@ def get_download_link(book_id, book_format, client): return do_download_file(book, book_format, client, data1, headers) else: abort(404) + + +def clear_cover_thumbnail_cache(book_id): + if config.schedule_generate_book_covers: + WorkerThread.add(None, TaskClearCoverThumbnailCache(book_id), hidden=True) + + +def replace_cover_thumbnail_cache(book_id): + if config.schedule_generate_book_covers: + WorkerThread.add(None, TaskClearCoverThumbnailCache(book_id), hidden=True) + WorkerThread.add(None, TaskGenerateCoverThumbnails(book_id), hidden=True) + + +def delete_thumbnail_cache(): + WorkerThread.add(None, TaskClearCoverThumbnailCache(-1)) + + +def add_book_to_thumbnail_cache(book_id): + if config.schedule_generate_book_covers: + WorkerThread.add(None, TaskGenerateCoverThumbnails(book_id), hidden=True) + + +def update_thumbnail_cache(): + if config.schedule_generate_book_covers: + WorkerThread.add(None, TaskGenerateCoverThumbnails()) diff --git a/cps/isoLanguages.py b/cps/isoLanguages.py index 50447aca..31e3dade 100644 --- a/cps/isoLanguages.py +++ b/cps/isoLanguages.py @@ -49,7 +49,7 @@ except ImportError: def get_language_names(locale): - return _LANGUAGE_NAMES.get(locale) + return _LANGUAGE_NAMES.get(str(locale)) def get_language_name(locale, lang_code): diff --git a/cps/jinjia.py b/cps/jinjia.py index 06e99141..e42c650c 100644 --- a/cps/jinjia.py +++ b/cps/jinjia.py @@ -22,17 +22,17 @@ # custom jinja filters +from markupsafe import escape import datetime import mimetypes from uuid import uuid4 -from babel.dates import format_date +# from babel.dates import format_date from flask import Blueprint, request, url_for -from flask_babel import get_locale +from flask_babel import format_date from flask_login import current_user -from markupsafe import escape -from . import logger +from . import constants, logger jinjia = Blueprint('jinjia', __name__) log = logger.create() @@ -77,7 +77,7 @@ def mimetype_filter(val): @jinjia.app_template_filter('formatdate') def formatdate_filter(val): try: - return format_date(val, format='medium', locale=get_locale()) + return format_date(val, format='medium') except AttributeError as e: log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, @@ -128,12 +128,55 @@ def formatseriesindex_filter(series_index): return series_index return 0 + @jinjia.app_template_filter('escapedlink') def escapedlink_filter(url, text): return "{}".format(url, escape(text)) + @jinjia.app_template_filter('uuidfilter') def uuidfilter(var): return uuid4() +@jinjia.app_template_filter('cache_timestamp') +def cache_timestamp(rolling_period='month'): + if rolling_period == 'day': + return str(int(datetime.datetime.today().replace(hour=1, minute=1).timestamp())) + elif rolling_period == 'year': + return str(int(datetime.datetime.today().replace(day=1).timestamp())) + else: + return str(int(datetime.datetime.today().replace(month=1, day=1).timestamp())) + + +@jinjia.app_template_filter('last_modified') +def book_last_modified(book): + return str(int(book.last_modified.timestamp())) + + +@jinjia.app_template_filter('get_cover_srcset') +def get_cover_srcset(book): + srcset = list() + resolutions = { + constants.COVER_THUMBNAIL_SMALL: 'sm', + constants.COVER_THUMBNAIL_MEDIUM: 'md', + constants.COVER_THUMBNAIL_LARGE: 'lg' + } + for resolution, shortname in resolutions.items(): + url = url_for('web.get_cover', book_id=book.id, resolution=shortname, c=book_last_modified(book)) + srcset.append(f'{url} {resolution}x') + return ', '.join(srcset) + + +@jinjia.app_template_filter('get_series_srcset') +def get_cover_srcset(series): + srcset = list() + resolutions = { + constants.COVER_THUMBNAIL_SMALL: 'sm', + constants.COVER_THUMBNAIL_MEDIUM: 'md', + constants.COVER_THUMBNAIL_LARGE: 'lg' + } + for resolution, shortname in resolutions.items(): + url = url_for('web.get_series_cover', series_id=series.id, resolution=shortname, c=cache_timestamp()) + srcset.append(f'{url} {resolution}x') + return ', '.join(srcset) diff --git a/cps/kobo.py b/cps/kobo.py index d02660b2..46e68acb 100644 --- a/cps/kobo.py +++ b/cps/kobo.py @@ -45,7 +45,7 @@ import requests from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub, csrf, kobo_sync_status -from .constants import sqlalchemy_version2 +from .constants import sqlalchemy_version2, COVER_THUMBNAIL_SMALL from .helper import get_download_link from .services import SyncToken as SyncToken from .web import download_required @@ -148,8 +148,8 @@ def HandleSyncRequest(): sync_token.books_last_created = datetime.datetime.min sync_token.reading_state_last_modified = datetime.datetime.min - new_books_last_modified = sync_token.books_last_modified # needed for sync selected shelfs only - new_books_last_created = sync_token.books_last_created # needed to distinguish between new and changed entitlement + new_books_last_modified = sync_token.books_last_modified # needed for sync selected shelfs only + new_books_last_created = sync_token.books_last_created # needed to distinguish between new and changed entitlement new_reading_state_last_modified = sync_token.reading_state_last_modified new_archived_last_modified = datetime.datetime.min @@ -176,18 +176,17 @@ def HandleSyncRequest(): .join(db.Data).outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id, ub.ArchivedBook.user_id == current_user.id)) .filter(db.Books.id.notin_(calibre_db.session.query(ub.KoboSyncedBooks.book_id) - .filter(ub.KoboSyncedBooks.user_id == current_user.id))) - .filter(ub.BookShelf.date_added > sync_token.books_last_modified) - .filter(db.Data.format.in_(KOBO_FORMATS)) - .filter(calibre_db.common_filters(allow_show_archived=True)) - .order_by(db.Books.id) - .order_by(ub.ArchivedBook.last_modified) - .join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id) - .join(ub.Shelf) - .filter(ub.Shelf.user_id == current_user.id) - .filter(ub.Shelf.kobo_sync) - .distinct() - ) + .filter(ub.KoboSyncedBooks.user_id == current_user.id))) + .filter(ub.BookShelf.date_added > sync_token.books_last_modified) + .filter(db.Data.format.in_(KOBO_FORMATS)) + .filter(calibre_db.common_filters(allow_show_archived=True)) + .order_by(db.Books.id) + .order_by(ub.ArchivedBook.last_modified) + .join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id) + .join(ub.Shelf) + .filter(ub.Shelf.user_id == current_user.id) + .filter(ub.Shelf.kobo_sync) + .distinct()) else: if sqlalchemy_version2: changed_entries = select(db.Books, ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived) @@ -196,16 +195,14 @@ def HandleSyncRequest(): ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived) changed_entries = (changed_entries - .join(db.Data).outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id, - ub.ArchivedBook.user_id == current_user.id)) - .filter(db.Books.id.notin_(calibre_db.session.query(ub.KoboSyncedBooks.book_id) - .filter(ub.KoboSyncedBooks.user_id == current_user.id))) - .filter(calibre_db.common_filters(allow_show_archived=True)) - .filter(db.Data.format.in_(KOBO_FORMATS)) - .order_by(db.Books.last_modified) - .order_by(db.Books.id) - ) - + .join(db.Data).outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id, + ub.ArchivedBook.user_id == current_user.id)) + .filter(db.Books.id.notin_(calibre_db.session.query(ub.KoboSyncedBooks.book_id) + .filter(ub.KoboSyncedBooks.user_id == current_user.id))) + .filter(calibre_db.common_filters(allow_show_archived=True)) + .filter(db.Data.format.in_(KOBO_FORMATS)) + .order_by(db.Books.last_modified) + .order_by(db.Books.id)) reading_states_in_new_entitlements = [] if sqlalchemy_version2: @@ -215,7 +212,7 @@ def HandleSyncRequest(): log.debug("Books to Sync: {}".format(len(books.all()))) for book in books: formats = [data.format for data in book.Books.data] - if not 'KEPUB' in formats and config.config_kepubifypath and 'EPUB' in formats: + if 'KEPUB' not in formats and config.config_kepubifypath and 'EPUB' in formats: helper.convert_book_format(book.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name) kobo_reading_state = get_or_create_reading_state(book.Books.id) @@ -262,7 +259,7 @@ def HandleSyncRequest(): .columns(db.Books).first() else: max_change = changed_entries.from_self().filter(ub.ArchivedBook.is_archived)\ - .filter(ub.ArchivedBook.user_id==current_user.id) \ + .filter(ub.ArchivedBook.user_id == current_user.id) \ .order_by(func.datetime(ub.ArchivedBook.last_modified).desc()).first() max_change = max_change.last_modified if max_change else new_archived_last_modified @@ -425,9 +422,9 @@ def get_author(book): author_list = [] autor_roles = [] for author in book.authors: - autor_roles.append({"Name":author.name}) #.encode('unicode-escape').decode('latin-1') + autor_roles.append({"Name": author.name}) author_list.append(author.name) - return {"ContributorRoles": autor_roles, "Contributors":author_list} + return {"ContributorRoles": autor_roles, "Contributors": author_list} def get_publisher(book): @@ -441,6 +438,7 @@ def get_series(book): return None return book.series[0].name + def get_seriesindex(book): return book.series_index or 1 @@ -485,7 +483,7 @@ def get_metadata(book): "Language": "en", "PhoneticPronunciations": {}, "PublicationDate": convert_to_kobo_timestamp_string(book.pubdate), - "Publisher": {"Imprint": "", "Name": get_publisher(book),}, + "Publisher": {"Imprint": "", "Name": get_publisher(book), }, "RevisionId": book_uuid, "Title": book.title, "WorkId": book_uuid, @@ -504,6 +502,7 @@ def get_metadata(book): return metadata + @csrf.exempt @kobo.route("/v1/library/tags", methods=["POST", "DELETE"]) @requires_kobo_auth @@ -718,7 +717,6 @@ def sync_shelves(sync_token, sync_results, only_kobo_shelves=False): *extra_filters ).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc()) - for shelf in shelflist: if not shelf_lib.check_shelf_view_permissions(shelf): continue @@ -764,6 +762,7 @@ def create_kobo_tag(shelf): ) return {"Tag": tag} + @csrf.exempt @kobo.route("/v1/library//state", methods=["GET", "PUT"]) @requires_kobo_auth @@ -808,7 +807,7 @@ def HandleStateRequest(book_uuid): book_read = kobo_reading_state.book_read_link new_book_read_status = get_ub_read_status(request_status_info["Status"]) if new_book_read_status == ub.ReadBook.STATUS_IN_PROGRESS \ - and new_book_read_status != book_read.read_status: + and new_book_read_status != book_read.read_status: book_read.times_started_reading += 1 book_read.last_time_started_reading = datetime.datetime.utcnow() book_read.read_status = new_book_read_status @@ -848,7 +847,7 @@ def get_ub_read_status(kobo_read_status): def get_or_create_reading_state(book_id): book_read = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id, - ub.ReadBook.user_id == int(current_user.id)).one_or_none() + ub.ReadBook.user_id == int(current_user.id)).one_or_none() if not book_read: book_read = ub.ReadBook(user_id=current_user.id, book_id=book_id) if not book_read.kobo_reading_state: @@ -912,13 +911,12 @@ def get_current_bookmark_response(current_bookmark): } return resp + @kobo.route("/////image.jpg", defaults={'Quality': ""}) @kobo.route("//////image.jpg") @requires_kobo_auth -def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale): - book_cover = helper.get_book_cover_with_uuid( - book_uuid, use_generic_cover_on_failure=False - ) +def HandleCoverImageRequest(book_uuid, width, height, Quality, isGreyscale): + book_cover = helper.get_book_cover_with_uuid(book_uuid, resolution=COVER_THUMBNAIL_SMALL) if not book_cover: if config.config_kobo_proxy: log.debug("Cover for unknown book: %s proxied to kobo" % book_uuid) @@ -991,8 +989,8 @@ def handle_getests(): if config.config_kobo_proxy: return redirect_or_proxy_request() else: - testkey = request.headers.get("X-Kobo-userkey","") - return make_response(jsonify({"Result": "Success", "TestKey":testkey, "Tests": {}})) + testkey = request.headers.get("X-Kobo-userkey", "") + return make_response(jsonify({"Result": "Success", "TestKey": testkey, "Tests": {}})) @csrf.exempt @@ -1022,7 +1020,7 @@ def make_calibre_web_auth_response(): content = request.get_json() AccessToken = base64.b64encode(os.urandom(24)).decode('utf-8') RefreshToken = base64.b64encode(os.urandom(24)).decode('utf-8') - return make_response( + return make_response( jsonify( { "AccessToken": AccessToken, @@ -1160,14 +1158,16 @@ def NATIVE_KOBO_RESOURCES(): "eula_page": "https://www.kobo.com/termsofuse?style=onestore", "exchange_auth": "https://storeapi.kobo.com/v1/auth/exchange", "external_book": "https://storeapi.kobo.com/v1/products/books/external/{Ids}", - "facebook_sso_page": "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/", + "facebook_sso_page": + "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/", "featured_list": "https://storeapi.kobo.com/v1/products/featured/{FeaturedListId}", "featured_lists": "https://storeapi.kobo.com/v1/products/featured", "free_books_page": { "EN": "https://www.kobo.com/{region}/{language}/p/free-ebooks", "FR": "https://www.kobo.com/{region}/{language}/p/livres-gratuits", "IT": "https://www.kobo.com/{region}/{language}/p/libri-gratuiti", - "NL": "https://www.kobo.com/{region}/{language}/List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg", + "NL": "https://www.kobo.com/{region}/{language}/" + "List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg", "PT": "https://www.kobo.com/{region}/{language}/p/livros-gratis", }, "fte_feedback": "https://storeapi.kobo.com/v1/products/ftefeedback", @@ -1192,7 +1192,8 @@ def NATIVE_KOBO_RESOURCES(): "library_stack": "https://storeapi.kobo.com/v1/user/library/stacks/{LibraryItemId}", "library_sync": "https://storeapi.kobo.com/v1/library/sync", "love_dashboard_page": "https://store.kobobooks.com/{culture}/kobosuperpoints", - "love_points_redemption_page": "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}", + "love_points_redemption_page": + "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}", "magazine_landing_page": "https://store.kobobooks.com/emagazines", "notifications_registration_issue": "https://storeapi.kobo.com/v1/notifications/registration", "oauth_host": "https://oauth.kobo.com", @@ -1208,7 +1209,8 @@ def NATIVE_KOBO_RESOURCES(): "product_recommendations": "https://storeapi.kobo.com/v1/products/{ProductId}/recommendations", "product_reviews": "https://storeapi.kobo.com/v1/products/{ProductIds}/reviews", "products": "https://storeapi.kobo.com/v1/products", - "provider_external_sign_in_page": "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/", + "provider_external_sign_in_page": + "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/", "purchase_buy": "https://www.kobo.com/checkout/createpurchase/", "purchase_buy_templated": "https://www.kobo.com/{culture}/checkout/createpurchase/{ProductId}", "quickbuy_checkout": "https://storeapi.kobo.com/v1/store/quickbuy/{PurchaseId}/checkout", diff --git a/cps/kobo_auth.py b/cps/kobo_auth.py index 5668e6da..9865b993 100644 --- a/cps/kobo_auth.py +++ b/cps/kobo_auth.py @@ -71,47 +71,8 @@ from flask_babel import gettext as _ from . import logger, config, calibre_db, db, helper, ub, lm from .render_template import render_title_template - log = logger.create() - -def register_url_value_preprocessor(kobo): - @kobo.url_value_preprocessor - # pylint: disable=unused-variable - def pop_auth_token(__, values): - g.auth_token = values.pop("auth_token") - - -def disable_failed_auth_redirect_for_blueprint(bp): - lm.blueprint_login_views[bp.name] = None - - -def get_auth_token(): - if "auth_token" in g: - return g.get("auth_token") - else: - return None - - -def requires_kobo_auth(f): - @wraps(f) - def inner(*args, **kwargs): - auth_token = get_auth_token() - if auth_token is not None: - user = ( - ub.session.query(ub.User) - .join(ub.RemoteAuthToken) - .filter(ub.RemoteAuthToken.auth_token == auth_token).filter(ub.RemoteAuthToken.token_type==1) - .first() - ) - if user is not None: - login_user(user) - return f(*args, **kwargs) - log.debug("Received Kobo request without a recognizable auth token.") - return abort(401) - return inner - - kobo_auth = Blueprint("kobo_auth", __name__, url_prefix="/kobo_auth") @@ -165,3 +126,40 @@ def delete_auth_token(user_id): .filter(ub.RemoteAuthToken.token_type==1).delete() return ub.session_commit() + + +def disable_failed_auth_redirect_for_blueprint(bp): + lm.blueprint_login_views[bp.name] = None + + +def get_auth_token(): + if "auth_token" in g: + return g.get("auth_token") + else: + return None + + +def register_url_value_preprocessor(kobo): + @kobo.url_value_preprocessor + # pylint: disable=unused-variable + def pop_auth_token(__, values): + g.auth_token = values.pop("auth_token") + + +def requires_kobo_auth(f): + @wraps(f) + def inner(*args, **kwargs): + auth_token = get_auth_token() + if auth_token is not None: + user = ( + ub.session.query(ub.User) + .join(ub.RemoteAuthToken) + .filter(ub.RemoteAuthToken.auth_token == auth_token).filter(ub.RemoteAuthToken.token_type==1) + .first() + ) + if user is not None: + login_user(user) + return f(*args, **kwargs) + log.debug("Received Kobo request without a recognizable auth token.") + return abort(401) + return inner diff --git a/cps/main.py b/cps/main.py new file mode 100644 index 00000000..d3591c06 --- /dev/null +++ b/cps/main.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2012-2022 OzzieIsaacs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import sys + +from . import create_app +from .jinjia import jinjia +from .remotelogin import remotelogin + +def main(): + app = create_app() + + from .web import web + from .opds import opds + from .admin import admi + from .gdrive import gdrive + from .editbooks import editbook + from .about import about + from .search import search + from .search_metadata import meta + from .shelf import shelf + from .tasks_status import tasks + from .error_handler import init_errorhandler + try: + from .kobo import kobo, get_kobo_activated + from .kobo_auth import kobo_auth + kobo_available = get_kobo_activated() + except (ImportError, AttributeError): # Catch also error for not installed flask-WTF (missing csrf decorator) + kobo_available = False + + try: + from .oauth_bb import oauth + oauth_available = True + except ImportError: + oauth_available = False + + from . import web_server + init_errorhandler() + + app.register_blueprint(search) + app.register_blueprint(tasks) + app.register_blueprint(web) + app.register_blueprint(opds) + app.register_blueprint(jinjia) + app.register_blueprint(about) + app.register_blueprint(shelf) + app.register_blueprint(admi) + app.register_blueprint(remotelogin) + app.register_blueprint(meta) + app.register_blueprint(gdrive) + app.register_blueprint(editbook) + if kobo_available: + app.register_blueprint(kobo) + app.register_blueprint(kobo_auth) + if oauth_available: + app.register_blueprint(oauth) + success = web_server.start() + sys.exit(0 if success else 1) diff --git a/cps/oauth.py b/cps/oauth.py index f8e5c1fd..0caa61ec 100644 --- a/cps/oauth.py +++ b/cps/oauth.py @@ -19,18 +19,12 @@ from flask import session try: - from flask_dance.consumer.backend.sqla import SQLAlchemyBackend, first, _get_real_user + from flask_dance.consumer.storage.sqla import SQLAlchemyStorage as SQLAlchemyBackend + from flask_dance.consumer.storage.sqla import first, _get_real_user from sqlalchemy.orm.exc import NoResultFound - backend_resultcode = False # prevent storing values with this resultcode + backend_resultcode = True # prevent storing values with this resultcode except ImportError: - # fails on flask-dance >1.3, due to renaming - try: - from flask_dance.consumer.storage.sqla import SQLAlchemyStorage as SQLAlchemyBackend - from flask_dance.consumer.storage.sqla import first, _get_real_user - from sqlalchemy.orm.exc import NoResultFound - backend_resultcode = True # prevent storing values with this resultcode - except ImportError: - pass + pass class OAuthBackend(SQLAlchemyBackend): diff --git a/cps/opds.py b/cps/opds.py index cb8f397e..60dbd551 100644 --- a/cps/opds.py +++ b/cps/opds.py @@ -26,15 +26,18 @@ from functools import wraps from flask import Blueprint, request, render_template, Response, g, make_response, abort from flask_login import current_user +from flask_babel import get_locale from sqlalchemy.sql.expression import func, text, or_, and_, true from sqlalchemy.exc import InvalidRequestError, OperationalError from werkzeug.security import check_password_hash -from . import constants, logger, config, db, calibre_db, ub, services, get_locale, isoLanguages + +from . import constants, logger, config, db, calibre_db, ub, services, isoLanguages from .helper import get_download_link, get_book_cover from .pagination import Pagination from .web import render_read_books from .usermanagement import load_user_from_request from flask_babel import gettext as _ + opds = Blueprint('opds', __name__) log = logger.create() @@ -53,20 +56,6 @@ def requires_basic_auth_if_no_ano(f): return decorated -class FeedObject: - def __init__(self, rating_id, rating_name): - self.rating_id = rating_id - self.rating_name = rating_name - - @property - def id(self): - return self.rating_id - - @property - def name(self): - return self.rating_name - - @opds.route("/opds/") @opds.route("/opds") @requires_basic_auth_if_no_ano @@ -465,6 +454,20 @@ def feed_unread_books(): return render_xml_template('feed.xml', entries=result, pagination=pagination) +class FeedObject: + def __init__(self, rating_id, rating_name): + self.rating_id = rating_id + self.rating_name = rating_name + + @property + def id(self): + return self.rating_id + + @property + def name(self): + return self.rating_name + + def feed_search(term): if term: entries, __, ___ = calibre_db.get_search_results(term, config=config) diff --git a/cps/redirect.py b/cps/redirect.py index 8bd68109..9382a205 100644 --- a/cps/redirect.py +++ b/cps/redirect.py @@ -29,7 +29,6 @@ from urllib.parse import urlparse, urljoin - from flask import request, url_for, redirect diff --git a/cps/render_template.py b/cps/render_template.py index d2f40d6c..0750a9c4 100644 --- a/cps/render_template.py +++ b/cps/render_template.py @@ -16,9 +16,8 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from flask import render_template, request +from flask import render_template, g, abort, request from flask_babel import gettext as _ -from flask import g, abort from werkzeug.local import LocalProxy from flask_login import current_user diff --git a/cps/schedule.py b/cps/schedule.py new file mode 100644 index 00000000..faadfb7e --- /dev/null +++ b/cps/schedule.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2020 mmonkey +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import datetime + +from . import config, constants +from .services.background_scheduler import BackgroundScheduler, use_APScheduler +from .tasks.database import TaskReconnectDatabase +from .tasks.thumbnail import TaskGenerateCoverThumbnails, TaskGenerateSeriesThumbnails, TaskClearCoverThumbnailCache +from .services.worker import WorkerThread + + +def get_scheduled_tasks(reconnect=True): + tasks = list() + # config.schedule_reconnect or + # Reconnect Calibre database (metadata.db) + if reconnect: + tasks.append([lambda: TaskReconnectDatabase(), 'reconnect', False]) + + # Generate all missing book cover thumbnails + if config.schedule_generate_book_covers: + tasks.append([lambda: TaskClearCoverThumbnailCache(0), 'delete superfluous book covers', True]) + tasks.append([lambda: TaskGenerateCoverThumbnails(), 'generate book covers', False]) + + # Generate all missing series thumbnails + if config.schedule_generate_series_covers: + tasks.append([lambda: TaskGenerateSeriesThumbnails(), 'generate book covers', False]) + + return tasks + + +def end_scheduled_tasks(): + worker = WorkerThread.get_instance() + for __, __, __, task, __ in worker.tasks: + if task.scheduled and task.is_cancellable: + worker.end_task(task.id) + + +def register_scheduled_tasks(reconnect=True): + scheduler = BackgroundScheduler() + + if scheduler: + # Remove all existing jobs + scheduler.remove_all_jobs() + + start = config.schedule_start_time + duration = config.schedule_duration + + # Register scheduled tasks + scheduler.schedule_tasks(tasks=get_scheduled_tasks(reconnect), trigger='cron', hour=start) + end_time = calclulate_end_time(start, duration) + scheduler.schedule(func=end_scheduled_tasks, trigger='cron', name="end scheduled task", hour=end_time.hour, + minute=end_time.minute) + + # Kick-off tasks, if they should currently be running + if should_task_be_running(start, duration): + scheduler.schedule_tasks_immediately(tasks=get_scheduled_tasks(reconnect)) + + +def register_startup_tasks(): + scheduler = BackgroundScheduler() + + if scheduler: + start = config.schedule_start_time + duration = config.schedule_duration + + # Run scheduled tasks immediately for development and testing + # Ignore tasks that should currently be running, as these will be added when registering scheduled tasks + if constants.APP_MODE in ['development', 'test'] and not should_task_be_running(start, duration): + scheduler.schedule_tasks_immediately(tasks=get_scheduled_tasks(False)) + + +def should_task_be_running(start, duration): + now = datetime.datetime.now() + start_time = datetime.datetime.now().replace(hour=start, minute=0, second=0, microsecond=0) + end_time = start_time + datetime.timedelta(hours=duration // 60, minutes=duration % 60) + return start_time < now < end_time + +def calclulate_end_time(start, duration): + start_time = datetime.datetime.now().replace(hour=start, minute=0) + return start_time + datetime.timedelta(hours=duration // 60, minutes=duration % 60) + diff --git a/cps/search.py b/cps/search.py new file mode 100644 index 00000000..602881bf --- /dev/null +++ b/cps/search.py @@ -0,0 +1,418 @@ +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2022 OzzieIsaacs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import json +from datetime import datetime + +from flask import Blueprint, request, redirect, url_for, flash +from flask import session as flask_session +from flask_login import current_user +from flask_babel import format_date +from flask_babel import gettext as _ +from sqlalchemy.sql.expression import func, not_, and_, or_, text +from sqlalchemy.sql.functions import coalesce + +from . import logger, db, calibre_db, config, ub +from .usermanagement import login_required_if_no_ano +from .render_template import render_title_template +from .pagination import Pagination + +search = Blueprint('search', __name__) + +log = logger.create() + + +@search.route("/search", methods=["GET"]) +@login_required_if_no_ano +def simple_search(): + term = request.args.get("query") + if term: + return redirect(url_for('web.books_list', data="search", sort_param='stored', query=term.strip())) + else: + return render_title_template('search.html', + searchterm="", + result_count=0, + title=_(u"Search"), + page="search") + + +@search.route("/advsearch", methods=['POST']) +@login_required_if_no_ano +def advanced_search(): + values = dict(request.form) + params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf', + 'include_language', 'exclude_language', 'include_extension', 'exclude_extension'] + for param in params: + values[param] = list(request.form.getlist(param)) + flask_session['query'] = json.dumps(values) + return redirect(url_for('web.books_list', data="advsearch", sort_param='stored', query="")) + + +@search.route("/advsearch", methods=['GET']) +@login_required_if_no_ano +def advanced_search_form(): + # Build custom columns names + cc = calibre_db.get_cc_columns(config, filter_config_custom_read=True) + return render_prepare_search_form(cc) + + +def adv_search_custom_columns(cc, term, q): + for c in cc: + if c.datatype == "datetime": + custom_start = term.get('custom_column_' + str(c.id) + '_start') + custom_end = term.get('custom_column_' + str(c.id) + '_end') + if custom_start: + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + func.datetime(db.cc_classes[c.id].value) >= func.datetime(custom_start))) + if custom_end: + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + func.datetime(db.cc_classes[c.id].value) <= func.datetime(custom_end))) + else: + custom_query = term.get('custom_column_' + str(c.id)) + if custom_query != '' and custom_query is not None: + if c.datatype == 'bool': + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + db.cc_classes[c.id].value == (custom_query == "True"))) + elif c.datatype == 'int' or c.datatype == 'float': + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + db.cc_classes[c.id].value == custom_query)) + elif c.datatype == 'rating': + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + db.cc_classes[c.id].value == int(float(custom_query) * 2))) + else: + q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( + func.lower(db.cc_classes[c.id].value).ilike("%" + custom_query + "%"))) + return q + + +def adv_search_language(q, include_languages_inputs, exclude_languages_inputs): + if current_user.filter_language() != "all": + q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())) + else: + for language in include_languages_inputs: + q = q.filter(db.Books.languages.any(db.Languages.id == language)) + for language in exclude_languages_inputs: + q = q.filter(not_(db.Books.series.any(db.Languages.id == language))) + return q + + +def adv_search_ratings(q, rating_high, rating_low): + if rating_high: + rating_high = int(rating_high) * 2 + q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high)) + if rating_low: + rating_low = int(rating_low) * 2 + q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low)) + return q + + +def adv_search_read_status(q, read_status): + if read_status: + if config.config_read_column: + try: + if read_status == "True": + q = q.join(db.cc_classes[config.config_read_column], isouter=True) \ + .filter(db.cc_classes[config.config_read_column].value == True) + else: + q = q.join(db.cc_classes[config.config_read_column], isouter=True) \ + .filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True) + except (KeyError, AttributeError): + log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column) + flash(_("Custom Column No.%(column)d is not existing in calibre database", + column=config.config_read_column), + category="error") + return q + else: + if read_status == "True": + q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \ + .filter(ub.ReadBook.user_id == int(current_user.id), + ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED) + else: + q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \ + .filter(ub.ReadBook.user_id == int(current_user.id), + coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED) + return q + + +def adv_search_extension(q, include_extension_inputs, exclude_extension_inputs): + for extension in include_extension_inputs: + q = q.filter(db.Books.data.any(db.Data.format == extension)) + for extension in exclude_extension_inputs: + q = q.filter(not_(db.Books.data.any(db.Data.format == extension))) + return q + + +def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs): + for tag in include_tag_inputs: + q = q.filter(db.Books.tags.any(db.Tags.id == tag)) + for tag in exclude_tag_inputs: + q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag))) + return q + + +def adv_search_serie(q, include_series_inputs, exclude_series_inputs): + for serie in include_series_inputs: + q = q.filter(db.Books.series.any(db.Series.id == serie)) + for serie in exclude_series_inputs: + q = q.filter(not_(db.Books.series.any(db.Series.id == serie))) + return q + +def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs): + q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\ + .filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs))) + if len(include_shelf_inputs) > 0: + q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs)) + return q + +def extend_search_term(searchterm, + author_name, + book_title, + publisher, + pub_start, + pub_end, + tags, + rating_high, + rating_low, + read_status, + ): + searchterm.extend((author_name.replace('|', ','), book_title, publisher)) + if pub_start: + try: + searchterm.extend([_(u"Published after ") + + format_date(datetime.strptime(pub_start, "%Y-%m-%d"), + format='medium')]) + except ValueError: + pub_start = u"" + if pub_end: + try: + searchterm.extend([_(u"Published before ") + + format_date(datetime.strptime(pub_end, "%Y-%m-%d"), + format='medium')]) + except ValueError: + pub_end = u"" + elements = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf} + for key, db_element in elements.items(): + tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['include_' + key])).all() + searchterm.extend(tag.name for tag in tag_names) + tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['exclude_' + key])).all() + searchterm.extend(tag.name for tag in tag_names) + language_names = calibre_db.session.query(db.Languages). \ + filter(db.Languages.id.in_(tags['include_language'])).all() + if language_names: + language_names = calibre_db.speaking_language(language_names) + searchterm.extend(language.name for language in language_names) + language_names = calibre_db.session.query(db.Languages). \ + filter(db.Languages.id.in_(tags['exclude_language'])).all() + if language_names: + language_names = calibre_db.speaking_language(language_names) + searchterm.extend(language.name for language in language_names) + if rating_high: + searchterm.extend([_(u"Rating <= %(rating)s", rating=rating_high)]) + if rating_low: + searchterm.extend([_(u"Rating >= %(rating)s", rating=rating_low)]) + if read_status: + searchterm.extend([_(u"Read Status = %(status)s", status=read_status)]) + searchterm.extend(ext for ext in tags['include_extension']) + searchterm.extend(ext for ext in tags['exclude_extension']) + # handle custom columns + searchterm = " + ".join(filter(None, searchterm)) + return searchterm, pub_start, pub_end + + +def render_adv_search_results(term, offset=None, order=None, limit=None): + sort = order[0] if order else [db.Books.sort] + pagination = None + + cc = calibre_db.get_cc_columns(config, filter_config_custom_read=True) + calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase) + if not config.config_read_column: + query = (calibre_db.session.query(db.Books, ub.ArchivedBook.is_archived, ub.ReadBook).select_from(db.Books) + .outerjoin(ub.ReadBook, and_(db.Books.id == ub.ReadBook.book_id, + int(current_user.id) == ub.ReadBook.user_id))) + else: + try: + read_column = cc[config.config_read_column] + query = (calibre_db.session.query(db.Books, ub.ArchivedBook.is_archived, read_column.value) + .select_from(db.Books) + .outerjoin(read_column, read_column.book == db.Books.id)) + except (KeyError, AttributeError): + log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column) + # Skip linking read column + query = calibre_db.session.query(db.Books, ub.ArchivedBook.is_archived, None) + query = query.outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id, + int(current_user.id) == ub.ArchivedBook.user_id)) + + q = query.outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\ + .outerjoin(db.Series)\ + .filter(calibre_db.common_filters(True)) + + # parse multi selects to a complete dict + tags = dict() + elements = ['tag', 'serie', 'shelf', 'language', 'extension'] + for element in elements: + tags['include_' + element] = term.get('include_' + element) + tags['exclude_' + element] = term.get('exclude_' + element) + + author_name = term.get("author_name") + book_title = term.get("book_title") + publisher = term.get("publisher") + pub_start = term.get("publishstart") + pub_end = term.get("publishend") + rating_low = term.get("ratinghigh") + rating_high = term.get("ratinglow") + description = term.get("comment") + read_status = term.get("read_status") + if author_name: + author_name = author_name.strip().lower().replace(',', '|') + if book_title: + book_title = book_title.strip().lower() + if publisher: + publisher = publisher.strip().lower() + + search_term = [] + cc_present = False + for c in cc: + if c.datatype == "datetime": + column_start = term.get('custom_column_' + str(c.id) + '_start') + column_end = term.get('custom_column_' + str(c.id) + '_end') + if column_start: + search_term.extend([u"{} >= {}".format(c.name, + format_date(datetime.strptime(column_start, "%Y-%m-%d").date(), + format='medium') + )]) + cc_present = True + if column_end: + search_term.extend([u"{} <= {}".format(c.name, + format_date(datetime.strptime(column_end, "%Y-%m-%d").date(), + format='medium') + )]) + cc_present = True + elif term.get('custom_column_' + str(c.id)): + search_term.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))]) + cc_present = True + + if any(tags.values()) or author_name or book_title or publisher or pub_start or pub_end or rating_low \ + or rating_high or description or cc_present or read_status: + search_term, pub_start, pub_end = extend_search_term(search_term, + author_name, + book_title, + publisher, + pub_start, + pub_end, + tags, + rating_high, + rating_low, + read_status) + if author_name: + q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_name + "%"))) + if book_title: + q = q.filter(func.lower(db.Books.title).ilike("%" + book_title + "%")) + if pub_start: + q = q.filter(func.datetime(db.Books.pubdate) > func.datetime(pub_start)) + if pub_end: + q = q.filter(func.datetime(db.Books.pubdate) < func.datetime(pub_end)) + q = adv_search_read_status(q, read_status) + if publisher: + q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + publisher + "%"))) + q = adv_search_tag(q, tags['include_tag'], tags['exclude_tag']) + q = adv_search_serie(q, tags['include_serie'], tags['exclude_serie']) + q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf']) + q = adv_search_extension(q, tags['include_extension'], tags['exclude_extension']) + q = adv_search_language(q, tags['include_language'], tags['exclude_language']) + q = adv_search_ratings(q, rating_high, rating_low) + + if description: + q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + description + "%"))) + + # search custom columns + try: + q = adv_search_custom_columns(cc, term, q) + except AttributeError as ex: + log.debug_or_exception(ex) + flash(_("Error on search for custom columns, please restart Calibre-Web"), category="error") + + q = q.order_by(*sort).all() + flask_session['query'] = json.dumps(term) + ub.store_combo_ids(q) + result_count = len(q) + if offset is not None and limit is not None: + offset = int(offset) + limit_all = offset + int(limit) + pagination = Pagination((offset / (int(limit)) + 1), limit, result_count) + else: + offset = 0 + limit_all = result_count + entries = calibre_db.order_authors(q[offset:limit_all], list_return=True, combined=True) + return render_title_template('search.html', + adv_searchterm=search_term, + pagination=pagination, + entries=entries, + result_count=result_count, + title=_(u"Advanced Search"), page="advsearch", + order=order[1]) + + +def render_prepare_search_form(cc): + # prepare data for search-form + tags = calibre_db.session.query(db.Tags)\ + .join(db.books_tags_link)\ + .join(db.Books)\ + .filter(calibre_db.common_filters()) \ + .group_by(text('books_tags_link.tag'))\ + .order_by(db.Tags.name).all() + series = calibre_db.session.query(db.Series)\ + .join(db.books_series_link)\ + .join(db.Books)\ + .filter(calibre_db.common_filters()) \ + .group_by(text('books_series_link.series'))\ + .order_by(db.Series.name)\ + .filter(calibre_db.common_filters()).all() + shelves = ub.session.query(ub.Shelf)\ + .filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id)))\ + .order_by(ub.Shelf.name).all() + extensions = calibre_db.session.query(db.Data)\ + .join(db.Books)\ + .filter(calibre_db.common_filters()) \ + .group_by(db.Data.format)\ + .order_by(db.Data.format).all() + if current_user.filter_language() == u"all": + languages = calibre_db.speaking_language() + else: + languages = None + return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions, + series=series,shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch") + + +def render_search_results(term, offset=None, order=None, limit=None): + join = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series + entries, result_count, pagination = calibre_db.get_search_results(term, + config, + offset, + order, + limit, + *join) + return render_title_template('search.html', + searchterm=term, + pagination=pagination, + query=term, + adv_searchterm=term, + entries=entries, + result_count=result_count, + title=_(u"Search"), + page="search", + order=order[1]) + + diff --git a/cps/search_metadata.py b/cps/search_metadata.py index 0070e78f..e018da32 100644 --- a/cps/search_metadata.py +++ b/cps/search_metadata.py @@ -22,17 +22,16 @@ import inspect import json import os import sys -# from time import time - from flask import Blueprint, Response, request, url_for from flask_login import current_user from flask_login import login_required +from flask_babel import get_locale from sqlalchemy.exc import InvalidRequestError, OperationalError from sqlalchemy.orm.attributes import flag_modified from cps.services.Metadata import Metadata -from . import constants, get_locale, logger, ub, web_server +from . import constants, logger, ub, web_server # current_milli_time = lambda: int(round(time() * 1000)) @@ -57,9 +56,10 @@ for f in modules: try: importlib.import_module("cps.metadata_provider." + a) new_list.append(a) - except (ImportError, IndentationError, SyntaxError) as e: - log.error("Import error for metadata source: {} - {}".format(a, e)) - pass + except (IndentationError, SyntaxError) as e: + log.error("Syntax error for metadata source: {} - {}".format(a, e)) + except ImportError as e: + log.debug("Import error for metadata source: {} - {}".format(a, e)) def list_classes(provider_list): diff --git a/cps/services/__init__.py b/cps/services/__init__.py index 32a9d485..f93eca34 100644 --- a/cps/services/__init__.py +++ b/cps/services/__init__.py @@ -18,11 +18,10 @@ from .. import logger - log = logger.create() - -try: from . import goodreads_support +try: + from . import goodreads_support except ImportError as err: log.debug("Cannot import goodreads, showing authors-metadata will not work: %s", err) goodreads_support = None diff --git a/cps/services/background_scheduler.py b/cps/services/background_scheduler.py new file mode 100644 index 00000000..27285fd9 --- /dev/null +++ b/cps/services/background_scheduler.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2020 mmonkey +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import atexit + +from .. import logger +from .worker import WorkerThread + +try: + from apscheduler.schedulers.background import BackgroundScheduler as BScheduler + use_APScheduler = True +except (ImportError, RuntimeError) as e: + use_APScheduler = False + log = logger.create() + log.info('APScheduler not found. Unable to schedule tasks.') + + +class BackgroundScheduler: + _instance = None + + def __new__(cls): + if not use_APScheduler: + return False + + if cls._instance is None: + cls._instance = super(BackgroundScheduler, cls).__new__(cls) + cls.log = logger.create() + cls.scheduler = BScheduler() + cls.scheduler.start() + + atexit.register(lambda: cls.scheduler.shutdown()) + + return cls._instance + + def schedule(self, func, trigger, name=None, **trigger_args): + if use_APScheduler: + return self.scheduler.add_job(func=func, trigger=trigger, name=name, **trigger_args) + + # Expects a lambda expression for the task + def schedule_task(self, task, user=None, name=None, hidden=False, trigger='cron', **trigger_args): + if use_APScheduler: + def scheduled_task(): + worker_task = task() + worker_task.scheduled = True + WorkerThread.add(user, worker_task, hidden=hidden) + return self.schedule(func=scheduled_task, trigger=trigger, name=name, **trigger_args) + + # Expects a list of lambda expressions for the tasks + def schedule_tasks(self, tasks, user=None, trigger='cron', **trigger_args): + if use_APScheduler: + for task in tasks: + self.schedule_task(task[0], user=user, trigger=trigger, name=task[1], hidden=task[2], **trigger_args) + + # Expects a lambda expression for the task + def schedule_task_immediately(self, task, user=None, name=None, hidden=False): + if use_APScheduler: + def immediate_task(): + WorkerThread.add(user, task(), hidden) + return self.schedule(func=immediate_task, trigger='date', name=name) + + # Expects a list of lambda expressions for the tasks + def schedule_tasks_immediately(self, tasks, user=None): + if use_APScheduler: + for task in tasks: + self.schedule_task_immediately(task[0], user, name="immediately " + task[1], hidden=task[2]) + + # Remove all jobs + def remove_all_jobs(self): + self.scheduler.remove_all_jobs() diff --git a/cps/services/worker.py b/cps/services/worker.py index aea6d640..63d83bfb 100644 --- a/cps/services/worker.py +++ b/cps/services/worker.py @@ -37,11 +37,13 @@ STAT_WAITING = 0 STAT_FAIL = 1 STAT_STARTED = 2 STAT_FINISH_SUCCESS = 3 +STAT_ENDED = 4 +STAT_CANCELLED = 5 # Only retain this many tasks in dequeued list TASK_CLEANUP_TRIGGER = 20 -QueuedTask = namedtuple('QueuedTask', 'num, user, added, task') +QueuedTask = namedtuple('QueuedTask', 'num, user, added, task, hidden') def _get_main_thread(): @@ -51,7 +53,6 @@ def _get_main_thread(): raise Exception("main thread not found?!") - class ImprovedQueue(queue.Queue): def to_list(self): """ @@ -61,12 +62,13 @@ class ImprovedQueue(queue.Queue): with self.mutex: return list(self.queue) + # Class for all worker tasks in the background class WorkerThread(threading.Thread): _instance = None @classmethod - def getInstance(cls): + def get_instance(cls): if cls._instance is None: cls._instance = WorkerThread() return cls._instance @@ -82,15 +84,17 @@ class WorkerThread(threading.Thread): self.start() @classmethod - def add(cls, user, task): - ins = cls.getInstance() + def add(cls, user, task, hidden=False): + ins = cls.get_instance() ins.num += 1 - log.debug("Add Task for user: {} - {}".format(user, task)) + username = user if user is not None else 'System' + log.debug("Add Task for user: {} - {}".format(username, task)) ins.queue.put(QueuedTask( num=ins.num, - user=user, + user=username, added=datetime.now(), task=task, + hidden=hidden )) @property @@ -111,10 +115,10 @@ class WorkerThread(threading.Thread): if delta > TASK_CLEANUP_TRIGGER: ret = alive else: - # otherwise, lop off the oldest dead tasks until we hit the target trigger - ret = sorted(dead, key=lambda x: x.task.end_time)[-TASK_CLEANUP_TRIGGER:] + alive + # otherwise, loop off the oldest dead tasks until we hit the target trigger + ret = sorted(dead, key=lambda y: y.task.end_time)[-TASK_CLEANUP_TRIGGER:] + alive - self.dequeued = sorted(ret, key=lambda x: x.num) + self.dequeued = sorted(ret, key=lambda y: y.num) # Main thread loop starting the different tasks def run(self): @@ -141,11 +145,21 @@ class WorkerThread(threading.Thread): # sometimes tasks (like Upload) don't actually have work to do and are created as already finished if item.task.stat is STAT_WAITING: - # CalibreTask.start() should wrap all exceptions in it's own error handling + # CalibreTask.start() should wrap all exceptions in its own error handling item.task.start(self) + # remove self_cleanup tasks and hidden "System Tasks" from list + if item.task.self_cleanup or item.hidden: + self.dequeued.remove(item) + self.queue.task_done() + def end_task(self, task_id): + ins = self.get_instance() + for __, __, __, task, __ in ins.tasks: + if str(task.id) == str(task_id) and task.is_cancellable: + task.stat = STAT_CANCELLED if task.stat == STAT_WAITING else STAT_ENDED + class CalibreTask: __metaclass__ = abc.ABCMeta @@ -158,10 +172,12 @@ class CalibreTask: self.end_time = None self.message = message self.id = uuid.uuid4() + self.self_cleanup = False + self._scheduled = False @abc.abstractmethod def run(self, worker_thread): - """Provides the caller some human-readable name for this class""" + """The main entry-point for this task""" raise NotImplementedError @abc.abstractmethod @@ -169,6 +185,11 @@ class CalibreTask: """Provides the caller some human-readable name for this class""" raise NotImplementedError + @abc.abstractmethod + def is_cancellable(self): + """Does this task gracefully handle being cancelled (STAT_ENDED, STAT_CANCELLED)?""" + raise NotImplementedError + def start(self, *args): self.start_time = datetime.now() self.stat = STAT_STARTED @@ -219,15 +240,23 @@ class CalibreTask: We have a separate dictating this because there may be certain tasks that want to override this """ # By default, we're good to clean a task if it's "Done" - return self.stat in (STAT_FINISH_SUCCESS, STAT_FAIL) + return self.stat in (STAT_FINISH_SUCCESS, STAT_FAIL, STAT_ENDED, STAT_CANCELLED) - '''@progress.setter - def progress(self, x): - if x > 1: - x = 1 - if x < 0: - x = 0 - self._progress = x''' + @property + def self_cleanup(self): + return self._self_cleanup + + @self_cleanup.setter + def self_cleanup(self, is_self_cleanup): + self._self_cleanup = is_self_cleanup + + @property + def scheduled(self): + return self._scheduled + + @scheduled.setter + def scheduled(self, is_scheduled): + self._scheduled = is_scheduled def _handleError(self, error_message): self.stat = STAT_FAIL diff --git a/cps/shelf.py b/cps/shelf.py index 35f2941d..49d9a633 100644 --- a/cps/shelf.py +++ b/cps/shelf.py @@ -33,27 +33,9 @@ from . import calibre_db, config, db, logger, ub from .render_template import render_title_template from .usermanagement import login_required_if_no_ano -shelf = Blueprint('shelf', __name__) log = logger.create() - -def check_shelf_edit_permissions(cur_shelf): - if not cur_shelf.is_public and not cur_shelf.user_id == int(current_user.id): - log.error("User {} not allowed to edit shelf: {}".format(current_user.id, cur_shelf.name)) - return False - if cur_shelf.is_public and not current_user.role_edit_shelfs(): - log.info("User {} not allowed to edit public shelves".format(current_user.id)) - return False - return True - - -def check_shelf_view_permissions(cur_shelf): - if cur_shelf.is_public: - return True - if current_user.is_anonymous or cur_shelf.user_id != current_user.id: - log.error("User is unauthorized to view non-public shelf: {}".format(cur_shelf.name)) - return False - return True +shelf = Blueprint('shelf', __name__) @shelf.route("/shelf/add//", methods=["POST"]) @@ -238,96 +220,6 @@ def edit_shelf(shelf_id): return create_edit_shelf(shelf, page_title=_(u"Edit a shelf"), page="shelfedit", shelf_id=shelf_id) -# if shelf ID is set, we are editing a shelf -def create_edit_shelf(shelf, page_title, page, shelf_id=False): - sync_only_selected_shelves = current_user.kobo_only_shelves_sync - # calibre_db.session.query(ub.Shelf).filter(ub.Shelf.user_id == current_user.id).filter(ub.Shelf.kobo_sync).count() - if request.method == "POST": - to_save = request.form.to_dict() - if not current_user.role_edit_shelfs() and to_save.get("is_public") == "on": - flash(_(u"Sorry you are not allowed to create a public shelf"), category="error") - return redirect(url_for('web.index')) - is_public = 1 if to_save.get("is_public") == "on" else 0 - if config.config_kobo_sync: - shelf.kobo_sync = True if to_save.get("kobo_sync") else False - if shelf.kobo_sync: - ub.session.query(ub.ShelfArchive).filter(ub.ShelfArchive.user_id == current_user.id).filter( - ub.ShelfArchive.uuid == shelf.uuid).delete() - ub.session_commit() - shelf_title = to_save.get("title", "") - if check_shelf_is_unique(shelf, shelf_title, is_public, shelf_id): - shelf.name = shelf_title - shelf.is_public = is_public - if not shelf_id: - shelf.user_id = int(current_user.id) - ub.session.add(shelf) - shelf_action = "created" - flash_text = _(u"Shelf %(title)s created", title=shelf_title) - else: - shelf_action = "changed" - flash_text = _(u"Shelf %(title)s changed", title=shelf_title) - try: - ub.session.commit() - log.info(u"Shelf {} {}".format(shelf_title, shelf_action)) - flash(flash_text, category="success") - return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id)) - except (OperationalError, InvalidRequestError) as ex: - ub.session.rollback() - log.error_or_exception(ex) - log.error_or_exception("Settings Database error: {}".format(ex)) - flash(_(u"Database error: %(error)s.", error=ex.orig), category="error") - except Exception as ex: - ub.session.rollback() - log.error_or_exception(ex) - flash(_(u"There was an error"), category="error") - return render_title_template('shelf_edit.html', - shelf=shelf, - title=page_title, - page=page, - kobo_sync_enabled=config.config_kobo_sync, - sync_only_selected_shelves=sync_only_selected_shelves) - - -def check_shelf_is_unique(shelf, title, is_public, shelf_id=False): - if shelf_id: - ident = ub.Shelf.id != shelf_id - else: - ident = true() - if is_public == 1: - is_shelf_name_unique = ub.session.query(ub.Shelf) \ - .filter((ub.Shelf.name == title) & (ub.Shelf.is_public == 1)) \ - .filter(ident) \ - .first() is None - - if not is_shelf_name_unique: - log.error("A public shelf with the name '{}' already exists.".format(title)) - flash(_(u"A public shelf with the name '%(title)s' already exists.", title=title), - category="error") - else: - is_shelf_name_unique = ub.session.query(ub.Shelf) \ - .filter((ub.Shelf.name == title) & (ub.Shelf.is_public == 0) & - (ub.Shelf.user_id == int(current_user.id))) \ - .filter(ident) \ - .first() is None - - if not is_shelf_name_unique: - log.error("A private shelf with the name '{}' already exists.".format(title)) - flash(_(u"A private shelf with the name '%(title)s' already exists.", title=title), - category="error") - return is_shelf_name_unique - - -def delete_shelf_helper(cur_shelf): - if not cur_shelf or not check_shelf_edit_permissions(cur_shelf): - return False - shelf_id = cur_shelf.id - ub.session.delete(cur_shelf) - ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete() - ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id)) - ub.session_commit("successfully deleted Shelf {}".format(cur_shelf.name)) - return True - - @shelf.route("/shelf/delete/", methods=["POST"]) @login_required def delete_shelf(shelf_id): @@ -392,6 +284,115 @@ def order_shelf(shelf_id): abort(404) +def check_shelf_edit_permissions(cur_shelf): + if not cur_shelf.is_public and not cur_shelf.user_id == int(current_user.id): + log.error("User {} not allowed to edit shelf: {}".format(current_user.id, cur_shelf.name)) + return False + if cur_shelf.is_public and not current_user.role_edit_shelfs(): + log.info("User {} not allowed to edit public shelves".format(current_user.id)) + return False + return True + + +def check_shelf_view_permissions(cur_shelf): + if cur_shelf.is_public: + return True + if current_user.is_anonymous or cur_shelf.user_id != current_user.id: + log.error("User is unauthorized to view non-public shelf: {}".format(cur_shelf.name)) + return False + return True + + +# if shelf ID is set, we are editing a shelf +def create_edit_shelf(shelf, page_title, page, shelf_id=False): + sync_only_selected_shelves = current_user.kobo_only_shelves_sync + # calibre_db.session.query(ub.Shelf).filter(ub.Shelf.user_id == current_user.id).filter(ub.Shelf.kobo_sync).count() + if request.method == "POST": + to_save = request.form.to_dict() + if not current_user.role_edit_shelfs() and to_save.get("is_public") == "on": + flash(_(u"Sorry you are not allowed to create a public shelf"), category="error") + return redirect(url_for('web.index')) + is_public = 1 if to_save.get("is_public") == "on" else 0 + if config.config_kobo_sync: + shelf.kobo_sync = True if to_save.get("kobo_sync") else False + if shelf.kobo_sync: + ub.session.query(ub.ShelfArchive).filter(ub.ShelfArchive.user_id == current_user.id).filter( + ub.ShelfArchive.uuid == shelf.uuid).delete() + ub.session_commit() + shelf_title = to_save.get("title", "") + if check_shelf_is_unique(shelf_title, is_public, shelf_id): + shelf.name = shelf_title + shelf.is_public = is_public + if not shelf_id: + shelf.user_id = int(current_user.id) + ub.session.add(shelf) + shelf_action = "created" + flash_text = _(u"Shelf %(title)s created", title=shelf_title) + else: + shelf_action = "changed" + flash_text = _(u"Shelf %(title)s changed", title=shelf_title) + try: + ub.session.commit() + log.info(u"Shelf {} {}".format(shelf_title, shelf_action)) + flash(flash_text, category="success") + return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id)) + except (OperationalError, InvalidRequestError) as ex: + ub.session.rollback() + log.error_or_exception(ex) + log.error_or_exception("Settings Database error: {}".format(ex)) + flash(_(u"Database error: %(error)s.", error=ex.orig), category="error") + except Exception as ex: + ub.session.rollback() + log.error_or_exception(ex) + flash(_(u"There was an error"), category="error") + return render_title_template('shelf_edit.html', + shelf=shelf, + title=page_title, + page=page, + kobo_sync_enabled=config.config_kobo_sync, + sync_only_selected_shelves=sync_only_selected_shelves) + + +def check_shelf_is_unique(title, is_public, shelf_id=False): + if shelf_id: + ident = ub.Shelf.id != shelf_id + else: + ident = true() + if is_public == 1: + is_shelf_name_unique = ub.session.query(ub.Shelf) \ + .filter((ub.Shelf.name == title) & (ub.Shelf.is_public == 1)) \ + .filter(ident) \ + .first() is None + + if not is_shelf_name_unique: + log.error("A public shelf with the name '{}' already exists.".format(title)) + flash(_(u"A public shelf with the name '%(title)s' already exists.", title=title), + category="error") + else: + is_shelf_name_unique = ub.session.query(ub.Shelf) \ + .filter((ub.Shelf.name == title) & (ub.Shelf.is_public == 0) & + (ub.Shelf.user_id == int(current_user.id))) \ + .filter(ident) \ + .first() is None + + if not is_shelf_name_unique: + log.error("A private shelf with the name '{}' already exists.".format(title)) + flash(_(u"A private shelf with the name '%(title)s' already exists.", title=title), + category="error") + return is_shelf_name_unique + + +def delete_shelf_helper(cur_shelf): + if not cur_shelf or not check_shelf_edit_permissions(cur_shelf): + return False + shelf_id = cur_shelf.id + ub.session.delete(cur_shelf) + ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete() + ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id)) + ub.session_commit("successfully deleted Shelf {}".format(cur_shelf.name)) + return True + + def change_shelf_order(shelf_id, order): result = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\ diff --git a/cps/static/css/caliBlur.css b/cps/static/css/caliBlur.css index 3a980180..b2b35423 100644 --- a/cps/static/css/caliBlur.css +++ b/cps/static/css/caliBlur.css @@ -5150,7 +5150,7 @@ body.login > div.navbar.navbar-default.navbar-static-top > div > div.navbar-head pointer-events: none } -#DeleteDomain:hover:before, #RestartDialog:hover:before, #ShutdownDialog:hover:before, #StatusDialog:hover:before, #deleteButton, #deleteModal:hover:before, body.mailset > div.container-fluid > div > div.col-sm-10 > div.discover td > a:hover { +#DeleteDomain:hover:before, #RestartDialog:hover:before, #ShutdownDialog:hover:before, #StatusDialog:hover:before, #deleteButton, #deleteModal:hover:before, #cancelTaskModal:hover:before, body.mailset > div.container-fluid > div > div.col-sm-10 > div.discover td > a:hover { cursor: pointer } @@ -5237,7 +5237,11 @@ body.admin > div.container-fluid > div > div.col-sm-10 > div.container-fluid > d margin-bottom: 20px } -body.admin:not(.modal-open) .btn-default { +body.admin > div.container-fluid div.scheduled_tasks_details { + margin-bottom: 20px +} + +body.admin .btn-default { margin-bottom: 10px } @@ -5468,7 +5472,7 @@ body.admin.modal-open .navbar { z-index: 0 !important } -#RestartDialog, #ShutdownDialog, #StatusDialog, #deleteModal { +#RestartDialog, #ShutdownDialog, #StatusDialog, #deleteModal, #cancelTaskModal { top: 0; overflow: hidden; padding-top: 70px; @@ -5478,7 +5482,7 @@ body.admin.modal-open .navbar { background: rgba(0, 0, 0, .5) } -#RestartDialog:before, #ShutdownDialog:before, #StatusDialog:before, #deleteModal:before { +#RestartDialog:before, #ShutdownDialog:before, #StatusDialog:before, #deleteModal:before, #cancelTaskModal:before { content: "\E208"; padding-right: 10px; display: block; @@ -5500,18 +5504,18 @@ body.admin.modal-open .navbar { z-index: 99 } -#RestartDialog.in:before, #ShutdownDialog.in:before, #StatusDialog.in:before, #deleteModal.in:before { +#RestartDialog.in:before, #ShutdownDialog.in:before, #StatusDialog.in:before, #deleteModal.in:before, #cancelTaskModal.in:before { -webkit-transform: translate(0, 0); -ms-transform: translate(0, 0); transform: translate(0, 0) } -#RestartDialog > .modal-dialog, #ShutdownDialog > .modal-dialog, #StatusDialog > .modal-dialog, #deleteModal > .modal-dialog { +#RestartDialog > .modal-dialog, #ShutdownDialog > .modal-dialog, #StatusDialog > .modal-dialog, #deleteModal > .modal-dialog, #cancelTaskModal > .modal-dialog { width: 450px; margin: auto } -#RestartDialog > .modal-dialog > .modal-content, #ShutdownDialog > .modal-dialog > .modal-content, #StatusDialog > .modal-dialog > .modal-content, #deleteModal > .modal-dialog > .modal-content { +#RestartDialog > .modal-dialog > .modal-content, #ShutdownDialog > .modal-dialog > .modal-content, #StatusDialog > .modal-dialog > .modal-content, #deleteModal > .modal-dialog > .modal-content, #cancelTaskModal > .modal-dialog > .modal-content { max-height: calc(100% - 90px); -webkit-box-shadow: 0 5px 15px rgba(0, 0, 0, .5); box-shadow: 0 5px 15px rgba(0, 0, 0, .5); @@ -5522,7 +5526,7 @@ body.admin.modal-open .navbar { width: 450px } -#RestartDialog > .modal-dialog > .modal-content > .modal-header, #ShutdownDialog > .modal-dialog > .modal-content > .modal-header, #StatusDialog > .modal-dialog > .modal-content > .modal-header, #deleteModal > .modal-dialog > .modal-content > .modal-header { +#RestartDialog > .modal-dialog > .modal-content > .modal-header, #ShutdownDialog > .modal-dialog > .modal-content > .modal-header, #StatusDialog > .modal-dialog > .modal-content > .modal-header, #deleteModal > .modal-dialog > .modal-content > .modal-header, #cancelTaskModal > .modal-dialog > .modal-content > .modal-header { padding: 15px 20px; border-radius: 3px 3px 0 0; line-height: 1.71428571; @@ -5535,7 +5539,7 @@ body.admin.modal-open .navbar { text-align: left } -#RestartDialog > .modal-dialog > .modal-content > .modal-header:before, #ShutdownDialog > .modal-dialog > .modal-content > .modal-header:before, #StatusDialog > .modal-dialog > .modal-content > .modal-header:before, #deleteModal > .modal-dialog > .modal-content > .modal-header:before { +#RestartDialog > .modal-dialog > .modal-content > .modal-header:before, #ShutdownDialog > .modal-dialog > .modal-content > .modal-header:before, #StatusDialog > .modal-dialog > .modal-content > .modal-header:before, #deleteModal > .modal-dialog > .modal-content > .modal-header:before, #cancelTaskModal > .modal-dialog > .modal-content > .modal-header:before { padding-right: 10px; font-size: 18px; color: #999; @@ -5564,6 +5568,11 @@ body.admin.modal-open .navbar { font-family: plex-icons-new, serif } +#cancelTaskModal > .modal-dialog > .modal-content > .modal-header:before { + content: "\EA6D"; + font-family: plex-icons-new, serif +} + #RestartDialog > .modal-dialog > .modal-content > .modal-header:after { content: "Restart Calibre-Web"; display: inline-block; @@ -5588,7 +5597,13 @@ body.admin.modal-open .navbar { font-size: 20px } -#StatusDialog > .modal-dialog > .modal-content > .modal-header > span, #deleteModal > .modal-dialog > .modal-content > .modal-header > span, #loader > center > img, .rating-mobile { +#cancelTaskModal > .modal-dialog > .modal-content > .modal-header:after { + content: "Delete Book"; + display: inline-block; + font-size: 20px +} + +#StatusDialog > .modal-dialog > .modal-content > .modal-header > span, #deleteModal > .modal-dialog > .modal-content > .modal-header > span, #cancelTaskModal > .modal-dialog > .modal-content > .modal-header > span, #loader > center > img, .rating-mobile { display: none } @@ -5602,7 +5617,7 @@ body.admin.modal-open .navbar { text-align: left } -#ShutdownDialog > .modal-dialog > .modal-content > .modal-body, #StatusDialog > .modal-dialog > .modal-content > .modal-body, #deleteModal > .modal-dialog > .modal-content > .modal-body { +#ShutdownDialog > .modal-dialog > .modal-content > .modal-body, #StatusDialog > .modal-dialog > .modal-content > .modal-body, #deleteModal > .modal-dialog > .modal-content > .modal-body, #cancelTaskModal > .modal-dialog > .modal-content > .modal-body { padding: 20px 20px 40px; font-size: 16px; line-height: 1.6em; @@ -5612,7 +5627,7 @@ body.admin.modal-open .navbar { text-align: left } -#RestartDialog > .modal-dialog > .modal-content > .modal-body > p, #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > p, #StatusDialog > .modal-dialog > .modal-content > .modal-body > p, #deleteModal > .modal-dialog > .modal-content > .modal-body > p { +#RestartDialog > .modal-dialog > .modal-content > .modal-body > p, #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > p, #StatusDialog > .modal-dialog > .modal-content > .modal-body > p, #deleteModal > .modal-dialog > .modal-content > .modal-body > p, #cancelTaskModal > .modal-dialog > .modal-content > .modal-body > p { padding: 20px 20px 0 0; font-size: 16px; line-height: 1.6em; @@ -5621,7 +5636,7 @@ body.admin.modal-open .navbar { background: #282828 } -#RestartDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#restart), #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#shutdown), #deleteModal > .modal-dialog > .modal-content > .modal-footer > .btn-default { +#RestartDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#restart), #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#shutdown), #deleteModal > .modal-dialog > .modal-content > .modal-footer > .btn-default, #cancelTaskModal > .modal-dialog > .modal-content > .modal-footer > .btn-default { float: right; z-index: 9; position: relative; @@ -5669,6 +5684,18 @@ body.admin.modal-open .navbar { border-radius: 3px } +#cancelTaskModal > .modal-dialog > .modal-content > .modal-footer > .btn-danger { + float: right; + z-index: 9; + position: relative; + margin: 0 0 0 10px; + min-width: 80px; + padding: 10px 18px; + font-size: 16px; + line-height: 1.33; + border-radius: 3px +} + #RestartDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#restart) { margin: 25px 0 0 10px } @@ -5681,7 +5708,11 @@ body.admin.modal-open .navbar { margin: 0 0 0 10px } -#RestartDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#restart):hover, #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#shutdown):hover, #deleteModal > .modal-dialog > .modal-content > .modal-footer > .btn-default:hover { +#cancelTaskModal > .modal-dialog > .modal-content > .modal-footer > .btn-default { + margin: 0 0 0 10px +} + +#RestartDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#restart):hover, #ShutdownDialog > .modal-dialog > .modal-content > .modal-body > .btn-default:not(#shutdown):hover, #deleteModal > .modal-dialog > .modal-content > .modal-footer > .btn-default:hover, #cancelTaskModal > .modal-dialog > .modal-content > .modal-footer > .btn-default:hover { background-color: hsla(0, 0%, 100%, .3) } @@ -7303,11 +7334,11 @@ body.edituser.admin > div.container-fluid > div.row-fluid > div.col-sm-10 > div. background-color: transparent !important } - #RestartDialog > .modal-dialog, #ShutdownDialog > .modal-dialog, #StatusDialog > .modal-dialog, #deleteModal > .modal-dialog { + #RestartDialog > .modal-dialog, #ShutdownDialog > .modal-dialog, #StatusDialog > .modal-dialog, #deleteModal > .modal-dialog, #cancelTaskModal > .modal-dialog { max-width: calc(100vw - 40px) } - #RestartDialog > .modal-dialog > .modal-content, #ShutdownDialog > .modal-dialog > .modal-content, #StatusDialog > .modal-dialog > .modal-content, #deleteModal > .modal-dialog > .modal-content { + #RestartDialog > .modal-dialog > .modal-content, #ShutdownDialog > .modal-dialog > .modal-content, #StatusDialog > .modal-dialog > .modal-content, #deleteModal > .modal-dialog > .modal-content, #cancelTaskModal > .modal-dialog > .modal-content { max-width: calc(100vw - 40px); left: 0 } @@ -7457,7 +7488,7 @@ body.edituser.admin > div.container-fluid > div.row-fluid > div.col-sm-10 > div. padding: 30px 15px } - #RestartDialog.in:before, #ShutdownDialog.in:before, #StatusDialog.in:before, #deleteModal.in:before { + #RestartDialog.in:before, #ShutdownDialog.in:before, #StatusDialog.in:before, #deleteModal.in:before, #cancelTaskModal.in:before { left: auto; right: 34px } diff --git a/cps/static/js/main.js b/cps/static/js/main.js index 91084f06..04d47d6b 100755 --- a/cps/static/js/main.js +++ b/cps/static/js/main.js @@ -474,6 +474,17 @@ $(function() { } }); }); + $("#admin_refresh_cover_cache").click(function() { + confirmDialog("admin_refresh_cover_cache", "GeneralChangeModal", 0, function () { + $.ajax({ + method:"post", + contentType: "application/json; charset=utf-8", + dataType: "json", + url: getPath() + "/ajax/updateThumbnails", + }); + }); + }); + $("#restart_database").click(function() { $("#DialogHeader").addClass("hidden"); $("#DialogFinished").addClass("hidden"); diff --git a/cps/static/js/table.js b/cps/static/js/table.js index 8af7592f..548ca8c4 100644 --- a/cps/static/js/table.js +++ b/cps/static/js/table.js @@ -15,7 +15,7 @@ * along with this program. If not, see . */ -/* exported TableActions, RestrictionActions, EbookActions, responseHandler */ +/* exported TableActions, RestrictionActions, EbookActions, TaskActions, responseHandler */ /* global getPath, confirmDialog */ var selections = []; @@ -42,6 +42,24 @@ $(function() { }, 1000); } + $("#cancel_task_confirm").click(function() { + //get data-id attribute of the clicked element + var taskId = $(this).data("task-id"); + $.ajax({ + method: "post", + contentType: "application/json; charset=utf-8", + dataType: "json", + url: window.location.pathname + "/../ajax/canceltask", + data: JSON.stringify({"task_id": taskId}), + }); + }); + //triggered when modal is about to be shown + $("#cancelTaskModal").on("show.bs.modal", function(e) { + //get data-id attribute of the clicked element and store in button + var taskId = $(e.relatedTarget).data("task-id"); + $(e.currentTarget).find("#cancel_task_confirm").data("task-id", taskId); + }); + $("#books-table").on("check.bs.table check-all.bs.table uncheck.bs.table uncheck-all.bs.table", function (e, rowsAfter, rowsBefore) { var rows = rowsAfter; @@ -532,7 +550,7 @@ $(function() { $("#user-table").on("click-cell.bs.table", function (field, value, row, $element) { if (value === "denied_column_value") { - ConfirmDialog("btndeluser", "GeneralDeleteModal", $element.id, user_handle); + confirmDialog("btndeluser", "GeneralDeleteModal", $element.id, user_handle); } }); @@ -582,6 +600,7 @@ function handle_header_buttons () { $(".header_select").removeAttr("disabled"); } } + /* Function for deleting domain restrictions */ function TableActions (value, row) { return [ @@ -619,6 +638,19 @@ function UserActions (value, row) { ].join(""); } +/* Function for cancelling tasks */ +function TaskActions (value, row) { + var cancellableStats = [0, 1, 2]; + if (row.task_id && row.is_cancellable && cancellableStats.includes(row.stat)) { + return [ + "
", + "", + "
" + ].join(""); + } + return ''; +} + /* Function for keeping checked rows */ function responseHandler(res) { $.each(res.rows, function (i, row) { diff --git a/cps/tasks/convert.py b/cps/tasks/convert.py index 98cd7b48..3062850d 100644 --- a/cps/tasks/convert.py +++ b/cps/tasks/convert.py @@ -18,12 +18,12 @@ import os import re - from glob import glob from shutil import copyfile from markupsafe import escape from sqlalchemy.exc import SQLAlchemyError +from flask_babel import lazy_gettext as N_ from cps.services.worker import CalibreTask from cps import db @@ -41,10 +41,10 @@ log = logger.create() class TaskConvert(CalibreTask): - def __init__(self, file_path, bookid, taskMessage, settings, kindle_mail, user=None): - super(TaskConvert, self).__init__(taskMessage) + def __init__(self, file_path, book_id, task_message, settings, kindle_mail, user=None): + super(TaskConvert, self).__init__(task_message) self.file_path = file_path - self.bookid = bookid + self.book_id = book_id self.title = "" self.settings = settings self.kindle_mail = kindle_mail @@ -55,10 +55,10 @@ class TaskConvert(CalibreTask): def run(self, worker_thread): self.worker_thread = worker_thread if config.config_use_google_drive: - worker_db = db.CalibreDB(expire_on_commit=False) - cur_book = worker_db.get_book(self.bookid) + worker_db = db.CalibreDB(expire_on_commit=False, init=True) + cur_book = worker_db.get_book(self.book_id) self.title = cur_book.title - data = worker_db.get_book_format(self.bookid, self.settings['old_book_format']) + data = worker_db.get_book_format(self.book_id, self.settings['old_book_format']) df = gdriveutils.getFileFromEbooksFolder(cur_book.path, data.name + "." + self.settings['old_book_format'].lower()) if df: @@ -89,7 +89,7 @@ class TaskConvert(CalibreTask): # if we're sending to kindle after converting, create a one-off task and run it immediately # todo: figure out how to incorporate this into the progress try: - EmailText = _(u"%(book)s send to Kindle", book=escape(self.title)) + EmailText = N_(u"%(book)s send to Kindle", book=escape(self.title)) worker_thread.add(self.user, TaskEmail(self.settings['subject'], self.results["path"], filename, @@ -104,9 +104,9 @@ class TaskConvert(CalibreTask): def _convert_ebook_format(self): error_message = None - local_db = db.CalibreDB(expire_on_commit=False) + local_db = db.CalibreDB(expire_on_commit=False, init=True) file_path = self.file_path - book_id = self.bookid + book_id = self.book_id format_old_ext = u'.' + self.settings['old_book_format'].lower() format_new_ext = u'.' + self.settings['new_book_format'].lower() @@ -114,7 +114,7 @@ class TaskConvert(CalibreTask): # if it does - mark the conversion task as complete and return a success # this will allow send to kindle workflow to continue to work if os.path.isfile(file_path + format_new_ext) or\ - local_db.get_book_format(self.bookid, self.settings['new_book_format']): + local_db.get_book_format(self.book_id, self.settings['new_book_format']): log.info("Book id %d already converted to %s", book_id, format_new_ext) cur_book = local_db.get_book(book_id) self.title = cur_book.title @@ -133,7 +133,7 @@ class TaskConvert(CalibreTask): local_db.session.rollback() log.error("Database error: %s", e) local_db.session.close() - self._handleError(error_message) + self._handleError(N_("Database error: %(error)s.", error=e)) return self._handleSuccess() local_db.session.close() @@ -150,8 +150,7 @@ class TaskConvert(CalibreTask): else: # check if calibre converter-executable is existing if not os.path.exists(config.config_converterpath): - # ToDo Text is not translated - self._handleError(_(u"Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath)) + self._handleError(N_(u"Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath)) return check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext) @@ -184,11 +183,11 @@ class TaskConvert(CalibreTask): self._handleSuccess() return os.path.basename(file_path + format_new_ext) else: - error_message = _('%(format)s format not found on disk', format=format_new_ext.upper()) + error_message = N_('%(format)s format not found on disk', format=format_new_ext.upper()) local_db.session.close() log.info("ebook converter failed with error while converting book") if not error_message: - error_message = _('Ebook converter failed with unknown error') + error_message = N_('Ebook converter failed with unknown error') self._handleError(error_message) return @@ -198,7 +197,7 @@ class TaskConvert(CalibreTask): try: p = process_open(command, quotes) except OSError as e: - return 1, _(u"Kepubify-converter failed: %(error)s", error=e) + return 1, N_(u"Kepubify-converter failed: %(error)s", error=e) self.progress = 0.01 while True: nextline = p.stdout.readlines() @@ -219,7 +218,7 @@ class TaskConvert(CalibreTask): copyfile(converted_file[0], (file_path + format_new_ext)) os.unlink(converted_file[0]) else: - return 1, _(u"Converted file not found or more than one file in folder %(folder)s", + return 1, N_(u"Converted file not found or more than one file in folder %(folder)s", folder=os.path.dirname(file_path)) return check, None @@ -243,7 +242,7 @@ class TaskConvert(CalibreTask): p = process_open(command, quotes, newlines=False) except OSError as e: - return 1, _(u"Ebook-converter failed: %(error)s", error=e) + return 1, N_(u"Ebook-converter failed: %(error)s", error=e) while p.poll() is None: nextline = p.stdout.readline() @@ -266,12 +265,16 @@ class TaskConvert(CalibreTask): ele = ele.decode('utf-8', errors="ignore").strip('\n') log.debug(ele) if not ele.startswith('Traceback') and not ele.startswith(' File'): - error_message = _("Calibre failed with error: %(error)s", error=ele) + error_message = N_("Calibre failed with error: %(error)s", error=ele) return check, error_message @property def name(self): - return "Convert" + return N_("Convert") def __str__(self): - return "Convert {} {}".format(self.bookid, self.kindle_mail) + return "Convert {} {}".format(self.book_id, self.kindle_mail) + + @property + def is_cancellable(self): + return False diff --git a/cps/tasks/database.py b/cps/tasks/database.py new file mode 100644 index 00000000..afc4db2c --- /dev/null +++ b/cps/tasks/database.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2020 mmonkey +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from urllib.request import urlopen + +from flask_babel import lazy_gettext as N_ + +from cps import config, logger +from cps.services.worker import CalibreTask + + +class TaskReconnectDatabase(CalibreTask): + def __init__(self, task_message=N_('Reconnecting Calibre database')): + super(TaskReconnectDatabase, self).__init__(task_message) + self.log = logger.create() + self.listen_address = config.get_config_ipaddress() + self.listen_port = config.config_port + + + def run(self, worker_thread): + address = self.listen_address if self.listen_address else 'localhost' + port = self.listen_port if self.listen_port else 8083 + + try: + urlopen('http://' + address + ':' + str(port) + '/reconnect') + self._handleSuccess() + except Exception as ex: + self._handleError('Unable to reconnect Calibre database: ' + str(ex)) + + @property + def name(self): + return "Reconnect Database" + + @property + def is_cancellable(self): + return False diff --git a/cps/tasks/mail.py b/cps/tasks/mail.py index 4954c2d6..be240c79 100755 --- a/cps/tasks/mail.py +++ b/cps/tasks/mail.py @@ -24,12 +24,10 @@ import mimetypes from io import StringIO from email.message import EmailMessage -from email.utils import parseaddr - - -from email import encoders -from email.utils import formatdate, make_msgid +from email.utils import formatdate, parseaddr from email.generator import Generator +from flask_babel import lazy_gettext as N_ +from email.utils import formatdate from cps.services.worker import CalibreTask from cps.services import gmail @@ -111,13 +109,13 @@ class EmailSSL(EmailBase, smtplib.SMTP_SSL): class TaskEmail(CalibreTask): - def __init__(self, subject, filepath, attachment, settings, recipient, taskMessage, text, internal=False): - super(TaskEmail, self).__init__(taskMessage) + def __init__(self, subject, filepath, attachment, settings, recipient, task_message, text, internal=False): + super(TaskEmail, self).__init__(task_message) self.subject = subject self.attachment = attachment self.settings = settings self.filepath = filepath - self.recipent = recipient + self.recipient = recipient self.text = text self.asyncSMTP = None self.results = dict() @@ -139,7 +137,7 @@ class TaskEmail(CalibreTask): message = EmailMessage() # message = MIMEMultipart() message['From'] = self.settings["mail_from"] - message['To'] = self.recipent + message['To'] = self.recipient message['Subject'] = self.subject message['Date'] = formatdate(localtime=True) message['Message-Id'] = "{}@{}".format(uuid.uuid4(), self.get_msgid_domain()) # f"<{uuid.uuid4()}@{get_msgid_domain(from_)}>" # make_msgid('calibre-web') @@ -212,7 +210,7 @@ class TaskEmail(CalibreTask): gen = Generator(fp, mangle_from_=False) gen.flatten(msg) - self.asyncSMTP.sendmail(self.settings["mail_from"], self.recipent, fp.getvalue()) + self.asyncSMTP.sendmail(self.settings["mail_from"], self.recipient, fp.getvalue()) self.asyncSMTP.quit() self._handleSuccess() log.debug("E-mail send successfully") @@ -264,7 +262,11 @@ class TaskEmail(CalibreTask): @property def name(self): - return "E-mail" + return N_("E-mail") + + @property + def is_cancellable(self): + return False def __str__(self): return "E-mail {}, {}".format(self.name, self.subject) diff --git a/cps/tasks/thumbnail.py b/cps/tasks/thumbnail.py new file mode 100644 index 00000000..ace9cecc --- /dev/null +++ b/cps/tasks/thumbnail.py @@ -0,0 +1,514 @@ +# -*- coding: utf-8 -*- + +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2020 monkey +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os +from urllib.request import urlopen + +from .. import constants +from cps import config, db, fs, gdriveutils, logger, ub +from cps.services.worker import CalibreTask, STAT_CANCELLED, STAT_ENDED +from datetime import datetime +from sqlalchemy import func, text, or_ +from flask_babel import lazy_gettext as N_ + +try: + from wand.image import Image + use_IM = True +except (ImportError, RuntimeError) as e: + use_IM = False + + +def get_resize_height(resolution): + return int(225 * resolution) + + +def get_resize_width(resolution, original_width, original_height): + height = get_resize_height(resolution) + percent = (height / float(original_height)) + width = int((float(original_width) * float(percent))) + return width if width % 2 == 0 else width + 1 + + +def get_best_fit(width, height, image_width, image_height): + resize_width = int(width / 2.0) + resize_height = int(height / 2.0) + aspect_ratio = image_width / image_height + + # If this image's aspect ratio is different from the first image, then resize this image + # to fill the width and height of the first image + if aspect_ratio < width / height: + resize_width = int(width / 2.0) + resize_height = image_height * int(width / 2.0) / image_width + + elif aspect_ratio > width / height: + resize_width = image_width * int(height / 2.0) / image_height + resize_height = int(height / 2.0) + + return {'width': resize_width, 'height': resize_height} + + +class TaskGenerateCoverThumbnails(CalibreTask): + def __init__(self, book_id=-1, task_message=''): + super(TaskGenerateCoverThumbnails, self).__init__(task_message) + self.log = logger.create() + self.book_id = book_id + self.app_db_session = ub.get_new_session_instance() + # self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True) + self.cache = fs.FileSystem() + self.resolutions = [ + constants.COVER_THUMBNAIL_SMALL, + constants.COVER_THUMBNAIL_MEDIUM + ] + + def run(self, worker_thread): + if use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED: + self.message = 'Scanning Books' + books_with_covers = self.get_books_with_covers(self.book_id) + count = len(books_with_covers) + + total_generated = 0 + for i, book in enumerate(books_with_covers): + + # Generate new thumbnails for missing covers + generated = self.create_book_cover_thumbnails(book) + + # Increment the progress + self.progress = (1.0 / count) * i + + if generated > 0: + total_generated += generated + self.message = N_(u'Generated %(count)s cover thumbnails', count=total_generated) + + # Check if job has been cancelled or ended + if self.stat == STAT_CANCELLED: + self.log.info(f'GenerateCoverThumbnails task has been cancelled.') + return + + if self.stat == STAT_ENDED: + self.log.info(f'GenerateCoverThumbnails task has been ended.') + return + + if total_generated == 0: + self.self_cleanup = True + + self._handleSuccess() + self.app_db_session.remove() + + def get_books_with_covers(self, book_id=-1): + filter_exp = (db.Books.id == book_id) if book_id != -1 else True + calibre_db = db.CalibreDB(expire_on_commit=False, init=True) + books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all() + calibre_db.session.close() + return books_cover + + def get_book_cover_thumbnails(self, book_id): + return self.app_db_session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == constants.THUMBNAIL_TYPE_COVER) \ + .filter(ub.Thumbnail.entity_id == book_id) \ + .filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.utcnow())) \ + .all() + + def create_book_cover_thumbnails(self, book): + generated = 0 + book_cover_thumbnails = self.get_book_cover_thumbnails(book.id) + + # Generate new thumbnails for missing covers + resolutions = list(map(lambda t: t.resolution, book_cover_thumbnails)) + missing_resolutions = list(set(self.resolutions).difference(resolutions)) + for resolution in missing_resolutions: + generated += 1 + self.create_book_cover_single_thumbnail(book, resolution) + + # Replace outdated or missing thumbnails + for thumbnail in book_cover_thumbnails: + if book.last_modified > thumbnail.generated_at: + generated += 1 + self.update_book_cover_thumbnail(book, thumbnail) + + elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS): + generated += 1 + self.update_book_cover_thumbnail(book, thumbnail) + return generated + + def create_book_cover_single_thumbnail(self, book, resolution): + thumbnail = ub.Thumbnail() + thumbnail.type = constants.THUMBNAIL_TYPE_COVER + thumbnail.entity_id = book.id + thumbnail.format = 'jpeg' + thumbnail.resolution = resolution + + self.app_db_session.add(thumbnail) + try: + self.app_db_session.commit() + self.generate_book_thumbnail(book, thumbnail) + except Exception as ex: + self.log.debug('Error creating book thumbnail: ' + str(ex)) + self._handleError('Error creating book thumbnail: ' + str(ex)) + self.app_db_session.rollback() + + def update_book_cover_thumbnail(self, book, thumbnail): + thumbnail.generated_at = datetime.utcnow() + + try: + self.app_db_session.commit() + self.cache.delete_cache_file(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS) + self.generate_book_thumbnail(book, thumbnail) + except Exception as ex: + self.log.debug('Error updating book thumbnail: ' + str(ex)) + self._handleError('Error updating book thumbnail: ' + str(ex)) + self.app_db_session.rollback() + + def generate_book_thumbnail(self, book, thumbnail): + if book and thumbnail: + if config.config_use_google_drive: + if not gdriveutils.is_gdrive_ready(): + raise Exception('Google Drive is configured but not ready') + + web_content_link = gdriveutils.get_cover_via_gdrive(book.path) + if not web_content_link: + raise Exception('Google Drive cover url not found') + + stream = None + try: + stream = urlopen(web_content_link) + with Image(file=stream) as img: + height = get_resize_height(thumbnail.resolution) + if img.height > height: + width = get_resize_width(thumbnail.resolution, img.width, img.height) + img.resize(width=width, height=height, filter='lanczos') + img.format = thumbnail.format + filename = self.cache.get_cache_file_path(thumbnail.filename, + constants.CACHE_TYPE_THUMBNAILS) + img.save(filename=filename) + except Exception as ex: + # Bubble exception to calling function + self.log.debug('Error generating thumbnail file: ' + str(ex)) + raise ex + finally: + if stream is not None: + stream.close() + else: + book_cover_filepath = os.path.join(config.config_calibre_dir, book.path, 'cover.jpg') + if not os.path.isfile(book_cover_filepath): + raise Exception('Book cover file not found') + + with Image(filename=book_cover_filepath) as img: + height = get_resize_height(thumbnail.resolution) + if img.height > height: + width = get_resize_width(thumbnail.resolution, img.width, img.height) + img.resize(width=width, height=height, filter='lanczos') + img.format = thumbnail.format + filename = self.cache.get_cache_file_path(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS) + img.save(filename=filename) + + @property + def name(self): + return N_('Cover Thumbnails') + + def __str__(self): + if self.book_id > 0: + return "Add Cover Thumbnails for Book {}".format(self.book_id) + else: + return "Generate Cover Thumbnails" + + @property + def is_cancellable(self): + return True + + +class TaskGenerateSeriesThumbnails(CalibreTask): + def __init__(self, task_message=''): + super(TaskGenerateSeriesThumbnails, self).__init__(task_message) + self.log = logger.create() + self.app_db_session = ub.get_new_session_instance() + self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True) + self.cache = fs.FileSystem() + self.resolutions = [ + constants.COVER_THUMBNAIL_SMALL, + constants.COVER_THUMBNAIL_MEDIUM, + ] + + def run(self, worker_thread): + if self.calibre_db.session and use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED: + self.message = 'Scanning Series' + all_series = self.get_series_with_four_plus_books() + count = len(all_series) + + total_generated = 0 + for i, series in enumerate(all_series): + generated = 0 + series_thumbnails = self.get_series_thumbnails(series.id) + series_books = self.get_series_books(series.id) + + # Generate new thumbnails for missing covers + resolutions = list(map(lambda t: t.resolution, series_thumbnails)) + missing_resolutions = list(set(self.resolutions).difference(resolutions)) + for resolution in missing_resolutions: + generated += 1 + self.create_series_thumbnail(series, series_books, resolution) + + # Replace outdated or missing thumbnails + for thumbnail in series_thumbnails: + if any(book.last_modified > thumbnail.generated_at for book in series_books): + generated += 1 + self.update_series_thumbnail(series_books, thumbnail) + + elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS): + generated += 1 + self.update_series_thumbnail(series_books, thumbnail) + + # Increment the progress + self.progress = (1.0 / count) * i + + if generated > 0: + total_generated += generated + self.message = N_('Generated {0} series thumbnails').format(total_generated) + + # Check if job has been cancelled or ended + if self.stat == STAT_CANCELLED: + self.log.info(f'GenerateSeriesThumbnails task has been cancelled.') + return + + if self.stat == STAT_ENDED: + self.log.info(f'GenerateSeriesThumbnails task has been ended.') + return + + if total_generated == 0: + self.self_cleanup = True + + self._handleSuccess() + self.app_db_session.remove() + + def get_series_with_four_plus_books(self): + return self.calibre_db.session \ + .query(db.Series) \ + .join(db.books_series_link) \ + .join(db.Books) \ + .filter(db.Books.has_cover == 1) \ + .group_by(text('books_series_link.series')) \ + .having(func.count('book_series_link') > 3) \ + .all() + + def get_series_books(self, series_id): + return self.calibre_db.session \ + .query(db.Books) \ + .join(db.books_series_link) \ + .join(db.Series) \ + .filter(db.Books.has_cover == 1) \ + .filter(db.Series.id == series_id) \ + .all() + + def get_series_thumbnails(self, series_id): + return self.app_db_session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == constants.THUMBNAIL_TYPE_SERIES) \ + .filter(ub.Thumbnail.entity_id == series_id) \ + .filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.utcnow())) \ + .all() + + def create_series_thumbnail(self, series, series_books, resolution): + thumbnail = ub.Thumbnail() + thumbnail.type = constants.THUMBNAIL_TYPE_SERIES + thumbnail.entity_id = series.id + thumbnail.format = 'jpeg' + thumbnail.resolution = resolution + + self.app_db_session.add(thumbnail) + try: + self.app_db_session.commit() + self.generate_series_thumbnail(series_books, thumbnail) + except Exception as ex: + self.log.debug('Error creating book thumbnail: ' + str(ex)) + self._handleError('Error creating book thumbnail: ' + str(ex)) + self.app_db_session.rollback() + + def update_series_thumbnail(self, series_books, thumbnail): + thumbnail.generated_at = datetime.utcnow() + + try: + self.app_db_session.commit() + self.cache.delete_cache_file(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS) + self.generate_series_thumbnail(series_books, thumbnail) + except Exception as ex: + self.log.debug('Error updating book thumbnail: ' + str(ex)) + self._handleError('Error updating book thumbnail: ' + str(ex)) + self.app_db_session.rollback() + + def generate_series_thumbnail(self, series_books, thumbnail): + # Get the last four books in the series based on series_index + books = sorted(series_books, key=lambda b: float(b.series_index), reverse=True)[:4] + + top = 0 + left = 0 + width = 0 + height = 0 + with Image() as canvas: + for book in books: + if config.config_use_google_drive: + if not gdriveutils.is_gdrive_ready(): + raise Exception('Google Drive is configured but not ready') + + web_content_link = gdriveutils.get_cover_via_gdrive(book.path) + if not web_content_link: + raise Exception('Google Drive cover url not found') + + stream = None + try: + stream = urlopen(web_content_link) + with Image(file=stream) as img: + # Use the first image in this set to determine the width and height to scale the + # other images in this set + if width == 0 or height == 0: + width = get_resize_width(thumbnail.resolution, img.width, img.height) + height = get_resize_height(thumbnail.resolution) + canvas.blank(width, height) + + dimensions = get_best_fit(width, height, img.width, img.height) + + # resize and crop the image + img.resize(width=int(dimensions['width']), height=int(dimensions['height']), + filter='lanczos') + img.crop(width=int(width / 2.0), height=int(height / 2.0), gravity='center') + + # add the image to the canvas + canvas.composite(img, left, top) + + except Exception as ex: + self.log.debug('Error generating thumbnail file: ' + str(ex)) + raise ex + finally: + if stream is not None: + stream.close() + + book_cover_filepath = os.path.join(config.config_calibre_dir, book.path, 'cover.jpg') + if not os.path.isfile(book_cover_filepath): + raise Exception('Book cover file not found') + + with Image(filename=book_cover_filepath) as img: + # Use the first image in this set to determine the width and height to scale the + # other images in this set + if width == 0 or height == 0: + width = get_resize_width(thumbnail.resolution, img.width, img.height) + height = get_resize_height(thumbnail.resolution) + canvas.blank(width, height) + + dimensions = get_best_fit(width, height, img.width, img.height) + + # resize and crop the image + img.resize(width=int(dimensions['width']), height=int(dimensions['height']), filter='lanczos') + img.crop(width=int(width / 2.0), height=int(height / 2.0), gravity='center') + + # add the image to the canvas + canvas.composite(img, left, top) + + # set the coordinates for the next iteration + if left == 0 and top == 0: + left = int(width / 2.0) + elif left == int(width / 2.0) and top == 0: + left = 0 + top = int(height / 2.0) + else: + left = int(width / 2.0) + + canvas.format = thumbnail.format + filename = self.cache.get_cache_file_path(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS) + canvas.save(filename=filename) + + @property + def name(self): + return N_('Cover Thumbnails') + + def __str__(self): + return "GenerateSeriesThumbnails" + + @property + def is_cancellable(self): + return True + + +class TaskClearCoverThumbnailCache(CalibreTask): + def __init__(self, book_id, task_message=N_('Clearing cover thumbnail cache')): + super(TaskClearCoverThumbnailCache, self).__init__(task_message) + self.log = logger.create() + self.book_id = book_id + self.app_db_session = ub.get_new_session_instance() + self.cache = fs.FileSystem() + + def run(self, worker_thread): + if self.app_db_session: + if self.book_id == 0: # delete superfluous thumbnails + calibre_db = db.CalibreDB(expire_on_commit=False, init=True) + thumbnails = (calibre_db.session.query(ub.Thumbnail) + .join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True) + .filter(db.Books.id == None) + .all()) + calibre_db.session.close() + elif self.book_id > 0: # make sure single book is selected + thumbnails = self.get_thumbnails_for_book(self.book_id) + if self.book_id < 0: + self.delete_all_thumbnails() + else: + for thumbnail in thumbnails: + self.delete_thumbnail(thumbnail) + self._handleSuccess() + self.app_db_session.remove() + + def get_thumbnails_for_book(self, book_id): + return self.app_db_session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == constants.THUMBNAIL_TYPE_COVER) \ + .filter(ub.Thumbnail.entity_id == book_id) \ + .all() + + def delete_thumbnail(self, thumbnail): + try: + self.cache.delete_cache_file(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS) + self.app_db_session \ + .query(ub.Thumbnail) \ + .filter(ub.Thumbnail.type == constants.THUMBNAIL_TYPE_COVER) \ + .filter(ub.Thumbnail.entity_id == thumbnail.entity_id) \ + .delete() + self.app_db_session.commit() + except Exception as ex: + self.log.debug('Error deleting book thumbnail: ' + str(ex)) + self._handleError('Error deleting book thumbnail: ' + str(ex)) + + def delete_all_thumbnails(self): + try: + self.app_db_session.query(ub.Thumbnail).filter(ub.Thumbnail.type == constants.THUMBNAIL_TYPE_COVER).delete() + self.app_db_session.commit() + self.cache.delete_cache_dir(constants.CACHE_TYPE_THUMBNAILS) + except Exception as ex: + self.log.debug('Error deleting thumbnail directory: ' + str(ex)) + self._handleError('Error deleting thumbnail directory: ' + str(ex)) + + @property + def name(self): + return N_('Cover Thumbnails') + + # needed for logging + def __str__(self): + if self.book_id > 0: + return "Replace/Delete Cover Thumbnails for book " + str(self.book_id) + else: + return "Delete Thumbnail cache directory" + + @property + def is_cancellable(self): + return False diff --git a/cps/tasks/upload.py b/cps/tasks/upload.py index e0bb0094..bc8ba1e0 100644 --- a/cps/tasks/upload.py +++ b/cps/tasks/upload.py @@ -17,11 +17,14 @@ # along with this program. If not, see . from datetime import datetime + +from flask_babel import lazy_gettext as N_ + from cps.services.worker import CalibreTask, STAT_FINISH_SUCCESS class TaskUpload(CalibreTask): - def __init__(self, taskMessage, book_title): - super(TaskUpload, self).__init__(taskMessage) + def __init__(self, task_message, book_title): + super(TaskUpload, self).__init__(task_message) self.start_time = self.end_time = datetime.now() self.stat = STAT_FINISH_SUCCESS self.progress = 1 @@ -32,7 +35,11 @@ class TaskUpload(CalibreTask): @property def name(self): - return "Upload" + return N_("Upload") def __str__(self): return "Upload {}".format(self.book_title) + + @property + def is_cancellable(self): + return False diff --git a/cps/tasks_status.py b/cps/tasks_status.py new file mode 100644 index 00000000..e5f91975 --- /dev/null +++ b/cps/tasks_status.py @@ -0,0 +1,106 @@ +# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) +# Copyright (C) 2022 OzzieIsaacs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from markupsafe import escape + +from flask import Blueprint, jsonify +from flask_login import login_required, current_user +from flask_babel import gettext as _ +from flask_babel import format_datetime +from babel.units import format_unit + +from . import logger +from .render_template import render_title_template +from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS, STAT_ENDED, \ + STAT_CANCELLED + +tasks = Blueprint('tasks', __name__) + +log = logger.create() + + +@tasks.route("/ajax/emailstat") +@login_required +def get_email_status_json(): + tasks = WorkerThread.get_instance().tasks + return jsonify(render_task_status(tasks)) + + +@tasks.route("/tasks") +@login_required +def get_tasks_status(): + # if current user admin, show all email, otherwise only own emails + tasks = WorkerThread.get_instance().tasks + answer = render_task_status(tasks) + return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks") + + +# helper function to apply localize status information in tasklist entries +def render_task_status(tasklist): + rendered_tasklist = list() + for __, user, __, task, __ in tasklist: + if user == current_user.name or current_user.role_admin(): + ret = {} + if task.start_time: + ret['starttime'] = format_datetime(task.start_time, format='short') + ret['runtime'] = format_runtime(task.runtime) + + # localize the task status + if isinstance(task.stat, int): + if task.stat == STAT_WAITING: + ret['status'] = _(u'Waiting') + elif task.stat == STAT_FAIL: + ret['status'] = _(u'Failed') + elif task.stat == STAT_STARTED: + ret['status'] = _(u'Started') + elif task.stat == STAT_FINISH_SUCCESS: + ret['status'] = _(u'Finished') + elif task.stat == STAT_ENDED: + ret['status'] = _(u'Ended') + elif task.stat == STAT_CANCELLED: + ret['status'] = _(u'Cancelled') + else: + ret['status'] = _(u'Unknown Status') + + ret['taskMessage'] = "{}: {}".format(task.name, task.message) if task.message else task.name + ret['progress'] = "{} %".format(int(task.progress * 100)) + ret['user'] = escape(user) # prevent xss + + # Hidden fields + ret['task_id'] = task.id + ret['stat'] = task.stat + ret['is_cancellable'] = task.is_cancellable + + rendered_tasklist.append(ret) + + return rendered_tasklist + + +# helper function for displaying the runtime of tasks +def format_runtime(runtime): + ret_val = "" + if runtime.days: + ret_val = format_unit(runtime.days, 'duration-day', length="long") + ', ' + minutes, seconds = divmod(runtime.seconds, 60) + hours, minutes = divmod(minutes, 60) + # ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ? + if hours: + ret_val += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds) + elif minutes: + ret_val += '{:2d}:{:02d}s'.format(minutes, seconds) + else: + ret_val += '{:2d}s'.format(seconds) + return ret_val diff --git a/cps/templates/admin.html b/cps/templates/admin.html index c8c6eb82..f1c3749a 100644 --- a/cps/templates/admin.html +++ b/cps/templates/admin.html @@ -161,21 +161,56 @@ {{_('Edit UI Configuration')}} +{% if feature_support['scheduler'] %} +
+
+

{{_('Scheduled Tasks')}}

+
+
+
{{_('Time at which tasks start to run')}}
+
{{schedule_time}}
+
+
+
{{_('Maximum tasks duration')}}
+
{{schedule_duration}}
+
+
+
{{_('Generate book cover thumbnails')}}
+
{{ display_bool_setting(config.schedule_generate_book_covers) }}
+
+ +
+
{{_('Reconnect to Calibre Library')}}
+
{{ display_bool_setting(config.schedule_reconnect) }}
+
-
-

{{_('Administration')}}

- {{_('Download Debug Package')}} - {{_('View Logs')}} +
+ {{_('Edit Scheduled Tasks Settings')}} + {% if config.schedule_generate_book_covers %} + {{_('Refresh Thumbnail Cover Cache')}} + {% endif %}
-
-
{{_('Reconnect Calibre Database')}}
-
{{_('Restart')}}
-
{{_('Shutdown')}}
+
+{% endif %} +
+

{{_('Administration')}}

+ {{_('Download Debug Package')}} + {{_('View Logs')}} +
+
+
{{_('Reconnect Calibre Database')}}
+
+
+
{{_('Restart')}}
+
{{_('Shutdown')}}
-

{{_('Update')}}

+

{{_('Version Information')}}

@@ -252,3 +287,6 @@ {% endblock %} +{% block modal %} +{{ change_confirm_modal() }} +{% endblock %} diff --git a/cps/templates/author.html b/cps/templates/author.html index f7aeb3e1..b991e959 100644 --- a/cps/templates/author.html +++ b/cps/templates/author.html @@ -36,7 +36,7 @@
- + {{ image.book_cover(entry.Books, alt=author.name|safe) }} {% if entry[2] == True %}{% endif %} diff --git a/cps/templates/book_edit.html b/cps/templates/book_edit.html index 98866fd6..3ce1dbfa 100644 --- a/cps/templates/book_edit.html +++ b/cps/templates/book_edit.html @@ -3,7 +3,8 @@ {% if book %}
- {{ book.title }} + +
{% if g.user.role_delete_books() %}
diff --git a/cps/templates/detail.html b/cps/templates/detail.html index 3ced5854..3af205fb 100644 --- a/cps/templates/detail.html +++ b/cps/templates/detail.html @@ -4,7 +4,8 @@
- {{ entry.title }} + +
@@ -70,9 +71,9 @@ {% endif %}
{% endif %} - {% if entry.audioentries|length > 0 and g.user.role_viewer() %} + {% if entry.audio_entries|length > 0 and g.user.role_viewer() %}
- {% if entry.audioentries|length > 1 %} + {% if entry.audio_entries|length > 1 %}
{% endif %} diff --git a/cps/templates/fragment.html b/cps/templates/fragment.html index 1421ea6a..f2e94fb2 100644 --- a/cps/templates/fragment.html +++ b/cps/templates/fragment.html @@ -1,3 +1,4 @@ +{% import 'image.html' as image %}
{% block body %}{% endblock %}
diff --git a/cps/templates/grid.html b/cps/templates/grid.html index b9d40961..638b7245 100644 --- a/cps/templates/grid.html +++ b/cps/templates/grid.html @@ -1,3 +1,4 @@ +{% import 'image.html' as image %} {% extends "layout.html" %} {% block body %}

{{_(title)}}

@@ -27,7 +28,7 @@
- {{ entry[0].series[0].name }} + {{ image.series(entry[0].series[0], alt=entry[0].series[0].name|shortentitle) }} {{entry.count}} diff --git a/cps/templates/image.html b/cps/templates/image.html new file mode 100644 index 00000000..0bdba9a5 --- /dev/null +++ b/cps/templates/image.html @@ -0,0 +1,20 @@ +{% macro book_cover(book, alt=None) -%} + {%- set image_title = book.title if book.title else book.name -%} + {%- set image_alt = alt if alt else image_title -%} + {% set srcset = book|get_cover_srcset %} + {{ image_alt }} +{%- endmacro %} + +{% macro series(series, alt=None) -%} + {%- set image_alt = alt if alt else image_title -%} + {% set srcset = series|get_series_srcset %} + {{ book_title }} +{%- endmacro %} diff --git a/cps/templates/index.html b/cps/templates/index.html index 4cbf520e..0bb3da72 100644 --- a/cps/templates/index.html +++ b/cps/templates/index.html @@ -1,3 +1,4 @@ +{% import 'image.html' as image %} {% extends "layout.html" %} {% block body %} {% if g.user.show_detail_random() and page != "discover" %} @@ -9,7 +10,7 @@
- {{ entry.Books.title }} + {{ image.book_cover(entry.Books) }} {% if entry[2] == True %}{% endif %} @@ -92,7 +93,7 @@
- {{ entry.Books.title }} + {{ image.book_cover(entry.Books) }} {% if entry[2] == True %}{% endif %} diff --git a/cps/templates/layout.html b/cps/templates/layout.html index 46f03da1..7502514a 100644 --- a/cps/templates/layout.html +++ b/cps/templates/layout.html @@ -1,4 +1,5 @@ {% from 'modal_dialogs.html' import restrict_modal, delete_book, filechooser_modal, delete_confirm_modal, change_confirm_modal %} +{% import 'image.html' as image %} @@ -40,7 +41,7 @@ {{instance}}
{% if g.user.is_authenticated or g.allow_anonymous %} - +
@@ -53,7 +54,7 @@
- + {% endif %} {% endfor %} diff --git a/cps/templates/tasks.html b/cps/templates/tasks.html index 3ef50474..5cbc5f8b 100644 --- a/cps/templates/tasks.html +++ b/cps/templates/tasks.html @@ -5,7 +5,7 @@ {% block body %}

{{_('Tasks')}}

-
{{library}}{{_(version)}}{{version}}
+
{% if g.user.role_admin() %} @@ -16,6 +16,9 @@ + {% if g.user.role_admin() %} + + {% endif %} @@ -23,6 +26,30 @@
{{_('Progress')}} {{_('Run Time')}} {{_('Start Time')}}{{_('Actions')}}
{% endblock %} +{% block modal %} +{{ delete_book() }} +{% if g.user.role_admin() %} + +{% endif %} +{% endblock %} {% block js %} diff --git a/cps/ub.py b/cps/ub.py index 4bb66ff6..fd7a90ef 100644 --- a/cps/ub.py +++ b/cps/ub.py @@ -17,6 +17,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +import atexit import os import sys import datetime @@ -52,7 +53,7 @@ except ImportError: from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session from werkzeug.security import generate_password_hash -from . import constants, logger, cli +from . import constants, logger log = logger.create() @@ -512,6 +513,28 @@ class RemoteAuthToken(Base): return '' % self.id +def filename(context): + file_format = context.get_current_parameters()['format'] + if file_format == 'jpeg': + return context.get_current_parameters()['uuid'] + '.jpg' + else: + return context.get_current_parameters()['uuid'] + '.' + file_format + + +class Thumbnail(Base): + __tablename__ = 'thumbnail' + + id = Column(Integer, primary_key=True) + entity_id = Column(Integer) + uuid = Column(String, default=lambda: str(uuid.uuid4()), unique=True) + format = Column(String, default='jpeg') + type = Column(SmallInteger, default=constants.THUMBNAIL_TYPE_COVER) + resolution = Column(SmallInteger, default=constants.COVER_THUMBNAIL_SMALL) + filename = Column(String, default=filename) + generated_at = Column(DateTime, default=lambda: datetime.datetime.utcnow()) + expiration = Column(DateTime, nullable=True) + + # Add missing tables during migration of database def add_missing_tables(engine, _session): if not engine.dialect.has_table(engine.connect(), "book_read_link"): @@ -526,6 +549,8 @@ def add_missing_tables(engine, _session): KoboStatistics.__table__.create(bind=engine) if not engine.dialect.has_table(engine.connect(), "archived_book"): ArchivedBook.__table__.create(bind=engine) + if not engine.dialect.has_table(engine.connect(), "thumbnail"): + Thumbnail.__table__.create(bind=engine) if not engine.dialect.has_table(engine.connect(), "registration"): Registration.__table__.create(bind=engine) with engine.connect() as conn: @@ -791,7 +816,7 @@ def init_db_thread(): return Session() -def init_db(app_db_path): +def init_db(app_db_path, user_credentials=None): # Open session for database connection global session global app_DB_path @@ -812,8 +837,8 @@ def init_db(app_db_path): create_admin_user(session) create_anonymous_user(session) - if cli.user_credentials: - username, password = cli.user_credentials.split(':', 1) + if user_credentials: + username, password = user_credentials.split(':', 1) user = session.query(User).filter(func.lower(User.name) == username.lower()).first() if user: if not password: @@ -831,6 +856,16 @@ def init_db(app_db_path): sys.exit(3) +def get_new_session_instance(): + new_engine = create_engine(u'sqlite:///{0}'.format(app_DB_path), echo=False) + new_session = scoped_session(sessionmaker()) + new_session.configure(bind=new_engine) + + atexit.register(lambda: new_session.remove() if new_session else True) + + return new_session + + def dispose(): global session diff --git a/cps/updater.py b/cps/updater.py index f7341002..13d774bf 100644 --- a/cps/updater.py +++ b/cps/updater.py @@ -28,10 +28,10 @@ from io import BytesIO from tempfile import gettempdir import requests -from babel.dates import format_datetime +from flask_babel import format_datetime from flask_babel import gettext as _ -from . import constants, logger, config, web_server +from . import constants, logger # config, web_server log = logger.create() @@ -58,15 +58,19 @@ class Updater(threading.Thread): self.status = -1 self.updateIndex = None + def init_updater(self, config, web_server): + self.config = config + self.web_server = web_server + def get_current_version_info(self): - if config.config_updatechannel == constants.UPDATE_STABLE: + if self.config.config_updatechannel == constants.UPDATE_STABLE: return self._stable_version_info() return self._nightly_version_info() - def get_available_updates(self, request_method, locale): - if config.config_updatechannel == constants.UPDATE_STABLE: + def get_available_updates(self, request_method): + if self.config.config_updatechannel == constants.UPDATE_STABLE: return self._stable_available_updates(request_method) - return self._nightly_available_updates(request_method, locale) + return self._nightly_available_updates(request_method) def do_work(self): try: @@ -95,7 +99,7 @@ class Updater(threading.Thread): self.status = 6 log.debug(u'Preparing restart of server') time.sleep(2) - web_server.stop(True) + self.web_server.stop(True) self.status = 7 time.sleep(2) return True @@ -262,8 +266,9 @@ class Updater(threading.Thread): if additional_path: exclude.append(additional_path) exclude = tuple(exclude) - # check if we are in a package, rename cps.py to __init__.py + # check if we are in a package, rename cps.py to __init__.py and __main__.py if constants.HOME_CONFIG: + shutil.copy(os.path.join(source, 'cps.py'), os.path.join(source, '__main__.py')) shutil.move(os.path.join(source, 'cps.py'), os.path.join(source, '__init__.py')) for root, dirs, files in os.walk(destination, topdown=True): @@ -331,7 +336,7 @@ class Updater(threading.Thread): print("\n*** Finished ***") @staticmethod - def _populate_parent_commits(update_data, status, locale, tz, parents): + def _populate_parent_commits(update_data, status, tz, parents): try: parent_commit = update_data['parents'][0] # limit the maximum search depth @@ -356,7 +361,7 @@ class Updater(threading.Thread): parent_commit_date = datetime.datetime.strptime( parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz parent_commit_date = format_datetime( - parent_commit_date, format='short', locale=locale) + parent_commit_date, format='short') parents.append([parent_commit_date, parent_data['message'].replace('\r\n', '

').replace('\n', '

')]) @@ -398,7 +403,7 @@ class Updater(threading.Thread): os.sep + 'gdrive_credentials', os.sep + 'settings.yaml', os.sep + 'venv', os.sep + 'virtualenv', os.sep + 'access.log', os.sep + 'access.log1', os.sep + 'access.log2', os.sep + '.calibre-web.log.swp', os.sep + '_sqlite3.so', os.sep + 'cps' + os.sep + '.HOMEDIR', - os.sep + 'gmail.json', os.sep + 'exclude.txt' + os.sep + 'gmail.json', os.sep + 'exclude.txt', os.sep + 'cps' + os.sep + 'cache' ] try: with open(os.path.join(constants.BASE_DIR, "exclude.txt"), "r") as f: @@ -414,7 +419,7 @@ class Updater(threading.Thread): log_function("Excluded file list for updater not found, or not accessible") return excluded_files - def _nightly_available_updates(self, request_method, locale): + def _nightly_available_updates(self, request_method): tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone) if request_method == "GET": repository_url = _REPOSITORY_API_URL @@ -455,14 +460,14 @@ class Updater(threading.Thread): update_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz parents.append( [ - format_datetime(new_commit_date, format='short', locale=locale), + format_datetime(new_commit_date, format='short'), update_data['message'], update_data['sha'] ] ) # it only makes sense to analyze the parents if we know the current commit hash if status['current_commit_hash'] != '': - parents = self._populate_parent_commits(update_data, status, locale, tz, parents) + parents = self._populate_parent_commits(update_data, status, tz, parents) status['history'] = parents[::-1] except (IndexError, KeyError): status['success'] = False @@ -591,7 +596,7 @@ class Updater(threading.Thread): return json.dumps(status) def _get_request_path(self): - if config.config_updatechannel == constants.UPDATE_STABLE: + if self.config.config_updatechannel == constants.UPDATE_STABLE: return self.updateFile return _REPOSITORY_API_URL + '/zipball/master' @@ -619,7 +624,7 @@ class Updater(threading.Thread): status['message'] = _(u'HTTP Error') + ': ' + commit['message'] else: status['message'] = _(u'HTTP Error') + ': ' + str(e) - except requests.exceptions.ConnectionError: + except requests.exceptions.ConnectionError as e: status['message'] = _(u'Connection error') except requests.exceptions.Timeout: status['message'] = _(u'Timeout while establishing connection') diff --git a/cps/uploader.py b/cps/uploader.py index 276db326..5dbd1249 100644 --- a/cps/uploader.py +++ b/cps/uploader.py @@ -27,12 +27,6 @@ from .helper import split_authors log = logger.create() - -try: - from lxml.etree import LXML_VERSION as lxmlversion -except ImportError: - lxmlversion = None - try: from wand.image import Image, Color from wand import version as ImageVersion @@ -101,7 +95,7 @@ def default_meta(tmp_file_path, original_file_name, original_file_extension): extension=original_file_extension, title=original_file_name, author=_(u'Unknown'), - cover=None, #pdf_preview(tmp_file_path, original_file_name), + cover=None, description="", tags="", series="", @@ -237,29 +231,12 @@ def pdf_preview(tmp_file_path, tmp_dir): return None -def get_versions(all=True): +def get_versions(): ret = dict() if not use_generic_pdf_cover: ret['Image Magick'] = ImageVersion.MAGICK_VERSION else: ret['Image Magick'] = u'not installed' - if all: - if not use_generic_pdf_cover: - ret['Wand'] = ImageVersion.VERSION - else: - ret['Wand'] = u'not installed' - if use_pdf_meta: - ret['PyPdf'] = PyPdfVersion - else: - ret['PyPdf'] = u'not installed' - if lxmlversion: - ret['lxml'] = '.'.join(map(str, lxmlversion)) - else: - ret['lxml'] = u'not installed' - if comic.use_comic_meta: - ret['Comic_API'] = comic.comic_version or u'installed' - else: - ret['Comic_API'] = u'not installed' return ret diff --git a/cps/web.py b/cps/web.py index 8d8b9043..1aa4cc1b 100644 --- a/cps/web.py +++ b/cps/web.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web) # Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11, # andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh, @@ -21,40 +19,37 @@ # along with this program. If not, see . import os -from datetime import datetime import json import mimetypes import chardet # dependency of requests import copy -from functools import wraps -from babel.dates import format_date -from babel import Locale from flask import Blueprint, jsonify from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for from flask import session as flask_session from flask_babel import gettext as _ +from flask_babel import get_locale from flask_login import login_user, logout_user, login_required, current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError -from sqlalchemy.sql.expression import text, func, false, not_, and_, or_ +from sqlalchemy.sql.expression import text, func, false, not_, and_ from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.sql.functions import coalesce -from .services.worker import WorkerThread - from werkzeug.datastructures import Headers from werkzeug.security import generate_password_hash, check_password_hash from . import constants, logger, isoLanguages, services -from . import babel, db, ub, config, get_locale, app +from . import db, ub, config, app from . import calibre_db, kobo_sync_status +from .search import render_search_results, render_adv_search_results from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download -from .helper import check_valid_domain, render_task_status, check_email, check_username, \ - get_book_cover, get_download_link, send_mail, generate_random_password, \ +from .helper import check_valid_domain, check_email, check_username, \ + get_book_cover, get_series_cover_thumbnail, get_download_link, send_mail, generate_random_password, \ send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email, \ edit_book_read_status from .pagination import Pagination from .redirect import redirect_back +from .babel import get_available_locale from .usermanagement import login_required_if_no_ano from .kobo_sync_status import remove_synced_book from .render_template import render_title_template @@ -75,6 +70,8 @@ except ImportError: oauth_check = {} register_user_with_oauth = logout_oauth_user = get_oauth_status = None +from functools import wraps + try: from natsort import natsorted as sort except ImportError: @@ -102,6 +99,7 @@ def add_security_headers(resp): web = Blueprint('web', __name__) + log = logger.create() @@ -134,7 +132,7 @@ def viewer_required(f): @web.route("/ajax/emailstat") @login_required def get_email_status_json(): - tasks = WorkerThread.getInstance().tasks + tasks = WorkerThread.get_instance().tasks return jsonify(render_task_status(tasks)) @@ -770,57 +768,6 @@ def render_archived_books(page, sort_param): title=name, page=page_name, order=sort_param[1]) -def render_prepare_search_form(cc): - # prepare data for search-form - tags = calibre_db.session.query(db.Tags) \ - .join(db.books_tags_link) \ - .join(db.Books) \ - .filter(calibre_db.common_filters()) \ - .group_by(text('books_tags_link.tag')) \ - .order_by(db.Tags.name).all() - series = calibre_db.session.query(db.Series) \ - .join(db.books_series_link) \ - .join(db.Books) \ - .filter(calibre_db.common_filters()) \ - .group_by(text('books_series_link.series')) \ - .order_by(db.Series.name) \ - .filter(calibre_db.common_filters()).all() - shelves = ub.session.query(ub.Shelf) \ - .filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id))) \ - .order_by(ub.Shelf.name).all() - extensions = calibre_db.session.query(db.Data) \ - .join(db.Books) \ - .filter(calibre_db.common_filters()) \ - .group_by(db.Data.format) \ - .order_by(db.Data.format).all() - if current_user.filter_language() == u"all": - languages = calibre_db.speaking_language() - else: - languages = None - return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions, - series=series, shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch") - - -def render_search_results(term, offset=None, order=None, limit=None): - join = db.books_series_link, db.books_series_link.c.book == db.Books.id, db.Series - entries, result_count, pagination = calibre_db.get_search_results(term, - config, - offset, - order, - limit, - *join) - return render_title_template('search.html', - searchterm=term, - pagination=pagination, - query=term, - adv_searchterm=term, - entries=entries, - result_count=result_count, - title=_(u"Search"), - page="search", - order=order[1]) - - # ################################### View Books list ################################################################## @@ -1013,7 +960,7 @@ def publisher_list(): .count()) if no_publisher_count: entries.append([db.Category(_("None"), "-1"), no_publisher_count]) - entries = sorted(entries, key=lambda x: x[0].name, reverse=not order_no) + entries = sorted(entries, key=lambda x: x[0].name.lower(), reverse=not order_no) char_list = generate_char_list(entries) return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list, title=_(u"Publishers"), page="publisherlist", data="publisher", order=order_no) @@ -1043,7 +990,7 @@ def series_list(): .count()) if no_series_count: entries.append([db.Category(_("None"), "-1"), no_series_count]) - entries = sorted(entries, key=lambda x: x[0].name, reverse=not order_no) + entries = sorted(entries, key=lambda x: x[0].name.lower(), reverse=not order_no) return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list, title=_(u"Series"), page="serieslist", data="series", order=order_no) else: @@ -1145,7 +1092,7 @@ def category_list(): .count()) if no_tag_count: entries.append([db.Category(_("None"), "-1"), no_tag_count]) - entries = sorted(entries, key=lambda x: x[0].name, reverse=not order_no) + entries = sorted(entries, key=lambda x: x[0].name.lower(), reverse=not order_no) char_list = generate_char_list(entries) return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list, title=_(u"Categories"), page="catlist", data="category", order=order_no) @@ -1153,329 +1100,38 @@ def category_list(): abort(404) -# ################################### Task functions ################################################################ - - -@web.route("/tasks") -@login_required -def get_tasks_status(): - # if current user admin, show all email, otherwise only own emails - tasks = WorkerThread.getInstance().tasks - answer = render_task_status(tasks) - return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks") - - -# ################################### Search functions ################################################################ - -@web.route("/search", methods=["GET"]) -@login_required_if_no_ano -def search(): - term = request.args.get("query") - if term: - return redirect(url_for('web.books_list', data="search", sort_param='stored', query=term.strip())) - else: - return render_title_template('search.html', - searchterm="", - result_count=0, - title=_(u"Search"), - page="search") - - -@web.route("/advsearch", methods=['POST']) -@login_required_if_no_ano -def advanced_search(): - values = dict(request.form) - params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf', - 'include_language', 'exclude_language', 'include_extension', 'exclude_extension'] - for param in params: - values[param] = list(request.form.getlist(param)) - flask_session['query'] = json.dumps(values) - return redirect(url_for('web.books_list', data="advsearch", sort_param='stored', query="")) - - -def adv_search_custom_columns(cc, term, q): - for c in cc: - if c.datatype == "datetime": - custom_start = term.get('custom_column_' + str(c.id) + '_start') - custom_end = term.get('custom_column_' + str(c.id) + '_end') - if custom_start: - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - func.datetime(db.cc_classes[c.id].value) >= func.datetime(custom_start))) - if custom_end: - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - func.datetime(db.cc_classes[c.id].value) <= func.datetime(custom_end))) - else: - custom_query = term.get('custom_column_' + str(c.id)) - if custom_query != '' and custom_query is not None: - if c.datatype == 'bool': - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - db.cc_classes[c.id].value == (custom_query == "True"))) - elif c.datatype == 'int' or c.datatype == 'float': - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - db.cc_classes[c.id].value == custom_query)) - elif c.datatype == 'rating': - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - db.cc_classes[c.id].value == int(float(custom_query) * 2))) - else: - q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any( - func.lower(db.cc_classes[c.id].value).ilike("%" + custom_query + "%"))) - return q - - -def adv_search_read_status(q, read_status): - if read_status: - if config.config_read_column: - try: - if read_status == "True": - q = q.join(db.cc_classes[config.config_read_column], isouter=True) \ - .filter(db.cc_classes[config.config_read_column].value == True) - else: - q = q.join(db.cc_classes[config.config_read_column], isouter=True) \ - .filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True) - except (KeyError, AttributeError, IndexError): - log.error( - "Custom Column No.{} is not existing in calibre database".format(config.config_read_column)) - flash(_("Custom Column No.%(column)d is not existing in calibre database", - column=config.config_read_column), - category="error") - return q - else: - if read_status == "True": - q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \ - .filter(ub.ReadBook.user_id == int(current_user.id), - ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED) - else: - q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \ - .filter(ub.ReadBook.user_id == int(current_user.id), - coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED) - return q - - -def adv_search_language(q, include_languages_inputs, exclude_languages_inputs): - if current_user.filter_language() != "all": - q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())) - else: - return adv_search_text(q, include_languages_inputs, exclude_languages_inputs, db.Languages.id) - return q - - -def adv_search_ratings(q, rating_high, rating_low): - if rating_high: - rating_high = int(rating_high) * 2 - q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high)) - if rating_low: - rating_low = int(rating_low) * 2 - q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low)) - return q - - -def adv_search_text(q, include_inputs, exclude_inputs, data_table): - for inp in include_inputs: - q = q.filter(getattr(db.Books, data_table.class_.__tablename__).any(data_table == inp)) - for excl in exclude_inputs: - q = q.filter(not_(getattr(db.Books, data_table.class_.__tablename__).any(data_table == excl))) - return q - - -def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs): - q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id) \ - .filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs))) - if len(include_shelf_inputs) > 0: - q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs)) - return q - - -def extend_search_term(searchterm, - author_name, - book_title, - publisher, - pub_start, - pub_end, - tags, - rating_high, - rating_low, - read_status, - ): - searchterm.extend((author_name.replace('|', ','), book_title, publisher)) - if pub_start: - try: - searchterm.extend([_(u"Published after ") + - format_date(datetime.strptime(pub_start, "%Y-%m-%d"), - format='medium', locale=get_locale())]) - except ValueError: - pub_start = u"" - if pub_end: - try: - searchterm.extend([_(u"Published before ") + - format_date(datetime.strptime(pub_end, "%Y-%m-%d"), - format='medium', locale=get_locale())]) - except ValueError: - pub_end = u"" - elements = {'tag': db.Tags, 'serie': db.Series, 'shelf': ub.Shelf} - for key, db_element in elements.items(): - tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['include_' + key])).all() - searchterm.extend(tag.name for tag in tag_names) - tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['exclude_' + key])).all() - searchterm.extend(tag.name for tag in tag_names) - language_names = calibre_db.session.query(db.Languages). \ - filter(db.Languages.id.in_(tags['include_language'])).all() - if language_names: - language_names = calibre_db.speaking_language(language_names) - searchterm.extend(language.name for language in language_names) - language_names = calibre_db.session.query(db.Languages). \ - filter(db.Languages.id.in_(tags['exclude_language'])).all() - if language_names: - language_names = calibre_db.speaking_language(language_names) - searchterm.extend(language.name for language in language_names) - if rating_high: - searchterm.extend([_(u"Rating <= %(rating)s", rating=rating_high)]) - if rating_low: - searchterm.extend([_(u"Rating >= %(rating)s", rating=rating_low)]) - if read_status: - searchterm.extend([_(u"Read Status = %(status)s", status=read_status)]) - searchterm.extend(ext for ext in tags['include_extension']) - searchterm.extend(ext for ext in tags['exclude_extension']) - # handle custom columns - searchterm = " + ".join(filter(None, searchterm)) - return searchterm, pub_start, pub_end - - -def render_adv_search_results(term, offset=None, order=None, limit=None): - sort_param = order[0] if order else [db.Books.sort] - pagination = None - - cc = calibre_db.get_cc_columns(config, filter_config_custom_read=True) - calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase) - query = calibre_db.generate_linked_query(config.config_read_column, db.Books) - q = query.outerjoin(db.books_series_link, db.books_series_link.c.book == db.Books.id) \ - .outerjoin(db.Series) \ - .filter(calibre_db.common_filters(True)) - - # parse multiselects to a complete dict - tags = dict() - elements = ['tag', 'serie', 'shelf', 'language', 'extension'] - for element in elements: - tags['include_' + element] = term.get('include_' + element) - tags['exclude_' + element] = term.get('exclude_' + element) - - author_name = term.get("author_name") - book_title = term.get("book_title") - publisher = term.get("publisher") - pub_start = term.get("publishstart") - pub_end = term.get("publishend") - rating_low = term.get("ratinghigh") - rating_high = term.get("ratinglow") - description = term.get("comment") - read_status = term.get("read_status") - if author_name: - author_name = author_name.strip().lower().replace(',', '|') - if book_title: - book_title = book_title.strip().lower() - if publisher: - publisher = publisher.strip().lower() - - search_term = [] - cc_present = False - for c in cc: - if c.datatype == "datetime": - column_start = term.get('custom_column_' + str(c.id) + '_start') - column_end = term.get('custom_column_' + str(c.id) + '_end') - if column_start: - search_term.extend([u"{} >= {}".format(c.name, - format_date(datetime.strptime(column_start, "%Y-%m-%d").date(), - format='medium', - locale=get_locale()) - )]) - cc_present = True - if column_end: - search_term.extend([u"{} <= {}".format(c.name, - format_date(datetime.strptime(column_end, "%Y-%m-%d").date(), - format='medium', - locale=get_locale()) - )]) - cc_present = True - elif term.get('custom_column_' + str(c.id)): - search_term.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))]) - cc_present = True - - if any(tags.values()) or author_name or book_title or \ - publisher or pub_start or pub_end or rating_low or rating_high \ - or description or cc_present or read_status: - search_term, pub_start, pub_end = extend_search_term(search_term, - author_name, - book_title, - publisher, - pub_start, - pub_end, - tags, - rating_high, - rating_low, - read_status) - # q = q.filter() - if author_name: - q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_name + "%"))) - if book_title: - q = q.filter(func.lower(db.Books.title).ilike("%" + book_title + "%")) - if pub_start: - q = q.filter(func.datetime(db.Books.pubdate) > func.datetime(pub_start)) - if pub_end: - q = q.filter(func.datetime(db.Books.pubdate) < func.datetime(pub_end)) - q = adv_search_read_status(q, read_status) - if publisher: - q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + publisher + "%"))) - q = adv_search_text(q, tags['include_tag'], tags['exclude_tag'], db.Tags.id) - q = adv_search_text(q, tags['include_serie'], tags['exclude_serie'], db.Series.id) - q = adv_search_text(q, tags['include_extension'], tags['exclude_extension'], db.Data.format) - q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf']) - q = adv_search_language(q, tags['include_language'], tags['exclude_language']) - q = adv_search_ratings(q, rating_high, rating_low, ) - - if description: - q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + description + "%"))) - - # search custom columns - try: - q = adv_search_custom_columns(cc, term, q) - except AttributeError as ex: - log.error_or_exception(ex) - flash(_("Error on search for custom columns, please restart Calibre-Web"), category="error") - - q = q.order_by(*sort_param).all() - flask_session['query'] = json.dumps(term) - ub.store_combo_ids(q) - result_count = len(q) - if offset is not None and limit is not None: - offset = int(offset) - limit_all = offset + int(limit) - pagination = Pagination((offset / (int(limit)) + 1), limit, result_count) - else: - offset = 0 - limit_all = result_count - entries = calibre_db.order_authors(q[offset:limit_all], list_return=True, combined=True) - return render_title_template('search.html', - adv_searchterm=search_term, - pagination=pagination, - entries=entries, - result_count=result_count, - title=_(u"Advanced Search"), page="advsearch", - order=order[1]) - - -@web.route("/advsearch", methods=['GET']) -@login_required_if_no_ano -def advanced_search_form(): - # Build custom columns names - cc = calibre_db.get_cc_columns(config, filter_config_custom_read=True) - return render_prepare_search_form(cc) # ################################### Download/Send ################################################################## @web.route("/cover/") +@web.route("/cover//") @login_required_if_no_ano -def get_cover(book_id): - return get_book_cover(book_id) +def get_cover(book_id, resolution=None): + resolutions = { + 'og': constants.COVER_THUMBNAIL_ORIGINAL, + 'sm': constants.COVER_THUMBNAIL_SMALL, + 'md': constants.COVER_THUMBNAIL_MEDIUM, + 'lg': constants.COVER_THUMBNAIL_LARGE, + } + cover_resolution = resolutions.get(resolution, None) + return get_book_cover(book_id, cover_resolution) + + +@web.route("/series_cover/") +@web.route("/series_cover//") +@login_required_if_no_ano +def get_series_cover(series_id, resolution=None): + resolutions = { + 'og': constants.COVER_THUMBNAIL_ORIGINAL, + 'sm': constants.COVER_THUMBNAIL_SMALL, + 'md': constants.COVER_THUMBNAIL_MEDIUM, + 'lg': constants.COVER_THUMBNAIL_LARGE, + } + cover_resolution = resolutions.get(resolution, None) + return get_series_cover_thumbnail(series_id, cover_resolution) + @web.route("/robots.txt") @@ -1761,7 +1417,7 @@ def change_profile(kobo_support, local_oauth_check, oauth_status, translations, @login_required def profile(): languages = calibre_db.speaking_language() - translations = babel.list_translations() + [Locale('en')] + translations = get_available_locale() kobo_support = feature_support['kobo'] and config.config_kobo_sync if feature_support['oauth'] and config.config_login_type == 2: oauth_status = get_oauth_status() @@ -1868,10 +1524,10 @@ def show_book(book_id): entry.kindle_list = check_send_to_kindle(entry) entry.reader_list = check_read_formats(entry) - entry.audioentries = [] + entry.audio_entries = [] for media_format in entry.data: if media_format.format.lower() in constants.EXTENSIONS_AUDIO: - entry.audioentries.append(media_format.format.lower()) + entry.audio_entries.append(media_format.format.lower()) return render_title_template('detail.html', entry=entry, diff --git a/optional-requirements.txt b/optional-requirements.txt index fea410e5..4360d221 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -1,5 +1,5 @@ # GDrive Integration -google-api-python-client>=1.7.11,<2.44.0 +google-api-python-client>=1.7.11,<2.50.0 gevent>20.6.0,<22.0.0 greenlet>=0.4.17,<1.2.0 httplib2>=0.9.2,<0.21.0 @@ -13,7 +13,7 @@ rsa>=3.4.2,<4.9.0 # Gmail google-auth-oauthlib>=0.4.3,<0.6.0 -google-api-python-client>=1.7.11,<2.44.0 +google-api-python-client>=1.7.11,<2.50.0 # goodreads goodreads>=0.3.2,<0.4.0 diff --git a/requirements.txt b/requirements.txt index 81320aa3..7a30ae06 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +APScheduler>=3.6.3,<3.10.0 werkzeug<2.1.0 Babel>=1.3,<3.0 Flask-Babel>=0.11.1,<2.1.0 diff --git a/setup.cfg b/setup.cfg index 251d9db0..aff22e9e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,6 +38,7 @@ console_scripts = [options] include_package_data = True install_requires = + APScheduler>=3.6.3,<3.10.0 werkzeug<2.1.0 Babel>=1.3,<3.0 Flask-Babel>=0.11.1,<2.1.0 @@ -61,7 +62,7 @@ install_requires = [options.extras_require] gdrive = - google-api-python-client>=1.7.11,<2.44.0 + google-api-python-client>=1.7.11,<2.50.0 gevent>20.6.0,<22.0.0 greenlet>=0.4.17,<1.2.0 httplib2>=0.9.2,<0.21.0 @@ -74,7 +75,7 @@ gdrive = rsa>=3.4.2,<4.9.0 gmail = google-auth-oauthlib>=0.4.3,<0.6.0 - google-api-python-client>=1.7.11,<2.44.0 + google-api-python-client>=1.7.11,<2.50.0 goodreads = goodreads>=0.3.2,<0.4.0 python-Levenshtein>=0.12.0,<0.13.0 diff --git a/test/Calibre-Web TestSummary_Linux.html b/test/Calibre-Web TestSummary_Linux.html index 1c874a62..027aaa81 100644 --- a/test/Calibre-Web TestSummary_Linux.html +++ b/test/Calibre-Web TestSummary_Linux.html @@ -37,20 +37,20 @@

-

Start Time: 2022-04-19 20:47:12

+

Start Time: 2022-04-30 21:05:13

-

Stop Time: 2022-04-20 02:31:36

+

Stop Time: 2022-05-01 02:54:00

-

Duration: 4h 53 min

+

Duration: 4h 55 min

@@ -236,13 +236,13 @@ TestCli - 9 - 9 + 10 + 10 0 0 0 - Detail + Detail @@ -321,6 +321,15 @@ + +
TestCli - test_no_database
+ + PASS + + + + +
TestCli - test_settingsdb_not_writeable
@@ -332,13 +341,13 @@ TestCliGdrivedb - 3 - 3 + 4 + 4 0 0 0 - Detail + Detail @@ -370,6 +379,15 @@ + + + +
TestCliGdrivedb - test_no_database
+ + PASS + + + @@ -579,15 +597,15 @@ - + TestEbookConvertCalibreGDrive + 7 6 - 6 - 0 + 1 0 0 - Detail + Detail @@ -646,6 +664,35 @@ + + + +
TestEbookConvertCalibreGDrive - test_thumbnail_cache
+ + +
+ FAIL +
+ + + + + + + @@ -1394,11 +1441,11 @@ - + TestEditBooksList 18 - 15 - 3 + 18 + 0 0 0 @@ -1417,31 +1464,11 @@ - +
TestEditBooksList - test_bookslist_edit_categories
- -
- FAIL -
- - - - + PASS @@ -1527,33 +1554,11 @@ AssertionError: 2 != 3 - +
TestEditBooksList - test_bookslist_edit_publisher
- -
- FAIL -
- - - - + PASS @@ -1603,33 +1608,11 @@ AssertionError: 'Pandöm,Ti|s@d' != 'None' - +
TestEditBooksList - test_search_books_list
- -
- FAIL -
- - - - + PASS @@ -2278,13 +2261,13 @@ AssertionError: 4 != 5 TestKoboSyncBig - 5 - 5 + 6 + 6 0 0 0 - Detail + Detail @@ -2292,7 +2275,7 @@ AssertionError: 4 != 5 -
TestKoboSyncBig - test_kobo_sync_multi_user
+
TestKoboSyncBig - test_download_cover
PASS @@ -2301,7 +2284,7 @@ AssertionError: 4 != 5 -
TestKoboSyncBig - test_kobo_sync_selected_shelfs
+
TestKoboSyncBig - test_kobo_sync_multi_user
PASS @@ -2310,7 +2293,7 @@ AssertionError: 4 != 5 -
TestKoboSyncBig - test_sync_changed_book
+
TestKoboSyncBig - test_kobo_sync_selected_shelves
PASS @@ -2319,7 +2302,7 @@ AssertionError: 4 != 5 -
TestKoboSyncBig - test_sync_reading_state
+
TestKoboSyncBig - test_sync_changed_book
PASS @@ -2327,6 +2310,15 @@ AssertionError: 4 != 5 + +
TestKoboSyncBig - test_sync_reading_state
+ + PASS + + + + +
TestKoboSyncBig - test_sync_shelf
@@ -3603,15 +3595,15 @@ AssertionError: 4 != 5 - - TestUpdater - 9 - 8 - 0 - 0 + + TestThumbnailsEnv 1 + 1 + 0 + 0 + 0 - Detail + Detail @@ -3619,96 +3611,7 @@ AssertionError: 4 != 5 -
TestUpdater - test_check_update_nightly_errors
- - PASS - - - - - - -
TestUpdater - test_check_update_nightly_request_errors
- - PASS - - - - - - -
TestUpdater - test_check_update_stable_errors
- - PASS - - - - - - -
TestUpdater - test_check_update_stable_versions
- - PASS - - - - - - -
TestUpdater - test_perform_update
- - PASS - - - - - - -
TestUpdater - test_perform_update_stable_errors
- - PASS - - - - - - -
TestUpdater - test_perform_update_timeout
- - -
- SKIP -
- - - - - - - - - - -
TestUpdater - test_reconnect_database
- - PASS - - - - - - -
TestUpdater - test_update_write_protect
+
TestThumbnailsEnv - test_cover_cache_env_on_database_change
PASS @@ -3717,14 +3620,14 @@ AssertionError: 4 != 5 - TestUploadEPubs - 4 - 3 + TestThumbnails + 8 + 5 + 2 + 0 1 - 0 - 0 - Detail + Detail @@ -3732,7 +3635,7 @@ AssertionError: 4 != 5 -
TestUploadEPubs - test_upload_epub_cover
+
TestThumbnails - test_cache_non_writable
PASS @@ -3741,7 +3644,7 @@ AssertionError: 4 != 5 -
TestUploadEPubs - test_upload_epub_cover_formats
+
TestThumbnails - test_cache_of_deleted_book
PASS @@ -3750,7 +3653,7 @@ AssertionError: 4 != 5 -
TestUploadEPubs - test_upload_epub_duplicate
+
TestThumbnails - test_cover_cache_on_database_change
PASS @@ -3759,7 +3662,7 @@ AssertionError: 4 != 5 -
TestUploadEPubs - test_upload_epub_lang
+
TestThumbnails - test_cover_change_on_upload_new_cover
@@ -3774,11 +3677,9 @@ AssertionError: 4 != 5
Traceback (most recent call last):
-  File "/home/ozzie/Development/calibre-web-test/test/test_upload_epubs.py", line 94, in test_upload_epub_lang
-    self.assertEqual("Lingua: Tedesco", self.driver.find_elements(By.TAG_NAME, "h2")[1].text)
-AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål'
-- Lingua: Tedesco
-+ Lingua: Norvegese bokmål
+ File "/home/ozzie/Development/calibre-web-test/test/test_thumbnails.py", line 135, in test_cover_change_on_upload_new_cover + self.assertGreaterEqual(diff(BytesIO(updated_cover), BytesIO(original_cover), delete_diff_file=True), 0.05) +AssertionError: 0.038404302739168276 not greater than or equal to 0.05
@@ -3787,6 +3688,226 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' + + + +
TestThumbnails - test_cover_for_series
+ + SKIP + + + + + + +
TestThumbnails - test_cover_on_upload_book
+ + PASS + + + + + + +
TestThumbnails - test_remove_cover_from_cache
+ + PASS + + + + + + +
TestThumbnails - test_sideloaded_book
+ + +
+ FAIL +
+ + + + + + + + + + + TestUpdater + 9 + 8 + 0 + 0 + 1 + + Detail + + + + + + + +
TestUpdater - test_check_update_nightly_errors
+ + PASS + + + + + + +
TestUpdater - test_check_update_nightly_request_errors
+ + PASS + + + + + + +
TestUpdater - test_check_update_stable_errors
+ + PASS + + + + + + +
TestUpdater - test_check_update_stable_versions
+ + PASS + + + + + + +
TestUpdater - test_perform_update
+ + PASS + + + + + + +
TestUpdater - test_perform_update_stable_errors
+ + PASS + + + + + + +
TestUpdater - test_perform_update_timeout
+ + +
+ SKIP +
+ + + + + + + + + + +
TestUpdater - test_reconnect_database
+ + PASS + + + + + + +
TestUpdater - test_update_write_protect
+ + PASS + + + + + + + TestUploadEPubs + 4 + 4 + 0 + 0 + 0 + + Detail + + + + + + + +
TestUploadEPubs - test_upload_epub_cover
+ + PASS + + + + + + +
TestUploadEPubs - test_upload_epub_cover_formats
+ + PASS + + + + + + +
TestUploadEPubs - test_upload_epub_duplicate
+ + PASS + + + + + + +
TestUploadEPubs - test_upload_epub_lang
+ + PASS + + + @@ -3797,13 +3918,13 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' 0 0 - Detail + Detail - +
TestUserList - test_edit_user_email
@@ -3812,7 +3933,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_list_visibility
@@ -3821,7 +3942,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_admin_role
@@ -3830,7 +3951,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_check_sort
@@ -3839,7 +3960,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_denied_tags
@@ -3848,7 +3969,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_download_role
@@ -3857,7 +3978,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_button
@@ -3866,7 +3987,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_email
@@ -3875,7 +3996,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_kindle
@@ -3884,7 +4005,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_language
@@ -3893,7 +4014,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_locale
@@ -3902,7 +4023,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_name
@@ -3911,7 +4032,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_edit_visiblility
@@ -3920,7 +4041,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_guest_edit
@@ -3929,7 +4050,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_remove_admin
@@ -3938,7 +4059,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_requests
@@ -3947,7 +4068,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_search
@@ -3956,7 +4077,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserList - test_user_list_sort
@@ -3974,13 +4095,13 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' 0 0 - Detail + Detail - +
TestUserLoad - test_user_change_vis
@@ -3998,13 +4119,13 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' 0 0 - Detail + Detail - +
TestUserTemplate - test_allow_column_restriction
@@ -4013,7 +4134,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_allow_tag_restriction
@@ -4022,7 +4143,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_archived_format_template
@@ -4031,7 +4152,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_author_user_template
@@ -4040,7 +4161,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_best_user_template
@@ -4049,7 +4170,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_category_user_template
@@ -4058,7 +4179,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_deny_column_restriction
@@ -4067,7 +4188,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_deny_tag_restriction
@@ -4076,7 +4197,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_detail_random_user_template
@@ -4085,7 +4206,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_download_user_template
@@ -4094,7 +4215,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_format_user_template
@@ -4103,7 +4224,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_hot_user_template
@@ -4112,7 +4233,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_language_user_template
@@ -4121,7 +4242,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_limit_book_languages
@@ -4130,7 +4251,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_list_user_template
@@ -4139,7 +4260,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_publisher_user_template
@@ -4148,7 +4269,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_random_user_template
@@ -4157,7 +4278,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_read_user_template
@@ -4166,7 +4287,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_recent_user_template
@@ -4175,7 +4296,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_series_user_template
@@ -4184,7 +4305,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestUserTemplate - test_ui_language_settings
@@ -4202,13 +4323,13 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' 0 0 - Detail + Detail - +
TestCalibreWebVisibilitys - test_about
@@ -4217,7 +4338,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_SMTP_Settings
@@ -4226,7 +4347,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_add_user
@@ -4235,7 +4356,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_password
@@ -4244,7 +4365,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_archived
@@ -4253,7 +4374,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_authors
@@ -4262,7 +4383,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_category
@@ -4271,7 +4392,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_file_formats
@@ -4280,7 +4401,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_hot
@@ -4289,7 +4410,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_language
@@ -4298,7 +4419,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_publisher
@@ -4307,7 +4428,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_random
@@ -4316,7 +4437,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_rated
@@ -4325,7 +4446,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_rating
@@ -4334,7 +4455,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_read
@@ -4343,7 +4464,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_admin_change_visibility_series
@@ -4352,7 +4473,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_allow_columns
@@ -4361,7 +4482,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_allow_tags
@@ -4370,7 +4491,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_archive_books
@@ -4379,7 +4500,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_authors_max_settings
@@ -4388,7 +4509,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_change_title
@@ -4397,7 +4518,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_checked_logged_in
@@ -4406,7 +4527,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_hide_custom_column
@@ -4415,7 +4536,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_link_column_to_read_status
@@ -4424,7 +4545,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_random_books_available
@@ -4433,7 +4554,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_read_status_visible
@@ -4442,7 +4563,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_request_link_column_to_read_status
@@ -4451,7 +4572,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_restrict_columns
@@ -4460,7 +4581,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_restrict_tags
@@ -4469,7 +4590,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_save_views_recent
@@ -4478,7 +4599,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_search_functions
@@ -4487,7 +4608,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_search_order
@@ -4496,7 +4617,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_search_string
@@ -4505,7 +4626,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_user_email_available
@@ -4514,7 +4635,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreWebVisibilitys - test_user_visibility_sidebar
@@ -4532,13 +4653,13 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' 0 0 - Detail + Detail - +
TestCalibreHelper - test_author_sort
@@ -4547,7 +4668,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_author_sort_comma
@@ -4556,7 +4677,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_author_sort_junior
@@ -4565,7 +4686,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_author_sort_oneword
@@ -4574,7 +4695,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_author_sort_roman
@@ -4583,7 +4704,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_Limit_Length
@@ -4592,7 +4713,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_char_replacement
@@ -4601,7 +4722,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_chinese_Characters
@@ -4610,7 +4731,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_deg_eur_replacement
@@ -4619,7 +4740,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_doubleS
@@ -4628,7 +4749,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_finish_Dot
@@ -4637,7 +4758,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_high23
@@ -4646,7 +4767,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_check_umlauts
@@ -4655,7 +4776,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_random_password
@@ -4664,7 +4785,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_split_authors
@@ -4673,7 +4794,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmål' - +
TestCalibreHelper - test_whitespaces
@@ -4684,11 +4805,11 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' Total - 410 - 400 - 4 + 423 + 413 + 3 0 - 6 + 7   @@ -4716,7 +4837,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' Platform - Linux 5.13.0-39-generic #44~20.04.1-Ubuntu SMP Thu Mar 24 16:43:35 UTC 2022 x86_64 x86_64 + Linux 5.13.0-40-generic #45~20.04.1-Ubuntu SMP Mon Apr 4 09:38:31 UTC 2022 x86_64 x86_64 Basic @@ -4732,9 +4853,15 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' Basic + + APScheduler + 3.9.1 + Basic + + Babel - 2.9.1 + 2.10.1 Basic @@ -4794,7 +4921,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' Jinja2 - 3.1.1 + 3.1.2 Basic @@ -4830,7 +4957,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' SQLAlchemy - 1.4.35 + 1.4.36 Basic @@ -4860,7 +4987,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestCliGdrivedb @@ -4890,7 +5017,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestEbookConvertCalibreGDrive @@ -4920,7 +5047,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestEbookConvertGDriveKepubify @@ -4962,7 +5089,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestEditAuthorsGdrive @@ -4998,7 +5125,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestEditBooksOnGdrive @@ -5040,7 +5167,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l' google-api-python-client - 2.45.0 + 2.46.0 TestSetupGdrive @@ -5130,7 +5257,7 @@ AssertionError: 'Lingua: Tedesco' != 'Lingua: Norvegese bokmÃ¥l'
diff --git a/test/Calibre-Web TestSummary_Windows.html b/test/Calibre-Web TestSummary_Windows.html index 6668632d..21a41612 100644 --- a/test/Calibre-Web TestSummary_Windows.html +++ b/test/Calibre-Web TestSummary_Windows.html @@ -37,20 +37,20 @@
-

Start Time: 2021-05-18 17:33:17

+

Start Time: 2021-10-06 00:29:44

-

Stop Time: 2021-05-18 20:37:38

+

Stop Time: 2021-10-06 00:37:52

-

Duration: 2h 35 min

+

Duration: 7:28 min

@@ -102,15 +102,15 @@ - - TestAnonymous + + TestShelf + 24 + 1 + 9 13 - 13 - 0 - 0 - 0 + 1 - Detail + Detail @@ -118,1439 +118,33 @@ -
TestAnonymous - test_check_locale_guest
+
TestShelf - test_add_shelf_from_search
PASS - + -
TestAnonymous - test_guest_about
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_category
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_format
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_hot
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_language
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_publisher
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_rated
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_rating
- - PASS - - - - - - -
TestAnonymous - test_guest_change_visibility_series
- - PASS - - - - - - -
TestAnonymous - test_guest_random_books_available
- - PASS - - - - - - -
TestAnonymous - test_guest_restricted_settings_visibility
- - PASS - - - - - - -
TestAnonymous - test_guest_visibility_sidebar
- - PASS - - - - - - - TestCli - 8 - 8 - 0 - 0 - 0 - - Detail - - - - - - - -
TestCli - test_already_started
- - PASS - - - - - - -
TestCli - test_bind_to_single_interface
- - PASS - - - - - - -
TestCli - test_change_password
- - PASS - - - - - - -
TestCli - test_cli_SSL_files
- - PASS - - - - - - -
TestCli - test_cli_different_folder
- - PASS - - - - - - -
TestCli - test_cli_different_settings_database
- - PASS - - - - - - -
TestCli - test_environ_port_setting
- - PASS - - - - - - -
TestCli - test_settingsdb_not_writeable
- - PASS - - - - - - - TestCliGdrivedb - 2 - 2 - 0 - 0 - 0 - - Detail - - - - - - - -
TestCliGdrivedb - test_cli_gdrive_location
- - PASS - - - - - - -
TestCliGdrivedb - test_gdrive_db_nonwrite
- - PASS - - - - - - - TestCoverEditBooks - 1 - 1 - 0 - 0 - 0 - - Detail - - - - - - - -
TestCoverEditBooks - test_upload_jpg
- - PASS - - - - - - - TestDeleteDatabase - 1 - 1 - 0 - 0 - 0 - - Detail - - - - - - - -
TestDeleteDatabase - test_delete_books_in_database
- - PASS - - - - - - - TestEbookConvertCalibre - 11 - 0 - 0 - 0 - 11 - - Detail - - - - - - - -
TestEbookConvertCalibre - test_convert_deactivate
+
TestShelf - test_adv_search_shelf
- SKIP + ERROR
-