1
0
mirror of https://github.com/janeczku/calibre-web synced 2024-12-18 14:10:30 +00:00

Merge branch 'master' into Develop

# Conflicts:
#	cps/editbooks.py
#	requirements.txt
#	setup.cfg
#	test/Calibre-Web TestSummary_Linux.html
This commit is contained in:
Ozzie Isaacs 2024-06-24 14:42:00 +02:00
commit b28a3635f8
49 changed files with 293 additions and 172 deletions

View File

@ -26,12 +26,13 @@ from flask import session, current_app
from flask_login.utils import decode_cookie from flask_login.utils import decode_cookie
from flask_login.signals import user_loaded_from_cookie from flask_login.signals import user_loaded_from_cookie
class MyLoginManager(LoginManager): class MyLoginManager(LoginManager):
def _session_protection_failed(self): def _session_protection_failed(self):
sess = session._get_current_object() sess = session._get_current_object()
ident = self._session_identifier_generator() ident = self._session_identifier_generator()
if(sess and not (len(sess) == 1 if(sess and not (len(sess) == 1
and sess.get('csrf_token', None))) and ident != sess.get('_id', None): and sess.get('csrf_token', None))) and ident != sess.get('_id', None):
return super(). _session_protection_failed() return super(). _session_protection_failed()
return False return False

View File

@ -110,6 +110,7 @@ if limiter_present:
else: else:
limiter = None limiter = None
def create_app(): def create_app():
if csrf: if csrf:
csrf.init_app(app) csrf.init_app(app)

View File

@ -479,7 +479,7 @@ def edit_list_user(param):
elif param.endswith('role'): elif param.endswith('role'):
value = int(vals['field_index']) value = int(vals['field_index'])
if user.name == "Guest" and value in \ if user.name == "Guest" and value in \
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]: [constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
raise Exception(_("Guest can't have this role")) raise Exception(_("Guest can't have this role"))
# check for valid value, last on checks for power of 2 value # check for valid value, last on checks for power of 2 value
if value > 0 and value <= constants.ROLE_VIEWER and (value & value - 1 == 0 or value == 1): if value > 0 and value <= constants.ROLE_VIEWER and (value & value - 1 == 0 or value == 1):
@ -945,7 +945,7 @@ def do_full_kobo_sync(userid):
def check_valid_read_column(column): def check_valid_read_column(column):
if column != "0": if column != "0":
if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \
.filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all(): .filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all():
return False return False
return True return True
@ -953,7 +953,7 @@ def check_valid_read_column(column):
def check_valid_restricted_column(column): def check_valid_restricted_column(column):
if column != "0": if column != "0":
if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \ if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \
.filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all(): .filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all():
return False return False
return True return True
@ -999,10 +999,7 @@ def get_drives(current):
for d in string.ascii_uppercase: for d in string.ascii_uppercase:
if os.path.exists('{}:'.format(d)) and current[0].lower() != d.lower(): if os.path.exists('{}:'.format(d)) and current[0].lower() != d.lower():
drive = "{}:\\".format(d) drive = "{}:\\".format(d)
data = {"name": drive, "fullpath": drive} data = {"name": drive, "fullpath": drive, "type": "dir", "size": "", "sort": "_" + drive.lower()}
data["sort"] = "_" + data["fullpath"].lower()
data["type"] = "dir"
data["size"] = ""
drive_letters.append(data) drive_letters.append(data)
return drive_letters return drive_letters
@ -1142,12 +1139,12 @@ def _configuration_oauth_helper(to_save):
reboot_required = False reboot_required = False
for element in oauthblueprints: for element in oauthblueprints:
if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \ if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \
or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']: or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']:
reboot_required = True reboot_required = True
element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"] element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"]
element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"] element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"]
if to_save["config_" + str(element['id']) + "_oauth_client_id"] \ if to_save["config_" + str(element['id']) + "_oauth_client_id"] \
and to_save["config_" + str(element['id']) + "_oauth_client_secret"]: and to_save["config_" + str(element['id']) + "_oauth_client_secret"]:
active_oauths += 1 active_oauths += 1
element["active"] = 1 element["active"] = 1
else: else:
@ -1202,9 +1199,9 @@ def _configuration_ldap_helper(to_save):
config.save() config.save()
if not config.config_ldap_provider_url \ if not config.config_ldap_provider_url \
or not config.config_ldap_port \ or not config.config_ldap_port \
or not config.config_ldap_dn \ or not config.config_ldap_dn \
or not config.config_ldap_user_object: or not config.config_ldap_user_object:
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, ' return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier')) 'Port, DN and User Object Identifier'))
@ -1372,7 +1369,7 @@ def update_scheduledtasks():
error = False error = False
to_save = request.form.to_dict() to_save = request.form.to_dict()
if 0 <= int(to_save.get("schedule_start_time")) <= 23: if 0 <= int(to_save.get("schedule_start_time")) <= 23:
_config_int( to_save, "schedule_start_time") _config_int(to_save, "schedule_start_time")
else: else:
flash(_("Invalid start time for task specified"), category="error") flash(_("Invalid start time for task specified"), category="error")
error = True error = True
@ -1720,7 +1717,7 @@ def _db_configuration_update_helper():
return _db_configuration_result('{}'.format(ex), gdrive_error) return _db_configuration_result('{}'.format(ex), gdrive_error)
if db_change or not db_valid or not config.db_configured \ if db_change or not db_valid or not config.db_configured \
or config.config_calibre_dir != to_save["config_calibre_dir"]: or config.config_calibre_dir != to_save["config_calibre_dir"]:
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']: if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error) return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
else: else:

View File

@ -10,6 +10,7 @@ log = logger.create()
babel = Babel() babel = Babel()
def get_locale(): def get_locale():
# if a user is logged in, use the locale from the user settings # if a user is logged in, use the locale from the user settings
if current_user is not None and hasattr(current_user, "locale"): if current_user is not None and hasattr(current_user, "locale"):

View File

@ -19,29 +19,24 @@
from . import logger from . import logger
from lxml.etree import ParserError from lxml.etree import ParserError
log = logger.create()
try: try:
# at least bleach 6.0 is needed -> incomplatible change from list arguments to set arguments # at least bleach 6.0 is needed -> incomplatible change from list arguments to set arguments
from bleach import clean_text as clean_html from bleach import clean as clean_html
BLEACH = True from bleach.sanitizer import ALLOWED_TAGS
bleach = True
except ImportError: except ImportError:
try: from nh3 import clean as clean_html
BLEACH = False bleach = False
from nh3 import clean as clean_html
except ImportError:
try:
BLEACH = False
from lxml.html.clean import clean_html
except ImportError:
clean_html = None
log = logger.create()
def clean_string(unsafe_text, book_id=0): def clean_string(unsafe_text, book_id=0):
try: try:
if BLEACH: if bleach:
safe_text = clean_html(unsafe_text, tags=set(), attributes=set()) allowed_tags = list(ALLOWED_TAGS)
allowed_tags.extend(['p', 'span', 'div', 'pre'])
safe_text = clean_html(unsafe_text, tags=set(allowed_tags))
else: else:
safe_text = clean_html(unsafe_text) safe_text = clean_html(unsafe_text)
except ParserError as e: except ParserError as e:

View File

@ -35,6 +35,19 @@ def version_info():
class CliParameter(object): class CliParameter(object):
def __init__(self):
self.user_credentials = None
self.ip_address = None
self.allow_localhost = None
self.reconnect_enable = None
self.memory_backend = None
self.dry_run = None
self.certfilepath = None
self.keyfilepath = None
self.gd_path = None
self.settings_path = None
self.logpath = None
def init(self): def init(self):
self.arg_parser() self.arg_parser()
@ -44,22 +57,25 @@ class CliParameter(object):
prog='cps.py') prog='cps.py')
parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db') parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db')
parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db') parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db')
parser.add_argument('-c', metavar='path', help='path and name to SSL certfile, e.g. /opt/test.cert, ' parser.add_argument('-c', metavar='path', help='path and name to SSL certfile, '
'works only in combination with keyfile') 'e.g. /opt/test.cert, works only in combination with keyfile')
parser.add_argument('-k', metavar='path', help='path and name to SSL keyfile, e.g. /opt/test.key, ' parser.add_argument('-k', metavar='path', help='path and name to SSL keyfile, e.g. /opt/test.key, '
'works only in combination with certfile') 'works only in combination with certfile')
parser.add_argument('-o', metavar='path', help='path and name Calibre-Web logfile') parser.add_argument('-o', metavar='path', help='path and name Calibre-Web logfile')
parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-Web', parser.add_argument('-v', '--version', action='version', help='Shows version number '
'and exits Calibre-Web',
version=version_info()) version=version_info())
parser.add_argument('-i', metavar='ip-address', help='Server IP-Address to listen') parser.add_argument('-i', metavar='ip-address', help='Server IP-Address to listen')
parser.add_argument('-m', action='store_true', help='Use Memory-backend as limiter backend, use this parameter in case of miss configured backend') parser.add_argument('-m', action='store_true',
help='Use Memory-backend as limiter backend, use this parameter '
'in case of miss configured backend')
parser.add_argument('-s', metavar='user:pass', parser.add_argument('-s', metavar='user:pass',
help='Sets specific username to new password and exits Calibre-Web') help='Sets specific username to new password and exits Calibre-Web')
parser.add_argument('-f', action='store_true', help='Flag is depreciated and will be removed in next version')
parser.add_argument('-l', action='store_true', help='Allow loading covers from localhost') parser.add_argument('-l', action='store_true', help='Allow loading covers from localhost')
parser.add_argument('-d', action='store_true', help='Dry run of updater to check file permissions in advance ' parser.add_argument('-d', action='store_true', help='Dry run of updater to check file permissions '
'and exits Calibre-Web') 'in advance and exits Calibre-Web')
parser.add_argument('-r', action='store_true', help='Enable public database reconnect route under /reconnect') parser.add_argument('-r', action='store_true', help='Enable public database reconnect '
'route under /reconnect')
args = parser.parse_args() args = parser.parse_args()
self.logpath = args.o or "" self.logpath = args.o or ""
@ -130,6 +146,3 @@ class CliParameter(object):
if self.user_credentials and ":" not in self.user_credentials: if self.user_credentials and ":" not in self.user_credentials:
print("No valid 'username:password' format") print("No valid 'username:password' format")
sys.exit(3) sys.exit(3)
if args.f:
print("Warning: -f flag is depreciated and will be removed in next version")

View File

@ -48,6 +48,7 @@ class _Flask_Settings(_Base):
flask_session_key = Column(BLOB, default=b"") flask_session_key = Column(BLOB, default=b"")
def __init__(self, key): def __init__(self, key):
super().__init__()
self.flask_session_key = key self.flask_session_key = key
@ -82,7 +83,9 @@ class _Settings(_Base):
config_random_books = Column(Integer, default=4) config_random_books = Column(Integer, default=4)
config_authors_max = Column(Integer, default=0) config_authors_max = Column(Integer, default=0)
config_read_column = Column(Integer, default=0) config_read_column = Column(Integer, default=0)
config_title_regex = Column(String, default=r'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines|Le|La|Les|L\'|Un|Une)\s+') config_title_regex = Column(String,
default=r'^(A|The|An|Der|Die|Das|Den|Ein|Eine'
r'|Einen|Dem|Des|Einem|Eines|Le|La|Les|L\'|Un|Une)\s+')
config_theme = Column(Integer, default=0) config_theme = Column(Integer, default=0)
config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL) config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL)
@ -179,6 +182,26 @@ class _Settings(_Base):
class ConfigSQL(object): class ConfigSQL(object):
# pylint: disable=no-member # pylint: disable=no-member
def __init__(self): def __init__(self):
'''self.config_calibre_uuid = None
self.config_calibre_split_dir = None
self.dirty = None
self.config_logfile = None
self.config_upload_formats = None
self.mail_gmail_token = None
self.mail_server_type = None
self.mail_server = None
self.config_log_level = None
self.config_allowed_column_value = None
self.config_denied_column_value = None
self.config_allowed_tags = None
self.config_denied_tags = None
self.config_default_show = None
self.config_default_role = None
self.config_keyfile = None
self.config_certfile = None
self.config_rarfile_location = None
self.config_kepubifypath = None
self.config_binariesdir = None'''
self.__dict__["dirty"] = list() self.__dict__["dirty"] = list()
def init_config(self, session, secret_key, cli): def init_config(self, session, secret_key, cli):
@ -192,16 +215,16 @@ class ConfigSQL(object):
change = False change = False
if self.config_binariesdir == None: # pylint: disable=access-member-before-definition if self.config_binariesdir is None:
change = True change = True
self.config_binariesdir = autodetect_calibre_binaries() self.config_binariesdir = autodetect_calibre_binaries()
self.config_converterpath = autodetect_converter_binary(self.config_binariesdir) self.config_converterpath = autodetect_converter_binary(self.config_binariesdir)
if self.config_kepubifypath == None: # pylint: disable=access-member-before-definition if self.config_kepubifypath is None:
change = True change = True
self.config_kepubifypath = autodetect_kepubify_binary() self.config_kepubifypath = autodetect_kepubify_binary()
if self.config_rarfile_location == None: # pylint: disable=access-member-before-definition if self.config_rarfile_location is None:
change = True change = True
self.config_rarfile_location = autodetect_unrar_binary() self.config_rarfile_location = autodetect_unrar_binary()
if change: if change:
@ -430,8 +453,7 @@ def _encrypt_fields(session, secret_key):
{_Settings.mail_password_e: crypter.encrypt(settings.mail_password.encode())}) {_Settings.mail_password_e: crypter.encrypt(settings.mail_password.encode())})
if settings.config_ldap_serv_password: if settings.config_ldap_serv_password:
session.query(_Settings).update( session.query(_Settings).update(
{_Settings.config_ldap_serv_password_e: {_Settings.config_ldap_serv_password_e: crypter.encrypt(settings.config_ldap_serv_password.encode())})
crypter.encrypt(settings.config_ldap_serv_password.encode())})
session.commit() session.commit()
@ -547,7 +569,7 @@ def load_configuration(session, secret_key):
def get_flask_session_key(_session): def get_flask_session_key(_session):
flask_settings = _session.query(_Flask_Settings).one_or_none() flask_settings = _session.query(_Flask_Settings).one_or_none()
if flask_settings == None: if flask_settings is None:
flask_settings = _Flask_Settings(os.urandom(32)) flask_settings = _Flask_Settings(os.urandom(32))
_session.add(flask_settings) _session.add(flask_settings)
_session.commit() _session.commit()
@ -558,6 +580,7 @@ def get_encryption_key(key_path):
key_file = os.path.join(key_path, ".key") key_file = os.path.join(key_path, ".key")
generate = True generate = True
error = "" error = ""
key = None
if os.path.exists(key_file) and os.path.getsize(key_file) > 32: if os.path.exists(key_file) and os.path.getsize(key_file) > 32:
with open(key_file, "rb") as f: with open(key_file, "rb") as f:
key = f.read() key = f.read()

View File

@ -159,12 +159,13 @@ EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'kepub', 'mobi', 'azw', 'azw3', 'cbr'
_extension = "" _extension = ""
if sys.platform == "win32": if sys.platform == "win32":
_extension = ".exe" _extension = ".exe"
SUPPORTED_CALIBRE_BINARIES = {binary:binary + _extension for binary in ["ebook-convert", "calibredb"]} SUPPORTED_CALIBRE_BINARIES = {binary: binary + _extension for binary in ["ebook-convert", "calibredb"]}
def has_flag(value, bit_flag): def has_flag(value, bit_flag):
return bit_flag == (bit_flag & (value or 0)) return bit_flag == (bit_flag & (value or 0))
def selected_roles(dictionary): def selected_roles(dictionary):
return sum(v for k, v in ALL_ROLES.items() if k in dictionary) return sum(v for k, v in ALL_ROLES.items() if k in dictionary)

View File

@ -104,6 +104,7 @@ class Identifiers(Base):
book = Column(Integer, ForeignKey('books.id'), nullable=False) book = Column(Integer, ForeignKey('books.id'), nullable=False)
def __init__(self, val, id_type, book): def __init__(self, val, id_type, book):
super().__init__()
self.val = val self.val = val
self.type = id_type self.type = id_type
self.book = book self.book = book
@ -178,7 +179,7 @@ class Identifiers(Base):
elif self.val.lower().startswith("javascript:"): elif self.val.lower().startswith("javascript:"):
return quote(self.val) return quote(self.val)
elif self.val.lower().startswith("data:"): elif self.val.lower().startswith("data:"):
link , __, __ = str.partition(self.val, ",") link, __, __ = str.partition(self.val, ",")
return link return link
else: else:
return "{0}".format(self.val) return "{0}".format(self.val)
@ -192,6 +193,7 @@ class Comments(Base):
text = Column(String(collation='NOCASE'), nullable=False) text = Column(String(collation='NOCASE'), nullable=False)
def __init__(self, comment, book): def __init__(self, comment, book):
super().__init__()
self.text = comment self.text = comment
self.book = book self.book = book
@ -209,6 +211,7 @@ class Tags(Base):
name = Column(String(collation='NOCASE'), unique=True, nullable=False) name = Column(String(collation='NOCASE'), unique=True, nullable=False)
def __init__(self, name): def __init__(self, name):
super().__init__()
self.name = name self.name = name
def get(self): def get(self):
@ -230,6 +233,7 @@ class Authors(Base):
link = Column(String, nullable=False, default="") link = Column(String, nullable=False, default="")
def __init__(self, name, sort, link=""): def __init__(self, name, sort, link=""):
super().__init__()
self.name = name self.name = name
self.sort = sort self.sort = sort
self.link = link self.link = link
@ -252,6 +256,7 @@ class Series(Base):
sort = Column(String(collation='NOCASE')) sort = Column(String(collation='NOCASE'))
def __init__(self, name, sort): def __init__(self, name, sort):
super().__init__()
self.name = name self.name = name
self.sort = sort self.sort = sort
@ -272,6 +277,7 @@ class Ratings(Base):
rating = Column(Integer, CheckConstraint('rating>-1 AND rating<11'), unique=True) rating = Column(Integer, CheckConstraint('rating>-1 AND rating<11'), unique=True)
def __init__(self, rating): def __init__(self, rating):
super().__init__()
self.rating = rating self.rating = rating
def get(self): def get(self):
@ -291,6 +297,7 @@ class Languages(Base):
lang_code = Column(String(collation='NOCASE'), nullable=False, unique=True) lang_code = Column(String(collation='NOCASE'), nullable=False, unique=True)
def __init__(self, lang_code): def __init__(self, lang_code):
super().__init__()
self.lang_code = lang_code self.lang_code = lang_code
def get(self): def get(self):
@ -314,6 +321,7 @@ class Publishers(Base):
sort = Column(String(collation='NOCASE')) sort = Column(String(collation='NOCASE'))
def __init__(self, name, sort): def __init__(self, name, sort):
super().__init__()
self.name = name self.name = name
self.sort = sort self.sort = sort
@ -338,6 +346,7 @@ class Data(Base):
name = Column(String, nullable=False) name = Column(String, nullable=False)
def __init__(self, book, book_format, uncompressed_size, name): def __init__(self, book, book_format, uncompressed_size, name):
super().__init__()
self.book = book self.book = book
self.format = book_format self.format = book_format
self.uncompressed_size = uncompressed_size self.uncompressed_size = uncompressed_size
@ -357,6 +366,7 @@ class Metadata_Dirtied(Base):
book = Column(Integer, ForeignKey('books.id'), nullable=False, unique=True) book = Column(Integer, ForeignKey('books.id'), nullable=False, unique=True)
def __init__(self, book): def __init__(self, book):
super().__init__()
self.book = book self.book = book
@ -391,6 +401,7 @@ class Books(Base):
def __init__(self, title, sort, author_sort, timestamp, pubdate, series_index, last_modified, path, has_cover, def __init__(self, title, sort, author_sort, timestamp, pubdate, series_index, last_modified, path, has_cover,
authors, tags, languages=None): authors, tags, languages=None):
super().__init__()
self.title = title self.title = title
self.sort = sort self.sort = sort
self.author_sort = author_sort self.author_sort = author_sort
@ -399,12 +410,12 @@ class Books(Base):
self.series_index = series_index self.series_index = series_index
self.last_modified = last_modified self.last_modified = last_modified
self.path = path self.path = path
self.has_cover = (has_cover != None) self.has_cover = (has_cover is not None)
def __repr__(self): def __repr__(self):
return "<Books('{0},{1}{2}{3}{4}{5}{6}{7}{8}')>".format(self.title, self.sort, self.author_sort, return "<Books('{0},{1}{2}{3}{4}{5}{6}{7}{8}')>".format(self.title, self.sort, self.author_sort,
self.timestamp, self.pubdate, self.series_index, self.timestamp, self.pubdate, self.series_index,
self.last_modified, self.path, self.has_cover) self.last_modified, self.path, self.has_cover)
@property @property
def atom_timestamp(self): def atom_timestamp(self):
@ -448,11 +459,13 @@ class CustomColumns(Base):
content['is_editable'] = self.editable content['is_editable'] = self.editable
content['rec_index'] = sequence + 22 # toDo why ?? content['rec_index'] = sequence + 22 # toDo why ??
if isinstance(value, datetime): if isinstance(value, datetime):
content['#value#'] = {"__class__": "datetime.datetime", "__value__": value.strftime("%Y-%m-%dT%H:%M:%S+00:00")} content['#value#'] = {"__class__": "datetime.datetime",
"__value__": value.strftime("%Y-%m-%dT%H:%M:%S+00:00")}
else: else:
content['#value#'] = value content['#value#'] = value
content['#extra#'] = extra content['#extra#'] = extra
content['is_multiple2'] = {} if not self.is_multiple else {"cache_to_list": "|", "ui_to_list": ",", "list_to_ui": ", "} content['is_multiple2'] = {} if not self.is_multiple else {"cache_to_list": "|", "ui_to_list": ",",
"list_to_ui": ", "}
return json.dumps(content, ensure_ascii=False) return json.dumps(content, ensure_ascii=False)
@ -512,7 +525,6 @@ class CalibreDB:
if init: if init:
self.init_db(expire_on_commit) self.init_db(expire_on_commit)
def init_db(self, expire_on_commit=True): def init_db(self, expire_on_commit=True):
if self._init: if self._init:
self.init_session(expire_on_commit) self.init_session(expire_on_commit)
@ -722,8 +734,8 @@ class CalibreDB:
def common_filters(self, allow_show_archived=False, return_all_languages=False): def common_filters(self, allow_show_archived=False, return_all_languages=False):
if not allow_show_archived: if not allow_show_archived:
archived_books = (ub.session.query(ub.ArchivedBook) archived_books = (ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id)) .filter(ub.ArchivedBook.user_id==int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True) .filter(ub.ArchivedBook.is_archived==True)
.all()) .all())
archived_book_ids = [archived_book.book_id for archived_book in archived_books] archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = Books.id.notin_(archived_book_ids) archived_filter = Books.id.notin_(archived_book_ids)
@ -959,7 +971,7 @@ class CalibreDB:
pagination = None pagination = None
result = self.search_query(term, config, *join).order_by(*order).all() result = self.search_query(term, config, *join).order_by(*order).all()
result_count = len(result) result_count = len(result)
if offset != None and limit != None: if offset is not None and limit is not None:
offset = int(offset) offset = int(offset)
limit_all = offset + int(limit) limit_all = offset + int(limit)
pagination = Pagination((offset / (int(limit)) + 1), limit, result_count) pagination = Pagination((offset / (int(limit)) + 1), limit, result_count)
@ -989,7 +1001,7 @@ class CalibreDB:
if not return_all_languages: if not return_all_languages:
no_lang_count = (self.session.query(Books) no_lang_count = (self.session.query(Books)
.outerjoin(books_languages_link).outerjoin(Languages) .outerjoin(books_languages_link).outerjoin(Languages)
.filter(Languages.lang_code == None) .filter(Languages.lang_code==None)
.filter(self.common_filters()) .filter(self.common_filters())
.count()) .count())
if no_lang_count: if no_lang_count:
@ -1087,9 +1099,3 @@ class Category:
self.id = cat_id self.id = cat_id
self.rating = rating self.rating = rating
self.count = 1 self.count = 1
'''class Count:
count = None
def __init__(self, count):
self.count = count'''

View File

@ -33,6 +33,7 @@ from .about import collect_stats
log = logger.create() log = logger.create()
class lazyEncoder(json.JSONEncoder): class lazyEncoder(json.JSONEncoder):
def default(self, obj): def default(self, obj):
if isinstance(obj, LazyString): if isinstance(obj, LazyString):
@ -40,6 +41,7 @@ class lazyEncoder(json.JSONEncoder):
# Let the base class default method raise the TypeError # Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj) return json.JSONEncoder.default(self, obj)
def assemble_logfiles(file_name): def assemble_logfiles(file_name):
log_list = sorted(glob.glob(file_name + '*'), reverse=True) log_list = sorted(glob.glob(file_name + '*'), reverse=True)
wfd = BytesIO() wfd = BytesIO()

View File

@ -58,6 +58,8 @@ def load_dependencies(optional=False):
def dependency_check(optional=False): def dependency_check(optional=False):
d = list() d = list()
dep_version_int = None
low_check = None
deps = load_dependencies(optional) deps = load_dependencies(optional)
for dep in deps: for dep in deps:
try: try:

View File

@ -28,7 +28,6 @@ from uuid import uuid4
from markupsafe import escape, Markup # dependency of flask from markupsafe import escape, Markup # dependency of flask
from functools import wraps from functools import wraps
from flask import Blueprint, request, flash, redirect, url_for, abort, Response from flask import Blueprint, request, flash, redirect, url_for, abort, Response
from flask_babel import gettext as _ from flask_babel import gettext as _
from flask_babel import lazy_gettext as N_ from flask_babel import lazy_gettext as N_
@ -1000,17 +999,6 @@ def edit_book_comments(comments, book):
modify_date = False modify_date = False
if comments: if comments:
comments = clean_string(comments, book.id) comments = clean_string(comments, book.id)
#try:
# if BLEACH:
# comments = clean_html(comments, tags=set(), attributes=set())
# else:
# comments = clean_html(comments)
#except ParserError as e:
# log.error("Comments of book {} are corrupted: {}".format(book.id, e))
# comments = ""
#except TypeError as e:
# log.error("Comments can't be parsed, maybe 'lxml' is too new, try installing 'bleach': {}".format(e))
# comments = ""
if len(book.comments): if len(book.comments):
if book.comments[0].text != comments: if book.comments[0].text != comments:
book.comments[0].text = comments book.comments[0].text = comments
@ -1069,18 +1057,6 @@ def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):
to_save[cc_string] = Markup(to_save[cc_string]).unescape() to_save[cc_string] = Markup(to_save[cc_string]).unescape()
if to_save[cc_string]: if to_save[cc_string]:
to_save[cc_string] = clean_string(to_save[cc_string], book_id) to_save[cc_string] = clean_string(to_save[cc_string], book_id)
#try:
# if BLEACH:
# to_save[cc_string] = clean_html(to_save[cc_string], tags=set(), attributes=set())
# else:
# to_save[cc_string] = clean_html(to_save[cc_string])
#except ParserError as e:
# log.error("Customs Comments of book {} are corrupted: {}".format(book_id, e))
# to_save[cc_string] = ""
#except TypeError as e:
# to_save[cc_string] = ""
# log.error("Customs Comments can't be parsed, maybe 'lxml' is too new, "
# "try installing 'bleach': {}".format(e))
elif c.datatype == 'datetime': elif c.datatype == 'datetime':
try: try:
to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d") to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d")
@ -1313,8 +1289,6 @@ def search_objects_remove(db_book_object, db_type, input_elements):
del_elements = [] del_elements = []
for c_elements in db_book_object: for c_elements in db_book_object:
found = False found = False
#if db_type == 'languages':
# type_elements = c_elements.lang_code
if db_type == 'custom': if db_type == 'custom':
type_elements = c_elements.value type_elements = c_elements.value
else: else:

View File

@ -45,6 +45,7 @@ def _extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
cf = zip_file.read(zip_cover_path) cf = zip_file.read(zip_cover_path)
return cover.cover_processing(tmp_file_name, cf, extension) return cover.cover_processing(tmp_file_name, cf, extension)
def get_epub_layout(book, book_data): def get_epub_layout(book, book_data):
file_path = os.path.normpath(os.path.join(config.get_book_path(), file_path = os.path.normpath(os.path.join(config.get_book_path(),
book.path, book_data.name + "." + book_data.format.lower())) book.path, book_data.name + "." + book_data.format.lower()))

View File

@ -43,7 +43,7 @@ def updateEpub(src, dest, filename, data, ):
# create a temp copy of the archive without filename # create a temp copy of the archive without filename
with zipfile.ZipFile(src, 'r') as zin: with zipfile.ZipFile(src, 'r') as zin:
with zipfile.ZipFile(dest, 'w') as zout: with zipfile.ZipFile(dest, 'w') as zout:
zout.comment = zin.comment # preserve the comment zout.comment = zin.comment # preserve the comment
for item in zin.infolist(): for item in zin.infolist():
if item.filename != filename: if item.filename != filename:
zout.writestr(item, zin.read(item.filename)) zout.writestr(item, zin.read(item.filename))
@ -53,7 +53,9 @@ def updateEpub(src, dest, filename, data, ):
zf.writestr(filename, data) zf.writestr(filename, data)
def get_content_opf(file_path, ns=default_ns): def get_content_opf(file_path, ns=None):
if ns is None:
ns = default_ns
epubZip = zipfile.ZipFile(file_path) epubZip = zipfile.ZipFile(file_path)
txt = epubZip.read('META-INF/container.xml') txt = epubZip.read('META-INF/container.xml')
tree = etree.fromstring(txt) tree = etree.fromstring(txt)
@ -154,13 +156,14 @@ def create_new_metadata_backup(book, custom_columns, export_language, translate
return package return package
def replace_metadata(tree, package): def replace_metadata(tree, package):
rep_element = tree.xpath('/pkg:package/pkg:metadata', namespaces=default_ns)[0] rep_element = tree.xpath('/pkg:package/pkg:metadata', namespaces=default_ns)[0]
new_element = package.xpath('//metadata', namespaces=default_ns)[0] new_element = package.xpath('//metadata', namespaces=default_ns)[0]
tree.replace(rep_element, new_element) tree.replace(rep_element, new_element)
return etree.tostring(tree, return etree.tostring(tree,
xml_declaration=True, xml_declaration=True,
encoding='utf-8', encoding='utf-8',
pretty_print=True).decode('utf-8') pretty_print=True).decode('utf-8')

View File

@ -31,6 +31,7 @@ from . import config, app, logger, services
log = logger.create() log = logger.create()
# custom error page # custom error page
def error_http(error): def error_http(error):
return render_template('http_error.html', return render_template('http_error.html',
error_code="Error {0}".format(error.code), error_code="Error {0}".format(error.code),
@ -52,6 +53,7 @@ def internal_error(error):
instance=config.config_calibre_web_title instance=config.config_calibre_web_title
), 500 ), 500
def init_errorhandler(): def init_errorhandler():
# http error handling # http error handling
for ex in default_exceptions: for ex in default_exceptions:
@ -60,7 +62,6 @@ def init_errorhandler():
elif ex == 500: elif ex == 500:
app.register_error_handler(ex, internal_error) app.register_error_handler(ex, internal_error)
if services.ldap: if services.ldap:
# Only way of catching the LDAPException upon logging in with LDAP server down # Only way of catching the LDAPException upon logging in with LDAP server down
@app.errorhandler(services.ldap.LDAPException) @app.errorhandler(services.ldap.LDAPException)

View File

@ -32,6 +32,7 @@ from . import logger
log = logger.create() log = logger.create()
def get_temp_dir(): def get_temp_dir():
tmp_dir = os.path.join(gettempdir(), 'calibre_web') tmp_dir = os.path.join(gettempdir(), 'calibre_web')
if not os.path.isdir(tmp_dir): if not os.path.isdir(tmp_dir):

View File

@ -45,7 +45,7 @@ except ImportError as err:
current_milli_time = lambda: int(round(time() * 1000)) current_milli_time = lambda: int(round(time() * 1000))
gdrive_watch_callback_token = 'target=calibreweb-watch_files' #nosec gdrive_watch_callback_token = 'target=calibreweb-watch_files' # nosec
@gdrive.route("/authenticate") @gdrive.route("/authenticate")
@ -86,11 +86,12 @@ def watch_gdrive():
notification_id = str(uuid4()) notification_id = str(uuid4())
try: try:
result = gdriveutils.watchChange(gdriveutils.Gdrive.Instance().drive, notification_id, result = gdriveutils.watchChange(gdriveutils.Gdrive.Instance().drive, notification_id,
'web_hook', address, gdrive_watch_callback_token, current_milli_time() + 604800*1000) 'web_hook', address, gdrive_watch_callback_token, current_milli_time() + 604800*1000)
config.config_google_drive_watch_changes_response = result config.config_google_drive_watch_changes_response = result
config.save() config.save()
except HttpError as e: except HttpError as e:
reason=json.loads(e.content)['error']['errors'][0] reason = json.loads(e.content)['error']['errors'][0]
if reason['reason'] == 'push.webhookUrlUnauthorized': if reason['reason'] == 'push.webhookUrlUnauthorized':
flash(_('Callback domain is not verified, ' flash(_('Callback domain is not verified, '
'please follow steps to verify domain in google developer console'), category="error") 'please follow steps to verify domain in google developer console'), category="error")
@ -115,6 +116,7 @@ def revoke_watch_gdrive():
config.save() config.save()
return redirect(url_for('admin.db_configuration')) return redirect(url_for('admin.db_configuration'))
try: try:
@csrf.exempt @csrf.exempt
@gdrive.route("/watch/callback", methods=['GET', 'POST']) @gdrive.route("/watch/callback", methods=['GET', 'POST'])
@ -138,7 +140,7 @@ try:
if response: if response:
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode() dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
if not response['deleted'] and response['file']['title'] == 'metadata.db' \ if not response['deleted'] and response['file']['title'] == 'metadata.db' \
and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec
tmp_dir = get_temp_dir() tmp_dir = get_temp_dir()
log.info('Database file updated') log.info('Database file updated')

View File

@ -207,6 +207,7 @@ def getDrive(drive=None, gauth=None):
log.error("Google Drive error: {}".format(e)) log.error("Google Drive error: {}".format(e))
return drive return drive
def listRootFolders(): def listRootFolders():
try: try:
drive = getDrive(Gdrive.Instance().drive) drive = getDrive(Gdrive.Instance().drive)
@ -224,7 +225,7 @@ def getEbooksFolder(drive):
def getFolderInFolder(parentId, folderName, drive): def getFolderInFolder(parentId, folderName, drive):
# drive = getDrive(drive) # drive = getDrive(drive)
query="" query = ""
if folderName: if folderName:
query = "title = '%s' and " % folderName.replace("'", r"\'") query = "title = '%s' and " % folderName.replace("'", r"\'")
folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder'" \ folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder'" \
@ -235,6 +236,7 @@ def getFolderInFolder(parentId, folderName, drive):
else: else:
return fileList[0] return fileList[0]
# Search for id of root folder in gdrive database, if not found request from gdrive and store in internal database # Search for id of root folder in gdrive database, if not found request from gdrive and store in internal database
def getEbooksFolderId(drive=None): def getEbooksFolderId(drive=None):
storedPathName = session.query(GdriveId).filter(GdriveId.path == '/').first() storedPathName = session.query(GdriveId).filter(GdriveId.path == '/').first()
@ -369,20 +371,20 @@ def moveGdriveFolderRemote(origin_file, target_folder):
def copyToDrive(drive, uploadFile, createRoot, replaceFiles, def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
ignoreFiles=None, ignoreFiles=None,
parent=None, prevDir=''): parent=None, prevDir=''):
ignoreFiles = ignoreFiles or [] ignoreFiles = ignoreFiles or []
drive = getDrive(drive) drive = getDrive(drive)
isInitial = not bool(parent) isInitial = not bool(parent)
if not parent: if not parent:
parent = getEbooksFolder(drive) parent = getEbooksFolder(drive)
if os.path.isdir(os.path.join(prevDir,uploadFile)): if os.path.isdir(os.path.join(prevDir, uploadFile)):
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(os.path.basename(uploadFile).replace("'", r"\'"), parent['id'])}).GetList() (os.path.basename(uploadFile).replace("'", r"\'"), parent['id'])}).GetList()
if len(existingFolder) == 0 and (not isInitial or createRoot): if len(existingFolder) == 0 and (not isInitial or createRoot):
parent = drive.CreateFile({'title': os.path.basename(uploadFile), parent = drive.CreateFile({'title': os.path.basename(uploadFile),
'parents': [{"kind": "drive#fileLink", 'id': parent['id']}], 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
"mimeType": "application/vnd.google-apps.folder"}) "mimeType": "application/vnd.google-apps.folder"})
parent.Upload() parent.Upload()
else: else:
if (not isInitial or createRoot) and len(existingFolder) > 0: if (not isInitial or createRoot) and len(existingFolder) > 0:
@ -398,7 +400,7 @@ def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
driveFile = existingFiles[0] driveFile = existingFiles[0]
else: else:
driveFile = drive.CreateFile({'title': os.path.basename(uploadFile).replace("'", r"\'"), driveFile = drive.CreateFile({'title': os.path.basename(uploadFile).replace("'", r"\'"),
'parents': [{"kind":"drive#fileLink", 'id': parent['id']}], }) 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}], })
driveFile.SetContentFile(os.path.join(prevDir, uploadFile)) driveFile.SetContentFile(os.path.join(prevDir, uploadFile))
driveFile.Upload() driveFile.Upload()
@ -410,7 +412,7 @@ def uploadFileToEbooksFolder(destFile, f, string=False):
for i, x in enumerate(splitDir): for i, x in enumerate(splitDir):
if i == len(splitDir)-1: if i == len(splitDir)-1:
existing_Files = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % existing_Files = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(x.replace("'", r"\'"), parent['id'])}).GetList() (x.replace("'", r"\'"), parent['id'])}).GetList()
if len(existing_Files) > 0: if len(existing_Files) > 0:
driveFile = existing_Files[0] driveFile = existing_Files[0]
else: else:
@ -423,17 +425,17 @@ def uploadFileToEbooksFolder(destFile, f, string=False):
driveFile.Upload() driveFile.Upload()
else: else:
existing_Folder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % existing_Folder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(x.replace("'", r"\'"), parent['id'])}).GetList() (x.replace("'", r"\'"), parent['id'])}).GetList()
if len(existing_Folder) == 0: if len(existing_Folder) == 0:
parent = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}], parent = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
"mimeType": "application/vnd.google-apps.folder"}) "mimeType": "application/vnd.google-apps.folder"})
parent.Upload() parent.Upload()
else: else:
parent = existing_Folder[0] parent = existing_Folder[0]
def watchChange(drive, channel_id, channel_type, channel_address, def watchChange(drive, channel_id, channel_type, channel_address,
channel_token=None, expiration=None): channel_token=None, expiration=None):
# Watch for all changes to a user's Drive. # Watch for all changes to a user's Drive.
# Args: # Args:
# service: Drive API service instance. # service: Drive API service instance.
@ -504,7 +506,7 @@ def stopChannel(drive, channel_id, resource_id):
return drive.auth.service.channels().stop(body=body).execute() return drive.auth.service.channels().stop(body=body).execute()
def getChangeById (drive, change_id): def getChangeById(drive, change_id):
# Print a single Change resource information. # Print a single Change resource information.
# #
# Args: # Args:
@ -538,8 +540,9 @@ def updateGdriveCalibreFromLocal():
if os.path.isdir(os.path.join(config.config_calibre_dir, x)): if os.path.isdir(os.path.join(config.config_calibre_dir, x)):
shutil.rmtree(os.path.join(config.config_calibre_dir, x)) shutil.rmtree(os.path.join(config.config_calibre_dir, x))
# update gdrive.db on edit of books title # update gdrive.db on edit of books title
def updateDatabaseOnEdit(ID,newPath): def updateDatabaseOnEdit(ID, newPath):
sqlCheckPath = newPath if newPath[-1] == '/' else newPath + '/' sqlCheckPath = newPath if newPath[-1] == '/' else newPath + '/'
storedPathName = session.query(GdriveId).filter(GdriveId.gdrive_id == ID).first() storedPathName = session.query(GdriveId).filter(GdriveId.gdrive_id == ID).first()
if storedPathName: if storedPathName:
@ -585,6 +588,7 @@ def get_cover_via_gdrive(cover_path):
else: else:
return None return None
# Gets cover file from gdrive # Gets cover file from gdrive
def get_metadata_backup_via_gdrive(metadata_path): def get_metadata_backup_via_gdrive(metadata_path):
df = getFileFromEbooksFolder(metadata_path, 'metadata.opf') df = getFileFromEbooksFolder(metadata_path, 'metadata.opf')
@ -608,6 +612,7 @@ def get_metadata_backup_via_gdrive(metadata_path):
else: else:
return None return None
# Creates chunks for downloading big files # Creates chunks for downloading big files
def partial(total_byte_len, part_size_limit): def partial(total_byte_len, part_size_limit):
s = [] s = []
@ -616,6 +621,7 @@ def partial(total_byte_len, part_size_limit):
s.append([p, last]) s.append([p, last])
return s return s
# downloads files in chunks from gdrive # downloads files in chunks from gdrive
def do_gdrive_download(df, headers, convert_encoding=False): def do_gdrive_download(df, headers, convert_encoding=False):
total_size = int(df.metadata.get('fileSize')) total_size = int(df.metadata.get('fileSize'))
@ -655,6 +661,7 @@ oauth_scope:
- https://www.googleapis.com/auth/drive - https://www.googleapis.com/auth/drive
""" """
def update_settings(client_id, client_secret, redirect_uri): def update_settings(client_id, client_secret, redirect_uri):
if redirect_uri.endswith('/'): if redirect_uri.endswith('/'):
redirect_uri = redirect_uri[:-1] redirect_uri = redirect_uri[:-1]

View File

@ -19,6 +19,7 @@
from gevent.pywsgi import WSGIHandler from gevent.pywsgi import WSGIHandler
class MyWSGIHandler(WSGIHandler): class MyWSGIHandler(WSGIHandler):
def get_environ(self): def get_environ(self):
env = super().get_environ() env = super().get_environ()

View File

@ -227,7 +227,7 @@ def send_mail(book_id, book_format, convert, ereader_mail, calibrepath, user_id)
email_text = N_("%(book)s send to eReader", book=link) email_text = N_("%(book)s send to eReader", book=link)
WorkerThread.add(user_id, TaskEmail(_("Send to eReader"), book.path, converted_file_name, WorkerThread.add(user_id, TaskEmail(_("Send to eReader"), book.path, converted_file_name,
config.get_mail_settings(), ereader_mail, config.get_mail_settings(), ereader_mail,
email_text, _('This Email has been sent via Calibre-Web.'),book.id)) email_text, _('This Email has been sent via Calibre-Web.'), book.id))
return return
return _("The requested file could not be read. Maybe wrong permissions?") return _("The requested file could not be read. Maybe wrong permissions?")
@ -441,9 +441,9 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
gd.moveGdriveFolderRemote(g_file, new_author_rename_dir) gd.moveGdriveFolderRemote(g_file, new_author_rename_dir)
else: else:
if os.path.isdir(os.path.join(calibre_path, old_author_dir)): if os.path.isdir(os.path.join(calibre_path, old_author_dir)):
old_author_path = os.path.join(calibre_path, old_author_dir)
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
try: try:
old_author_path = os.path.join(calibre_path, old_author_dir)
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
shutil.move(os.path.normcase(old_author_path), os.path.normcase(new_author_path)) shutil.move(os.path.normcase(old_author_path), os.path.normcase(new_author_path))
except OSError as ex: except OSError as ex:
log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex) log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex)
@ -505,7 +505,6 @@ def upload_new_file_gdrive(book_id, first_author, renamed_author, title, title_d
return rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True) return rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
def update_dir_structure_gdrive(book_id, first_author, renamed_author): def update_dir_structure_gdrive(book_id, first_author, renamed_author):
book = calibre_db.get_book(book_id) book = calibre_db.get_book(book_id)
@ -623,6 +622,7 @@ def reset_password(user_id):
ub.session.rollback() ub.session.rollback()
return 0, None return 0, None
def generate_random_password(min_length): def generate_random_password(min_length):
min_length = max(8, min_length) - 4 min_length = max(8, min_length) - 4
random_source = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?" random_source = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
@ -690,6 +690,7 @@ def valid_email(email):
raise Exception(_("Invalid Email address format")) raise Exception(_("Invalid Email address format"))
return email return email
def valid_password(check_password): def valid_password(check_password):
if config.config_password_policy: if config.config_password_policy:
verify = "" verify = ""
@ -731,7 +732,7 @@ def update_dir_structure(book_id,
def delete_book(book, calibrepath, book_format): def delete_book(book, calibrepath, book_format):
if not book_format: if not book_format:
clear_cover_thumbnail_cache(book.id) ## here it breaks clear_cover_thumbnail_cache(book.id) # here it breaks
calibre_db.delete_dirty_metadata(book.id) calibre_db.delete_dirty_metadata(book.id)
if config.config_use_google_drive: if config.config_use_google_drive:
return delete_book_gdrive(book, book_format) return delete_book_gdrive(book, book_format)
@ -943,13 +944,14 @@ def save_cover(img, book_path):
def do_download_file(book, book_format, client, data, headers): def do_download_file(book, book_format, client, data, headers):
book_name = data.name book_name = data.name
download_name = filename = None
if config.config_use_google_drive: if config.config_use_google_drive:
# startTime = time.time() # startTime = time.time()
df = gd.getFileFromEbooksFolder(book.path, book_name + "." + book_format) df = gd.getFileFromEbooksFolder(book.path, book_name + "." + book_format)
# log.debug('%s', time.time() - startTime) # log.debug('%s', time.time() - startTime)
if df: if df:
if config.config_embed_metadata and ( if config.config_embed_metadata and (
(book_format == "kepub" and config.config_kepubifypath ) or (book_format == "kepub" and config.config_kepubifypath) or
(book_format != "kepub" and config.config_binariesdir)): (book_format != "kepub" and config.config_binariesdir)):
output_path = os.path.join(config.config_calibre_dir, book.path) output_path = os.path.join(config.config_calibre_dir, book.path)
if not os.path.exists(output_path): if not os.path.exists(output_path):
@ -977,7 +979,7 @@ def do_download_file(book, book_format, client, data, headers):
filename, download_name = do_kepubify_metadata_replace(book, os.path.join(filename, filename, download_name = do_kepubify_metadata_replace(book, os.path.join(filename,
book_name + "." + book_format)) book_name + "." + book_format))
elif book_format != "kepub" and config.config_binariesdir and config.config_embed_metadata: elif book_format != "kepub" and config.config_binariesdir and config.config_embed_metadata:
filename, download_name = do_calibre_export(book.id, book_format) filename, download_name = do_calibre_export(book.id, book_format)
else: else:
download_name = book_name download_name = book_name
@ -1052,11 +1054,11 @@ def check_calibre(calibre_location):
return _('Calibre binaries not viable') return _('Calibre binaries not viable')
else: else:
ret_val = [] ret_val = []
missing_binaries=[path for path, available in missing_binaries = [path for path, available in
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_available) if not available] zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_available) if not available]
missing_perms=[path for path, available in missing_perms = [path for path, available in
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_executable) if not available] zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_executable) if not available]
if missing_binaries: if missing_binaries:
ret_val.append(_('Missing calibre binaries: %(missing)s', missing=", ".join(missing_binaries))) ret_val.append(_('Missing calibre binaries: %(missing)s', missing=", ".join(missing_binaries)))
if missing_perms: if missing_perms:

View File

@ -82,7 +82,6 @@ def get_language_codes(locale, language_names, remainder=None):
return lang return lang
def get_valid_language_codes(locale, language_names, remainder=None): def get_valid_language_codes(locale, language_names, remainder=None):
lang = list() lang = list()
if "" in language_names: if "" in language_names:

View File

@ -48,7 +48,7 @@ import requests
from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub, csrf, kobo_sync_status from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub, csrf, kobo_sync_status
from . import isoLanguages from . import isoLanguages
from .epub import get_epub_layout from .epub import get_epub_layout
from .constants import COVER_THUMBNAIL_SMALL #, sqlalchemy_version2 from .constants import COVER_THUMBNAIL_SMALL
from .helper import get_download_link from .helper import get_download_link
from .services import SyncToken as SyncToken from .services import SyncToken as SyncToken
from .web import download_required from .web import download_required
@ -145,7 +145,7 @@ def HandleSyncRequest():
sync_token = SyncToken.SyncToken.from_headers(request.headers) sync_token = SyncToken.SyncToken.from_headers(request.headers)
log.info("Kobo library sync request received") log.info("Kobo library sync request received")
log.debug("SyncToken: {}".format(sync_token)) log.debug("SyncToken: {}".format(sync_token))
log.debug("Download link format {}".format(get_download_url_for_book('[bookid]','[bookformat]'))) log.debug("Download link format {}".format(get_download_url_for_book('[bookid]', '[bookformat]')))
if not current_app.wsgi_app.is_proxied: if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port') log.debug('Kobo: Received unproxied request, changed request port to external server port')
@ -212,7 +212,7 @@ def HandleSyncRequest():
kobo_reading_state = get_or_create_reading_state(book.Books.id) kobo_reading_state = get_or_create_reading_state(book.Books.id)
entitlement = { entitlement = {
"BookEntitlement": create_book_entitlement(book.Books, archived=(book.is_archived == True)), "BookEntitlement": create_book_entitlement(book.Books, archived=(book.is_archived==True)),
"BookMetadata": get_metadata(book.Books), "BookMetadata": get_metadata(book.Books),
} }
@ -921,8 +921,8 @@ def HandleCoverImageRequest(book_uuid, width, height, Quality, isGreyscale):
log.debug("Redirecting request for cover image of unknown book %s to Kobo" % book_uuid) log.debug("Redirecting request for cover image of unknown book %s to Kobo" % book_uuid)
return redirect(KOBO_IMAGEHOST_URL + return redirect(KOBO_IMAGEHOST_URL +
"/{book_uuid}/{width}/{height}/false/image.jpg".format(book_uuid=book_uuid, "/{book_uuid}/{width}/{height}/false/image.jpg".format(book_uuid=book_uuid,
width=width, width=width,
height=height), 307) height=height), 307)
@kobo.route("") @kobo.route("")
@ -951,7 +951,8 @@ def HandleBookDeletionRequest(book_uuid):
@csrf.exempt @csrf.exempt
@kobo.route("/v1/library/<dummy>", methods=["DELETE", "GET"]) @kobo.route("/v1/library/<dummy>", methods=["DELETE", "GET"])
def HandleUnimplementedRequest(dummy=None): def HandleUnimplementedRequest(dummy=None):
log.debug("Unimplemented Library Request received: %s (request is forwarded to kobo if configured)", request.base_url) log.debug("Unimplemented Library Request received: %s (request is forwarded to kobo if configured)",
request.base_url)
return redirect_or_proxy_request() return redirect_or_proxy_request()
@ -1004,7 +1005,8 @@ def handle_getests():
@kobo.route("/v1/affiliate", methods=["GET", "POST"]) @kobo.route("/v1/affiliate", methods=["GET", "POST"])
@kobo.route("/v1/deals", methods=["GET", "POST"]) @kobo.route("/v1/deals", methods=["GET", "POST"])
def HandleProductsRequest(dummy=None): def HandleProductsRequest(dummy=None):
log.debug("Unimplemented Products Request received: %s (request is forwarded to kobo if configured)", request.base_url) log.debug("Unimplemented Products Request received: %s (request is forwarded to kobo if configured)",
request.base_url)
return redirect_or_proxy_request() return redirect_or_proxy_request()
@ -1021,7 +1023,7 @@ def make_calibre_web_auth_response():
"RefreshToken": RefreshToken, "RefreshToken": RefreshToken,
"TokenType": "Bearer", "TokenType": "Bearer",
"TrackingId": str(uuid.uuid4()), "TrackingId": str(uuid.uuid4()),
"UserKey": content.get('UserKey',""), "UserKey": content.get('UserKey', ""),
} }
) )
) )

View File

@ -115,7 +115,7 @@ def generate_auth_token(user_id):
"generate_kobo_auth_url.html", "generate_kobo_auth_url.html",
title=_("Kobo Setup"), title=_("Kobo Setup"),
auth_token=auth_token.auth_token, auth_token=auth_token.auth_token,
warning = warning warning=warning
) )

View File

@ -23,6 +23,7 @@ import datetime
from sqlalchemy.sql.expression import or_, and_, true from sqlalchemy.sql.expression import or_, and_, true
from sqlalchemy import exc from sqlalchemy import exc
# Add the current book id to kobo_synced_books table for current user, if entry is already present, # Add the current book id to kobo_synced_books table for current user, if entry is already present,
# do nothing (safety precaution) # do nothing (safety precaution)
def add_synced_books(book_id): def add_synced_books(book_id):
@ -50,7 +51,6 @@ def remove_synced_book(book_id, all=False, session=None):
ub.session_commit(_session=session) ub.session_commit(_session=session)
def change_archived_books(book_id, state=None, message=None): def change_archived_books(book_id, state=None, message=None):
archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id), archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first() ub.ArchivedBook.book_id == book_id)).first()
@ -71,7 +71,7 @@ def update_on_sync_shelfs(user_id):
books_to_archive = (ub.session.query(ub.KoboSyncedBooks) books_to_archive = (ub.session.query(ub.KoboSyncedBooks)
.join(ub.BookShelf, ub.KoboSyncedBooks.book_id == ub.BookShelf.book_id, isouter=True) .join(ub.BookShelf, ub.KoboSyncedBooks.book_id == ub.BookShelf.book_id, isouter=True)
.join(ub.Shelf, ub.Shelf.user_id == user_id, isouter=True) .join(ub.Shelf, ub.Shelf.user_id == user_id, isouter=True)
.filter(or_(ub.Shelf.kobo_sync == 0, ub.Shelf.kobo_sync == None)) .filter(or_(ub.Shelf.kobo_sync == 0, ub.Shelf.kobo_sync==None))
.filter(ub.KoboSyncedBooks.user_id == user_id).all()) .filter(ub.KoboSyncedBooks.user_id == user_id).all())
for b in books_to_archive: for b in books_to_archive:
change_archived_books(b.book_id, True) change_archived_books(b.book_id, True)

View File

@ -27,6 +27,7 @@ from flask import request
def request_username(): def request_username():
return request.authorization.username return request.authorization.username
def main(): def main():
app = create_app() app = create_app()
@ -48,12 +49,14 @@ def main():
kobo_available = get_kobo_activated() kobo_available = get_kobo_activated()
except (ImportError, AttributeError): # Catch also error for not installed flask-WTF (missing csrf decorator) except (ImportError, AttributeError): # Catch also error for not installed flask-WTF (missing csrf decorator)
kobo_available = False kobo_available = False
kobo = kobo_auth = get_remote_address = None
try: try:
from .oauth_bb import oauth from .oauth_bb import oauth
oauth_available = True oauth_available = True
except ImportError: except ImportError:
oauth_available = False oauth_available = False
oauth = None
from . import web_server from . import web_server
init_errorhandler() init_errorhandler()
@ -62,7 +65,7 @@ def main():
app.register_blueprint(tasks) app.register_blueprint(tasks)
app.register_blueprint(web) app.register_blueprint(web)
app.register_blueprint(opds) app.register_blueprint(opds)
limiter.limit("3/minute",key_func=request_username)(opds) limiter.limit("3/minute", key_func=request_username)(opds)
app.register_blueprint(jinjia) app.register_blueprint(jinjia)
app.register_blueprint(about) app.register_blueprint(about)
app.register_blueprint(shelf) app.register_blueprint(shelf)

View File

@ -25,7 +25,7 @@ try:
import cchardet #optional for better speed import cchardet #optional for better speed
except ImportError: except ImportError:
pass pass
from cps import logger
from cps.services.Metadata import MetaRecord, MetaSourceInfo, Metadata from cps.services.Metadata import MetaRecord, MetaSourceInfo, Metadata
import cps.logger as logger import cps.logger as logger
@ -33,8 +33,6 @@ import cps.logger as logger
from operator import itemgetter from operator import itemgetter
log = logger.create() log = logger.create()
log = logger.create()
class Amazon(Metadata): class Amazon(Metadata):
__name__ = "Amazon" __name__ = "Amazon"

View File

@ -217,7 +217,8 @@ class Douban(Metadata):
return match return match
def _clean_date(self, date: str) -> str: @staticmethod
def _clean_date(date: str) -> str:
""" """
Clean up the date string to be in the format YYYY-MM-DD Clean up the date string to be in the format YYYY-MM-DD

View File

@ -103,7 +103,7 @@ class LubimyCzytac(Metadata):
PUBLISH_DATE = "//dt[contains(@title,'Data pierwszego wydania" PUBLISH_DATE = "//dt[contains(@title,'Data pierwszego wydania"
FIRST_PUBLISH_DATE = f"{DETAILS}{PUBLISH_DATE} oryginalnego')]{SIBLINGS}[1]/text()" FIRST_PUBLISH_DATE = f"{DETAILS}{PUBLISH_DATE} oryginalnego')]{SIBLINGS}[1]/text()"
FIRST_PUBLISH_DATE_PL = f"{DETAILS}{PUBLISH_DATE} polskiego')]{SIBLINGS}[1]/text()" FIRST_PUBLISH_DATE_PL = f"{DETAILS}{PUBLISH_DATE} polskiego')]{SIBLINGS}[1]/text()"
TAGS = "//a[contains(@href,'/ksiazki/t/')]/text()" # "//nav[@aria-label='breadcrumbs']//a[contains(@href,'/ksiazki/k/')]/span/text()" TAGS = "//a[contains(@href,'/ksiazki/t/')]/text()" # "//nav[@aria-label='breadcrumbs']//a[contains(@href,'/ksiazki/k/')]/span/text()"
RATING = "//meta[@property='books:rating:value']/@content" RATING = "//meta[@property='books:rating:value']/@content"

View File

@ -135,7 +135,7 @@ def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider
if oauth_entry.user: if oauth_entry.user:
login_user(oauth_entry.user) login_user(oauth_entry.user)
log.debug("You are now logged in as: '%s'", oauth_entry.user.name) log.debug("You are now logged in as: '%s'", oauth_entry.user.name)
flash(_("Success! You are now logged in as: %(nickname)s", nickname= oauth_entry.user.name), flash(_("Success! You are now logged in as: %(nickname)s", nickname=oauth_entry.user.name),
category="success") category="success")
return redirect(url_for('web.index')) return redirect(url_for('web.index'))
else: else:
@ -205,6 +205,7 @@ def unlink_oauth(provider):
flash(_("Not Linked to %(oauth)s", oauth=provider), category="error") flash(_("Not Linked to %(oauth)s", oauth=provider), category="error")
return redirect(url_for('web.profile')) return redirect(url_for('web.profile'))
def generate_oauth_blueprints(): def generate_oauth_blueprints():
if not ub.session.query(ub.OAuthProvider).count(): if not ub.session.query(ub.OAuthProvider).count():
for provider in ("github", "google"): for provider in ("github", "google"):
@ -291,6 +292,7 @@ if ub.oauth_support:
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id) return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
# notify on OAuth provider error # notify on OAuth provider error
@oauth_error.connect_via(oauthblueprints[0]['blueprint']) @oauth_error.connect_via(oauthblueprints[0]['blueprint'])
def github_error(blueprint, error, error_description=None, error_uri=None): def github_error(blueprint, error, error_description=None, error_uri=None):

View File

@ -394,6 +394,7 @@ def feed_shelf(book_id):
and_(ub.Shelf.is_public == 1, and_(ub.Shelf.is_public == 1,
ub.Shelf.id == book_id))).first() ub.Shelf.id == book_id))).first()
result = list() result = list()
pagination = list()
# user is allowed to access shelf # user is allowed to access shelf
if shelf: if shelf:
result, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), result, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),

View File

@ -97,7 +97,8 @@ class WebServer(object):
log.warning('Cert path: %s', certfile_path) log.warning('Cert path: %s', certfile_path)
log.warning('Key path: %s', keyfile_path) log.warning('Key path: %s', keyfile_path)
def _make_gevent_socket_activated(self): @staticmethod
def _make_gevent_socket_activated():
# Reuse an already open socket on fd=SD_LISTEN_FDS_START # Reuse an already open socket on fd=SD_LISTEN_FDS_START
SD_LISTEN_FDS_START = 3 SD_LISTEN_FDS_START = 3
return GeventSocket(fileno=SD_LISTEN_FDS_START) return GeventSocket(fileno=SD_LISTEN_FDS_START)
@ -139,8 +140,8 @@ class WebServer(object):
return ((self.listen_address, self.listen_port), return ((self.listen_address, self.listen_port),
_readable_listen_address(self.listen_address, self.listen_port)) _readable_listen_address(self.listen_address, self.listen_port))
address = ('::', self.listen_port)
try: try:
address = ('::', self.listen_port)
sock = WSGIServer.get_listener(address, family=socket.AF_INET6) sock = WSGIServer.get_listener(address, family=socket.AF_INET6)
except socket.error as ex: except socket.error as ex:
log.error('%s', ex) log.error('%s', ex)
@ -301,7 +302,6 @@ class WebServer(object):
log.info("Performing restart of Calibre-Web") log.info("Performing restart of Calibre-Web")
args = self._get_args_for_reloading() args = self._get_args_for_reloading()
os.execv(args[0].lstrip('"').rstrip('"'), args) os.execv(args[0].lstrip('"').rstrip('"'), args)
return True
@staticmethod @staticmethod
def shutdown_scheduler(): def shutdown_scheduler():

View File

@ -173,8 +173,8 @@ class SyncToken:
def __str__(self): def __str__(self):
return "{},{},{},{},{},{}".format(self.books_last_created, return "{},{},{},{},{},{}".format(self.books_last_created,
self.books_last_modified, self.books_last_modified,
self.archive_last_modified, self.archive_last_modified,
self.reading_state_last_modified, self.reading_state_last_modified,
self.tags_last_modified, self.tags_last_modified,
self.raw_kobo_store_token) self.raw_kobo_store_token)

View File

@ -36,6 +36,7 @@ SCOPES = ['openid', 'https://www.googleapis.com/auth/gmail.send', 'https://www.g
def setup_gmail(token): def setup_gmail(token):
# If there are no (valid) credentials available, let the user log in. # If there are no (valid) credentials available, let the user log in.
creds = None creds = None
user_info = None
if "token" in token: if "token" in token:
creds = Credentials( creds = Credentials(
token=token['token'], token=token['token'],

View File

@ -32,6 +32,7 @@ except ImportError:
from .. import logger from .. import logger
from ..clean_html import clean_string from ..clean_html import clean_string
class my_GoodreadsClient(GoodreadsClient): class my_GoodreadsClient(GoodreadsClient):
def request(self, *args, **kwargs): def request(self, *args, **kwargs):
@ -39,6 +40,7 @@ class my_GoodreadsClient(GoodreadsClient):
req = my_GoodreadsRequest(self, *args, **kwargs) req = my_GoodreadsRequest(self, *args, **kwargs)
return req.request() return req.request()
class GoodreadsRequestException(Exception): class GoodreadsRequestException(Exception):
def __init__(self, error_msg, url): def __init__(self, error_msg, url):
self.error_msg = error_msg self.error_msg = error_msg
@ -52,8 +54,8 @@ class my_GoodreadsRequest(GoodreadsRequest):
def request(self): def request(self):
resp = requests.get(self.host+self.path, params=self.params, resp = requests.get(self.host+self.path, params=self.params,
headers={"User-Agent":"Mozilla/5.0 (X11; Linux x86_64; rv:125.0) " headers={"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:125.0) "
"Gecko/20100101 Firefox/125.0"}) "Gecko/20100101 Firefox/125.0"})
if resp.status_code != 200: if resp.status_code != 200:
raise GoodreadsRequestException(resp.reason, self.path) raise GoodreadsRequestException(resp.reason, self.path)
if self.req_format == 'xml': if self.req_format == 'xml':
@ -125,7 +127,8 @@ def get_other_books(author_info, library_books=None):
identifiers = [] identifiers = []
library_titles = [] library_titles = []
if library_books: if library_books:
identifiers = list(reduce(lambda acc, book: acc + [i.val for i in book.identifiers if i.val], library_books, [])) identifiers = list(
reduce(lambda acc, book: acc + [i.val for i in book.identifiers if i.val], library_books, []))
library_titles = [book.title for book in library_books] library_titles = [book.title for book in library_books]
for book in author_info.books: for book in author_info.books:

View File

@ -30,9 +30,11 @@ except ImportError:
log = logger.create() log = logger.create()
class LDAPLogger(object): class LDAPLogger(object):
def write(self, message): @staticmethod
def write(message):
try: try:
log.debug(message.strip("\n").replace("\n", "")) log.debug(message.strip("\n").replace("\n", ""))
except Exception: except Exception:
@ -71,6 +73,7 @@ class mySimpleLDap(LDAP):
_ldap = mySimpleLDap() _ldap = mySimpleLDap()
def init_app(app, config): def init_app(app, config):
if config.config_login_type != constants.LOGIN_LDAP: if config.config_login_type != constants.LOGIN_LDAP:
return return
@ -124,7 +127,7 @@ def init_app(app, config):
log.error(e) log.error(e)
def get_object_details(user=None,query_filter=None): def get_object_details(user=None, query_filter=None):
return _ldap.get_object_details(user, query_filter=query_filter) return _ldap.get_object_details(user, query_filter=query_filter)

View File

@ -44,9 +44,11 @@ log = logger.create()
current_milli_time = lambda: int(round(time() * 1000)) current_milli_time = lambda: int(round(time() * 1000))
class TaskConvert(CalibreTask): class TaskConvert(CalibreTask):
def __init__(self, file_path, book_id, task_message, settings, ereader_mail, user=None): def __init__(self, file_path, book_id, task_message, settings, ereader_mail, user=None):
super(TaskConvert, self).__init__(task_message) super(TaskConvert, self).__init__(task_message)
self.worker_thread = None
self.file_path = file_path self.file_path = file_path
self.book_id = book_id self.book_id = book_id
self.title = "" self.title = ""
@ -67,12 +69,13 @@ class TaskConvert(CalibreTask):
data.name + "." + self.settings['old_book_format'].lower()) data.name + "." + self.settings['old_book_format'].lower())
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg") df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
if df: if df:
datafile_cover = None
datafile = os.path.join(config.get_book_path(), datafile = os.path.join(config.get_book_path(),
cur_book.path, cur_book.path,
data.name + "." + self.settings['old_book_format'].lower()) data.name + "." + self.settings['old_book_format'].lower())
if df_cover: if df_cover:
datafile_cover = os.path.join(config.get_book_path(), datafile_cover = os.path.join(config.get_book_path(),
cur_book.path, "cover.jpg") cur_book.path, "cover.jpg")
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)): if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
os.makedirs(os.path.join(config.get_book_path(), cur_book.path)) os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
df.GetContentFile(datafile) df.GetContentFile(datafile)
@ -85,7 +88,7 @@ class TaskConvert(CalibreTask):
format=self.settings['old_book_format'], format=self.settings['old_book_format'],
fn=data.name + "." + self.settings['old_book_format'].lower()) fn=data.name + "." + self.settings['old_book_format'].lower())
worker_db.session.close() worker_db.session.close()
return self._handleError(self, error_message) return self._handleError(error_message)
filename = self._convert_ebook_format() filename = self._convert_ebook_format()
if config.config_use_google_drive: if config.config_use_google_drive:
@ -242,10 +245,11 @@ class TaskConvert(CalibreTask):
os.unlink(converted_file[0]) os.unlink(converted_file[0])
else: else:
return 1, N_("Converted file not found or more than one file in folder %(folder)s", return 1, N_("Converted file not found or more than one file in folder %(folder)s",
folder=os.path.dirname(file_path)) folder=os.path.dirname(file_path))
return check, None return check, None
def _convert_calibre(self, file_path, format_old_ext, format_new_ext, has_cover): def _convert_calibre(self, file_path, format_old_ext, format_new_ext, has_cover):
path_tmp_opf = None
try: try:
# path_tmp_opf = self._embed_metadata() # path_tmp_opf = self._embed_metadata()
if config.config_embed_metadata: if config.config_embed_metadata:

View File

@ -31,7 +31,6 @@ class TaskReconnectDatabase(CalibreTask):
self.listen_address = config.get_config_ipaddress() self.listen_address = config.get_config_ipaddress()
self.listen_port = config.config_port self.listen_port = config.config_port
def run(self, worker_thread): def run(self, worker_thread):
address = self.listen_address if self.listen_address else 'localhost' address = self.listen_address if self.listen_address else 'localhost'
port = self.listen_port if self.listen_port else 8083 port = self.listen_port if self.listen_port else 8083

View File

@ -59,7 +59,7 @@ class EmailBase:
if hasattr(self, 'sock') and self.sock: if hasattr(self, 'sock') and self.sock:
try: try:
if self.transferSize: if self.transferSize:
lock=threading.Lock() lock = threading.Lock()
lock.acquire() lock.acquire()
self.transferSize = len(strg) self.transferSize = len(strg)
lock.release() lock.release()

View File

@ -25,6 +25,7 @@ from flask_babel import lazy_gettext as N_
from ..epub_helper import create_new_metadata_backup from ..epub_helper import create_new_metadata_backup
class TaskBackupMetadata(CalibreTask): class TaskBackupMetadata(CalibreTask):
def __init__(self, export_language="en", def __init__(self, export_language="en",

View File

@ -110,7 +110,8 @@ class TaskGenerateCoverThumbnails(CalibreTask):
self._handleSuccess() self._handleSuccess()
self.app_db_session.remove() self.app_db_session.remove()
def get_books_with_covers(self, book_id=-1): @staticmethod
def get_books_with_covers(book_id=-1):
filter_exp = (db.Books.id == book_id) if book_id != -1 else True filter_exp = (db.Books.id == book_id) if book_id != -1 else True
calibre_db = db.CalibreDB(expire_on_commit=False, init=True) calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all() books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all()
@ -464,7 +465,7 @@ class TaskClearCoverThumbnailCache(CalibreTask):
calibre_db = db.CalibreDB(expire_on_commit=False, init=True) calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
thumbnails = (calibre_db.session.query(ub.Thumbnail) thumbnails = (calibre_db.session.query(ub.Thumbnail)
.join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True) .join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True)
.filter(db.Books.id == None) .filter(db.Books.id==None)
.all()) .all())
calibre_db.session.close() calibre_db.session.close()
elif self.book_id > 0: # make sure single book is selected elif self.book_id > 0: # make sure single book is selected

View File

@ -22,6 +22,7 @@ from flask_babel import lazy_gettext as N_
from cps.services.worker import CalibreTask, STAT_FINISH_SUCCESS from cps.services.worker import CalibreTask, STAT_FINISH_SUCCESS
class TaskUpload(CalibreTask): class TaskUpload(CalibreTask):
def __init__(self, task_message, book_title): def __init__(self, task_message, book_title):
super(TaskUpload, self).__init__(task_message) super(TaskUpload, self).__init__(task_message)

View File

@ -198,6 +198,15 @@ See https://github.com/adobe-type-tools/cmap-resources
<div id="secondaryToolbar" class="secondaryToolbar hidden doorHangerRight"> <div id="secondaryToolbar" class="secondaryToolbar hidden doorHangerRight">
<div id="secondaryToolbarButtonContainer"> <div id="secondaryToolbarButtonContainer">
{% if current_user.role_download() %}
<button id="secondaryPrint" class="secondaryToolbarButton visibleMediumView" title="Print" tabindex="52" data-l10n-id="pdfjs-print-button">
<span data-l10n-id="pdfjs-print-button-label">Print</span>
</button>
<button id="secondaryDownload" class="secondaryToolbarButton visibleMediumView" title="Save" tabindex="53" data-l10n-id="pdfjs-save-button">
<span data-l10n-id="pdfjs-save-button-label">Save</span>
</button>
{% endif %}
<div class="horizontalToolbarSeparator"></div> <div class="horizontalToolbarSeparator"></div>
<button id="presentationMode" class="secondaryToolbarButton" title="Switch to Presentation Mode" tabindex="54" data-l10n-id="pdfjs-presentation-mode-button"> <button id="presentationMode" class="secondaryToolbarButton" title="Switch to Presentation Mode" tabindex="54" data-l10n-id="pdfjs-presentation-mode-button">
@ -316,9 +325,17 @@ See https://github.com/adobe-type-tools/cmap-resources
<span data-l10n-id="pdfjs-editor-stamp-button-label">Add or edit images</span> <span data-l10n-id="pdfjs-editor-stamp-button-label">Add or edit images</span>
</button> </button>
</div> </div>
{% if current_user.role_download() %}
<div id="editorModeSeparator" class="verticalToolbarSeparator"></div> <div id="editorModeSeparator" class="verticalToolbarSeparator"></div>
<button id="print" class="toolbarButton hiddenMediumView" title="Print" tabindex="41" data-l10n-id="pdfjs-print-button">
<span data-l10n-id="pdfjs-print-button-label">Print</span>
</button>
<button id="download" class="toolbarButton hiddenMediumView" title="Save" tabindex="42" data-l10n-id="pdfjs-save-button">
<span data-l10n-id="pdfjs-save-button-label">Save</span>
</button>
{% endif %}
<div class="verticalToolbarSeparator hiddenMediumView"></div> <div class="verticalToolbarSeparator hiddenMediumView"></div>
<button id="secondaryToolbarToggle" class="toolbarButton" title="Tools" tabindex="43" data-l10n-id="pdfjs-tools-button" aria-expanded="false" aria-controls="secondaryToolbar"> <button id="secondaryToolbarToggle" class="toolbarButton" title="Tools" tabindex="43" data-l10n-id="pdfjs-tools-button" aria-expanded="false" aria-controls="secondaryToolbar">

View File

@ -268,6 +268,18 @@ class OAuthProvider(Base):
# anonymous user # anonymous user
class Anonymous(AnonymousUserMixin, UserBase): class Anonymous(AnonymousUserMixin, UserBase):
def __init__(self): def __init__(self):
self.kobo_only_shelves_sync = None
self.view_settings = None
self.allowed_column_value = None
self.allowed_tags = None
self.denied_tags = None
self.kindle_mail = None
self.locale = None
self.default_language = None
self.sidebar_view = None
self.id = None
self.role = None
self.name = None
self.loadSettings() self.loadSettings()
def loadSettings(self): def loadSettings(self):
@ -325,6 +337,7 @@ class User_Sessions(Base):
session_key = Column(String, default="") session_key = Column(String, default="")
def __init__(self, user_id, session_key): def __init__(self, user_id, session_key):
super().__init__()
self.user_id = user_id self.user_id = user_id
self.session_key = session_key self.session_key = session_key
@ -507,6 +520,7 @@ class RemoteAuthToken(Base):
token_type = Column(Integer, default=0) token_type = Column(Integer, default=0)
def __init__(self): def __init__(self):
super().__init__()
self.auth_token = (hexlify(os.urandom(4))).decode('utf-8') self.auth_token = (hexlify(os.urandom(4))).decode('utf-8')
self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from now self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from now

View File

@ -52,6 +52,8 @@ class Updater(threading.Thread):
def __init__(self): def __init__(self):
threading.Thread.__init__(self) threading.Thread.__init__(self)
self.web_server = None
self.config = None
self.paused = False self.paused = False
self.can_run = threading.Event() self.can_run = threading.Event()
self.pause() self.pause()

View File

@ -45,4 +45,4 @@ comicapi>=2.2.0,<3.3.0
jsonschema>=3.2.0,<4.23.0 jsonschema>=3.2.0,<4.23.0
# Hide console Window on Windows # Hide console Window on Windows
pywin32>=220,<310 pywin32>=220,<310 ; sys_platform == 'win32'

33
qodana.yaml Normal file
View File

@ -0,0 +1,33 @@
#-------------------------------------------------------------------------------#
# Qodana analysis is configured by qodana.yaml file #
# https://www.jetbrains.com/help/qodana/qodana-yaml.html #
#-------------------------------------------------------------------------------#
version: "1.0"
#Specify inspection profile for code analysis
profile:
name: qodana.starter
#Enable inspections
#include:
# - name: <SomeEnabledInspectionId>
#Disable inspections
#exclude:
# - name: <SomeDisabledInspectionId>
# paths:
# - <path/where/not/run/inspection>
#Execute shell command before Qodana execution (Applied in CI/CD pipeline)
#bootstrap: sh ./prepare-qodana.sh
#Install IDE plugins before Qodana execution (Applied in CI/CD pipeline)
#plugins:
# - id: <plugin.id> #(plugin id can be found at https://plugins.jetbrains.com)
#Specify Qodana linter for analysis (Applied in CI/CD pipeline)
linter: jetbrains/qodana-python:latest
exclude:
- name: All
paths:
- cps/static/js/libs

View File

@ -6,17 +6,18 @@ Flask-Login>=0.3.2,<0.6.4
Flask-Principal>=0.3.2,<0.5.1 Flask-Principal>=0.3.2,<0.5.1
Flask>=1.0.2,<3.1.0 Flask>=1.0.2,<3.1.0
iso-639>=0.4.5,<0.5.0 iso-639>=0.4.5,<0.5.0
PyPDF>=3.15.6,<4.1.0 PyPDF>=3.15.6,<4.3.0
pytz>=2016.10 pytz>=2016.10
requests>=2.28.0,<2.32.0 requests>=2.28.0,<2.32.0
SQLAlchemy>=1.3.0,<2.1.0 SQLAlchemy>=1.3.0,<2.1.0
tornado>=6.3,<6.5 tornado>=6.3,<6.5
Wand>=0.4.4,<0.7.0 Wand>=0.4.4,<0.7.0
unidecode>=0.04.19,<1.4.0 unidecode>=0.04.19,<1.4.0
lxml>=3.8.0,<5.2.0 lxml>=4.9.1,<5.3.0
flask-wtf>=0.14.2,<1.3.0 flask-wtf>=0.14.2,<1.3.0
chardet>=3.0.0,<4.1.0 chardet>=3.0.0,<4.1.0
advocate>=1.0.0,<1.1.0 advocate>=1.0.0,<1.1.0
Flask-Limiter>=2.3.0,<3.6.0 Flask-Limiter>=2.3.0,<3.6.0
regex>=2022.3.2,<2024.2.25 regex>=2022.3.2,<2024.6.25
bleach>=6.0.0,<6.2.0
python-magic>=0.4.27,<0.5.0 python-magic>=0.4.27,<0.5.0

View File

@ -53,12 +53,13 @@ install_requires =
tornado>=6.3,<6.5 tornado>=6.3,<6.5
Wand>=0.4.4,<0.7.0 Wand>=0.4.4,<0.7.0
unidecode>=0.04.19,<1.4.0 unidecode>=0.04.19,<1.4.0
lxml>=3.8.0,<5.2.0 lxml>=4.9.1,<5.2.0
flask-wtf>=0.14.2,<1.3.0 flask-wtf>=0.14.2,<1.3.0
chardet>=3.0.0,<4.1.0 chardet>=3.0.0,<4.1.0
advocate>=1.0.0,<1.1.0 advocate>=1.0.0,<1.1.0
Flask-Limiter>=2.3.0,<3.6.0 Flask-Limiter>=2.3.0,<3.6.0
regex>=2022.3.2,<2024.2.25 regex>=2022.3.2,<2024.2.25
bleach>=6.0.0,<6.2.0
python-magic>=0.4.27,<0.5.0 python-magic>=0.4.27,<0.5.0