mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-26 01:50:31 +00:00
Merge remote-tracking branch 'origin/Develop' into Develop
This commit is contained in:
commit
897188ff18
@ -56,23 +56,26 @@ except ImportError:
|
|||||||
mimetypes.init()
|
mimetypes.init()
|
||||||
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
||||||
mimetypes.add_type('application/epub+zip', '.epub')
|
mimetypes.add_type('application/epub+zip', '.epub')
|
||||||
mimetypes.add_type('application/fb2+zip', '.fb2')
|
mimetypes.add_type('application/epub+zip', '.kepub')
|
||||||
mimetypes.add_type('application/x-mobipocket-ebook', '.mobi')
|
mimetypes.add_type('text/xml', '.fb2')
|
||||||
mimetypes.add_type('application/x-mobipocket-ebook', '.prc')
|
mimetypes.add_type('application/octet-stream', '.mobi')
|
||||||
|
mimetypes.add_type('application/octet-stream', '.prc')
|
||||||
mimetypes.add_type('application/vnd.amazon.ebook', '.azw')
|
mimetypes.add_type('application/vnd.amazon.ebook', '.azw')
|
||||||
mimetypes.add_type('application/x-mobi8-ebook', '.azw3')
|
mimetypes.add_type('application/x-mobi8-ebook', '.azw3')
|
||||||
mimetypes.add_type('application/x-cbr', '.cbr')
|
mimetypes.add_type('application/x-rar', '.cbr')
|
||||||
mimetypes.add_type('application/x-cbz', '.cbz')
|
mimetypes.add_type('application/zip', '.cbz')
|
||||||
mimetypes.add_type('application/x-cbt', '.cbt')
|
mimetypes.add_type('application/x-tar', '.cbt')
|
||||||
mimetypes.add_type('application/x-cb7', '.cb7')
|
mimetypes.add_type('application/x-7z-compressed', '.cb7')
|
||||||
mimetypes.add_type('image/vnd.djv', '.djv')
|
mimetypes.add_type('image/vnd.djv', '.djv')
|
||||||
|
mimetypes.add_type('image/vnd.djv', '.djvu')
|
||||||
mimetypes.add_type('application/mpeg', '.mpeg')
|
mimetypes.add_type('application/mpeg', '.mpeg')
|
||||||
mimetypes.add_type('application/mpeg', '.mp3')
|
mimetypes.add_type('audio/mpeg', '.mp3')
|
||||||
mimetypes.add_type('application/mp4', '.m4a')
|
mimetypes.add_type('application/mp4', '.m4a')
|
||||||
mimetypes.add_type('application/mp4', '.m4b')
|
mimetypes.add_type('application/mp4', '.m4b')
|
||||||
mimetypes.add_type('application/ogg', '.ogg')
|
mimetypes.add_type('audio/ogg', '.ogg')
|
||||||
mimetypes.add_type('application/ogg', '.oga')
|
mimetypes.add_type('application/ogg', '.oga')
|
||||||
mimetypes.add_type('text/css', '.css')
|
mimetypes.add_type('text/css', '.css')
|
||||||
|
mimetypes.add_type('application/x-ms-reader', '.lit')
|
||||||
mimetypes.add_type('text/javascript; charset=UTF-8', '.js')
|
mimetypes.add_type('text/javascript; charset=UTF-8', '.js')
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
@ -1777,7 +1777,7 @@ def _configuration_update_helper():
|
|||||||
to_save["config_upload_formats"] = ','.join(
|
to_save["config_upload_formats"] = ','.join(
|
||||||
helper.uniq([x.lstrip().rstrip().lower() for x in to_save["config_upload_formats"].split(',')]))
|
helper.uniq([x.lstrip().rstrip().lower() for x in to_save["config_upload_formats"].split(',')]))
|
||||||
_config_string(to_save, "config_upload_formats")
|
_config_string(to_save, "config_upload_formats")
|
||||||
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
|
# constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
|
||||||
|
|
||||||
_config_string(to_save, "config_calibre")
|
_config_string(to_save, "config_calibre")
|
||||||
_config_string(to_save, "config_binariesdir")
|
_config_string(to_save, "config_binariesdir")
|
||||||
@ -1827,6 +1827,7 @@ def _configuration_update_helper():
|
|||||||
reboot_required |= reboot
|
reboot_required |= reboot
|
||||||
|
|
||||||
# security configuration
|
# security configuration
|
||||||
|
_config_checkbox(to_save, "config_check_extensions")
|
||||||
_config_checkbox(to_save, "config_password_policy")
|
_config_checkbox(to_save, "config_password_policy")
|
||||||
_config_checkbox(to_save, "config_password_number")
|
_config_checkbox(to_save, "config_password_number")
|
||||||
_config_checkbox(to_save, "config_password_lower")
|
_config_checkbox(to_save, "config_password_lower")
|
||||||
|
@ -172,6 +172,7 @@ class _Settings(_Base):
|
|||||||
config_ratelimiter = Column(Boolean, default=True)
|
config_ratelimiter = Column(Boolean, default=True)
|
||||||
config_limiter_uri = Column(String, default="")
|
config_limiter_uri = Column(String, default="")
|
||||||
config_limiter_options = Column(String, default="")
|
config_limiter_options = Column(String, default="")
|
||||||
|
config_check_extensions = Column(Boolean, default=True)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self.__class__.__name__
|
return self.__class__.__name__
|
||||||
@ -371,7 +372,7 @@ class ConfigSQL(object):
|
|||||||
db_file = os.path.join(self.config_calibre_dir, 'metadata.db')
|
db_file = os.path.join(self.config_calibre_dir, 'metadata.db')
|
||||||
have_metadata_db = os.path.isfile(db_file)
|
have_metadata_db = os.path.isfile(db_file)
|
||||||
self.db_configured = have_metadata_db
|
self.db_configured = have_metadata_db
|
||||||
constants.EXTENSIONS_UPLOAD = [x.lstrip().rstrip().lower() for x in self.config_upload_formats.split(',')]
|
# constants.EXTENSIONS_UPLOAD = [x.lstrip().rstrip().lower() for x in self.config_upload_formats.split(',')]
|
||||||
from . import cli_param
|
from . import cli_param
|
||||||
if os.environ.get('FLASK_DEBUG'):
|
if os.environ.get('FLASK_DEBUG'):
|
||||||
logfile = logger.setup(logger.LOG_TO_STDOUT, logger.logging.DEBUG)
|
logfile = logger.setup(logger.LOG_TO_STDOUT, logger.logging.DEBUG)
|
||||||
|
106
cps/editbooks.py
106
cps/editbooks.py
@ -23,7 +23,7 @@
|
|||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
from shutil import copyfile
|
from shutil import copyfile, move
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from markupsafe import escape, Markup # dependency of flask
|
from markupsafe import escape, Markup # dependency of flask
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
@ -46,7 +46,7 @@ from .render_template import render_title_template
|
|||||||
from .usermanagement import login_required_if_no_ano
|
from .usermanagement import login_required_if_no_ano
|
||||||
from .kobo_sync_status import change_archived_books
|
from .kobo_sync_status import change_archived_books
|
||||||
from .redirect import get_redirect_location
|
from .redirect import get_redirect_location
|
||||||
|
from .file_helper import validate_mime_type
|
||||||
|
|
||||||
editbook = Blueprint('edit-book', __name__)
|
editbook = Blueprint('edit-book', __name__)
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
@ -118,14 +118,13 @@ def edit_book(book_id):
|
|||||||
# handle book title change
|
# handle book title change
|
||||||
title_change = handle_title_on_edit(book, to_save["book_title"])
|
title_change = handle_title_on_edit(book, to_save["book_title"])
|
||||||
# handle book author change
|
# handle book author change
|
||||||
input_authors, author_change, renamed = handle_author_on_edit(book, to_save["author_name"])
|
input_authors, author_change = handle_author_on_edit(book, to_save["author_name"])
|
||||||
if author_change or title_change:
|
if author_change or title_change:
|
||||||
edited_books_id = book.id
|
edited_books_id = book.id
|
||||||
modify_date = True
|
modify_date = True
|
||||||
title_author_error = helper.update_dir_structure(edited_books_id,
|
title_author_error = helper.update_dir_structure(edited_books_id,
|
||||||
config.get_book_path(),
|
config.get_book_path(),
|
||||||
input_authors[0],
|
input_authors[0])
|
||||||
renamed_author=renamed)
|
|
||||||
if title_author_error:
|
if title_author_error:
|
||||||
flash(title_author_error, category="error")
|
flash(title_author_error, category="error")
|
||||||
calibre_db.session.rollback()
|
calibre_db.session.rollback()
|
||||||
@ -251,7 +250,7 @@ def upload():
|
|||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
|
|
||||||
db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modify_date, meta)
|
db_book, input_authors, title_dir = create_book_on_upload(modify_date, meta)
|
||||||
|
|
||||||
# Comments need book id therefore only possible after flush
|
# Comments need book id therefore only possible after flush
|
||||||
modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
||||||
@ -261,7 +260,6 @@ def upload():
|
|||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
helper.upload_new_file_gdrive(book_id,
|
helper.upload_new_file_gdrive(book_id,
|
||||||
input_authors[0],
|
input_authors[0],
|
||||||
renamed_authors,
|
|
||||||
title,
|
title,
|
||||||
title_dir,
|
title_dir,
|
||||||
meta.file_path,
|
meta.file_path,
|
||||||
@ -271,8 +269,7 @@ def upload():
|
|||||||
config.get_book_path(),
|
config.get_book_path(),
|
||||||
input_authors[0],
|
input_authors[0],
|
||||||
meta.file_path,
|
meta.file_path,
|
||||||
title_dir + meta.extension.lower(),
|
title_dir + meta.extension.lower())
|
||||||
renamed_author=renamed_authors)
|
|
||||||
|
|
||||||
move_coverfile(meta, db_book)
|
move_coverfile(meta, db_book)
|
||||||
|
|
||||||
@ -405,9 +402,8 @@ def edit_list_book(param):
|
|||||||
ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}),
|
ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}),
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
elif param == 'authors':
|
elif param == 'authors':
|
||||||
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
||||||
rename_error = helper.update_dir_structure(book.id, config.get_book_path(), input_authors[0],
|
rename_error = helper.update_dir_structure(book.id, config.get_book_path(), input_authors[0])
|
||||||
renamed_author=renamed)
|
|
||||||
if not rename_error:
|
if not rename_error:
|
||||||
ret = Response(json.dumps({
|
ret = Response(json.dumps({
|
||||||
'success': True,
|
'success': True,
|
||||||
@ -543,7 +539,7 @@ def table_xchange_author_title():
|
|||||||
author_names.append(authr.name.replace('|', ','))
|
author_names.append(authr.name.replace('|', ','))
|
||||||
|
|
||||||
title_change = handle_title_on_edit(book, " ".join(author_names))
|
title_change = handle_title_on_edit(book, " ".join(author_names))
|
||||||
input_authors, author_change, renamed = handle_author_on_edit(book, authors)
|
input_authors, author_change = handle_author_on_edit(book, authors)
|
||||||
if author_change or title_change:
|
if author_change or title_change:
|
||||||
edited_books_id = book.id
|
edited_books_id = book.id
|
||||||
modify_date = True
|
modify_date = True
|
||||||
@ -553,8 +549,7 @@ def table_xchange_author_title():
|
|||||||
|
|
||||||
if edited_books_id:
|
if edited_books_id:
|
||||||
# toDo: Handle error
|
# toDo: Handle error
|
||||||
edit_error = helper.update_dir_structure(edited_books_id, config.get_book_path(), input_authors[0],
|
edit_error = helper.update_dir_structure(edited_books_id, config.get_book_path(), input_authors[0])
|
||||||
renamed_author=renamed)
|
|
||||||
if modify_date:
|
if modify_date:
|
||||||
book.last_modified = datetime.utcnow()
|
book.last_modified = datetime.utcnow()
|
||||||
calibre_db.set_metadata_dirty(book.id)
|
calibre_db.set_metadata_dirty(book.id)
|
||||||
@ -602,7 +597,9 @@ def identifier_list(to_save, book):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def prepare_authors(authr):
|
def prepare_authors(authr, calibre_path, gdrive=False):
|
||||||
|
if gdrive:
|
||||||
|
calibre_path = ""
|
||||||
# handle authors
|
# handle authors
|
||||||
input_authors = authr.split('&')
|
input_authors = authr.split('&')
|
||||||
# handle_authors(input_authors)
|
# handle_authors(input_authors)
|
||||||
@ -614,18 +611,44 @@ def prepare_authors(authr):
|
|||||||
if input_authors == ['']:
|
if input_authors == ['']:
|
||||||
input_authors = [_('Unknown')] # prevent empty Author
|
input_authors = [_('Unknown')] # prevent empty Author
|
||||||
|
|
||||||
renamed = list()
|
|
||||||
for in_aut in input_authors:
|
for in_aut in input_authors:
|
||||||
renamed_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == in_aut).first()
|
renamed_author = calibre_db.session.query(db.Authors).filter(func.lower(db.Authors.name).ilike(in_aut)).first()
|
||||||
if renamed_author and in_aut != renamed_author.name:
|
if renamed_author and in_aut != renamed_author.name:
|
||||||
renamed.append(renamed_author.name)
|
old_author_name = renamed_author.name
|
||||||
|
# rename author in Database
|
||||||
|
create_objects_for_addition(renamed_author, in_aut,"author")
|
||||||
|
# rename all Books with this author as first author:
|
||||||
|
# rename all book author_sort strings with the new author name
|
||||||
all_books = calibre_db.session.query(db.Books) \
|
all_books = calibre_db.session.query(db.Books) \
|
||||||
.filter(db.Books.authors.any(db.Authors.name == renamed_author.name)).all()
|
.filter(db.Books.authors.any(db.Authors.name == renamed_author.name)).all()
|
||||||
sorted_renamed_author = helper.get_sorted_author(renamed_author.name)
|
|
||||||
sorted_old_author = helper.get_sorted_author(in_aut)
|
|
||||||
for one_book in all_books:
|
for one_book in all_books:
|
||||||
one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author)
|
# ToDo: check
|
||||||
return input_authors, renamed
|
sorted_old_author = helper.get_sorted_author(old_author_name)
|
||||||
|
sorted_renamed_author = helper.get_sorted_author(in_aut)
|
||||||
|
# change author sort path
|
||||||
|
try:
|
||||||
|
author_index = one_book.author_sort.index(sorted_old_author)
|
||||||
|
one_book.author_sort = one_book.author_sort.replace(sorted_old_author, sorted_renamed_author)
|
||||||
|
except ValueError:
|
||||||
|
log.error("Sorted author {} not found in database".format(sorted_old_author))
|
||||||
|
author_index = -1
|
||||||
|
# change book path if changed author is first author -> match on first position
|
||||||
|
if author_index == 0:
|
||||||
|
one_titledir = one_book.path.split('/')[1]
|
||||||
|
one_old_authordir = one_book.path.split('/')[0]
|
||||||
|
# rename author path only once per renamed author -> search all books with author name in book.path
|
||||||
|
# das muss einmal geschehen aber pro Buch geprüft werden ansonsten habe ich das Problem das vlt. 2 gleiche Ordner bis auf Groß/Kleinschreibung vorhanden sind im Umzug
|
||||||
|
new_author_dir = helper.rename_author_path(in_aut, one_old_authordir, renamed_author.name, calibre_path, gdrive)
|
||||||
|
one_book.path = os.path.join(new_author_dir, one_titledir).replace('\\', '/')
|
||||||
|
# rename all books in book data with the new author name and move corresponding files to new locations
|
||||||
|
# old_path = os.path.join(calibre_path, new_author_dir, one_titledir)
|
||||||
|
new_path = os.path.join(calibre_path, new_author_dir, one_titledir)
|
||||||
|
all_new_name = helper.get_valid_filename(one_book.title, chars=42) + ' - ' \
|
||||||
|
+ helper.get_valid_filename(renamed_author.name, chars=42)
|
||||||
|
# change location in database to new author/title path
|
||||||
|
helper.rename_all_files_on_change(one_book, new_path, new_path, all_new_name, gdrive)
|
||||||
|
|
||||||
|
return input_authors
|
||||||
|
|
||||||
|
|
||||||
def prepare_authors_on_upload(title, authr):
|
def prepare_authors_on_upload(title, authr):
|
||||||
@ -636,12 +659,13 @@ def prepare_authors_on_upload(title, authr):
|
|||||||
flash(_("Uploaded book probably exists in the library, consider to change before upload new: ")
|
flash(_("Uploaded book probably exists in the library, consider to change before upload new: ")
|
||||||
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
||||||
|
|
||||||
input_authors, renamed = prepare_authors(authr)
|
input_authors = prepare_authors(authr, config.get_book_path(), config.config_use_google_drive)
|
||||||
|
|
||||||
sort_authors_list = list()
|
sort_authors_list = list()
|
||||||
db_author = None
|
db_author = None
|
||||||
for inp in input_authors:
|
for inp in input_authors:
|
||||||
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
# stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
||||||
|
stored_author = calibre_db.session.query(db.Authors).filter(func.lower(db.Authors.name).ilike(inp)).first()
|
||||||
if not stored_author:
|
if not stored_author:
|
||||||
if not db_author:
|
if not db_author:
|
||||||
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
|
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
|
||||||
@ -654,13 +678,13 @@ def prepare_authors_on_upload(title, authr):
|
|||||||
sort_author = stored_author.sort
|
sort_author = stored_author.sort
|
||||||
sort_authors_list.append(sort_author)
|
sort_authors_list.append(sort_author)
|
||||||
sort_authors = ' & '.join(sort_authors_list)
|
sort_authors = ' & '.join(sort_authors_list)
|
||||||
return sort_authors, input_authors, db_author, renamed
|
return sort_authors, input_authors, db_author
|
||||||
|
|
||||||
|
|
||||||
def create_book_on_upload(modify_date, meta):
|
def create_book_on_upload(modify_date, meta):
|
||||||
title = meta.title
|
title = meta.title
|
||||||
authr = meta.author
|
authr = meta.author
|
||||||
sort_authors, input_authors, db_author, renamed_authors = prepare_authors_on_upload(title, authr)
|
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
|
||||||
|
|
||||||
title_dir = helper.get_valid_filename(title, chars=96)
|
title_dir = helper.get_valid_filename(title, chars=96)
|
||||||
author_dir = helper.get_valid_filename(db_author.name, chars=96)
|
author_dir = helper.get_valid_filename(db_author.name, chars=96)
|
||||||
@ -717,14 +741,20 @@ def create_book_on_upload(modify_date, meta):
|
|||||||
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
|
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
|
||||||
modify_date |= modification
|
modify_date |= modification
|
||||||
|
|
||||||
return db_book, input_authors, title_dir, renamed_authors
|
return db_book, input_authors, title_dir
|
||||||
|
|
||||||
|
|
||||||
def file_handling_on_upload(requested_file):
|
def file_handling_on_upload(requested_file):
|
||||||
# check if file extension is correct
|
# check if file extension is correct
|
||||||
|
allowed_extensions = config.config_upload_formats.split(',')
|
||||||
|
if requested_file:
|
||||||
|
if config.config_check_extensions and allowed_extensions != ['']:
|
||||||
|
if not validate_mime_type(requested_file, allowed_extensions):
|
||||||
|
flash(_("File type isn't allowed to be uploaded to this server"), category="error")
|
||||||
|
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||||
if '.' in requested_file.filename:
|
if '.' in requested_file.filename:
|
||||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||||
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
|
if file_ext not in allowed_extensions and '' not in allowed_extensions:
|
||||||
flash(
|
flash(
|
||||||
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
||||||
ext=file_ext), category="error")
|
ext=file_ext), category="error")
|
||||||
@ -1152,7 +1182,12 @@ def edit_cc_data(book_id, book, to_save, cc):
|
|||||||
def upload_single_file(file_request, book, book_id):
|
def upload_single_file(file_request, book, book_id):
|
||||||
# Check and handle Uploaded file
|
# Check and handle Uploaded file
|
||||||
requested_file = file_request.files.get('btn-upload-format', None)
|
requested_file = file_request.files.get('btn-upload-format', None)
|
||||||
|
allowed_extensions = config.config_upload_formats.split(',')
|
||||||
if requested_file:
|
if requested_file:
|
||||||
|
if config.config_check_extensions and allowed_extensions != ['']:
|
||||||
|
if not validate_mime_type(requested_file, allowed_extensions):
|
||||||
|
flash(_("File type isn't allowed to be uploaded to this server"), category="error")
|
||||||
|
return False
|
||||||
# check for empty request
|
# check for empty request
|
||||||
if requested_file.filename != '':
|
if requested_file.filename != '':
|
||||||
if not current_user.role_upload():
|
if not current_user.role_upload():
|
||||||
@ -1160,7 +1195,7 @@ def upload_single_file(file_request, book, book_id):
|
|||||||
return False
|
return False
|
||||||
if '.' in requested_file.filename:
|
if '.' in requested_file.filename:
|
||||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||||
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
|
if file_ext not in allowed_extensions and '' not in allowed_extensions:
|
||||||
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
|
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
|
||||||
category="error")
|
category="error")
|
||||||
return False
|
return False
|
||||||
@ -1177,7 +1212,8 @@ def upload_single_file(file_request, book, book_id):
|
|||||||
try:
|
try:
|
||||||
os.makedirs(filepath)
|
os.makedirs(filepath)
|
||||||
except OSError:
|
except OSError:
|
||||||
flash(_("Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
|
flash(_("Failed to create path %(path)s (Permission denied).", path=filepath),
|
||||||
|
category="error")
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
requested_file.save(saved_filename)
|
requested_file.save(saved_filename)
|
||||||
@ -1247,7 +1283,7 @@ def handle_title_on_edit(book, book_title):
|
|||||||
def handle_author_on_edit(book, author_name, update_stored=True):
|
def handle_author_on_edit(book, author_name, update_stored=True):
|
||||||
change = False
|
change = False
|
||||||
# handle author(s)
|
# handle author(s)
|
||||||
input_authors, renamed = prepare_authors(author_name)
|
input_authors = prepare_authors(author_name, config.get_book_path(), config.config_use_google_drive)
|
||||||
|
|
||||||
# change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
|
# change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
|
||||||
# Search for each author if author is in database, if not, author name and sorted author name is generated new
|
# Search for each author if author is in database, if not, author name and sorted author name is generated new
|
||||||
@ -1267,7 +1303,7 @@ def handle_author_on_edit(book, author_name, update_stored=True):
|
|||||||
|
|
||||||
change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
|
change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
|
||||||
|
|
||||||
return input_authors, change, renamed
|
return input_authors, change
|
||||||
|
|
||||||
|
|
||||||
def search_objects_remove(db_book_object, db_type, input_elements):
|
def search_objects_remove(db_book_object, db_type, input_elements):
|
||||||
@ -1351,8 +1387,8 @@ def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
|
|||||||
if db_no_case:
|
if db_no_case:
|
||||||
# check for new case of element
|
# check for new case of element
|
||||||
db_element = create_objects_for_addition(db_element, add_element, db_type)
|
db_element = create_objects_for_addition(db_element, add_element, db_type)
|
||||||
else:
|
#else:
|
||||||
db_element = create_objects_for_addition(db_element, add_element, db_type)
|
# db_element = create_objects_for_addition(db_element, add_element, db_type)
|
||||||
# add element to book
|
# add element to book
|
||||||
db_book_object.append(db_element)
|
db_book_object.append(db_element)
|
||||||
|
|
||||||
|
@ -19,6 +19,18 @@
|
|||||||
from tempfile import gettempdir
|
from tempfile import gettempdir
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import zipfile
|
||||||
|
import mimetypes
|
||||||
|
import copy
|
||||||
|
from io import BytesIO
|
||||||
|
try:
|
||||||
|
import magic
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
from . import logger
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def get_temp_dir():
|
def get_temp_dir():
|
||||||
@ -31,3 +43,29 @@ def get_temp_dir():
|
|||||||
def del_temp_dir():
|
def del_temp_dir():
|
||||||
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
||||||
shutil.rmtree(tmp_dir)
|
shutil.rmtree(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_mime_type(file_buffer, allowed_extensions):
|
||||||
|
mime = magic.Magic(mime=True)
|
||||||
|
allowed_mimetypes =list()
|
||||||
|
for x in allowed_extensions:
|
||||||
|
try:
|
||||||
|
allowed_mimetypes.append(mimetypes.types_map["." + x])
|
||||||
|
except KeyError as e:
|
||||||
|
log.error("Unkown mimetype for Extension: {}".format(x))
|
||||||
|
tmp_mime_type = mime.from_buffer(file_buffer.read())
|
||||||
|
file_buffer.seek(0)
|
||||||
|
if any(mime_type in tmp_mime_type for mime_type in allowed_mimetypes):
|
||||||
|
return True
|
||||||
|
# Some epubs show up as zip mimetypes
|
||||||
|
elif "zip" in tmp_mime_type:
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(BytesIO(file_buffer.read()), 'r') as epub:
|
||||||
|
file_buffer.seek(0)
|
||||||
|
if "mimetype" in epub.namelist():
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
file_buffer.seek(0)
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
@ -581,7 +581,7 @@ def get_cover_via_gdrive(cover_path):
|
|||||||
session.add(permissionAdded)
|
session.add(permissionAdded)
|
||||||
try:
|
try:
|
||||||
session.commit()
|
session.commit()
|
||||||
except OperationalError as ex:
|
except (OperationalError, IntegrityError) as ex:
|
||||||
log.error_or_exception('Database error: {}'.format(ex))
|
log.error_or_exception('Database error: {}'.format(ex))
|
||||||
session.rollback()
|
session.rollback()
|
||||||
return df.metadata.get('webContentLink')
|
return df.metadata.get('webContentLink')
|
||||||
|
158
cps/helper.py
158
cps/helper.py
@ -388,42 +388,27 @@ def delete_book_file(book, calibrepath, book_format=None):
|
|||||||
id=book.id,
|
id=book.id,
|
||||||
path=book.path)
|
path=book.path)
|
||||||
|
|
||||||
|
def rename_all_files_on_change(one_book, new_path, old_path, all_new_name, gdrive=False):
|
||||||
def clean_author_database(renamed_author, calibre_path="", local_book=None, gdrive=None):
|
for file_format in one_book.data:
|
||||||
valid_filename_authors = [get_valid_filename(r, chars=96) for r in renamed_author]
|
if not gdrive:
|
||||||
for r in renamed_author:
|
if not os.path.exists(new_path):
|
||||||
if local_book:
|
os.makedirs(new_path)
|
||||||
all_books = [local_book]
|
shutil.move(os.path.normcase(
|
||||||
|
os.path.join(old_path, file_format.name + '.' + file_format.format.lower())),
|
||||||
|
os.path.normcase(
|
||||||
|
os.path.join(new_path, all_new_name + '.' + file_format.format.lower())))
|
||||||
else:
|
else:
|
||||||
all_books = calibre_db.session.query(db.Books) \
|
g_file = gd.getFileFromEbooksFolder(old_path,
|
||||||
.filter(db.Books.authors.any(db.Authors.name == r)).all()
|
file_format.name + '.' + file_format.format.lower())
|
||||||
for book in all_books:
|
if g_file:
|
||||||
book_author_path = book.path.split('/')[0]
|
gd.moveGdriveFileRemote(g_file, all_new_name + '.' + file_format.format.lower())
|
||||||
if book_author_path in valid_filename_authors or local_book:
|
gd.updateDatabaseOnEdit(g_file['id'], all_new_name + '.' + file_format.format.lower())
|
||||||
new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == r).first()
|
else:
|
||||||
all_new_authordir = get_valid_filename(new_author.name, chars=96)
|
log.error("File {} not found on gdrive"
|
||||||
all_titledir = book.path.split('/')[1]
|
.format(old_path, file_format.name + '.' + file_format.format.lower()))
|
||||||
all_new_path = os.path.join(calibre_path, all_new_authordir, all_titledir)
|
|
||||||
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
|
# change name in Database
|
||||||
+ get_valid_filename(new_author.name, chars=42)
|
file_format.name = all_new_name
|
||||||
# change location in database to new author/title path
|
|
||||||
book.path = os.path.join(all_new_authordir, all_titledir).replace('\\', '/')
|
|
||||||
for file_format in book.data:
|
|
||||||
if not gdrive:
|
|
||||||
shutil.move(os.path.normcase(os.path.join(all_new_path,
|
|
||||||
file_format.name + '.' + file_format.format.lower())),
|
|
||||||
os.path.normcase(os.path.join(all_new_path,
|
|
||||||
all_new_name + '.' + file_format.format.lower())))
|
|
||||||
else:
|
|
||||||
g_file = gd.getFileFromEbooksFolder(all_new_path,
|
|
||||||
file_format.name + '.' + file_format.format.lower())
|
|
||||||
if g_file:
|
|
||||||
gd.moveGdriveFileRemote(g_file, all_new_name + '.' + file_format.format.lower())
|
|
||||||
gd.updateDatabaseOnEdit(g_file['id'], all_new_name + '.' + file_format.format.lower())
|
|
||||||
else:
|
|
||||||
log.error("File {} not found on gdrive"
|
|
||||||
.format(all_new_path, file_format.name + '.' + file_format.format.lower()))
|
|
||||||
file_format.name = all_new_name
|
|
||||||
|
|
||||||
|
|
||||||
def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=None, gdrive=False):
|
def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=None, gdrive=False):
|
||||||
@ -455,8 +440,32 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
|
|||||||
return new_authordir
|
return new_authordir
|
||||||
|
|
||||||
|
|
||||||
|
def rename_author_path(first_author, old_author_dir, renamed_author, calibre_path="", gdrive=False):
|
||||||
|
# Create new_author_dir from parameter or from database
|
||||||
|
# Create new title_dir from database and add id
|
||||||
|
new_authordir = get_valid_filename(first_author, chars=96)
|
||||||
|
# new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == renamed_author).first()
|
||||||
|
# old_author_dir = get_valid_filename(old_author_name, chars=96)
|
||||||
|
new_author_rename_dir = get_valid_filename(renamed_author, chars=96)
|
||||||
|
if gdrive:
|
||||||
|
g_file = gd.getFileFromEbooksFolder(None, old_author_dir)
|
||||||
|
if g_file:
|
||||||
|
gd.moveGdriveFolderRemote(g_file, new_author_rename_dir)
|
||||||
|
else:
|
||||||
|
if os.path.isdir(os.path.join(calibre_path, old_author_dir)):
|
||||||
|
old_author_path = os.path.join(calibre_path, old_author_dir)
|
||||||
|
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
|
||||||
|
try:
|
||||||
|
shutil.move(os.path.normcase(old_author_path), os.path.normcase(new_author_path))
|
||||||
|
except OSError as ex:
|
||||||
|
log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex)
|
||||||
|
log.debug(ex, exc_info=True)
|
||||||
|
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||||
|
src=old_author_path, dest=new_author_path, error=str(ex))
|
||||||
|
return new_authordir
|
||||||
|
|
||||||
# Moves files in file storage during author/title rename, or from temp dir to file storage
|
# Moves files in file storage during author/title rename, or from temp dir to file storage
|
||||||
def update_dir_structure_file(book_id, calibre_path, first_author, original_filepath, db_filename, renamed_author):
|
def update_dir_structure_file(book_id, calibre_path, original_filepath, db_filename):
|
||||||
# get book database entry from id, if original path overwrite source with original_filepath
|
# get book database entry from id, if original path overwrite source with original_filepath
|
||||||
local_book = calibre_db.get_book(book_id)
|
local_book = calibre_db.get_book(book_id)
|
||||||
if original_filepath:
|
if original_filepath:
|
||||||
@ -468,49 +477,47 @@ def update_dir_structure_file(book_id, calibre_path, first_author, original_file
|
|||||||
author_dir = local_book.path.split('/')[0]
|
author_dir = local_book.path.split('/')[0]
|
||||||
title_dir = local_book.path.split('/')[1]
|
title_dir = local_book.path.split('/')[1]
|
||||||
|
|
||||||
# Create new_author_dir from parameter or from database
|
|
||||||
# Create new title_dir from database and add id
|
|
||||||
new_author_dir = rename_all_authors(first_author, renamed_author, calibre_path, local_book)
|
|
||||||
if first_author:
|
|
||||||
if first_author.lower() in [r.lower() for r in renamed_author]:
|
|
||||||
if os.path.isdir(os.path.join(calibre_path, new_author_dir)):
|
|
||||||
path = os.path.join(calibre_path, new_author_dir, title_dir)
|
|
||||||
|
|
||||||
new_title_dir = get_valid_filename(local_book.title, chars=96) + " (" + str(book_id) + ")"
|
new_title_dir = get_valid_filename(local_book.title, chars=96) + " (" + str(book_id) + ")"
|
||||||
|
|
||||||
if title_dir != new_title_dir or author_dir != new_author_dir or original_filepath:
|
if title_dir != new_title_dir or original_filepath:
|
||||||
error = move_files_on_change(calibre_path,
|
error = move_files_on_change(calibre_path,
|
||||||
new_author_dir,
|
author_dir,
|
||||||
new_title_dir,
|
new_title_dir,
|
||||||
local_book,
|
local_book,
|
||||||
db_filename,
|
db_filename,
|
||||||
original_filepath,
|
original_filepath,
|
||||||
path)
|
path)
|
||||||
|
new_path = os.path.join(calibre_path, author_dir, new_title_dir).replace('\\', '/')
|
||||||
|
all_new_name = get_valid_filename(local_book.title, chars=42) + ' - ' \
|
||||||
|
+ get_valid_filename(author_dir, chars=42)
|
||||||
|
# Book folder already moved, only files need to be renamed
|
||||||
|
rename_all_files_on_change(local_book, new_path, new_path, all_new_name)
|
||||||
|
|
||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
|
|
||||||
# Rename all files from old names to new names
|
# Rename all files from old names to new names
|
||||||
return rename_files_on_change(first_author, renamed_author, local_book, original_filepath, path, calibre_path)
|
return False
|
||||||
|
|
||||||
|
|
||||||
def upload_new_file_gdrive(book_id, first_author, renamed_author, title, title_dir, original_filepath, filename_ext):
|
def upload_new_file_gdrive(book_id, first_author, title, title_dir, original_filepath, filename_ext):
|
||||||
book = calibre_db.get_book(book_id)
|
book = calibre_db.get_book(book_id)
|
||||||
file_name = get_valid_filename(title, chars=42) + ' - ' + \
|
file_name = get_valid_filename(title, chars=42) + ' - ' + \
|
||||||
get_valid_filename(first_author, chars=42) + filename_ext
|
get_valid_filename(first_author, chars=42) + filename_ext
|
||||||
rename_all_authors(first_author, renamed_author, gdrive=True)
|
|
||||||
gdrive_path = os.path.join(get_valid_filename(first_author, chars=96),
|
gdrive_path = os.path.join(get_valid_filename(first_author, chars=96),
|
||||||
title_dir + " (" + str(book_id) + ")")
|
title_dir + " (" + str(book_id) + ")")
|
||||||
book.path = gdrive_path.replace("\\", "/")
|
book.path = gdrive_path.replace("\\", "/")
|
||||||
gd.uploadFileToEbooksFolder(os.path.join(gdrive_path, file_name).replace("\\", "/"), original_filepath)
|
gd.uploadFileToEbooksFolder(os.path.join(gdrive_path, file_name).replace("\\", "/"), original_filepath)
|
||||||
return rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
|
return False # rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
|
||||||
|
|
||||||
|
|
||||||
def update_dir_structure_gdrive(book_id, first_author, renamed_author):
|
def update_dir_structure_gdrive(book_id):
|
||||||
book = calibre_db.get_book(book_id)
|
book = calibre_db.get_book(book_id)
|
||||||
|
|
||||||
authordir = book.path.split('/')[0]
|
authordir = book.path.split('/')[0]
|
||||||
titledir = book.path.split('/')[1]
|
titledir = book.path.split('/')[1]
|
||||||
new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
|
# new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
|
||||||
|
# new_authordir = get_valid_filename(book.title, chars=96)
|
||||||
new_titledir = get_valid_filename(book.title, chars=96) + " (" + str(book_id) + ")"
|
new_titledir = get_valid_filename(book.title, chars=96) + " (" + str(book_id) + ")"
|
||||||
|
|
||||||
if titledir != new_titledir:
|
if titledir != new_titledir:
|
||||||
@ -522,21 +529,25 @@ def update_dir_structure_gdrive(book_id, first_author, renamed_author):
|
|||||||
else:
|
else:
|
||||||
return _('File %(file)s not found on Google Drive', file=book.path) # file not found
|
return _('File %(file)s not found on Google Drive', file=book.path) # file not found
|
||||||
|
|
||||||
if authordir != new_authordir and authordir not in renamed_author:
|
'''if authordir != new_authordir:
|
||||||
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
||||||
if g_file:
|
if g_file:
|
||||||
gd.moveGdriveFolderRemote(g_file, new_authordir)
|
gd.moveGdriveFolderRemote(g_file, new_authordir)
|
||||||
book.path = new_authordir + '/' + book.path.split('/')[1]
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
||||||
gd.updateDatabaseOnEdit(g_file['id'], book.path)
|
gd.updateDatabaseOnEdit(g_file['id'], book.path)
|
||||||
else:
|
else:
|
||||||
return _('File %(file)s not found on Google Drive', file=authordir) # file not found
|
return _('File %(file)s not found on Google Drive', file=authordir) # file not found'''
|
||||||
|
if titledir != new_titledir:
|
||||||
|
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
|
||||||
|
+ get_valid_filename(authordir, chars=42)
|
||||||
|
rename_all_files_on_change(book, book.path, book.path, all_new_name, gdrive=True) # todo: Move filenames on gdrive
|
||||||
# change location in database to new author/title path
|
# change location in database to new author/title path
|
||||||
book.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
|
# book.path = os.path.join(authordir, new_titledir).replace('\\', '/')
|
||||||
return rename_files_on_change(first_author, renamed_author, book, gdrive=True)
|
return False
|
||||||
|
|
||||||
|
|
||||||
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
|
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
|
||||||
|
new_authordir = get_valid_filename(new_authordir, chars=96)
|
||||||
new_path = os.path.join(calibre_path, new_authordir, new_titledir)
|
new_path = os.path.join(calibre_path, new_authordir, new_titledir)
|
||||||
new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir
|
new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir
|
||||||
try:
|
try:
|
||||||
@ -575,15 +586,15 @@ def rename_files_on_change(first_author,
|
|||||||
calibre_path="",
|
calibre_path="",
|
||||||
gdrive=False):
|
gdrive=False):
|
||||||
# Rename all files from old names to new names
|
# Rename all files from old names to new names
|
||||||
try:
|
#try:
|
||||||
clean_author_database(renamed_author, calibre_path, gdrive=gdrive)
|
#clean_author_database(renamed_author, calibre_path, gdrive=gdrive)
|
||||||
if first_author and first_author not in renamed_author:
|
#if first_author and first_author not in renamed_author:
|
||||||
clean_author_database([first_author], calibre_path, local_book, gdrive)
|
# clean_author_database([first_author], calibre_path, local_book, gdrive)
|
||||||
if not gdrive and not renamed_author and not original_filepath and len(os.listdir(os.path.dirname(path))) == 0:
|
#if not gdrive and not renamed_author and not original_filepath and len(os.listdir(os.path.dirname(path))) == 0:
|
||||||
shutil.rmtree(os.path.dirname(path))
|
# shutil.rmtree(os.path.dirname(path))
|
||||||
except (OSError, FileNotFoundError) as ex:
|
#except (OSError, FileNotFoundError) as ex:
|
||||||
log.error_or_exception("Error in rename file in path {}".format(ex))
|
# log.error_or_exception("Error in rename file in path {}".format(ex))
|
||||||
return _("Error in rename file in path: {}".format(str(ex)))
|
# return _("Error in rename file in path: {}".format(str(ex)))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -648,12 +659,6 @@ def generate_random_password(min_length):
|
|||||||
return ''.join(password)
|
return ''.join(password)
|
||||||
|
|
||||||
|
|
||||||
'''def generate_random_password(min_length):
|
|
||||||
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
|
||||||
passlen = min_length
|
|
||||||
return "".join(s[c % len(s)] for c in os.urandom(passlen))'''
|
|
||||||
|
|
||||||
|
|
||||||
def uniq(inpt):
|
def uniq(inpt):
|
||||||
output = []
|
output = []
|
||||||
inpt = [" ".join(inp.split()) for inp in inpt]
|
inpt = [" ".join(inp.split()) for inp in inpt]
|
||||||
@ -717,17 +722,14 @@ def update_dir_structure(book_id,
|
|||||||
calibre_path,
|
calibre_path,
|
||||||
first_author=None, # change author of book to this author
|
first_author=None, # change author of book to this author
|
||||||
original_filepath=None,
|
original_filepath=None,
|
||||||
db_filename=None,
|
db_filename=None):
|
||||||
renamed_author=None):
|
|
||||||
renamed_author = renamed_author or []
|
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
return update_dir_structure_gdrive(book_id, first_author, renamed_author)
|
return update_dir_structure_gdrive(book_id, first_author)
|
||||||
else:
|
else:
|
||||||
return update_dir_structure_file(book_id,
|
return update_dir_structure_file(book_id,
|
||||||
calibre_path,
|
calibre_path,
|
||||||
first_author,
|
|
||||||
original_filepath,
|
original_filepath,
|
||||||
db_filename, renamed_author)
|
db_filename)
|
||||||
|
|
||||||
|
|
||||||
def delete_book(book, calibrepath, book_format):
|
def delete_book(book, calibrepath, book_format):
|
||||||
|
@ -112,7 +112,7 @@ def render_title_template(*args, **kwargs):
|
|||||||
sidebar, simple = get_sidebar_config(kwargs)
|
sidebar, simple = get_sidebar_config(kwargs)
|
||||||
try:
|
try:
|
||||||
return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, simple=simple,
|
return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, simple=simple,
|
||||||
accept=constants.EXTENSIONS_UPLOAD,
|
accept=config.config_upload_formats.split(','),
|
||||||
*args, **kwargs)
|
*args, **kwargs)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
log.error("No permission to access {} file.".format(args[0]))
|
log.error("No permission to access {} file.".format(args[0]))
|
||||||
|
@ -373,10 +373,14 @@
|
|||||||
<input type="text" class="form-control" id="config_limiter_uri" name="config_limiter_uri" value="{% if config.config_limiter_uri != None %}{{ config.config_limiter_uri }}{% endif %}" autocomplete="off">
|
<input type="text" class="form-control" id="config_limiter_uri" name="config_limiter_uri" value="{% if config.config_limiter_uri != None %}{{ config.config_limiter_uri }}{% endif %}" autocomplete="off">
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group" style="margin-left:10px;">
|
<div class="form-group" style="margin-left:10px;">
|
||||||
<label for="config_calibre">{{_('Options for Limiter')}}</label>
|
<label for="config_calibre">{{_('Options for Limiter Backend')}}</label>
|
||||||
<input type="text" class="form-control" id="config_limiter_options" name="config_limiter_options" value="{% if config.config_limiter_options != None %}{{ config.config_limiter_options }}{% endif %}" autocomplete="off">
|
<input type="text" class="form-control" id="config_limiter_options" name="config_limiter_options" value="{% if config.config_limiter_options != None %}{{ config.config_limiter_options }}{% endif %}" autocomplete="off">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<input type="checkbox" id="config_check_extensions" name="config_check_extensions" {% if config.config_check_extensions %}checked{% endif %}>
|
||||||
|
<label for="config_check_extensions">{{_('Check if file extensions matches file content on upload')}}</label>
|
||||||
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="config_session">{{_('Session protection')}}</label>
|
<label for="config_session">{{_('Session protection')}}</label>
|
||||||
<select name="config_session" id="config_session" class="form-control">
|
<select name="config_session" id="config_session" class="form-control">
|
||||||
|
@ -251,7 +251,7 @@
|
|||||||
<input id="have_read_cb" data-checked="{{ _('Mark As Unread') }}"
|
<input id="have_read_cb" data-checked="{{ _('Mark As Unread') }}"
|
||||||
data-unchecked="{{ _('Mark As Read') }}" type="checkbox"
|
data-unchecked="{{ _('Mark As Read') }}" type="checkbox"
|
||||||
{% if entry.read_status %}checked{% endif %}>
|
{% if entry.read_status %}checked{% endif %}>
|
||||||
<span>{{ _('Read') }}</span>
|
<span data-toggle="tooltip" title="{{_('Mark Book as Read or Unread')}}">{{ _('Read') }}</span>
|
||||||
</label>
|
</label>
|
||||||
</form>
|
</form>
|
||||||
</p>
|
</p>
|
||||||
@ -264,7 +264,7 @@
|
|||||||
<input id="archived_cb" data-checked="{{ _('Restore from archive') }}"
|
<input id="archived_cb" data-checked="{{ _('Restore from archive') }}"
|
||||||
data-unchecked="{{ _('Add to archive') }}" type="checkbox"
|
data-unchecked="{{ _('Add to archive') }}" type="checkbox"
|
||||||
{% if entry.is_archived %}checked{% endif %}>
|
{% if entry.is_archived %}checked{% endif %}>
|
||||||
<span>{{ _('Archived') }}</span>
|
<span data-toggle="tooltip" title="{{_('Mark Book as archived or not, to hide it in Calibre-Web and delete it from Kobo Reader')}}">{{ _('Archive') }}</span>
|
||||||
</label>
|
</label>
|
||||||
</form>
|
</form>
|
||||||
</p>
|
</p>
|
||||||
|
@ -23,7 +23,7 @@ from flask_babel import gettext as _
|
|||||||
from . import logger, comic, isoLanguages
|
from . import logger, comic, isoLanguages
|
||||||
from .constants import BookMeta
|
from .constants import BookMeta
|
||||||
from .helper import split_authors
|
from .helper import split_authors
|
||||||
from .file_helper import get_temp_dir
|
from .file_helper import get_temp_dir, validate_mime_type
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
@ -1582,7 +1582,8 @@ def read_book(book_id, book_format):
|
|||||||
return render_title_template('readtxt.html', txtfile=book_id, title=book.title)
|
return render_title_template('readtxt.html', txtfile=book_id, title=book.title)
|
||||||
elif book_format.lower() in ["djvu", "djv"]:
|
elif book_format.lower() in ["djvu", "djv"]:
|
||||||
log.debug("Start djvu reader for %d", book_id)
|
log.debug("Start djvu reader for %d", book_id)
|
||||||
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title, extension=book_format.lower())
|
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title,
|
||||||
|
extension=book_format.lower())
|
||||||
else:
|
else:
|
||||||
for fileExt in constants.EXTENSIONS_AUDIO:
|
for fileExt in constants.EXTENSIONS_AUDIO:
|
||||||
if book_format.lower() == fileExt:
|
if book_format.lower() == fileExt:
|
||||||
|
@ -20,3 +20,4 @@ advocate>=1.0.0,<1.1.0
|
|||||||
Flask-Limiter>=2.3.0,<3.6.0
|
Flask-Limiter>=2.3.0,<3.6.0
|
||||||
regex>=2022.3.2,<2024.6.25
|
regex>=2022.3.2,<2024.6.25
|
||||||
bleach>=6.0.0,<6.2.0
|
bleach>=6.0.0,<6.2.0
|
||||||
|
python-magic>=0.4.27,<0.5.0
|
||||||
|
@ -60,6 +60,7 @@ install_requires =
|
|||||||
Flask-Limiter>=2.3.0,<3.6.0
|
Flask-Limiter>=2.3.0,<3.6.0
|
||||||
regex>=2022.3.2,<2024.2.25
|
regex>=2022.3.2,<2024.2.25
|
||||||
bleach>=6.0.0,<6.2.0
|
bleach>=6.0.0,<6.2.0
|
||||||
|
python-magic>=0.4.27,<0.5.0
|
||||||
|
|
||||||
|
|
||||||
[options.packages.find]
|
[options.packages.find]
|
||||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user