mirror of
https://github.com/janeczku/calibre-web
synced 2025-01-07 07:50:29 +00:00
Connection to metadata.db is now on per request base, should solve upload issues with multiple files at once
This commit is contained in:
commit
87b200cc13
@ -102,7 +102,7 @@ if wtf_present:
|
|||||||
else:
|
else:
|
||||||
csrf = None
|
csrf = None
|
||||||
|
|
||||||
calibre_db = db.CalibreDB()
|
calibre_db = db.CalibreDB(app)
|
||||||
|
|
||||||
web_server = WebServer()
|
web_server = WebServer()
|
||||||
|
|
||||||
@ -146,9 +146,7 @@ def create_app():
|
|||||||
lm.anonymous_user = ub.Anonymous
|
lm.anonymous_user = ub.Anonymous
|
||||||
lm.session_protection = 'strong' if config.config_session == 1 else "basic"
|
lm.session_protection = 'strong' if config.config_session == 1 else "basic"
|
||||||
|
|
||||||
db.CalibreDB.update_config(config)
|
db.CalibreDB.update_config(config, config.config_calibre_dir, cli_param.settings_path)
|
||||||
db.CalibreDB.setup_db(config.config_calibre_dir, cli_param.settings_path)
|
|
||||||
calibre_db.init_db()
|
|
||||||
|
|
||||||
updater_thread.init_updater(config, web_server)
|
updater_thread.init_updater(config, web_server)
|
||||||
# Perform dry run of updater and exit afterward
|
# Perform dry run of updater and exit afterward
|
||||||
|
16
cps/admin.py
16
cps/admin.py
@ -118,7 +118,7 @@ def before_request():
|
|||||||
g.allow_upload = config.config_uploading
|
g.allow_upload = config.config_uploading
|
||||||
g.current_theme = config.config_theme
|
g.current_theme = config.config_theme
|
||||||
g.config_authors_max = config.config_authors_max
|
g.config_authors_max = config.config_authors_max
|
||||||
if '/static/' not in request.path and not config.db_configured and \
|
if ('/static/' not in request.path and not config.db_configured and
|
||||||
request.endpoint not in ('admin.ajax_db_config',
|
request.endpoint not in ('admin.ajax_db_config',
|
||||||
'admin.simulatedbchange',
|
'admin.simulatedbchange',
|
||||||
'admin.db_configuration',
|
'admin.db_configuration',
|
||||||
@ -126,7 +126,7 @@ def before_request():
|
|||||||
'web.login_post',
|
'web.login_post',
|
||||||
'web.logout',
|
'web.logout',
|
||||||
'admin.load_dialogtexts',
|
'admin.load_dialogtexts',
|
||||||
'admin.ajax_pathchooser'):
|
'admin.ajax_pathchooser')):
|
||||||
return redirect(url_for('admin.db_configuration'))
|
return redirect(url_for('admin.db_configuration'))
|
||||||
|
|
||||||
|
|
||||||
@ -144,7 +144,6 @@ def shutdown():
|
|||||||
show_text = {}
|
show_text = {}
|
||||||
if task in (0, 1): # valid commandos received
|
if task in (0, 1): # valid commandos received
|
||||||
# close all database connections
|
# close all database connections
|
||||||
calibre_db.dispose()
|
|
||||||
ub.dispose()
|
ub.dispose()
|
||||||
|
|
||||||
if task == 0:
|
if task == 0:
|
||||||
@ -1737,14 +1736,12 @@ def _db_configuration_update_helper():
|
|||||||
return _db_configuration_result(_("Books path not valid"), gdrive_error)
|
return _db_configuration_result(_("Books path not valid"), gdrive_error)
|
||||||
else:
|
else:
|
||||||
_config_string(to_save, "config_calibre_split_dir")
|
_config_string(to_save, "config_calibre_split_dir")
|
||||||
|
if (db_change or not db_valid or not config.db_configured
|
||||||
if db_change or not db_valid or not config.db_configured \
|
or config.config_calibre_dir != to_save["config_calibre_dir"]):
|
||||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
|
||||||
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
|
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
|
||||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||||
else:
|
else:
|
||||||
calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path)
|
calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path)
|
||||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
|
||||||
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
||||||
if db_change:
|
if db_change:
|
||||||
log.info("Calibre Database changed, all Calibre-Web info related to old Database gets deleted")
|
log.info("Calibre Database changed, all Calibre-Web info related to old Database gets deleted")
|
||||||
@ -1767,10 +1764,11 @@ def _db_configuration_update_helper():
|
|||||||
config.config_allowed_column_value = ""
|
config.config_allowed_column_value = ""
|
||||||
config.config_read_column = 0
|
config.config_read_column = 0
|
||||||
_config_string(to_save, "config_calibre_dir")
|
_config_string(to_save, "config_calibre_dir")
|
||||||
calibre_db.update_config(config)
|
calibre_db.update_config(config, config.config_calibre_dir, ub.app_DB_path)
|
||||||
|
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||||
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
||||||
flash(_("DB is not Writeable"), category="warning")
|
flash(_("DB is not Writeable"), category="warning")
|
||||||
calibre_db.update_config(config)
|
calibre_db.update_config(config, config.config_calibre_dir, ub.app_DB_path)
|
||||||
config.save()
|
config.save()
|
||||||
return _db_configuration_result(None, gdrive_error)
|
return _db_configuration_result(None, gdrive_error)
|
||||||
|
|
||||||
|
18
cps/audio.py
18
cps/audio.py
@ -16,8 +16,6 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import mutagen
|
import mutagen
|
||||||
import base64
|
import base64
|
||||||
from . import cover, logger
|
from . import cover, logger
|
||||||
@ -51,13 +49,12 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
if not pubdate:
|
if not pubdate:
|
||||||
pubdate = str(audio_file.tags.get('TDOR').text[0]) if "TDOR" in audio_file.tags else None
|
pubdate = str(audio_file.tags.get('TDOR').text[0]) if "TDOR" in audio_file.tags else None
|
||||||
if cover_data and not no_cover_processing:
|
if cover_data and not no_cover_processing:
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
|
||||||
cover_info = cover_data[0]
|
cover_info = cover_data[0]
|
||||||
for dat in cover_data:
|
for dat in cover_data:
|
||||||
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
||||||
cover_info = dat
|
cover_info = dat
|
||||||
break
|
break
|
||||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||||
elif original_file_extension in [".ogg", ".flac", ".opus", ".ogv"]:
|
elif original_file_extension in [".ogg", ".flac", ".opus", ".ogv"]:
|
||||||
title = audio_file.tags.get('TITLE')[0] if "TITLE" in audio_file else None
|
title = audio_file.tags.get('TITLE')[0] if "TITLE" in audio_file else None
|
||||||
author = audio_file.tags.get('ARTIST')[0] if "ARTIST" in audio_file else None
|
author = audio_file.tags.get('ARTIST')[0] if "ARTIST" in audio_file else None
|
||||||
@ -70,17 +67,15 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
cover_data = audio_file.tags.get('METADATA_BLOCK_PICTURE')
|
cover_data = audio_file.tags.get('METADATA_BLOCK_PICTURE')
|
||||||
if not no_cover_processing:
|
if not no_cover_processing:
|
||||||
if cover_data:
|
if cover_data:
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
|
||||||
cover_info = mutagen.flac.Picture(base64.b64decode(cover_data[0]))
|
cover_info = mutagen.flac.Picture(base64.b64decode(cover_data[0]))
|
||||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||||
if hasattr(audio_file, "pictures"):
|
if hasattr(audio_file, "pictures"):
|
||||||
cover_info = audio_file.pictures[0]
|
cover_info = audio_file.pictures[0]
|
||||||
for dat in audio_file.pictures:
|
for dat in audio_file.pictures:
|
||||||
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
||||||
cover_info = dat
|
cover_info = dat
|
||||||
break
|
break
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
|
||||||
elif original_file_extension in [".aac"]:
|
elif original_file_extension in [".aac"]:
|
||||||
title = audio_file.tags.get('Title').value if "Title" in audio_file else None
|
title = audio_file.tags.get('Title').value if "Title" in audio_file else None
|
||||||
author = audio_file.tags.get('Artist').value if "Artist" in audio_file else None
|
author = audio_file.tags.get('Artist').value if "Artist" in audio_file else None
|
||||||
@ -92,7 +87,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
pubdate = audio_file.tags.get('Year').value if "Year" in audio_file else None
|
pubdate = audio_file.tags.get('Year').value if "Year" in audio_file else None
|
||||||
cover_data = audio_file.tags['Cover Art (Front)']
|
cover_data = audio_file.tags['Cover Art (Front)']
|
||||||
if cover_data and not no_cover_processing:
|
if cover_data and not no_cover_processing:
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
tmp_cover_name = tmp_file_path + '.jpg'
|
||||||
with open(tmp_cover_name, "wb") as cover_file:
|
with open(tmp_cover_name, "wb") as cover_file:
|
||||||
cover_file.write(cover_data.value.split(b"\x00",1)[1])
|
cover_file.write(cover_data.value.split(b"\x00",1)[1])
|
||||||
elif original_file_extension in [".asf"]:
|
elif original_file_extension in [".asf"]:
|
||||||
@ -106,7 +101,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
pubdate = audio_file.tags.get('Year')[0].value if "Year" in audio_file else None
|
pubdate = audio_file.tags.get('Year')[0].value if "Year" in audio_file else None
|
||||||
cover_data = audio_file.tags.get('WM/Picture', None)
|
cover_data = audio_file.tags.get('WM/Picture', None)
|
||||||
if cover_data and not no_cover_processing:
|
if cover_data and not no_cover_processing:
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
tmp_cover_name = tmp_file_path + '.jpg'
|
||||||
with open(tmp_cover_name, "wb") as cover_file:
|
with open(tmp_cover_name, "wb") as cover_file:
|
||||||
cover_file.write(cover_data[0].value)
|
cover_file.write(cover_data[0].value)
|
||||||
elif original_file_extension in [".mp4", ".m4a", ".m4b"]:
|
elif original_file_extension in [".mp4", ".m4a", ".m4b"]:
|
||||||
@ -120,7 +115,6 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
pubdate = audio_file.tags.get('©day')[0] if "©day" in audio_file.tags else None
|
pubdate = audio_file.tags.get('©day')[0] if "©day" in audio_file.tags else None
|
||||||
cover_data = audio_file.tags.get('covr', None)
|
cover_data = audio_file.tags.get('covr', None)
|
||||||
if cover_data and not no_cover_processing:
|
if cover_data and not no_cover_processing:
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
|
||||||
cover_type = None
|
cover_type = None
|
||||||
for c in cover_data:
|
for c in cover_data:
|
||||||
if c.imageformat == mutagen.mp4.AtomDataType.JPEG:
|
if c.imageformat == mutagen.mp4.AtomDataType.JPEG:
|
||||||
@ -132,7 +126,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
|||||||
cover_bin = c
|
cover_bin = c
|
||||||
break
|
break
|
||||||
if cover_type:
|
if cover_type:
|
||||||
cover.cover_processing(tmp_file_path, cover_bin, cover_type)
|
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_bin, cover_type)
|
||||||
else:
|
else:
|
||||||
logger.error("Unknown covertype in file {} ".format(original_file_name))
|
logger.error("Unknown covertype in file {} ".format(original_file_name))
|
||||||
|
|
||||||
|
12
cps/comic.py
12
cps/comic.py
@ -90,7 +90,7 @@ def _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_exec
|
|||||||
if len(ext) > 1:
|
if len(ext) > 1:
|
||||||
extension = ext[1].lower()
|
extension = ext[1].lower()
|
||||||
if extension in cover.COVER_EXTENSIONS:
|
if extension in cover.COVER_EXTENSIONS:
|
||||||
cover_data = cf.read([name])
|
cover_data = cf.read(name)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
log.error('Rarfile failed with error: {}'.format(ex))
|
log.error('Rarfile failed with error: {}'.format(ex))
|
||||||
@ -109,13 +109,13 @@ def _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_exec
|
|||||||
return cover_data, extension
|
return cover_data, extension
|
||||||
|
|
||||||
|
|
||||||
def _extract_cover(tmp_file_name, original_file_extension, rar_executable):
|
def _extract_cover(tmp_file_path, original_file_extension, rar_executable):
|
||||||
cover_data = extension = None
|
cover_data = extension = None
|
||||||
if use_comic_meta:
|
if use_comic_meta:
|
||||||
try:
|
try:
|
||||||
archive = ComicArchive(tmp_file_name, rar_exe_path=rar_executable)
|
archive = ComicArchive(tmp_file_path, rar_exe_path=rar_executable)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
archive = ComicArchive(tmp_file_name)
|
archive = ComicArchive(tmp_file_path)
|
||||||
name_list = archive.getPageNameList if hasattr(archive, "getPageNameList") else archive.get_page_name_list
|
name_list = archive.getPageNameList if hasattr(archive, "getPageNameList") else archive.get_page_name_list
|
||||||
for index, name in enumerate(name_list()):
|
for index, name in enumerate(name_list()):
|
||||||
ext = os.path.splitext(name)
|
ext = os.path.splitext(name)
|
||||||
@ -126,8 +126,8 @@ def _extract_cover(tmp_file_name, original_file_extension, rar_executable):
|
|||||||
cover_data = get_page(index)
|
cover_data = get_page(index)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
cover_data, extension = _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_executable)
|
cover_data, extension = _extract_cover_from_archive(original_file_extension, tmp_file_path, rar_executable)
|
||||||
return cover.cover_processing(tmp_file_name, cover_data, extension)
|
return cover.cover_processing(tmp_file_path, cover_data, extension)
|
||||||
|
|
||||||
|
|
||||||
def get_comic_info(tmp_file_path, original_file_name, original_file_extension, rar_executable, no_cover_processing):
|
def get_comic_info(tmp_file_path, original_file_name, original_file_extension, rar_executable, no_cover_processing):
|
||||||
|
@ -405,11 +405,13 @@ class ConfigSQL(object):
|
|||||||
return self.config_calibre_split_dir if self.config_calibre_split_dir else self.config_calibre_dir
|
return self.config_calibre_split_dir if self.config_calibre_split_dir else self.config_calibre_dir
|
||||||
|
|
||||||
def store_calibre_uuid(self, calibre_db, Library_table):
|
def store_calibre_uuid(self, calibre_db, Library_table):
|
||||||
|
from . import app
|
||||||
try:
|
try:
|
||||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
with app.app_context():
|
||||||
if self.config_calibre_uuid != calibre_uuid.uuid:
|
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||||
self.config_calibre_uuid = calibre_uuid.uuid
|
if self.config_calibre_uuid != calibre_uuid.uuid:
|
||||||
self.save()
|
self.config_calibre_uuid = calibre_uuid.uuid
|
||||||
|
self.save()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -29,8 +29,9 @@ NO_JPEG_EXTENSIONS = ['.png', '.webp', '.bmp']
|
|||||||
COVER_EXTENSIONS = ['.png', '.webp', '.bmp', '.jpg', '.jpeg']
|
COVER_EXTENSIONS = ['.png', '.webp', '.bmp', '.jpg', '.jpeg']
|
||||||
|
|
||||||
|
|
||||||
def cover_processing(tmp_file_name, img, extension):
|
def cover_processing(tmp_file_path, img, extension):
|
||||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
# tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
||||||
|
tmp_cover_name = tmp_file_path + '.jpg'
|
||||||
if extension in NO_JPEG_EXTENSIONS:
|
if extension in NO_JPEG_EXTENSIONS:
|
||||||
if use_IM:
|
if use_IM:
|
||||||
with Image(blob=img) as imgc:
|
with Image(blob=img) as imgc:
|
||||||
|
112
cps/db.py
112
cps/db.py
@ -23,7 +23,7 @@ import json
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
import unidecode
|
import unidecode
|
||||||
from weakref import WeakSet
|
# from weakref import WeakSet
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from sqlite3 import OperationalError as sqliteOperationalError
|
from sqlite3 import OperationalError as sqliteOperationalError
|
||||||
@ -45,7 +45,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
|||||||
from .cw_login import current_user
|
from .cw_login import current_user
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from flask_babel import get_locale
|
from flask_babel import get_locale
|
||||||
from flask import flash
|
from flask import flash, g, Flask
|
||||||
|
|
||||||
from . import logger, ub, isoLanguages
|
from . import logger, ub, isoLanguages
|
||||||
from .pagination import Pagination
|
from .pagination import Pagination
|
||||||
@ -528,34 +528,25 @@ class AlchemyEncoder(json.JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
class CalibreDB:
|
class CalibreDB:
|
||||||
_init = False
|
|
||||||
engine = None
|
|
||||||
config = None
|
config = None
|
||||||
session_factory = None
|
config_calibre_dir = None
|
||||||
# This is a WeakSet so that references here don't keep other CalibreDB
|
app_db_path = None
|
||||||
# instances alive once they reach the end of their respective scopes
|
|
||||||
instances = WeakSet()
|
|
||||||
|
|
||||||
def __init__(self, expire_on_commit=True, init=False):
|
def __init__(self, _app: Flask=None): # , expire_on_commit=True, init=False):
|
||||||
""" Initialize a new CalibreDB session
|
""" Initialize a new CalibreDB session
|
||||||
"""
|
"""
|
||||||
self.session = None
|
self.Session = None
|
||||||
if init:
|
#if init:
|
||||||
self.init_db(expire_on_commit)
|
# self.init_db(expire_on_commit)
|
||||||
|
if _app is not None and not _app._got_first_request:
|
||||||
|
self.init_app(_app)
|
||||||
|
|
||||||
def init_db(self, expire_on_commit=True):
|
def init_app(self, _app):
|
||||||
if self._init:
|
_app.teardown_appcontext(self.teardown)
|
||||||
self.init_session(expire_on_commit)
|
|
||||||
|
|
||||||
self.instances.add(self)
|
|
||||||
|
|
||||||
def init_session(self, expire_on_commit=True):
|
|
||||||
self.session = self.session_factory()
|
|
||||||
self.session.expire_on_commit = expire_on_commit
|
|
||||||
self.create_functions(self.config)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_db_cc_classes(cls, cc):
|
def setup_db_cc_classes(cls, cc):
|
||||||
|
global cc_classes
|
||||||
cc_ids = []
|
cc_ids = []
|
||||||
books_custom_column_links = {}
|
books_custom_column_links = {}
|
||||||
for row in cc:
|
for row in cc:
|
||||||
@ -623,8 +614,6 @@ class CalibreDB:
|
|||||||
secondary=books_custom_column_links[cc_id[0]],
|
secondary=books_custom_column_links[cc_id[0]],
|
||||||
backref='books'))
|
backref='books'))
|
||||||
|
|
||||||
return cc_classes
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
|
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
|
||||||
if not config_calibre_dir:
|
if not config_calibre_dir:
|
||||||
@ -644,7 +633,6 @@ class CalibreDB:
|
|||||||
local_session = scoped_session(sessionmaker())
|
local_session = scoped_session(sessionmaker())
|
||||||
local_session.configure(bind=connection)
|
local_session.configure(bind=connection)
|
||||||
database_uuid = local_session().query(Library_Id).one_or_none()
|
database_uuid = local_session().query(Library_Id).one_or_none()
|
||||||
# local_session.dispose()
|
|
||||||
|
|
||||||
check_engine.connect()
|
check_engine.connect()
|
||||||
db_change = config_calibre_uuid != database_uuid.uuid
|
db_change = config_calibre_uuid != database_uuid.uuid
|
||||||
@ -652,13 +640,30 @@ class CalibreDB:
|
|||||||
return False, False
|
return False, False
|
||||||
return True, db_change
|
return True, db_change
|
||||||
|
|
||||||
|
def teardown(self, exception):
|
||||||
|
ctx = g.get("lib_sql")
|
||||||
|
if ctx:
|
||||||
|
ctx.close()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def session(self):
|
||||||
|
# connect or get active connection
|
||||||
|
if not g.get("lib_sql"):
|
||||||
|
g.lib_sql = self.connect()
|
||||||
|
return g.lib_sql
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_config(cls, config):
|
def update_config(cls, config, config_calibre_dir, app_db_path):
|
||||||
cls.config = config
|
cls.config = config
|
||||||
|
cls.config_calibre_dir = config_calibre_dir
|
||||||
|
cls.app_db_path = app_db_path
|
||||||
|
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
return self.setup_db(self.config_calibre_dir, self.app_db_path)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_db(cls, config_calibre_dir, app_db_path):
|
def setup_db(cls, config_calibre_dir, app_db_path):
|
||||||
cls.dispose()
|
|
||||||
|
|
||||||
if not config_calibre_dir:
|
if not config_calibre_dir:
|
||||||
cls.config.invalidate()
|
cls.config.invalidate()
|
||||||
@ -670,17 +675,17 @@ class CalibreDB:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cls.engine = create_engine('sqlite://',
|
engine = create_engine('sqlite://',
|
||||||
echo=False,
|
echo=False,
|
||||||
isolation_level="SERIALIZABLE",
|
isolation_level="SERIALIZABLE",
|
||||||
connect_args={'check_same_thread': False},
|
connect_args={'check_same_thread': False},
|
||||||
poolclass=StaticPool)
|
poolclass=StaticPool)
|
||||||
with cls.engine.begin() as connection:
|
with engine.begin() as connection:
|
||||||
connection.execute(text('PRAGMA cache_size = 10000;'))
|
connection.execute(text('PRAGMA cache_size = 10000;'))
|
||||||
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
||||||
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
||||||
|
|
||||||
conn = cls.engine.connect()
|
conn = engine.connect()
|
||||||
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
|
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
cls.config.invalidate(ex)
|
cls.config.invalidate(ex)
|
||||||
@ -696,13 +701,10 @@ class CalibreDB:
|
|||||||
log.error_or_exception(e)
|
log.error_or_exception(e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
|
return scoped_session(sessionmaker(autocommit=False,
|
||||||
autoflush=True,
|
autoflush=False,
|
||||||
bind=cls.engine, future=True))
|
bind=engine, future=True))
|
||||||
for inst in cls.instances:
|
|
||||||
inst.init_session()
|
|
||||||
|
|
||||||
cls._init = True
|
|
||||||
|
|
||||||
def get_book(self, book_id):
|
def get_book(self, book_id):
|
||||||
return self.session.query(Books).filter(Books.id == book_id).first()
|
return self.session.query(Books).filter(Books.id == book_id).first()
|
||||||
@ -1066,43 +1068,11 @@ class CalibreDB:
|
|||||||
except sqliteOperationalError:
|
except sqliteOperationalError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def dispose(cls):
|
|
||||||
# global session
|
|
||||||
|
|
||||||
for inst in cls.instances:
|
|
||||||
old_session = inst.session
|
|
||||||
inst.session = None
|
|
||||||
if old_session:
|
|
||||||
try:
|
|
||||||
old_session.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if old_session.bind:
|
|
||||||
try:
|
|
||||||
old_session.bind.dispose()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
for attr in list(Books.__dict__.keys()):
|
|
||||||
if attr.startswith("custom_column_"):
|
|
||||||
setattr(Books, attr, None)
|
|
||||||
|
|
||||||
for db_class in cc_classes.values():
|
|
||||||
Base.metadata.remove(db_class.__table__)
|
|
||||||
cc_classes.clear()
|
|
||||||
|
|
||||||
for table in reversed(Base.metadata.sorted_tables):
|
|
||||||
name = table.key
|
|
||||||
if name.startswith("custom_column_") or name.startswith("books_custom_column_"):
|
|
||||||
if table is not None:
|
|
||||||
Base.metadata.remove(table)
|
|
||||||
|
|
||||||
def reconnect_db(self, config, app_db_path):
|
def reconnect_db(self, config, app_db_path):
|
||||||
self.dispose()
|
# self.dispose()
|
||||||
self.engine.dispose()
|
# self.engine.dispose()
|
||||||
self.setup_db(config.config_calibre_dir, app_db_path)
|
self.setup_db(config.config_calibre_dir, app_db_path)
|
||||||
self.update_config(config)
|
self.update_config(config, config.config_calibre_dir, app_db_path)
|
||||||
|
|
||||||
|
|
||||||
def lcase(s):
|
def lcase(s):
|
||||||
|
@ -215,6 +215,7 @@ def table_get_custom_enum(c_id):
|
|||||||
def edit_list_book(param):
|
def edit_list_book(param):
|
||||||
vals = request.form.to_dict()
|
vals = request.form.to_dict()
|
||||||
book = calibre_db.get_book(vals['pk'])
|
book = calibre_db.get_book(vals['pk'])
|
||||||
|
calibre_db.create_functions(config)
|
||||||
sort_param = ""
|
sort_param = ""
|
||||||
ret = ""
|
ret = ""
|
||||||
try:
|
try:
|
||||||
|
@ -726,20 +726,20 @@ $(function() {
|
|||||||
url: getPath() + "/ajax/simulatedbchange",
|
url: getPath() + "/ajax/simulatedbchange",
|
||||||
data: {config_calibre_dir: $("#config_calibre_dir").val(), csrf_token: $("input[name='csrf_token']").val()},
|
data: {config_calibre_dir: $("#config_calibre_dir").val(), csrf_token: $("input[name='csrf_token']").val()},
|
||||||
success: function success(data) {
|
success: function success(data) {
|
||||||
if ( data.change ) {
|
if ( !data.valid ) {
|
||||||
if ( data.valid ) {
|
$("#InvalidDialog").modal('show');
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
if ( data.change ) {
|
||||||
confirmDialog(
|
confirmDialog(
|
||||||
"db_submit",
|
"db_submit",
|
||||||
"GeneralChangeModal",
|
"GeneralChangeModal",
|
||||||
0,
|
0,
|
||||||
changeDbSettings
|
changeDbSettings
|
||||||
);
|
);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
$("#InvalidDialog").modal('show');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
changeDbSettings();
|
changeDbSettings();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -28,7 +28,7 @@ from sqlalchemy.exc import SQLAlchemyError
|
|||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
from cps.services.worker import CalibreTask
|
from cps.services.worker import CalibreTask
|
||||||
from cps import db
|
from cps import db, app
|
||||||
from cps import logger, config
|
from cps import logger, config
|
||||||
from cps.subproc_wrapper import process_open
|
from cps.subproc_wrapper import process_open
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
@ -60,36 +60,39 @@ class TaskConvert(CalibreTask):
|
|||||||
self.results = dict()
|
self.results = dict()
|
||||||
|
|
||||||
def run(self, worker_thread):
|
def run(self, worker_thread):
|
||||||
|
df_cover = None
|
||||||
|
cur_book = None
|
||||||
self.worker_thread = worker_thread
|
self.worker_thread = worker_thread
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
worker_db = db.CalibreDB(expire_on_commit=False, init=True)
|
with app.app_context():
|
||||||
cur_book = worker_db.get_book(self.book_id)
|
worker_db = db.CalibreDB(app)
|
||||||
self.title = cur_book.title
|
cur_book = worker_db.get_book(self.book_id)
|
||||||
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
self.title = cur_book.title
|
||||||
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
||||||
data.name + "." + self.settings['old_book_format'].lower())
|
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
||||||
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
|
data.name + "." + self.settings['old_book_format'].lower())
|
||||||
if df:
|
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
|
||||||
datafile_cover = None
|
if df:
|
||||||
datafile = os.path.join(config.get_book_path(),
|
datafile_cover = None
|
||||||
cur_book.path,
|
datafile = os.path.join(config.get_book_path(),
|
||||||
data.name + "." + self.settings['old_book_format'].lower())
|
cur_book.path,
|
||||||
if df_cover:
|
data.name + "." + self.settings['old_book_format'].lower())
|
||||||
datafile_cover = os.path.join(config.get_book_path(),
|
if df_cover:
|
||||||
cur_book.path, "cover.jpg")
|
datafile_cover = os.path.join(config.get_book_path(),
|
||||||
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
|
cur_book.path, "cover.jpg")
|
||||||
os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
|
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
|
||||||
df.GetContentFile(datafile)
|
os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
|
||||||
if df_cover:
|
df.GetContentFile(datafile)
|
||||||
df_cover.GetContentFile(datafile_cover)
|
if df_cover:
|
||||||
worker_db.session.close()
|
df_cover.GetContentFile(datafile_cover)
|
||||||
else:
|
# worker_db.session.close()
|
||||||
# ToDo Include cover in error handling
|
else:
|
||||||
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
# ToDo Include cover in error handling
|
||||||
format=self.settings['old_book_format'],
|
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
||||||
fn=data.name + "." + self.settings['old_book_format'].lower())
|
format=self.settings['old_book_format'],
|
||||||
worker_db.session.close()
|
fn=data.name + "." + self.settings['old_book_format'].lower())
|
||||||
return self._handleError(error_message)
|
# worker_db.session.close()
|
||||||
|
return self._handleError(error_message)
|
||||||
|
|
||||||
filename = self._convert_ebook_format()
|
filename = self._convert_ebook_format()
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
@ -106,7 +109,7 @@ class TaskConvert(CalibreTask):
|
|||||||
# if we're sending to E-Reader after converting, create a one-off task and run it immediately
|
# if we're sending to E-Reader after converting, create a one-off task and run it immediately
|
||||||
# todo: figure out how to incorporate this into the progress
|
# todo: figure out how to incorporate this into the progress
|
||||||
try:
|
try:
|
||||||
EmailText = N_(u"%(book)s send to E-Reader", book=escape(self.title))
|
EmailText = N_(u"%(book)s send to E-Reader", book=escape(self.title))
|
||||||
for email in self.ereader_mail.split(','):
|
for email in self.ereader_mail.split(','):
|
||||||
email = strip_whitespaces(email)
|
email = strip_whitespaces(email)
|
||||||
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
|
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
|
||||||
@ -124,95 +127,96 @@ class TaskConvert(CalibreTask):
|
|||||||
|
|
||||||
def _convert_ebook_format(self):
|
def _convert_ebook_format(self):
|
||||||
error_message = None
|
error_message = None
|
||||||
local_db = db.CalibreDB(expire_on_commit=False, init=True)
|
with app.app_context():
|
||||||
file_path = self.file_path
|
local_db = db.CalibreDB(app)
|
||||||
book_id = self.book_id
|
file_path = self.file_path
|
||||||
format_old_ext = '.' + self.settings['old_book_format'].lower()
|
book_id = self.book_id
|
||||||
format_new_ext = '.' + self.settings['new_book_format'].lower()
|
format_old_ext = '.' + self.settings['old_book_format'].lower()
|
||||||
|
format_new_ext = '.' + self.settings['new_book_format'].lower()
|
||||||
|
|
||||||
# check to see if destination format already exists - or if book is in database
|
# check to see if destination format already exists - or if book is in database
|
||||||
# if it does - mark the conversion task as complete and return a success
|
# if it does - mark the conversion task as complete and return a success
|
||||||
# this will allow to send to E-Reader workflow to continue to work
|
# this will allow to send to E-Reader workflow to continue to work
|
||||||
if os.path.isfile(file_path + format_new_ext) or\
|
if os.path.isfile(file_path + format_new_ext) or\
|
||||||
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
||||||
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
||||||
cur_book = local_db.get_book(book_id)
|
cur_book = local_db.get_book(book_id)
|
||||||
self.title = cur_book.title
|
self.title = cur_book.title
|
||||||
self.results['path'] = cur_book.path
|
self.results['path'] = cur_book.path
|
||||||
self.results['title'] = self.title
|
self.results['title'] = self.title
|
||||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id)\
|
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id)\
|
||||||
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
|
||||||
if not new_format:
|
|
||||||
new_format = db.Data(name=os.path.basename(file_path),
|
|
||||||
book_format=self.settings['new_book_format'].upper(),
|
|
||||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
|
||||||
try:
|
|
||||||
local_db.session.merge(new_format)
|
|
||||||
local_db.session.commit()
|
|
||||||
except SQLAlchemyError as e:
|
|
||||||
local_db.session.rollback()
|
|
||||||
log.error("Database error: %s", e)
|
|
||||||
local_db.session.close()
|
|
||||||
self._handleError(N_("Oops! Database Error: %(error)s.", error=e))
|
|
||||||
return
|
|
||||||
self._handleSuccess()
|
|
||||||
local_db.session.close()
|
|
||||||
return os.path.basename(file_path + format_new_ext)
|
|
||||||
else:
|
|
||||||
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.",
|
|
||||||
book_id,
|
|
||||||
format_new_ext)
|
|
||||||
|
|
||||||
if config.config_kepubifypath and format_old_ext == '.epub' and format_new_ext == '.kepub':
|
|
||||||
check, error_message = self._convert_kepubify(file_path,
|
|
||||||
format_old_ext,
|
|
||||||
format_new_ext)
|
|
||||||
else:
|
|
||||||
# check if calibre converter-executable is existing
|
|
||||||
if not os.path.exists(config.config_converterpath):
|
|
||||||
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
|
||||||
return
|
|
||||||
has_cover = local_db.get_book(book_id).has_cover
|
|
||||||
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext, has_cover)
|
|
||||||
|
|
||||||
if check == 0:
|
|
||||||
cur_book = local_db.get_book(book_id)
|
|
||||||
if os.path.isfile(file_path + format_new_ext):
|
|
||||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id) \
|
|
||||||
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
||||||
if not new_format:
|
if not new_format:
|
||||||
new_format = db.Data(name=cur_book.data[0].name,
|
new_format = db.Data(name=os.path.basename(file_path),
|
||||||
book_format=self.settings['new_book_format'].upper(),
|
book_format=self.settings['new_book_format'].upper(),
|
||||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||||
try:
|
try:
|
||||||
local_db.session.merge(new_format)
|
local_db.session.merge(new_format)
|
||||||
local_db.session.commit()
|
local_db.session.commit()
|
||||||
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
|
||||||
ub_session = init_db_thread()
|
|
||||||
remove_synced_book(book_id, True, ub_session)
|
|
||||||
ub_session.close()
|
|
||||||
except SQLAlchemyError as e:
|
except SQLAlchemyError as e:
|
||||||
local_db.session.rollback()
|
local_db.session.rollback()
|
||||||
log.error("Database error: %s", e)
|
log.error("Database error: %s", e)
|
||||||
local_db.session.close()
|
local_db.session.close()
|
||||||
self._handleError(error_message)
|
self._handleError(N_("Oops! Database Error: %(error)s.", error=e))
|
||||||
return
|
return
|
||||||
self.results['path'] = cur_book.path
|
|
||||||
self.title = cur_book.title
|
|
||||||
self.results['title'] = self.title
|
|
||||||
if not config.config_use_google_drive:
|
|
||||||
self._handleSuccess()
|
self._handleSuccess()
|
||||||
return os.path.basename(file_path + format_new_ext)
|
local_db.session.close()
|
||||||
|
return os.path.basename(file_path + format_new_ext)
|
||||||
else:
|
else:
|
||||||
error_message = N_('%(format)s format not found on disk', format=format_new_ext.upper())
|
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.",
|
||||||
local_db.session.close()
|
book_id,
|
||||||
log.info("ebook converter failed with error while converting book")
|
format_new_ext)
|
||||||
if not error_message:
|
|
||||||
error_message = N_('Ebook converter failed with unknown error')
|
if config.config_kepubifypath and format_old_ext == '.epub' and format_new_ext == '.kepub':
|
||||||
else:
|
check, error_message = self._convert_kepubify(file_path,
|
||||||
log.error(error_message)
|
format_old_ext,
|
||||||
self._handleError(error_message)
|
format_new_ext)
|
||||||
return
|
else:
|
||||||
|
# check if calibre converter-executable is existing
|
||||||
|
if not os.path.exists(config.config_converterpath):
|
||||||
|
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
||||||
|
return
|
||||||
|
has_cover = local_db.get_book(book_id).has_cover
|
||||||
|
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext, has_cover)
|
||||||
|
|
||||||
|
if check == 0:
|
||||||
|
cur_book = local_db.get_book(book_id)
|
||||||
|
if os.path.isfile(file_path + format_new_ext):
|
||||||
|
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id) \
|
||||||
|
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
||||||
|
if not new_format:
|
||||||
|
new_format = db.Data(name=cur_book.data[0].name,
|
||||||
|
book_format=self.settings['new_book_format'].upper(),
|
||||||
|
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||||
|
try:
|
||||||
|
local_db.session.merge(new_format)
|
||||||
|
local_db.session.commit()
|
||||||
|
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
||||||
|
ub_session = init_db_thread()
|
||||||
|
remove_synced_book(book_id, True, ub_session)
|
||||||
|
ub_session.close()
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
local_db.session.rollback()
|
||||||
|
log.error("Database error: %s", e)
|
||||||
|
local_db.session.close()
|
||||||
|
self._handleError(error_message)
|
||||||
|
return
|
||||||
|
self.results['path'] = cur_book.path
|
||||||
|
self.title = cur_book.title
|
||||||
|
self.results['title'] = self.title
|
||||||
|
if not config.config_use_google_drive:
|
||||||
|
self._handleSuccess()
|
||||||
|
return os.path.basename(file_path + format_new_ext)
|
||||||
|
else:
|
||||||
|
error_message = N_('%(format)s format not found on disk', format=format_new_ext.upper())
|
||||||
|
local_db.session.close()
|
||||||
|
log.info("ebook converter failed with error while converting book")
|
||||||
|
if not error_message:
|
||||||
|
error_message = N_('Ebook converter failed with unknown error')
|
||||||
|
else:
|
||||||
|
log.error(error_message)
|
||||||
|
self._handleError(error_message)
|
||||||
|
return
|
||||||
|
|
||||||
def _convert_kepubify(self, file_path, format_old_ext, format_new_ext):
|
def _convert_kepubify(self, file_path, format_old_ext, format_new_ext):
|
||||||
if config.config_embed_metadata and config.config_binariesdir:
|
if config.config_embed_metadata and config.config_binariesdir:
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
from cps import config, logger, db, ub
|
from cps import config, logger, db, ub, app
|
||||||
from cps.services.worker import CalibreTask
|
from cps.services.worker import CalibreTask
|
||||||
|
|
||||||
|
|
||||||
@ -26,11 +26,13 @@ class TaskReconnectDatabase(CalibreTask):
|
|||||||
def __init__(self, task_message=N_('Reconnecting Calibre database')):
|
def __init__(self, task_message=N_('Reconnecting Calibre database')):
|
||||||
super(TaskReconnectDatabase, self).__init__(task_message)
|
super(TaskReconnectDatabase, self).__init__(task_message)
|
||||||
self.log = logger.create()
|
self.log = logger.create()
|
||||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||||
|
|
||||||
def run(self, worker_thread):
|
def run(self, worker_thread):
|
||||||
self.calibre_db.reconnect_db(config, ub.app_DB_path)
|
with app.app_context():
|
||||||
self.calibre_db.session.close()
|
calibre_db = db.CalibreDB(app)
|
||||||
|
calibre_db.reconnect_db(config, ub.app_DB_path)
|
||||||
|
# self.calibre_db.session.close()
|
||||||
self._handleSuccess()
|
self._handleSuccess()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
import os
|
import os
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
|
|
||||||
from cps import config, db, gdriveutils, logger
|
from cps import config, db, gdriveutils, logger, app
|
||||||
from cps.services.worker import CalibreTask
|
from cps.services.worker import CalibreTask
|
||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ class TaskBackupMetadata(CalibreTask):
|
|||||||
task_message=N_('Backing up Metadata')):
|
task_message=N_('Backing up Metadata')):
|
||||||
super(TaskBackupMetadata, self).__init__(task_message)
|
super(TaskBackupMetadata, self).__init__(task_message)
|
||||||
self.log = logger.create()
|
self.log = logger.create()
|
||||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||||
self.export_language = export_language
|
self.export_language = export_language
|
||||||
self.translated_title = translated_title
|
self.translated_title = translated_title
|
||||||
self.set_dirty = set_dirty
|
self.set_dirty = set_dirty
|
||||||
@ -46,47 +46,51 @@ class TaskBackupMetadata(CalibreTask):
|
|||||||
self.backup_metadata()
|
self.backup_metadata()
|
||||||
|
|
||||||
def set_all_books_dirty(self):
|
def set_all_books_dirty(self):
|
||||||
try:
|
with app.app_context():
|
||||||
books = self.calibre_db.session.query(db.Books).all()
|
calibre_dbb = db.CalibreDB(app)
|
||||||
for book in books:
|
try:
|
||||||
self.calibre_db.set_metadata_dirty(book.id)
|
books = calibre_dbb.session.query(db.Books).all()
|
||||||
self.calibre_db.session.commit()
|
for book in books:
|
||||||
self._handleSuccess()
|
calibre_dbb.set_metadata_dirty(book.id)
|
||||||
except Exception as ex:
|
calibre_dbb.session.commit()
|
||||||
self.log.debug('Error adding book for backup: ' + str(ex))
|
self._handleSuccess()
|
||||||
self._handleError('Error adding book for backup: ' + str(ex))
|
except Exception as ex:
|
||||||
self.calibre_db.session.rollback()
|
self.log.debug('Error adding book for backup: ' + str(ex))
|
||||||
self.calibre_db.session.close()
|
self._handleError('Error adding book for backup: ' + str(ex))
|
||||||
|
calibre_dbb.session.rollback()
|
||||||
|
# self.calibre_db.session.close()
|
||||||
|
|
||||||
def backup_metadata(self):
|
def backup_metadata(self):
|
||||||
try:
|
with app.app_context():
|
||||||
metadata_backup = self.calibre_db.session.query(db.Metadata_Dirtied).all()
|
try:
|
||||||
custom_columns = (self.calibre_db.session.query(db.CustomColumns)
|
calibre_dbb = db.CalibreDB(app)
|
||||||
.filter(db.CustomColumns.mark_for_delete == 0)
|
metadata_backup = calibre_dbb.session.query(db.Metadata_Dirtied).all()
|
||||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
custom_columns = (calibre_dbb.session.query(db.CustomColumns)
|
||||||
.order_by(db.CustomColumns.label).all())
|
.filter(db.CustomColumns.mark_for_delete == 0)
|
||||||
count = len(metadata_backup)
|
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
||||||
i = 0
|
.order_by(db.CustomColumns.label).all())
|
||||||
for backup in metadata_backup:
|
count = len(metadata_backup)
|
||||||
book = self.calibre_db.session.query(db.Books).filter(db.Books.id == backup.book).one_or_none()
|
i = 0
|
||||||
self.calibre_db.session.query(db.Metadata_Dirtied).filter(
|
for backup in metadata_backup:
|
||||||
db.Metadata_Dirtied.book == backup.book).delete()
|
book = calibre_dbb.session.query(db.Books).filter(db.Books.id == backup.book).one_or_none()
|
||||||
self.calibre_db.session.commit()
|
calibre_dbb.session.query(db.Metadata_Dirtied).filter(
|
||||||
if book:
|
db.Metadata_Dirtied.book == backup.book).delete()
|
||||||
self.open_metadata(book, custom_columns)
|
calibre_dbb.session.commit()
|
||||||
else:
|
if book:
|
||||||
self.log.error("Book {} not found in database".format(backup.book))
|
self.open_metadata(book, custom_columns)
|
||||||
i += 1
|
else:
|
||||||
self.progress = (1.0 / count) * i
|
self.log.error("Book {} not found in database".format(backup.book))
|
||||||
self._handleSuccess()
|
i += 1
|
||||||
self.calibre_db.session.close()
|
self.progress = (1.0 / count) * i
|
||||||
|
self._handleSuccess()
|
||||||
|
# self.calibre_db.session.close()
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
b = "NaN" if not hasattr(book, 'id') else book.id
|
b = "NaN" if not hasattr(book, 'id') else book.id
|
||||||
self.log.debug('Error creating metadata backup for book {}: '.format(b) + str(ex))
|
self.log.debug('Error creating metadata backup for book {}: '.format(b) + str(ex))
|
||||||
self._handleError('Error creating metadata backup: ' + str(ex))
|
self._handleError('Error creating metadata backup: ' + str(ex))
|
||||||
self.calibre_db.session.rollback()
|
calibre_dbb.session.rollback()
|
||||||
self.calibre_db.session.close()
|
# self.calibre_db.session.close()
|
||||||
|
|
||||||
def open_metadata(self, book, custom_columns):
|
def open_metadata(self, book, custom_columns):
|
||||||
# package = self.create_new_metadata_backup(book, custom_columns)
|
# package = self.create_new_metadata_backup(book, custom_columns)
|
||||||
|
@ -23,10 +23,11 @@ from io import BytesIO
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from .. import constants
|
from .. import constants
|
||||||
from cps import config, db, fs, gdriveutils, logger, ub
|
from cps import config, db, fs, gdriveutils, logger, ub, app
|
||||||
from cps.services.worker import CalibreTask, STAT_CANCELLED, STAT_ENDED
|
from cps.services.worker import CalibreTask, STAT_CANCELLED, STAT_ENDED
|
||||||
from sqlalchemy import func, text, or_
|
from sqlalchemy import func, text, or_
|
||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from wand.image import Image
|
from wand.image import Image
|
||||||
use_IM = True
|
use_IM = True
|
||||||
@ -113,9 +114,10 @@ class TaskGenerateCoverThumbnails(CalibreTask):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_books_with_covers(book_id=-1):
|
def get_books_with_covers(book_id=-1):
|
||||||
filter_exp = (db.Books.id == book_id) if book_id != -1 else True
|
filter_exp = (db.Books.id == book_id) if book_id != -1 else True
|
||||||
calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
with app.app_context():
|
||||||
books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all()
|
calibre_db = db.CalibreDB(app) #, expire_on_commit=False, init=True)
|
||||||
calibre_db.session.close()
|
books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all()
|
||||||
|
# calibre_db.session.close()
|
||||||
return books_cover
|
return books_cover
|
||||||
|
|
||||||
def get_book_cover_thumbnails(self, book_id):
|
def get_book_cover_thumbnails(self, book_id):
|
||||||
@ -246,7 +248,7 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
|||||||
super(TaskGenerateSeriesThumbnails, self).__init__(task_message)
|
super(TaskGenerateSeriesThumbnails, self).__init__(task_message)
|
||||||
self.log = logger.create()
|
self.log = logger.create()
|
||||||
self.app_db_session = ub.get_new_session_instance()
|
self.app_db_session = ub.get_new_session_instance()
|
||||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||||
self.cache = fs.FileSystem()
|
self.cache = fs.FileSystem()
|
||||||
self.resolutions = [
|
self.resolutions = [
|
||||||
constants.COVER_THUMBNAIL_SMALL,
|
constants.COVER_THUMBNAIL_SMALL,
|
||||||
@ -254,58 +256,60 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def run(self, worker_thread):
|
def run(self, worker_thread):
|
||||||
if self.calibre_db.session and use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED:
|
with app.app_context():
|
||||||
self.message = 'Scanning Series'
|
calibre_db = db.CalibreDB(app)
|
||||||
all_series = self.get_series_with_four_plus_books()
|
if calibre_db.session and use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED:
|
||||||
count = len(all_series)
|
self.message = 'Scanning Series'
|
||||||
|
all_series = self.get_series_with_four_plus_books(calibre_db)
|
||||||
|
count = len(all_series)
|
||||||
|
|
||||||
total_generated = 0
|
total_generated = 0
|
||||||
for i, series in enumerate(all_series):
|
for i, series in enumerate(all_series):
|
||||||
generated = 0
|
generated = 0
|
||||||
series_thumbnails = self.get_series_thumbnails(series.id)
|
series_thumbnails = self.get_series_thumbnails(series.id)
|
||||||
series_books = self.get_series_books(series.id)
|
series_books = self.get_series_books(series.id, calibre_db)
|
||||||
|
|
||||||
# Generate new thumbnails for missing covers
|
# Generate new thumbnails for missing covers
|
||||||
resolutions = list(map(lambda t: t.resolution, series_thumbnails))
|
resolutions = list(map(lambda t: t.resolution, series_thumbnails))
|
||||||
missing_resolutions = list(set(self.resolutions).difference(resolutions))
|
missing_resolutions = list(set(self.resolutions).difference(resolutions))
|
||||||
for resolution in missing_resolutions:
|
for resolution in missing_resolutions:
|
||||||
generated += 1
|
|
||||||
self.create_series_thumbnail(series, series_books, resolution)
|
|
||||||
|
|
||||||
# Replace outdated or missing thumbnails
|
|
||||||
for thumbnail in series_thumbnails:
|
|
||||||
if any(book.last_modified > thumbnail.generated_at for book in series_books):
|
|
||||||
generated += 1
|
generated += 1
|
||||||
self.update_series_thumbnail(series_books, thumbnail)
|
self.create_series_thumbnail(series, series_books, resolution)
|
||||||
|
|
||||||
elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS):
|
# Replace outdated or missing thumbnails
|
||||||
generated += 1
|
for thumbnail in series_thumbnails:
|
||||||
self.update_series_thumbnail(series_books, thumbnail)
|
if any(book.last_modified > thumbnail.generated_at for book in series_books):
|
||||||
|
generated += 1
|
||||||
|
self.update_series_thumbnail(series_books, thumbnail)
|
||||||
|
|
||||||
# Increment the progress
|
elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS):
|
||||||
self.progress = (1.0 / count) * i
|
generated += 1
|
||||||
|
self.update_series_thumbnail(series_books, thumbnail)
|
||||||
|
|
||||||
if generated > 0:
|
# Increment the progress
|
||||||
total_generated += generated
|
self.progress = (1.0 / count) * i
|
||||||
self.message = N_('Generated {0} series thumbnails').format(total_generated)
|
|
||||||
|
|
||||||
# Check if job has been cancelled or ended
|
if generated > 0:
|
||||||
if self.stat == STAT_CANCELLED:
|
total_generated += generated
|
||||||
self.log.info(f'GenerateSeriesThumbnails task has been cancelled.')
|
self.message = N_('Generated {0} series thumbnails').format(total_generated)
|
||||||
return
|
|
||||||
|
|
||||||
if self.stat == STAT_ENDED:
|
# Check if job has been cancelled or ended
|
||||||
self.log.info(f'GenerateSeriesThumbnails task has been ended.')
|
if self.stat == STAT_CANCELLED:
|
||||||
return
|
self.log.info(f'GenerateSeriesThumbnails task has been cancelled.')
|
||||||
|
return
|
||||||
|
|
||||||
if total_generated == 0:
|
if self.stat == STAT_ENDED:
|
||||||
self.self_cleanup = True
|
self.log.info(f'GenerateSeriesThumbnails task has been ended.')
|
||||||
|
return
|
||||||
|
|
||||||
self._handleSuccess()
|
if total_generated == 0:
|
||||||
self.app_db_session.remove()
|
self.self_cleanup = True
|
||||||
|
|
||||||
def get_series_with_four_plus_books(self):
|
self._handleSuccess()
|
||||||
return self.calibre_db.session \
|
self.app_db_session.remove()
|
||||||
|
|
||||||
|
def get_series_with_four_plus_books(self, calibre_db):
|
||||||
|
return calibre_db.session \
|
||||||
.query(db.Series) \
|
.query(db.Series) \
|
||||||
.join(db.books_series_link) \
|
.join(db.books_series_link) \
|
||||||
.join(db.Books) \
|
.join(db.Books) \
|
||||||
@ -314,8 +318,8 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
|||||||
.having(func.count('book_series_link') > 3) \
|
.having(func.count('book_series_link') > 3) \
|
||||||
.all()
|
.all()
|
||||||
|
|
||||||
def get_series_books(self, series_id):
|
def get_series_books(self, series_id, calibre_db):
|
||||||
return self.calibre_db.session \
|
return calibre_db.session \
|
||||||
.query(db.Books) \
|
.query(db.Books) \
|
||||||
.join(db.books_series_link) \
|
.join(db.books_series_link) \
|
||||||
.join(db.Series) \
|
.join(db.Series) \
|
||||||
@ -461,13 +465,15 @@ class TaskClearCoverThumbnailCache(CalibreTask):
|
|||||||
|
|
||||||
def run(self, worker_thread):
|
def run(self, worker_thread):
|
||||||
if self.app_db_session:
|
if self.app_db_session:
|
||||||
if self.book_id == 0: # delete superfluous thumbnails
|
# delete superfluous thumbnails
|
||||||
calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
if self.book_id == 0:
|
||||||
thumbnails = (calibre_db.session.query(ub.Thumbnail)
|
with app.app_context():
|
||||||
.join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True)
|
calibre_db = db.CalibreDB(app)
|
||||||
.filter(db.Books.id==None)
|
thumbnails = (calibre_db.session.query(ub.Thumbnail)
|
||||||
.all())
|
.join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True)
|
||||||
calibre_db.session.close()
|
.filter(db.Books.id==None)
|
||||||
|
.all())
|
||||||
|
# calibre_db.session.close()
|
||||||
elif self.book_id > 0: # make sure single book is selected
|
elif self.book_id > 0: # make sure single book is selected
|
||||||
thumbnails = self.get_thumbnails_for_book(self.book_id)
|
thumbnails = self.get_thumbnails_for_book(self.book_id)
|
||||||
if self.book_id < 0:
|
if self.book_id < 0:
|
||||||
|
@ -237,7 +237,7 @@ def pdf_preview(tmp_file_path, tmp_dir):
|
|||||||
if use_generic_pdf_cover:
|
if use_generic_pdf_cover:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
cover_file_name = os.path.join(os.path.dirname(tmp_file_path), "cover.jpg")
|
cover_file_name = tmp_file_path + ".jpg"
|
||||||
with Image() as img:
|
with Image() as img:
|
||||||
img.options["pdf:use-cropbox"] = "true"
|
img.options["pdf:use-cropbox"] = "true"
|
||||||
img.read(filename=tmp_file_path + '[0]', resolution=150)
|
img.read(filename=tmp_file_path + '[0]', resolution=150)
|
||||||
@ -245,7 +245,7 @@ def pdf_preview(tmp_file_path, tmp_dir):
|
|||||||
if img.alpha_channel:
|
if img.alpha_channel:
|
||||||
img.alpha_channel = 'remove'
|
img.alpha_channel = 'remove'
|
||||||
img.background_color = Color('white')
|
img.background_color = Color('white')
|
||||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
img.save(filename=cover_file_name)
|
||||||
return cover_file_name
|
return cover_file_name
|
||||||
except PolicyError as ex:
|
except PolicyError as ex:
|
||||||
log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
||||||
|
Loading…
Reference in New Issue
Block a user