mirror of
https://github.com/janeczku/calibre-web
synced 2025-01-05 23:10:31 +00:00
Connection to metadata.db is now on per request base, should solve upload issues with multiple files at once
This commit is contained in:
commit
87b200cc13
@ -102,7 +102,7 @@ if wtf_present:
|
||||
else:
|
||||
csrf = None
|
||||
|
||||
calibre_db = db.CalibreDB()
|
||||
calibre_db = db.CalibreDB(app)
|
||||
|
||||
web_server = WebServer()
|
||||
|
||||
@ -146,9 +146,7 @@ def create_app():
|
||||
lm.anonymous_user = ub.Anonymous
|
||||
lm.session_protection = 'strong' if config.config_session == 1 else "basic"
|
||||
|
||||
db.CalibreDB.update_config(config)
|
||||
db.CalibreDB.setup_db(config.config_calibre_dir, cli_param.settings_path)
|
||||
calibre_db.init_db()
|
||||
db.CalibreDB.update_config(config, config.config_calibre_dir, cli_param.settings_path)
|
||||
|
||||
updater_thread.init_updater(config, web_server)
|
||||
# Perform dry run of updater and exit afterward
|
||||
|
16
cps/admin.py
16
cps/admin.py
@ -118,7 +118,7 @@ def before_request():
|
||||
g.allow_upload = config.config_uploading
|
||||
g.current_theme = config.config_theme
|
||||
g.config_authors_max = config.config_authors_max
|
||||
if '/static/' not in request.path and not config.db_configured and \
|
||||
if ('/static/' not in request.path and not config.db_configured and
|
||||
request.endpoint not in ('admin.ajax_db_config',
|
||||
'admin.simulatedbchange',
|
||||
'admin.db_configuration',
|
||||
@ -126,7 +126,7 @@ def before_request():
|
||||
'web.login_post',
|
||||
'web.logout',
|
||||
'admin.load_dialogtexts',
|
||||
'admin.ajax_pathchooser'):
|
||||
'admin.ajax_pathchooser')):
|
||||
return redirect(url_for('admin.db_configuration'))
|
||||
|
||||
|
||||
@ -144,7 +144,6 @@ def shutdown():
|
||||
show_text = {}
|
||||
if task in (0, 1): # valid commandos received
|
||||
# close all database connections
|
||||
calibre_db.dispose()
|
||||
ub.dispose()
|
||||
|
||||
if task == 0:
|
||||
@ -1737,14 +1736,12 @@ def _db_configuration_update_helper():
|
||||
return _db_configuration_result(_("Books path not valid"), gdrive_error)
|
||||
else:
|
||||
_config_string(to_save, "config_calibre_split_dir")
|
||||
|
||||
if db_change or not db_valid or not config.db_configured \
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||
if (db_change or not db_valid or not config.db_configured
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]):
|
||||
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
|
||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||
else:
|
||||
calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
||||
if db_change:
|
||||
log.info("Calibre Database changed, all Calibre-Web info related to old Database gets deleted")
|
||||
@ -1767,10 +1764,11 @@ def _db_configuration_update_helper():
|
||||
config.config_allowed_column_value = ""
|
||||
config.config_read_column = 0
|
||||
_config_string(to_save, "config_calibre_dir")
|
||||
calibre_db.update_config(config)
|
||||
calibre_db.update_config(config, config.config_calibre_dir, ub.app_DB_path)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
||||
flash(_("DB is not Writeable"), category="warning")
|
||||
calibre_db.update_config(config)
|
||||
calibre_db.update_config(config, config.config_calibre_dir, ub.app_DB_path)
|
||||
config.save()
|
||||
return _db_configuration_result(None, gdrive_error)
|
||||
|
||||
|
18
cps/audio.py
18
cps/audio.py
@ -16,8 +16,6 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
import mutagen
|
||||
import base64
|
||||
from . import cover, logger
|
||||
@ -51,13 +49,12 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
if not pubdate:
|
||||
pubdate = str(audio_file.tags.get('TDOR').text[0]) if "TDOR" in audio_file.tags else None
|
||||
if cover_data and not no_cover_processing:
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
cover_info = cover_data[0]
|
||||
for dat in cover_data:
|
||||
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
||||
cover_info = dat
|
||||
break
|
||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
elif original_file_extension in [".ogg", ".flac", ".opus", ".ogv"]:
|
||||
title = audio_file.tags.get('TITLE')[0] if "TITLE" in audio_file else None
|
||||
author = audio_file.tags.get('ARTIST')[0] if "ARTIST" in audio_file else None
|
||||
@ -70,17 +67,15 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
cover_data = audio_file.tags.get('METADATA_BLOCK_PICTURE')
|
||||
if not no_cover_processing:
|
||||
if cover_data:
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
cover_info = mutagen.flac.Picture(base64.b64decode(cover_data[0]))
|
||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
if hasattr(audio_file, "pictures"):
|
||||
cover_info = audio_file.pictures[0]
|
||||
for dat in audio_file.pictures:
|
||||
if dat.type == mutagen.id3.PictureType.COVER_FRONT:
|
||||
cover_info = dat
|
||||
break
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_info.data, "." + cover_info.mime[-3:])
|
||||
elif original_file_extension in [".aac"]:
|
||||
title = audio_file.tags.get('Title').value if "Title" in audio_file else None
|
||||
author = audio_file.tags.get('Artist').value if "Artist" in audio_file else None
|
||||
@ -92,7 +87,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
pubdate = audio_file.tags.get('Year').value if "Year" in audio_file else None
|
||||
cover_data = audio_file.tags['Cover Art (Front)']
|
||||
if cover_data and not no_cover_processing:
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
tmp_cover_name = tmp_file_path + '.jpg'
|
||||
with open(tmp_cover_name, "wb") as cover_file:
|
||||
cover_file.write(cover_data.value.split(b"\x00",1)[1])
|
||||
elif original_file_extension in [".asf"]:
|
||||
@ -106,7 +101,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
pubdate = audio_file.tags.get('Year')[0].value if "Year" in audio_file else None
|
||||
cover_data = audio_file.tags.get('WM/Picture', None)
|
||||
if cover_data and not no_cover_processing:
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
tmp_cover_name = tmp_file_path + '.jpg'
|
||||
with open(tmp_cover_name, "wb") as cover_file:
|
||||
cover_file.write(cover_data[0].value)
|
||||
elif original_file_extension in [".mp4", ".m4a", ".m4b"]:
|
||||
@ -120,7 +115,6 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
pubdate = audio_file.tags.get('©day')[0] if "©day" in audio_file.tags else None
|
||||
cover_data = audio_file.tags.get('covr', None)
|
||||
if cover_data and not no_cover_processing:
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_path), 'cover.jpg')
|
||||
cover_type = None
|
||||
for c in cover_data:
|
||||
if c.imageformat == mutagen.mp4.AtomDataType.JPEG:
|
||||
@ -132,7 +126,7 @@ def get_audio_file_info(tmp_file_path, original_file_extension, original_file_na
|
||||
cover_bin = c
|
||||
break
|
||||
if cover_type:
|
||||
cover.cover_processing(tmp_file_path, cover_bin, cover_type)
|
||||
tmp_cover_name = cover.cover_processing(tmp_file_path, cover_bin, cover_type)
|
||||
else:
|
||||
logger.error("Unknown covertype in file {} ".format(original_file_name))
|
||||
|
||||
|
12
cps/comic.py
12
cps/comic.py
@ -90,7 +90,7 @@ def _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_exec
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cover_data = cf.read([name])
|
||||
cover_data = cf.read(name)
|
||||
break
|
||||
except Exception as ex:
|
||||
log.error('Rarfile failed with error: {}'.format(ex))
|
||||
@ -109,13 +109,13 @@ def _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_exec
|
||||
return cover_data, extension
|
||||
|
||||
|
||||
def _extract_cover(tmp_file_name, original_file_extension, rar_executable):
|
||||
def _extract_cover(tmp_file_path, original_file_extension, rar_executable):
|
||||
cover_data = extension = None
|
||||
if use_comic_meta:
|
||||
try:
|
||||
archive = ComicArchive(tmp_file_name, rar_exe_path=rar_executable)
|
||||
archive = ComicArchive(tmp_file_path, rar_exe_path=rar_executable)
|
||||
except TypeError:
|
||||
archive = ComicArchive(tmp_file_name)
|
||||
archive = ComicArchive(tmp_file_path)
|
||||
name_list = archive.getPageNameList if hasattr(archive, "getPageNameList") else archive.get_page_name_list
|
||||
for index, name in enumerate(name_list()):
|
||||
ext = os.path.splitext(name)
|
||||
@ -126,8 +126,8 @@ def _extract_cover(tmp_file_name, original_file_extension, rar_executable):
|
||||
cover_data = get_page(index)
|
||||
break
|
||||
else:
|
||||
cover_data, extension = _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_executable)
|
||||
return cover.cover_processing(tmp_file_name, cover_data, extension)
|
||||
cover_data, extension = _extract_cover_from_archive(original_file_extension, tmp_file_path, rar_executable)
|
||||
return cover.cover_processing(tmp_file_path, cover_data, extension)
|
||||
|
||||
|
||||
def get_comic_info(tmp_file_path, original_file_name, original_file_extension, rar_executable, no_cover_processing):
|
||||
|
@ -405,11 +405,13 @@ class ConfigSQL(object):
|
||||
return self.config_calibre_split_dir if self.config_calibre_split_dir else self.config_calibre_dir
|
||||
|
||||
def store_calibre_uuid(self, calibre_db, Library_table):
|
||||
from . import app
|
||||
try:
|
||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||
if self.config_calibre_uuid != calibre_uuid.uuid:
|
||||
self.config_calibre_uuid = calibre_uuid.uuid
|
||||
self.save()
|
||||
with app.app_context():
|
||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||
if self.config_calibre_uuid != calibre_uuid.uuid:
|
||||
self.config_calibre_uuid = calibre_uuid.uuid
|
||||
self.save()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
@ -29,8 +29,9 @@ NO_JPEG_EXTENSIONS = ['.png', '.webp', '.bmp']
|
||||
COVER_EXTENSIONS = ['.png', '.webp', '.bmp', '.jpg', '.jpeg']
|
||||
|
||||
|
||||
def cover_processing(tmp_file_name, img, extension):
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
||||
def cover_processing(tmp_file_path, img, extension):
|
||||
# tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
||||
tmp_cover_name = tmp_file_path + '.jpg'
|
||||
if extension in NO_JPEG_EXTENSIONS:
|
||||
if use_IM:
|
||||
with Image(blob=img) as imgc:
|
||||
|
112
cps/db.py
112
cps/db.py
@ -23,7 +23,7 @@ import json
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote
|
||||
import unidecode
|
||||
from weakref import WeakSet
|
||||
# from weakref import WeakSet
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlite3 import OperationalError as sqliteOperationalError
|
||||
@ -45,7 +45,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from .cw_login import current_user
|
||||
from flask_babel import gettext as _
|
||||
from flask_babel import get_locale
|
||||
from flask import flash
|
||||
from flask import flash, g, Flask
|
||||
|
||||
from . import logger, ub, isoLanguages
|
||||
from .pagination import Pagination
|
||||
@ -528,34 +528,25 @@ class AlchemyEncoder(json.JSONEncoder):
|
||||
|
||||
|
||||
class CalibreDB:
|
||||
_init = False
|
||||
engine = None
|
||||
config = None
|
||||
session_factory = None
|
||||
# This is a WeakSet so that references here don't keep other CalibreDB
|
||||
# instances alive once they reach the end of their respective scopes
|
||||
instances = WeakSet()
|
||||
config_calibre_dir = None
|
||||
app_db_path = None
|
||||
|
||||
def __init__(self, expire_on_commit=True, init=False):
|
||||
def __init__(self, _app: Flask=None): # , expire_on_commit=True, init=False):
|
||||
""" Initialize a new CalibreDB session
|
||||
"""
|
||||
self.session = None
|
||||
if init:
|
||||
self.init_db(expire_on_commit)
|
||||
self.Session = None
|
||||
#if init:
|
||||
# self.init_db(expire_on_commit)
|
||||
if _app is not None and not _app._got_first_request:
|
||||
self.init_app(_app)
|
||||
|
||||
def init_db(self, expire_on_commit=True):
|
||||
if self._init:
|
||||
self.init_session(expire_on_commit)
|
||||
|
||||
self.instances.add(self)
|
||||
|
||||
def init_session(self, expire_on_commit=True):
|
||||
self.session = self.session_factory()
|
||||
self.session.expire_on_commit = expire_on_commit
|
||||
self.create_functions(self.config)
|
||||
def init_app(self, _app):
|
||||
_app.teardown_appcontext(self.teardown)
|
||||
|
||||
@classmethod
|
||||
def setup_db_cc_classes(cls, cc):
|
||||
global cc_classes
|
||||
cc_ids = []
|
||||
books_custom_column_links = {}
|
||||
for row in cc:
|
||||
@ -623,8 +614,6 @@ class CalibreDB:
|
||||
secondary=books_custom_column_links[cc_id[0]],
|
||||
backref='books'))
|
||||
|
||||
return cc_classes
|
||||
|
||||
@classmethod
|
||||
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
|
||||
if not config_calibre_dir:
|
||||
@ -644,7 +633,6 @@ class CalibreDB:
|
||||
local_session = scoped_session(sessionmaker())
|
||||
local_session.configure(bind=connection)
|
||||
database_uuid = local_session().query(Library_Id).one_or_none()
|
||||
# local_session.dispose()
|
||||
|
||||
check_engine.connect()
|
||||
db_change = config_calibre_uuid != database_uuid.uuid
|
||||
@ -652,13 +640,30 @@ class CalibreDB:
|
||||
return False, False
|
||||
return True, db_change
|
||||
|
||||
def teardown(self, exception):
|
||||
ctx = g.get("lib_sql")
|
||||
if ctx:
|
||||
ctx.close()
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
# connect or get active connection
|
||||
if not g.get("lib_sql"):
|
||||
g.lib_sql = self.connect()
|
||||
return g.lib_sql
|
||||
|
||||
@classmethod
|
||||
def update_config(cls, config):
|
||||
def update_config(cls, config, config_calibre_dir, app_db_path):
|
||||
cls.config = config
|
||||
cls.config_calibre_dir = config_calibre_dir
|
||||
cls.app_db_path = app_db_path
|
||||
|
||||
|
||||
def connect(self):
|
||||
return self.setup_db(self.config_calibre_dir, self.app_db_path)
|
||||
|
||||
@classmethod
|
||||
def setup_db(cls, config_calibre_dir, app_db_path):
|
||||
cls.dispose()
|
||||
|
||||
if not config_calibre_dir:
|
||||
cls.config.invalidate()
|
||||
@ -670,17 +675,17 @@ class CalibreDB:
|
||||
return None
|
||||
|
||||
try:
|
||||
cls.engine = create_engine('sqlite://',
|
||||
engine = create_engine('sqlite://',
|
||||
echo=False,
|
||||
isolation_level="SERIALIZABLE",
|
||||
connect_args={'check_same_thread': False},
|
||||
poolclass=StaticPool)
|
||||
with cls.engine.begin() as connection:
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text('PRAGMA cache_size = 10000;'))
|
||||
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
||||
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
||||
|
||||
conn = cls.engine.connect()
|
||||
conn = engine.connect()
|
||||
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
|
||||
except Exception as ex:
|
||||
cls.config.invalidate(ex)
|
||||
@ -696,13 +701,10 @@ class CalibreDB:
|
||||
log.error_or_exception(e)
|
||||
return None
|
||||
|
||||
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=True,
|
||||
bind=cls.engine, future=True))
|
||||
for inst in cls.instances:
|
||||
inst.init_session()
|
||||
return scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine, future=True))
|
||||
|
||||
cls._init = True
|
||||
|
||||
def get_book(self, book_id):
|
||||
return self.session.query(Books).filter(Books.id == book_id).first()
|
||||
@ -1066,43 +1068,11 @@ class CalibreDB:
|
||||
except sqliteOperationalError:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def dispose(cls):
|
||||
# global session
|
||||
|
||||
for inst in cls.instances:
|
||||
old_session = inst.session
|
||||
inst.session = None
|
||||
if old_session:
|
||||
try:
|
||||
old_session.close()
|
||||
except Exception:
|
||||
pass
|
||||
if old_session.bind:
|
||||
try:
|
||||
old_session.bind.dispose()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for attr in list(Books.__dict__.keys()):
|
||||
if attr.startswith("custom_column_"):
|
||||
setattr(Books, attr, None)
|
||||
|
||||
for db_class in cc_classes.values():
|
||||
Base.metadata.remove(db_class.__table__)
|
||||
cc_classes.clear()
|
||||
|
||||
for table in reversed(Base.metadata.sorted_tables):
|
||||
name = table.key
|
||||
if name.startswith("custom_column_") or name.startswith("books_custom_column_"):
|
||||
if table is not None:
|
||||
Base.metadata.remove(table)
|
||||
|
||||
def reconnect_db(self, config, app_db_path):
|
||||
self.dispose()
|
||||
self.engine.dispose()
|
||||
# self.dispose()
|
||||
# self.engine.dispose()
|
||||
self.setup_db(config.config_calibre_dir, app_db_path)
|
||||
self.update_config(config)
|
||||
self.update_config(config, config.config_calibre_dir, app_db_path)
|
||||
|
||||
|
||||
def lcase(s):
|
||||
|
@ -215,6 +215,7 @@ def table_get_custom_enum(c_id):
|
||||
def edit_list_book(param):
|
||||
vals = request.form.to_dict()
|
||||
book = calibre_db.get_book(vals['pk'])
|
||||
calibre_db.create_functions(config)
|
||||
sort_param = ""
|
||||
ret = ""
|
||||
try:
|
||||
|
@ -726,20 +726,20 @@ $(function() {
|
||||
url: getPath() + "/ajax/simulatedbchange",
|
||||
data: {config_calibre_dir: $("#config_calibre_dir").val(), csrf_token: $("input[name='csrf_token']").val()},
|
||||
success: function success(data) {
|
||||
if ( data.change ) {
|
||||
if ( data.valid ) {
|
||||
if ( !data.valid ) {
|
||||
$("#InvalidDialog").modal('show');
|
||||
}
|
||||
else{
|
||||
if ( data.change ) {
|
||||
confirmDialog(
|
||||
"db_submit",
|
||||
"GeneralChangeModal",
|
||||
0,
|
||||
changeDbSettings
|
||||
);
|
||||
}
|
||||
else {
|
||||
$("#InvalidDialog").modal('show');
|
||||
}
|
||||
} else {
|
||||
} else {
|
||||
changeDbSettings();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -28,7 +28,7 @@ from sqlalchemy.exc import SQLAlchemyError
|
||||
from flask_babel import lazy_gettext as N_
|
||||
|
||||
from cps.services.worker import CalibreTask
|
||||
from cps import db
|
||||
from cps import db, app
|
||||
from cps import logger, config
|
||||
from cps.subproc_wrapper import process_open
|
||||
from flask_babel import gettext as _
|
||||
@ -60,36 +60,39 @@ class TaskConvert(CalibreTask):
|
||||
self.results = dict()
|
||||
|
||||
def run(self, worker_thread):
|
||||
df_cover = None
|
||||
cur_book = None
|
||||
self.worker_thread = worker_thread
|
||||
if config.config_use_google_drive:
|
||||
worker_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
cur_book = worker_db.get_book(self.book_id)
|
||||
self.title = cur_book.title
|
||||
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
||||
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
||||
data.name + "." + self.settings['old_book_format'].lower())
|
||||
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
|
||||
if df:
|
||||
datafile_cover = None
|
||||
datafile = os.path.join(config.get_book_path(),
|
||||
cur_book.path,
|
||||
data.name + "." + self.settings['old_book_format'].lower())
|
||||
if df_cover:
|
||||
datafile_cover = os.path.join(config.get_book_path(),
|
||||
cur_book.path, "cover.jpg")
|
||||
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
|
||||
os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
|
||||
df.GetContentFile(datafile)
|
||||
if df_cover:
|
||||
df_cover.GetContentFile(datafile_cover)
|
||||
worker_db.session.close()
|
||||
else:
|
||||
# ToDo Include cover in error handling
|
||||
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
||||
format=self.settings['old_book_format'],
|
||||
fn=data.name + "." + self.settings['old_book_format'].lower())
|
||||
worker_db.session.close()
|
||||
return self._handleError(error_message)
|
||||
with app.app_context():
|
||||
worker_db = db.CalibreDB(app)
|
||||
cur_book = worker_db.get_book(self.book_id)
|
||||
self.title = cur_book.title
|
||||
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
||||
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
||||
data.name + "." + self.settings['old_book_format'].lower())
|
||||
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
|
||||
if df:
|
||||
datafile_cover = None
|
||||
datafile = os.path.join(config.get_book_path(),
|
||||
cur_book.path,
|
||||
data.name + "." + self.settings['old_book_format'].lower())
|
||||
if df_cover:
|
||||
datafile_cover = os.path.join(config.get_book_path(),
|
||||
cur_book.path, "cover.jpg")
|
||||
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
|
||||
os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
|
||||
df.GetContentFile(datafile)
|
||||
if df_cover:
|
||||
df_cover.GetContentFile(datafile_cover)
|
||||
# worker_db.session.close()
|
||||
else:
|
||||
# ToDo Include cover in error handling
|
||||
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
||||
format=self.settings['old_book_format'],
|
||||
fn=data.name + "." + self.settings['old_book_format'].lower())
|
||||
# worker_db.session.close()
|
||||
return self._handleError(error_message)
|
||||
|
||||
filename = self._convert_ebook_format()
|
||||
if config.config_use_google_drive:
|
||||
@ -106,7 +109,7 @@ class TaskConvert(CalibreTask):
|
||||
# if we're sending to E-Reader after converting, create a one-off task and run it immediately
|
||||
# todo: figure out how to incorporate this into the progress
|
||||
try:
|
||||
EmailText = N_(u"%(book)s send to E-Reader", book=escape(self.title))
|
||||
EmailText = N_(u"%(book)s send to E-Reader", book=escape(self.title))
|
||||
for email in self.ereader_mail.split(','):
|
||||
email = strip_whitespaces(email)
|
||||
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
|
||||
@ -124,95 +127,96 @@ class TaskConvert(CalibreTask):
|
||||
|
||||
def _convert_ebook_format(self):
|
||||
error_message = None
|
||||
local_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
file_path = self.file_path
|
||||
book_id = self.book_id
|
||||
format_old_ext = '.' + self.settings['old_book_format'].lower()
|
||||
format_new_ext = '.' + self.settings['new_book_format'].lower()
|
||||
with app.app_context():
|
||||
local_db = db.CalibreDB(app)
|
||||
file_path = self.file_path
|
||||
book_id = self.book_id
|
||||
format_old_ext = '.' + self.settings['old_book_format'].lower()
|
||||
format_new_ext = '.' + self.settings['new_book_format'].lower()
|
||||
|
||||
# check to see if destination format already exists - or if book is in database
|
||||
# if it does - mark the conversion task as complete and return a success
|
||||
# this will allow to send to E-Reader workflow to continue to work
|
||||
if os.path.isfile(file_path + format_new_ext) or\
|
||||
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
||||
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
||||
cur_book = local_db.get_book(book_id)
|
||||
self.title = cur_book.title
|
||||
self.results['path'] = cur_book.path
|
||||
self.results['title'] = self.title
|
||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id)\
|
||||
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
||||
if not new_format:
|
||||
new_format = db.Data(name=os.path.basename(file_path),
|
||||
book_format=self.settings['new_book_format'].upper(),
|
||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||
try:
|
||||
local_db.session.merge(new_format)
|
||||
local_db.session.commit()
|
||||
except SQLAlchemyError as e:
|
||||
local_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
local_db.session.close()
|
||||
self._handleError(N_("Oops! Database Error: %(error)s.", error=e))
|
||||
return
|
||||
self._handleSuccess()
|
||||
local_db.session.close()
|
||||
return os.path.basename(file_path + format_new_ext)
|
||||
else:
|
||||
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.",
|
||||
book_id,
|
||||
format_new_ext)
|
||||
|
||||
if config.config_kepubifypath and format_old_ext == '.epub' and format_new_ext == '.kepub':
|
||||
check, error_message = self._convert_kepubify(file_path,
|
||||
format_old_ext,
|
||||
format_new_ext)
|
||||
else:
|
||||
# check if calibre converter-executable is existing
|
||||
if not os.path.exists(config.config_converterpath):
|
||||
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
||||
return
|
||||
has_cover = local_db.get_book(book_id).has_cover
|
||||
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext, has_cover)
|
||||
|
||||
if check == 0:
|
||||
cur_book = local_db.get_book(book_id)
|
||||
if os.path.isfile(file_path + format_new_ext):
|
||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id) \
|
||||
# check to see if destination format already exists - or if book is in database
|
||||
# if it does - mark the conversion task as complete and return a success
|
||||
# this will allow to send to E-Reader workflow to continue to work
|
||||
if os.path.isfile(file_path + format_new_ext) or\
|
||||
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
||||
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
||||
cur_book = local_db.get_book(book_id)
|
||||
self.title = cur_book.title
|
||||
self.results['path'] = cur_book.path
|
||||
self.results['title'] = self.title
|
||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id)\
|
||||
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
||||
if not new_format:
|
||||
new_format = db.Data(name=cur_book.data[0].name,
|
||||
new_format = db.Data(name=os.path.basename(file_path),
|
||||
book_format=self.settings['new_book_format'].upper(),
|
||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||
try:
|
||||
local_db.session.merge(new_format)
|
||||
local_db.session.commit()
|
||||
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
||||
ub_session = init_db_thread()
|
||||
remove_synced_book(book_id, True, ub_session)
|
||||
ub_session.close()
|
||||
except SQLAlchemyError as e:
|
||||
local_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
local_db.session.close()
|
||||
self._handleError(error_message)
|
||||
self._handleError(N_("Oops! Database Error: %(error)s.", error=e))
|
||||
return
|
||||
self.results['path'] = cur_book.path
|
||||
self.title = cur_book.title
|
||||
self.results['title'] = self.title
|
||||
if not config.config_use_google_drive:
|
||||
self._handleSuccess()
|
||||
return os.path.basename(file_path + format_new_ext)
|
||||
local_db.session.close()
|
||||
return os.path.basename(file_path + format_new_ext)
|
||||
else:
|
||||
error_message = N_('%(format)s format not found on disk', format=format_new_ext.upper())
|
||||
local_db.session.close()
|
||||
log.info("ebook converter failed with error while converting book")
|
||||
if not error_message:
|
||||
error_message = N_('Ebook converter failed with unknown error')
|
||||
else:
|
||||
log.error(error_message)
|
||||
self._handleError(error_message)
|
||||
return
|
||||
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.",
|
||||
book_id,
|
||||
format_new_ext)
|
||||
|
||||
if config.config_kepubifypath and format_old_ext == '.epub' and format_new_ext == '.kepub':
|
||||
check, error_message = self._convert_kepubify(file_path,
|
||||
format_old_ext,
|
||||
format_new_ext)
|
||||
else:
|
||||
# check if calibre converter-executable is existing
|
||||
if not os.path.exists(config.config_converterpath):
|
||||
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
||||
return
|
||||
has_cover = local_db.get_book(book_id).has_cover
|
||||
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext, has_cover)
|
||||
|
||||
if check == 0:
|
||||
cur_book = local_db.get_book(book_id)
|
||||
if os.path.isfile(file_path + format_new_ext):
|
||||
new_format = local_db.session.query(db.Data).filter(db.Data.book == book_id) \
|
||||
.filter(db.Data.format == self.settings['new_book_format'].upper()).one_or_none()
|
||||
if not new_format:
|
||||
new_format = db.Data(name=cur_book.data[0].name,
|
||||
book_format=self.settings['new_book_format'].upper(),
|
||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||
try:
|
||||
local_db.session.merge(new_format)
|
||||
local_db.session.commit()
|
||||
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
||||
ub_session = init_db_thread()
|
||||
remove_synced_book(book_id, True, ub_session)
|
||||
ub_session.close()
|
||||
except SQLAlchemyError as e:
|
||||
local_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
local_db.session.close()
|
||||
self._handleError(error_message)
|
||||
return
|
||||
self.results['path'] = cur_book.path
|
||||
self.title = cur_book.title
|
||||
self.results['title'] = self.title
|
||||
if not config.config_use_google_drive:
|
||||
self._handleSuccess()
|
||||
return os.path.basename(file_path + format_new_ext)
|
||||
else:
|
||||
error_message = N_('%(format)s format not found on disk', format=format_new_ext.upper())
|
||||
local_db.session.close()
|
||||
log.info("ebook converter failed with error while converting book")
|
||||
if not error_message:
|
||||
error_message = N_('Ebook converter failed with unknown error')
|
||||
else:
|
||||
log.error(error_message)
|
||||
self._handleError(error_message)
|
||||
return
|
||||
|
||||
def _convert_kepubify(self, file_path, format_old_ext, format_new_ext):
|
||||
if config.config_embed_metadata and config.config_binariesdir:
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
from flask_babel import lazy_gettext as N_
|
||||
|
||||
from cps import config, logger, db, ub
|
||||
from cps import config, logger, db, ub, app
|
||||
from cps.services.worker import CalibreTask
|
||||
|
||||
|
||||
@ -26,11 +26,13 @@ class TaskReconnectDatabase(CalibreTask):
|
||||
def __init__(self, task_message=N_('Reconnecting Calibre database')):
|
||||
super(TaskReconnectDatabase, self).__init__(task_message)
|
||||
self.log = logger.create()
|
||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
|
||||
def run(self, worker_thread):
|
||||
self.calibre_db.reconnect_db(config, ub.app_DB_path)
|
||||
self.calibre_db.session.close()
|
||||
with app.app_context():
|
||||
calibre_db = db.CalibreDB(app)
|
||||
calibre_db.reconnect_db(config, ub.app_DB_path)
|
||||
# self.calibre_db.session.close()
|
||||
self._handleSuccess()
|
||||
|
||||
@property
|
||||
|
@ -19,7 +19,7 @@
|
||||
import os
|
||||
from lxml import etree
|
||||
|
||||
from cps import config, db, gdriveutils, logger
|
||||
from cps import config, db, gdriveutils, logger, app
|
||||
from cps.services.worker import CalibreTask
|
||||
from flask_babel import lazy_gettext as N_
|
||||
|
||||
@ -34,7 +34,7 @@ class TaskBackupMetadata(CalibreTask):
|
||||
task_message=N_('Backing up Metadata')):
|
||||
super(TaskBackupMetadata, self).__init__(task_message)
|
||||
self.log = logger.create()
|
||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
self.export_language = export_language
|
||||
self.translated_title = translated_title
|
||||
self.set_dirty = set_dirty
|
||||
@ -46,47 +46,51 @@ class TaskBackupMetadata(CalibreTask):
|
||||
self.backup_metadata()
|
||||
|
||||
def set_all_books_dirty(self):
|
||||
try:
|
||||
books = self.calibre_db.session.query(db.Books).all()
|
||||
for book in books:
|
||||
self.calibre_db.set_metadata_dirty(book.id)
|
||||
self.calibre_db.session.commit()
|
||||
self._handleSuccess()
|
||||
except Exception as ex:
|
||||
self.log.debug('Error adding book for backup: ' + str(ex))
|
||||
self._handleError('Error adding book for backup: ' + str(ex))
|
||||
self.calibre_db.session.rollback()
|
||||
self.calibre_db.session.close()
|
||||
with app.app_context():
|
||||
calibre_dbb = db.CalibreDB(app)
|
||||
try:
|
||||
books = calibre_dbb.session.query(db.Books).all()
|
||||
for book in books:
|
||||
calibre_dbb.set_metadata_dirty(book.id)
|
||||
calibre_dbb.session.commit()
|
||||
self._handleSuccess()
|
||||
except Exception as ex:
|
||||
self.log.debug('Error adding book for backup: ' + str(ex))
|
||||
self._handleError('Error adding book for backup: ' + str(ex))
|
||||
calibre_dbb.session.rollback()
|
||||
# self.calibre_db.session.close()
|
||||
|
||||
def backup_metadata(self):
|
||||
try:
|
||||
metadata_backup = self.calibre_db.session.query(db.Metadata_Dirtied).all()
|
||||
custom_columns = (self.calibre_db.session.query(db.CustomColumns)
|
||||
.filter(db.CustomColumns.mark_for_delete == 0)
|
||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
||||
.order_by(db.CustomColumns.label).all())
|
||||
count = len(metadata_backup)
|
||||
i = 0
|
||||
for backup in metadata_backup:
|
||||
book = self.calibre_db.session.query(db.Books).filter(db.Books.id == backup.book).one_or_none()
|
||||
self.calibre_db.session.query(db.Metadata_Dirtied).filter(
|
||||
db.Metadata_Dirtied.book == backup.book).delete()
|
||||
self.calibre_db.session.commit()
|
||||
if book:
|
||||
self.open_metadata(book, custom_columns)
|
||||
else:
|
||||
self.log.error("Book {} not found in database".format(backup.book))
|
||||
i += 1
|
||||
self.progress = (1.0 / count) * i
|
||||
self._handleSuccess()
|
||||
self.calibre_db.session.close()
|
||||
with app.app_context():
|
||||
try:
|
||||
calibre_dbb = db.CalibreDB(app)
|
||||
metadata_backup = calibre_dbb.session.query(db.Metadata_Dirtied).all()
|
||||
custom_columns = (calibre_dbb.session.query(db.CustomColumns)
|
||||
.filter(db.CustomColumns.mark_for_delete == 0)
|
||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
||||
.order_by(db.CustomColumns.label).all())
|
||||
count = len(metadata_backup)
|
||||
i = 0
|
||||
for backup in metadata_backup:
|
||||
book = calibre_dbb.session.query(db.Books).filter(db.Books.id == backup.book).one_or_none()
|
||||
calibre_dbb.session.query(db.Metadata_Dirtied).filter(
|
||||
db.Metadata_Dirtied.book == backup.book).delete()
|
||||
calibre_dbb.session.commit()
|
||||
if book:
|
||||
self.open_metadata(book, custom_columns)
|
||||
else:
|
||||
self.log.error("Book {} not found in database".format(backup.book))
|
||||
i += 1
|
||||
self.progress = (1.0 / count) * i
|
||||
self._handleSuccess()
|
||||
# self.calibre_db.session.close()
|
||||
|
||||
except Exception as ex:
|
||||
b = "NaN" if not hasattr(book, 'id') else book.id
|
||||
self.log.debug('Error creating metadata backup for book {}: '.format(b) + str(ex))
|
||||
self._handleError('Error creating metadata backup: ' + str(ex))
|
||||
self.calibre_db.session.rollback()
|
||||
self.calibre_db.session.close()
|
||||
except Exception as ex:
|
||||
b = "NaN" if not hasattr(book, 'id') else book.id
|
||||
self.log.debug('Error creating metadata backup for book {}: '.format(b) + str(ex))
|
||||
self._handleError('Error creating metadata backup: ' + str(ex))
|
||||
calibre_dbb.session.rollback()
|
||||
# self.calibre_db.session.close()
|
||||
|
||||
def open_metadata(self, book, custom_columns):
|
||||
# package = self.create_new_metadata_backup(book, custom_columns)
|
||||
|
@ -23,10 +23,11 @@ from io import BytesIO
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from .. import constants
|
||||
from cps import config, db, fs, gdriveutils, logger, ub
|
||||
from cps import config, db, fs, gdriveutils, logger, ub, app
|
||||
from cps.services.worker import CalibreTask, STAT_CANCELLED, STAT_ENDED
|
||||
from sqlalchemy import func, text, or_
|
||||
from flask_babel import lazy_gettext as N_
|
||||
|
||||
try:
|
||||
from wand.image import Image
|
||||
use_IM = True
|
||||
@ -113,9 +114,10 @@ class TaskGenerateCoverThumbnails(CalibreTask):
|
||||
@staticmethod
|
||||
def get_books_with_covers(book_id=-1):
|
||||
filter_exp = (db.Books.id == book_id) if book_id != -1 else True
|
||||
calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all()
|
||||
calibre_db.session.close()
|
||||
with app.app_context():
|
||||
calibre_db = db.CalibreDB(app) #, expire_on_commit=False, init=True)
|
||||
books_cover = calibre_db.session.query(db.Books).filter(db.Books.has_cover == 1).filter(filter_exp).all()
|
||||
# calibre_db.session.close()
|
||||
return books_cover
|
||||
|
||||
def get_book_cover_thumbnails(self, book_id):
|
||||
@ -246,7 +248,7 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
||||
super(TaskGenerateSeriesThumbnails, self).__init__(task_message)
|
||||
self.log = logger.create()
|
||||
self.app_db_session = ub.get_new_session_instance()
|
||||
self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
# self.calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
self.cache = fs.FileSystem()
|
||||
self.resolutions = [
|
||||
constants.COVER_THUMBNAIL_SMALL,
|
||||
@ -254,58 +256,60 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
||||
]
|
||||
|
||||
def run(self, worker_thread):
|
||||
if self.calibre_db.session and use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED:
|
||||
self.message = 'Scanning Series'
|
||||
all_series = self.get_series_with_four_plus_books()
|
||||
count = len(all_series)
|
||||
with app.app_context():
|
||||
calibre_db = db.CalibreDB(app)
|
||||
if calibre_db.session and use_IM and self.stat != STAT_CANCELLED and self.stat != STAT_ENDED:
|
||||
self.message = 'Scanning Series'
|
||||
all_series = self.get_series_with_four_plus_books(calibre_db)
|
||||
count = len(all_series)
|
||||
|
||||
total_generated = 0
|
||||
for i, series in enumerate(all_series):
|
||||
generated = 0
|
||||
series_thumbnails = self.get_series_thumbnails(series.id)
|
||||
series_books = self.get_series_books(series.id)
|
||||
total_generated = 0
|
||||
for i, series in enumerate(all_series):
|
||||
generated = 0
|
||||
series_thumbnails = self.get_series_thumbnails(series.id)
|
||||
series_books = self.get_series_books(series.id, calibre_db)
|
||||
|
||||
# Generate new thumbnails for missing covers
|
||||
resolutions = list(map(lambda t: t.resolution, series_thumbnails))
|
||||
missing_resolutions = list(set(self.resolutions).difference(resolutions))
|
||||
for resolution in missing_resolutions:
|
||||
generated += 1
|
||||
self.create_series_thumbnail(series, series_books, resolution)
|
||||
|
||||
# Replace outdated or missing thumbnails
|
||||
for thumbnail in series_thumbnails:
|
||||
if any(book.last_modified > thumbnail.generated_at for book in series_books):
|
||||
# Generate new thumbnails for missing covers
|
||||
resolutions = list(map(lambda t: t.resolution, series_thumbnails))
|
||||
missing_resolutions = list(set(self.resolutions).difference(resolutions))
|
||||
for resolution in missing_resolutions:
|
||||
generated += 1
|
||||
self.update_series_thumbnail(series_books, thumbnail)
|
||||
self.create_series_thumbnail(series, series_books, resolution)
|
||||
|
||||
elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS):
|
||||
generated += 1
|
||||
self.update_series_thumbnail(series_books, thumbnail)
|
||||
# Replace outdated or missing thumbnails
|
||||
for thumbnail in series_thumbnails:
|
||||
if any(book.last_modified > thumbnail.generated_at for book in series_books):
|
||||
generated += 1
|
||||
self.update_series_thumbnail(series_books, thumbnail)
|
||||
|
||||
# Increment the progress
|
||||
self.progress = (1.0 / count) * i
|
||||
elif not self.cache.get_cache_file_exists(thumbnail.filename, constants.CACHE_TYPE_THUMBNAILS):
|
||||
generated += 1
|
||||
self.update_series_thumbnail(series_books, thumbnail)
|
||||
|
||||
if generated > 0:
|
||||
total_generated += generated
|
||||
self.message = N_('Generated {0} series thumbnails').format(total_generated)
|
||||
# Increment the progress
|
||||
self.progress = (1.0 / count) * i
|
||||
|
||||
# Check if job has been cancelled or ended
|
||||
if self.stat == STAT_CANCELLED:
|
||||
self.log.info(f'GenerateSeriesThumbnails task has been cancelled.')
|
||||
return
|
||||
if generated > 0:
|
||||
total_generated += generated
|
||||
self.message = N_('Generated {0} series thumbnails').format(total_generated)
|
||||
|
||||
if self.stat == STAT_ENDED:
|
||||
self.log.info(f'GenerateSeriesThumbnails task has been ended.')
|
||||
return
|
||||
# Check if job has been cancelled or ended
|
||||
if self.stat == STAT_CANCELLED:
|
||||
self.log.info(f'GenerateSeriesThumbnails task has been cancelled.')
|
||||
return
|
||||
|
||||
if total_generated == 0:
|
||||
self.self_cleanup = True
|
||||
if self.stat == STAT_ENDED:
|
||||
self.log.info(f'GenerateSeriesThumbnails task has been ended.')
|
||||
return
|
||||
|
||||
self._handleSuccess()
|
||||
self.app_db_session.remove()
|
||||
if total_generated == 0:
|
||||
self.self_cleanup = True
|
||||
|
||||
def get_series_with_four_plus_books(self):
|
||||
return self.calibre_db.session \
|
||||
self._handleSuccess()
|
||||
self.app_db_session.remove()
|
||||
|
||||
def get_series_with_four_plus_books(self, calibre_db):
|
||||
return calibre_db.session \
|
||||
.query(db.Series) \
|
||||
.join(db.books_series_link) \
|
||||
.join(db.Books) \
|
||||
@ -314,8 +318,8 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
||||
.having(func.count('book_series_link') > 3) \
|
||||
.all()
|
||||
|
||||
def get_series_books(self, series_id):
|
||||
return self.calibre_db.session \
|
||||
def get_series_books(self, series_id, calibre_db):
|
||||
return calibre_db.session \
|
||||
.query(db.Books) \
|
||||
.join(db.books_series_link) \
|
||||
.join(db.Series) \
|
||||
@ -461,13 +465,15 @@ class TaskClearCoverThumbnailCache(CalibreTask):
|
||||
|
||||
def run(self, worker_thread):
|
||||
if self.app_db_session:
|
||||
if self.book_id == 0: # delete superfluous thumbnails
|
||||
calibre_db = db.CalibreDB(expire_on_commit=False, init=True)
|
||||
thumbnails = (calibre_db.session.query(ub.Thumbnail)
|
||||
.join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True)
|
||||
.filter(db.Books.id==None)
|
||||
.all())
|
||||
calibre_db.session.close()
|
||||
# delete superfluous thumbnails
|
||||
if self.book_id == 0:
|
||||
with app.app_context():
|
||||
calibre_db = db.CalibreDB(app)
|
||||
thumbnails = (calibre_db.session.query(ub.Thumbnail)
|
||||
.join(db.Books, ub.Thumbnail.entity_id == db.Books.id, isouter=True)
|
||||
.filter(db.Books.id==None)
|
||||
.all())
|
||||
# calibre_db.session.close()
|
||||
elif self.book_id > 0: # make sure single book is selected
|
||||
thumbnails = self.get_thumbnails_for_book(self.book_id)
|
||||
if self.book_id < 0:
|
||||
|
@ -237,7 +237,7 @@ def pdf_preview(tmp_file_path, tmp_dir):
|
||||
if use_generic_pdf_cover:
|
||||
return None
|
||||
try:
|
||||
cover_file_name = os.path.join(os.path.dirname(tmp_file_path), "cover.jpg")
|
||||
cover_file_name = tmp_file_path + ".jpg"
|
||||
with Image() as img:
|
||||
img.options["pdf:use-cropbox"] = "true"
|
||||
img.read(filename=tmp_file_path + '[0]', resolution=150)
|
||||
@ -245,7 +245,7 @@ def pdf_preview(tmp_file_path, tmp_dir):
|
||||
if img.alpha_channel:
|
||||
img.alpha_channel = 'remove'
|
||||
img.background_color = Color('white')
|
||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
||||
img.save(filename=cover_file_name)
|
||||
return cover_file_name
|
||||
except PolicyError as ex:
|
||||
log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
||||
|
Loading…
Reference in New Issue
Block a user