mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-20 15:10:31 +00:00
Merge branch 'database_fix' into Develop
This commit is contained in:
commit
fe82583813
53
cps/db.py
53
cps/db.py
@ -48,6 +48,7 @@ try:
|
||||
except ImportError:
|
||||
use_unidecode = False
|
||||
|
||||
Session = None
|
||||
|
||||
cc_exceptions = ['datetime', 'comments', 'composite', 'series']
|
||||
cc_classes = {}
|
||||
@ -102,41 +103,49 @@ class Identifiers(Base):
|
||||
# return {self.type: self.val}
|
||||
|
||||
def formatType(self):
|
||||
if self.type == "amazon":
|
||||
format_type = self.type.lower()
|
||||
if format_type == 'amazon':
|
||||
return u"Amazon"
|
||||
elif self.type == "isbn":
|
||||
elif format_type.startswith("amazon_"):
|
||||
return u"Amazon.{0}".format(format_type[7:])
|
||||
elif format_type == "isbn":
|
||||
return u"ISBN"
|
||||
elif self.type == "doi":
|
||||
elif format_type == "doi":
|
||||
return u"DOI"
|
||||
elif self.type == "goodreads":
|
||||
elif format_type == "douban":
|
||||
return u"Douban"
|
||||
elif format_type == "goodreads":
|
||||
return u"Goodreads"
|
||||
elif self.type == "google":
|
||||
elif format_type == "google":
|
||||
return u"Google Books"
|
||||
elif self.type == "kobo":
|
||||
elif format_type == "kobo":
|
||||
return u"Kobo"
|
||||
if self.type == "lubimyczytac":
|
||||
if format_type == "lubimyczytac":
|
||||
return u"Lubimyczytac"
|
||||
else:
|
||||
return self.type
|
||||
|
||||
def __repr__(self):
|
||||
if self.type == "amazon" or self.type == "asin":
|
||||
format_type = self.type.lower()
|
||||
if format_type == "amazon" or format_type == "asin":
|
||||
return u"https://amzn.com/{0}".format(self.val)
|
||||
elif self.type == "isbn":
|
||||
elif format_type.startswith('amazon_'):
|
||||
return u"https://amazon.{0}/{1}".format(format_type[7:], self.val)
|
||||
elif format_type == "isbn":
|
||||
return u"https://www.worldcat.org/isbn/{0}".format(self.val)
|
||||
elif self.type == "doi":
|
||||
elif format_type == "doi":
|
||||
return u"https://dx.doi.org/{0}".format(self.val)
|
||||
elif self.type == "goodreads":
|
||||
elif format_type == "goodreads":
|
||||
return u"https://www.goodreads.com/book/show/{0}".format(self.val)
|
||||
elif self.type == "douban":
|
||||
elif format_type == "douban":
|
||||
return u"https://book.douban.com/subject/{0}".format(self.val)
|
||||
elif self.type == "google":
|
||||
elif format_type == "google":
|
||||
return u"https://books.google.com/books?id={0}".format(self.val)
|
||||
elif self.type == "kobo":
|
||||
elif format_type == "kobo":
|
||||
return u"https://www.kobo.com/ebook/{0}".format(self.val)
|
||||
elif self.type == "lubimyczytac":
|
||||
elif format_type == "lubimyczytac":
|
||||
return u" https://lubimyczytac.pl/ksiazka/{0}".format(self.val)
|
||||
elif self.type == "url":
|
||||
elif format_type == "url":
|
||||
return u"{0}".format(self.val)
|
||||
else:
|
||||
return u""
|
||||
@ -409,6 +418,7 @@ class CalibreDB():
|
||||
def setup_db(self, config, app_db_path):
|
||||
self.config = config
|
||||
self.dispose()
|
||||
global Session
|
||||
|
||||
if not config.config_calibre_dir:
|
||||
config.invalidate()
|
||||
@ -506,7 +516,7 @@ class CalibreDB():
|
||||
backref='books'))
|
||||
|
||||
Session = scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=False,
|
||||
autoflush=True,
|
||||
bind=self.engine))
|
||||
self.session = Session()
|
||||
return True
|
||||
@ -591,13 +601,16 @@ class CalibreDB():
|
||||
sort_authors = entry.author_sort.split('&')
|
||||
authors_ordered = list()
|
||||
error = False
|
||||
ids = [a.id for a in entry.authors]
|
||||
for auth in sort_authors:
|
||||
results = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).all()
|
||||
# ToDo: How to handle not found authorname
|
||||
result = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).first()
|
||||
if not result:
|
||||
if not len(results):
|
||||
error = True
|
||||
break
|
||||
authors_ordered.append(result)
|
||||
for r in results:
|
||||
if r.id in ids:
|
||||
authors_ordered.append(r)
|
||||
if not error:
|
||||
entry.authors = authors_ordered
|
||||
return entry
|
||||
|
@ -154,8 +154,11 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
||||
input_identifiers is a list of read-to-persist Identifiers objects.
|
||||
db_identifiers is a list of already persisted list of Identifiers objects."""
|
||||
changed = False
|
||||
input_dict = dict([ (identifier.type.lower(), identifier) for identifier in input_identifiers ])
|
||||
db_dict = dict([ (identifier.type.lower(), identifier) for identifier in db_identifiers ])
|
||||
error = False
|
||||
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
|
||||
if len(input_identifiers) != len(input_dict):
|
||||
error = True
|
||||
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
|
||||
# delete db identifiers not present in input or modify them with input val
|
||||
for identifier_type, identifier in db_dict.items():
|
||||
if identifier_type not in input_dict.keys():
|
||||
@ -170,7 +173,7 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
||||
if identifier_type not in db_dict.keys():
|
||||
db_session.add(identifier)
|
||||
changed = True
|
||||
return changed
|
||||
return changed, error
|
||||
|
||||
@editbook.route("/ajax/delete/<int:book_id>")
|
||||
@login_required
|
||||
@ -654,8 +657,10 @@ def edit_book(book_id):
|
||||
|
||||
# Handle identifiers
|
||||
input_identifiers = identifier_list(to_save, book)
|
||||
modif_date |= modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
|
||||
|
||||
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
|
||||
if warning:
|
||||
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
|
||||
modif_date |= modification
|
||||
# Handle book tags
|
||||
modif_date |= edit_book_tags(to_save['tags'], book)
|
||||
|
||||
@ -684,6 +689,7 @@ def edit_book(book_id):
|
||||
|
||||
if modif_date:
|
||||
book.last_modified = datetime.utcnow()
|
||||
calibre_db.session.merge(book)
|
||||
calibre_db.session.commit()
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
@ -727,7 +733,7 @@ def identifier_list(to_save, book):
|
||||
val_key = id_val_prefix + type_key[len(id_type_prefix):]
|
||||
if val_key not in to_save.keys():
|
||||
continue
|
||||
result.append( db.Identifiers(to_save[val_key], type_value, book.id) )
|
||||
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
|
||||
return result
|
||||
|
||||
@editbook.route("/upload", methods=["GET", "POST"])
|
||||
|
@ -64,7 +64,7 @@ from . import gdriveutils as gd
|
||||
from .constants import STATIC_DIR as _STATIC_DIR
|
||||
from .subproc_wrapper import process_wait
|
||||
from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
||||
from .tasks.email import TaskEmail
|
||||
from .tasks.mail import TaskEmail
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
@ -194,7 +194,7 @@ class WebServer(object):
|
||||
os.execv(sys.executable, arguments)
|
||||
return True
|
||||
|
||||
def _killServer(self, ignored_signum, ignored_frame):
|
||||
def _killServer(self, __, ___):
|
||||
self.stop()
|
||||
|
||||
def stop(self, restart=False):
|
||||
|
@ -3,15 +3,15 @@ from __future__ import division, print_function, unicode_literals
|
||||
import threading
|
||||
import abc
|
||||
import uuid
|
||||
import time
|
||||
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from collections import namedtuple
|
||||
|
||||
from cps import calibre_db
|
||||
from cps import logger
|
||||
|
||||
log = logger.create()
|
||||
@ -82,16 +82,8 @@ class WorkerThread(threading.Thread):
|
||||
tasks = self.queue.to_list() + self.dequeued
|
||||
return sorted(tasks, key=lambda x: x.num)
|
||||
|
||||
|
||||
# Main thread loop starting the different tasks
|
||||
def run(self):
|
||||
main_thread = _get_main_thread()
|
||||
while main_thread.is_alive():
|
||||
item = self.queue.get()
|
||||
|
||||
def cleanup_tasks(self):
|
||||
with self.doLock:
|
||||
# once we hit our trigger, start cleaning up dead tasks
|
||||
if len(self.dequeued) > TASK_CLEANUP_TRIGGER:
|
||||
dead = []
|
||||
alive = []
|
||||
for x in self.dequeued:
|
||||
@ -106,9 +98,30 @@ class WorkerThread(threading.Thread):
|
||||
ret = sorted(dead, key=lambda x: x.task.end_time)[-TASK_CLEANUP_TRIGGER:] + alive
|
||||
|
||||
self.dequeued = sorted(ret, key=lambda x: x.num)
|
||||
|
||||
# Main thread loop starting the different tasks
|
||||
def run(self):
|
||||
main_thread = _get_main_thread()
|
||||
while main_thread.is_alive():
|
||||
try:
|
||||
# this blocks until something is available. This can cause issues when the main thread dies - this
|
||||
# thread will remain alive. We implement a timeout to unblock every second which allows us to check if
|
||||
# the main thread is still alive.
|
||||
# We don't use a daemon here because we don't want the tasks to just be abruptly halted, leading to
|
||||
# possible file / database corruption
|
||||
item = self.queue.get(timeout=1)
|
||||
except queue.Empty as ex:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
with self.doLock:
|
||||
# add to list so that in-progress tasks show up
|
||||
self.dequeued.append(item)
|
||||
|
||||
# once we hit our trigger, start cleaning up dead tasks
|
||||
if len(self.dequeued) > TASK_CLEANUP_TRIGGER:
|
||||
self.cleanup_tasks()
|
||||
|
||||
# sometimes tasks (like Upload) don't actually have work to do and are created as already finished
|
||||
if item.task.stat is STAT_WAITING:
|
||||
# CalibreTask.start() should wrap all exceptions in it's own error handling
|
||||
|
@ -30,7 +30,7 @@ from sqlalchemy.sql.expression import func
|
||||
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
||||
|
||||
from . import logger, ub, searched_ids, calibre_db
|
||||
from .web import render_title_template
|
||||
from .web import login_required_if_no_ano, render_title_template
|
||||
|
||||
|
||||
shelf = Blueprint('shelf', __name__)
|
||||
@ -341,7 +341,7 @@ def delete_shelf(shelf_id):
|
||||
|
||||
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
||||
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
||||
@login_required
|
||||
@login_required_if_no_ano
|
||||
def show_shelf(shelf_type, shelf_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
|
||||
|
@ -74,8 +74,8 @@ $(function () {
|
||||
$("#meta-info").html("<ul id=\"book-list\" class=\"media-list\"></ul>");
|
||||
}
|
||||
if ((ggDone === 3 || (ggDone === 1 && ggResults.length === 0)) &&
|
||||
(dbDone === 3 || (ggDone === 1 && dbResults.length === 0)) &&
|
||||
(cvDone === 3 || (ggDone === 1 && cvResults.length === 0))) {
|
||||
(dbDone === 3 || (dbDone === 1 && dbResults.length === 0)) &&
|
||||
(cvDone === 3 || (cvDone === 1 && cvResults.length === 0))) {
|
||||
$("#meta-info").html("<p class=\"text-danger\">" + msg.no_result + "</p>");
|
||||
return;
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ from cps import logger, config
|
||||
from cps.subproc_wrapper import process_open
|
||||
from flask_babel import gettext as _
|
||||
|
||||
from cps.tasks.email import TaskEmail
|
||||
from cps.tasks.mail import TaskEmail
|
||||
from cps import gdriveutils
|
||||
log = logger.create()
|
||||
|
||||
@ -53,6 +53,7 @@ class TaskConvert(CalibreTask):
|
||||
|
||||
def _convert_ebook_format(self):
|
||||
error_message = None
|
||||
local_session = db.Session()
|
||||
file_path = self.file_path
|
||||
book_id = self.bookid
|
||||
format_old_ext = u'.' + self.settings['old_book_format'].lower()
|
||||
@ -92,17 +93,13 @@ class TaskConvert(CalibreTask):
|
||||
new_format = db.Data(name=cur_book.data[0].name,
|
||||
book_format=self.settings['new_book_format'].upper(),
|
||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||
|
||||
cur_book.data.append(new_format)
|
||||
|
||||
try:
|
||||
# db.session.merge(cur_book)
|
||||
calibre_db.session.commit()
|
||||
local_session.merge(new_format)
|
||||
local_session.commit()
|
||||
except SQLAlchemyError as e:
|
||||
calibre_db.session.rollback()
|
||||
local_session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
return
|
||||
|
||||
self.results['path'] = cur_book.path
|
||||
self.results['title'] = cur_book.title
|
||||
if config.config_use_google_drive:
|
||||
@ -193,7 +190,7 @@ class TaskConvert(CalibreTask):
|
||||
ele = ele.decode('utf-8')
|
||||
log.debug(ele.strip('\n'))
|
||||
if not ele.startswith('Traceback') and not ele.startswith(' File'):
|
||||
error_message = _("Calibre failed with error: %(error)s", ele.strip('\n'))
|
||||
error_message = _("Calibre failed with error: %(error)s", error=ele.strip('\n'))
|
||||
return check, error_message
|
||||
|
||||
@property
|
||||
|
@ -175,6 +175,8 @@ class TaskEmail(CalibreTask):
|
||||
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
||||
elif hasattr(e, "message"):
|
||||
text = e.message
|
||||
elif hasattr(e, "args"):
|
||||
text = '\n'.join(e.args)
|
||||
else:
|
||||
log.exception(e)
|
||||
text = ''
|
@ -37,7 +37,7 @@
|
||||
</div>
|
||||
<label for="mail_size">{{_('Attachment Size Limit')}}</label>
|
||||
<div class="form-group input-group">
|
||||
<input type="number" min="1" max="600" step="1" class="form-control" name="mail_size" id="mail_size" value="{% if content.mail_size != None %}{{ (content.mail_size / 1024 / 1024)|int }}{% endif %}">
|
||||
<input type="number" min="1" max="600" step="1" class="form-control" name="mail_size" id="mail_size" value="{% if content.mail_size != None %}{{ (content.mail_size / 1024 / 1024)|int }}{% endif %}" required>
|
||||
<span class="input-group-btn">
|
||||
<button type="button" id="attachement_size" class="btn btn-default" disabled>MB</button>
|
||||
</span>
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
534
messages.pot
534
messages.pot
File diff suppressed because it is too large
Load Diff
@ -36,12 +36,16 @@
|
||||
<div class="col-xs-12 col-sm-6">
|
||||
<div class="row">
|
||||
<div class="col-xs-6 col-md-6 col-sm-offset-3" style="margin-top:50px;">
|
||||
<p class='text-justify attribute'><strong>Start Time: </strong>2020-08-29 11:15:36</p>
|
||||
|
||||
<p class='text-justify attribute'><strong>Start Time: </strong>2020-08-30 15:47:09</p>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-xs-6 col-md-6 col-sm-offset-3">
|
||||
<p class='text-justify attribute'><strong>Stop Time: </strong>2020-08-29 12:34:46</p>
|
||||
|
||||
<p class='text-justify attribute'><strong>Stop Time: </strong>2020-08-30 17:06:27</p>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
@ -356,7 +360,7 @@
|
||||
</div>
|
||||
<div class="text-left pull-left">
|
||||
<pre class="text-left">Traceback (most recent call last):
|
||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_cover_edit_books.py", line 89, in test_upload_jpg
|
||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_cover_edit_books.py", line 91, in test_upload_jpg
|
||||
self.assertTrue(self.check_element_on_page((By.ID, 'flash_alert')))
|
||||
AssertionError: False is not true</pre>
|
||||
</div>
|
||||
@ -920,7 +924,7 @@ AssertionError: False is not true</pre>
|
||||
</div>
|
||||
<div class="text-left pull-left">
|
||||
<pre class="text-left">Traceback (most recent call last):
|
||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_edit_books.py", line 734, in test_upload_cover_hdd
|
||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_edit_books.py", line 735, in test_upload_cover_hdd
|
||||
self.assertTrue(False, "Browser-Cache Problem: Old Cover is displayed instead of New Cover")
|
||||
AssertionError: False is not true : Browser-Cache Problem: Old Cover is displayed instead of New Cover</pre>
|
||||
</div>
|
||||
|
Loading…
Reference in New Issue
Block a user