mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-20 15:10:31 +00:00
Merge branch 'database_fix' into Develop
This commit is contained in:
commit
fe82583813
53
cps/db.py
53
cps/db.py
@ -48,6 +48,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
use_unidecode = False
|
use_unidecode = False
|
||||||
|
|
||||||
|
Session = None
|
||||||
|
|
||||||
cc_exceptions = ['datetime', 'comments', 'composite', 'series']
|
cc_exceptions = ['datetime', 'comments', 'composite', 'series']
|
||||||
cc_classes = {}
|
cc_classes = {}
|
||||||
@ -102,41 +103,49 @@ class Identifiers(Base):
|
|||||||
# return {self.type: self.val}
|
# return {self.type: self.val}
|
||||||
|
|
||||||
def formatType(self):
|
def formatType(self):
|
||||||
if self.type == "amazon":
|
format_type = self.type.lower()
|
||||||
|
if format_type == 'amazon':
|
||||||
return u"Amazon"
|
return u"Amazon"
|
||||||
elif self.type == "isbn":
|
elif format_type.startswith("amazon_"):
|
||||||
|
return u"Amazon.{0}".format(format_type[7:])
|
||||||
|
elif format_type == "isbn":
|
||||||
return u"ISBN"
|
return u"ISBN"
|
||||||
elif self.type == "doi":
|
elif format_type == "doi":
|
||||||
return u"DOI"
|
return u"DOI"
|
||||||
elif self.type == "goodreads":
|
elif format_type == "douban":
|
||||||
|
return u"Douban"
|
||||||
|
elif format_type == "goodreads":
|
||||||
return u"Goodreads"
|
return u"Goodreads"
|
||||||
elif self.type == "google":
|
elif format_type == "google":
|
||||||
return u"Google Books"
|
return u"Google Books"
|
||||||
elif self.type == "kobo":
|
elif format_type == "kobo":
|
||||||
return u"Kobo"
|
return u"Kobo"
|
||||||
if self.type == "lubimyczytac":
|
if format_type == "lubimyczytac":
|
||||||
return u"Lubimyczytac"
|
return u"Lubimyczytac"
|
||||||
else:
|
else:
|
||||||
return self.type
|
return self.type
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
if self.type == "amazon" or self.type == "asin":
|
format_type = self.type.lower()
|
||||||
|
if format_type == "amazon" or format_type == "asin":
|
||||||
return u"https://amzn.com/{0}".format(self.val)
|
return u"https://amzn.com/{0}".format(self.val)
|
||||||
elif self.type == "isbn":
|
elif format_type.startswith('amazon_'):
|
||||||
|
return u"https://amazon.{0}/{1}".format(format_type[7:], self.val)
|
||||||
|
elif format_type == "isbn":
|
||||||
return u"https://www.worldcat.org/isbn/{0}".format(self.val)
|
return u"https://www.worldcat.org/isbn/{0}".format(self.val)
|
||||||
elif self.type == "doi":
|
elif format_type == "doi":
|
||||||
return u"https://dx.doi.org/{0}".format(self.val)
|
return u"https://dx.doi.org/{0}".format(self.val)
|
||||||
elif self.type == "goodreads":
|
elif format_type == "goodreads":
|
||||||
return u"https://www.goodreads.com/book/show/{0}".format(self.val)
|
return u"https://www.goodreads.com/book/show/{0}".format(self.val)
|
||||||
elif self.type == "douban":
|
elif format_type == "douban":
|
||||||
return u"https://book.douban.com/subject/{0}".format(self.val)
|
return u"https://book.douban.com/subject/{0}".format(self.val)
|
||||||
elif self.type == "google":
|
elif format_type == "google":
|
||||||
return u"https://books.google.com/books?id={0}".format(self.val)
|
return u"https://books.google.com/books?id={0}".format(self.val)
|
||||||
elif self.type == "kobo":
|
elif format_type == "kobo":
|
||||||
return u"https://www.kobo.com/ebook/{0}".format(self.val)
|
return u"https://www.kobo.com/ebook/{0}".format(self.val)
|
||||||
elif self.type == "lubimyczytac":
|
elif format_type == "lubimyczytac":
|
||||||
return u" https://lubimyczytac.pl/ksiazka/{0}".format(self.val)
|
return u" https://lubimyczytac.pl/ksiazka/{0}".format(self.val)
|
||||||
elif self.type == "url":
|
elif format_type == "url":
|
||||||
return u"{0}".format(self.val)
|
return u"{0}".format(self.val)
|
||||||
else:
|
else:
|
||||||
return u""
|
return u""
|
||||||
@ -409,6 +418,7 @@ class CalibreDB():
|
|||||||
def setup_db(self, config, app_db_path):
|
def setup_db(self, config, app_db_path):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.dispose()
|
self.dispose()
|
||||||
|
global Session
|
||||||
|
|
||||||
if not config.config_calibre_dir:
|
if not config.config_calibre_dir:
|
||||||
config.invalidate()
|
config.invalidate()
|
||||||
@ -506,7 +516,7 @@ class CalibreDB():
|
|||||||
backref='books'))
|
backref='books'))
|
||||||
|
|
||||||
Session = scoped_session(sessionmaker(autocommit=False,
|
Session = scoped_session(sessionmaker(autocommit=False,
|
||||||
autoflush=False,
|
autoflush=True,
|
||||||
bind=self.engine))
|
bind=self.engine))
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
return True
|
return True
|
||||||
@ -591,13 +601,16 @@ class CalibreDB():
|
|||||||
sort_authors = entry.author_sort.split('&')
|
sort_authors = entry.author_sort.split('&')
|
||||||
authors_ordered = list()
|
authors_ordered = list()
|
||||||
error = False
|
error = False
|
||||||
|
ids = [a.id for a in entry.authors]
|
||||||
for auth in sort_authors:
|
for auth in sort_authors:
|
||||||
|
results = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).all()
|
||||||
# ToDo: How to handle not found authorname
|
# ToDo: How to handle not found authorname
|
||||||
result = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).first()
|
if not len(results):
|
||||||
if not result:
|
|
||||||
error = True
|
error = True
|
||||||
break
|
break
|
||||||
authors_ordered.append(result)
|
for r in results:
|
||||||
|
if r.id in ids:
|
||||||
|
authors_ordered.append(r)
|
||||||
if not error:
|
if not error:
|
||||||
entry.authors = authors_ordered
|
entry.authors = authors_ordered
|
||||||
return entry
|
return entry
|
||||||
|
@ -154,8 +154,11 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
|||||||
input_identifiers is a list of read-to-persist Identifiers objects.
|
input_identifiers is a list of read-to-persist Identifiers objects.
|
||||||
db_identifiers is a list of already persisted list of Identifiers objects."""
|
db_identifiers is a list of already persisted list of Identifiers objects."""
|
||||||
changed = False
|
changed = False
|
||||||
input_dict = dict([ (identifier.type.lower(), identifier) for identifier in input_identifiers ])
|
error = False
|
||||||
db_dict = dict([ (identifier.type.lower(), identifier) for identifier in db_identifiers ])
|
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
|
||||||
|
if len(input_identifiers) != len(input_dict):
|
||||||
|
error = True
|
||||||
|
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
|
||||||
# delete db identifiers not present in input or modify them with input val
|
# delete db identifiers not present in input or modify them with input val
|
||||||
for identifier_type, identifier in db_dict.items():
|
for identifier_type, identifier in db_dict.items():
|
||||||
if identifier_type not in input_dict.keys():
|
if identifier_type not in input_dict.keys():
|
||||||
@ -170,7 +173,7 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
|||||||
if identifier_type not in db_dict.keys():
|
if identifier_type not in db_dict.keys():
|
||||||
db_session.add(identifier)
|
db_session.add(identifier)
|
||||||
changed = True
|
changed = True
|
||||||
return changed
|
return changed, error
|
||||||
|
|
||||||
@editbook.route("/ajax/delete/<int:book_id>")
|
@editbook.route("/ajax/delete/<int:book_id>")
|
||||||
@login_required
|
@login_required
|
||||||
@ -652,10 +655,12 @@ def edit_book(book_id):
|
|||||||
# Handle book comments/description
|
# Handle book comments/description
|
||||||
modif_date |= edit_book_comments(to_save["description"], book)
|
modif_date |= edit_book_comments(to_save["description"], book)
|
||||||
|
|
||||||
# Handle identifiers
|
# Handle identifiers
|
||||||
input_identifiers = identifier_list(to_save, book)
|
input_identifiers = identifier_list(to_save, book)
|
||||||
modif_date |= modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
|
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
|
||||||
|
if warning:
|
||||||
|
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
|
||||||
|
modif_date |= modification
|
||||||
# Handle book tags
|
# Handle book tags
|
||||||
modif_date |= edit_book_tags(to_save['tags'], book)
|
modif_date |= edit_book_tags(to_save['tags'], book)
|
||||||
|
|
||||||
@ -684,6 +689,7 @@ def edit_book(book_id):
|
|||||||
|
|
||||||
if modif_date:
|
if modif_date:
|
||||||
book.last_modified = datetime.utcnow()
|
book.last_modified = datetime.utcnow()
|
||||||
|
calibre_db.session.merge(book)
|
||||||
calibre_db.session.commit()
|
calibre_db.session.commit()
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
gdriveutils.updateGdriveCalibreFromLocal()
|
gdriveutils.updateGdriveCalibreFromLocal()
|
||||||
@ -727,7 +733,7 @@ def identifier_list(to_save, book):
|
|||||||
val_key = id_val_prefix + type_key[len(id_type_prefix):]
|
val_key = id_val_prefix + type_key[len(id_type_prefix):]
|
||||||
if val_key not in to_save.keys():
|
if val_key not in to_save.keys():
|
||||||
continue
|
continue
|
||||||
result.append( db.Identifiers(to_save[val_key], type_value, book.id) )
|
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@editbook.route("/upload", methods=["GET", "POST"])
|
@editbook.route("/upload", methods=["GET", "POST"])
|
||||||
|
@ -64,7 +64,7 @@ from . import gdriveutils as gd
|
|||||||
from .constants import STATIC_DIR as _STATIC_DIR
|
from .constants import STATIC_DIR as _STATIC_DIR
|
||||||
from .subproc_wrapper import process_wait
|
from .subproc_wrapper import process_wait
|
||||||
from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
||||||
from .tasks.email import TaskEmail
|
from .tasks.mail import TaskEmail
|
||||||
|
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
@ -194,7 +194,7 @@ class WebServer(object):
|
|||||||
os.execv(sys.executable, arguments)
|
os.execv(sys.executable, arguments)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _killServer(self, ignored_signum, ignored_frame):
|
def _killServer(self, __, ___):
|
||||||
self.stop()
|
self.stop()
|
||||||
|
|
||||||
def stop(self, restart=False):
|
def stop(self, restart=False):
|
||||||
|
@ -3,15 +3,15 @@ from __future__ import division, print_function, unicode_literals
|
|||||||
import threading
|
import threading
|
||||||
import abc
|
import abc
|
||||||
import uuid
|
import uuid
|
||||||
|
import time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import queue
|
import queue
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import Queue as queue
|
import Queue as queue
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from cps import calibre_db
|
|
||||||
from cps import logger
|
from cps import logger
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
@ -82,33 +82,46 @@ class WorkerThread(threading.Thread):
|
|||||||
tasks = self.queue.to_list() + self.dequeued
|
tasks = self.queue.to_list() + self.dequeued
|
||||||
return sorted(tasks, key=lambda x: x.num)
|
return sorted(tasks, key=lambda x: x.num)
|
||||||
|
|
||||||
|
def cleanup_tasks(self):
|
||||||
|
with self.doLock:
|
||||||
|
dead = []
|
||||||
|
alive = []
|
||||||
|
for x in self.dequeued:
|
||||||
|
(dead if x.task.dead else alive).append(x)
|
||||||
|
|
||||||
|
# if the ones that we need to keep are within the trigger, do nothing else
|
||||||
|
delta = len(self.dequeued) - len(dead)
|
||||||
|
if delta > TASK_CLEANUP_TRIGGER:
|
||||||
|
ret = alive
|
||||||
|
else:
|
||||||
|
# otherwise, lop off the oldest dead tasks until we hit the target trigger
|
||||||
|
ret = sorted(dead, key=lambda x: x.task.end_time)[-TASK_CLEANUP_TRIGGER:] + alive
|
||||||
|
|
||||||
|
self.dequeued = sorted(ret, key=lambda x: x.num)
|
||||||
|
|
||||||
# Main thread loop starting the different tasks
|
# Main thread loop starting the different tasks
|
||||||
def run(self):
|
def run(self):
|
||||||
main_thread = _get_main_thread()
|
main_thread = _get_main_thread()
|
||||||
while main_thread.is_alive():
|
while main_thread.is_alive():
|
||||||
item = self.queue.get()
|
try:
|
||||||
|
# this blocks until something is available. This can cause issues when the main thread dies - this
|
||||||
|
# thread will remain alive. We implement a timeout to unblock every second which allows us to check if
|
||||||
|
# the main thread is still alive.
|
||||||
|
# We don't use a daemon here because we don't want the tasks to just be abruptly halted, leading to
|
||||||
|
# possible file / database corruption
|
||||||
|
item = self.queue.get(timeout=1)
|
||||||
|
except queue.Empty as ex:
|
||||||
|
time.sleep(1)
|
||||||
|
continue
|
||||||
|
|
||||||
with self.doLock:
|
with self.doLock:
|
||||||
# once we hit our trigger, start cleaning up dead tasks
|
|
||||||
if len(self.dequeued) > TASK_CLEANUP_TRIGGER:
|
|
||||||
dead = []
|
|
||||||
alive = []
|
|
||||||
for x in self.dequeued:
|
|
||||||
(dead if x.task.dead else alive).append(x)
|
|
||||||
|
|
||||||
# if the ones that we need to keep are within the trigger, do nothing else
|
|
||||||
delta = len(self.dequeued) - len(dead)
|
|
||||||
if delta > TASK_CLEANUP_TRIGGER:
|
|
||||||
ret = alive
|
|
||||||
else:
|
|
||||||
# otherwise, lop off the oldest dead tasks until we hit the target trigger
|
|
||||||
ret = sorted(dead, key=lambda x: x.task.end_time)[-TASK_CLEANUP_TRIGGER:] + alive
|
|
||||||
|
|
||||||
self.dequeued = sorted(ret, key=lambda x: x.num)
|
|
||||||
# add to list so that in-progress tasks show up
|
# add to list so that in-progress tasks show up
|
||||||
self.dequeued.append(item)
|
self.dequeued.append(item)
|
||||||
|
|
||||||
|
# once we hit our trigger, start cleaning up dead tasks
|
||||||
|
if len(self.dequeued) > TASK_CLEANUP_TRIGGER:
|
||||||
|
self.cleanup_tasks()
|
||||||
|
|
||||||
# sometimes tasks (like Upload) don't actually have work to do and are created as already finished
|
# sometimes tasks (like Upload) don't actually have work to do and are created as already finished
|
||||||
if item.task.stat is STAT_WAITING:
|
if item.task.stat is STAT_WAITING:
|
||||||
# CalibreTask.start() should wrap all exceptions in it's own error handling
|
# CalibreTask.start() should wrap all exceptions in it's own error handling
|
||||||
|
@ -30,7 +30,7 @@ from sqlalchemy.sql.expression import func
|
|||||||
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
||||||
|
|
||||||
from . import logger, ub, searched_ids, calibre_db
|
from . import logger, ub, searched_ids, calibre_db
|
||||||
from .web import render_title_template
|
from .web import login_required_if_no_ano, render_title_template
|
||||||
|
|
||||||
|
|
||||||
shelf = Blueprint('shelf', __name__)
|
shelf = Blueprint('shelf', __name__)
|
||||||
@ -341,7 +341,7 @@ def delete_shelf(shelf_id):
|
|||||||
|
|
||||||
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
||||||
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
||||||
@login_required
|
@login_required_if_no_ano
|
||||||
def show_shelf(shelf_type, shelf_id):
|
def show_shelf(shelf_type, shelf_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
|
|
||||||
|
@ -74,8 +74,8 @@ $(function () {
|
|||||||
$("#meta-info").html("<ul id=\"book-list\" class=\"media-list\"></ul>");
|
$("#meta-info").html("<ul id=\"book-list\" class=\"media-list\"></ul>");
|
||||||
}
|
}
|
||||||
if ((ggDone === 3 || (ggDone === 1 && ggResults.length === 0)) &&
|
if ((ggDone === 3 || (ggDone === 1 && ggResults.length === 0)) &&
|
||||||
(dbDone === 3 || (ggDone === 1 && dbResults.length === 0)) &&
|
(dbDone === 3 || (dbDone === 1 && dbResults.length === 0)) &&
|
||||||
(cvDone === 3 || (ggDone === 1 && cvResults.length === 0))) {
|
(cvDone === 3 || (cvDone === 1 && cvResults.length === 0))) {
|
||||||
$("#meta-info").html("<p class=\"text-danger\">" + msg.no_result + "</p>");
|
$("#meta-info").html("<p class=\"text-danger\">" + msg.no_result + "</p>");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ from cps import logger, config
|
|||||||
from cps.subproc_wrapper import process_open
|
from cps.subproc_wrapper import process_open
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
|
||||||
from cps.tasks.email import TaskEmail
|
from cps.tasks.mail import TaskEmail
|
||||||
from cps import gdriveutils
|
from cps import gdriveutils
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
@ -53,6 +53,7 @@ class TaskConvert(CalibreTask):
|
|||||||
|
|
||||||
def _convert_ebook_format(self):
|
def _convert_ebook_format(self):
|
||||||
error_message = None
|
error_message = None
|
||||||
|
local_session = db.Session()
|
||||||
file_path = self.file_path
|
file_path = self.file_path
|
||||||
book_id = self.bookid
|
book_id = self.bookid
|
||||||
format_old_ext = u'.' + self.settings['old_book_format'].lower()
|
format_old_ext = u'.' + self.settings['old_book_format'].lower()
|
||||||
@ -92,17 +93,13 @@ class TaskConvert(CalibreTask):
|
|||||||
new_format = db.Data(name=cur_book.data[0].name,
|
new_format = db.Data(name=cur_book.data[0].name,
|
||||||
book_format=self.settings['new_book_format'].upper(),
|
book_format=self.settings['new_book_format'].upper(),
|
||||||
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
book=book_id, uncompressed_size=os.path.getsize(file_path + format_new_ext))
|
||||||
|
|
||||||
cur_book.data.append(new_format)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# db.session.merge(cur_book)
|
local_session.merge(new_format)
|
||||||
calibre_db.session.commit()
|
local_session.commit()
|
||||||
except SQLAlchemyError as e:
|
except SQLAlchemyError as e:
|
||||||
calibre_db.session.rollback()
|
local_session.rollback()
|
||||||
log.error("Database error: %s", e)
|
log.error("Database error: %s", e)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.results['path'] = cur_book.path
|
self.results['path'] = cur_book.path
|
||||||
self.results['title'] = cur_book.title
|
self.results['title'] = cur_book.title
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
@ -193,7 +190,7 @@ class TaskConvert(CalibreTask):
|
|||||||
ele = ele.decode('utf-8')
|
ele = ele.decode('utf-8')
|
||||||
log.debug(ele.strip('\n'))
|
log.debug(ele.strip('\n'))
|
||||||
if not ele.startswith('Traceback') and not ele.startswith(' File'):
|
if not ele.startswith('Traceback') and not ele.startswith(' File'):
|
||||||
error_message = _("Calibre failed with error: %(error)s", ele.strip('\n'))
|
error_message = _("Calibre failed with error: %(error)s", error=ele.strip('\n'))
|
||||||
return check, error_message
|
return check, error_message
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -175,6 +175,8 @@ class TaskEmail(CalibreTask):
|
|||||||
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
||||||
elif hasattr(e, "message"):
|
elif hasattr(e, "message"):
|
||||||
text = e.message
|
text = e.message
|
||||||
|
elif hasattr(e, "args"):
|
||||||
|
text = '\n'.join(e.args)
|
||||||
else:
|
else:
|
||||||
log.exception(e)
|
log.exception(e)
|
||||||
text = ''
|
text = ''
|
@ -65,7 +65,7 @@
|
|||||||
<table class="table" id="identifier-table">
|
<table class="table" id="identifier-table">
|
||||||
{% for identifier in book.identifiers %}
|
{% for identifier in book.identifiers %}
|
||||||
<tr>
|
<tr>
|
||||||
<td><input type="text" class="form-control" name="identifier-type-{{identifier.type}}" value="{{identifier.type}}" required="required" placeholder="{{_('Identifier Type')}}"></td>
|
<td><input type="text" class="form-control" name="identifier-type-{{identifier.type}}" value="{{identifier.type}}" required="required" placeholder="{{_('Identifier Type')}}"></td>
|
||||||
<td><input type="text" class="form-control" name="identifier-val-{{identifier.type}}" value="{{identifier.val}}" required="required" placeholder="{{_('Identifier Value')}}"></td>
|
<td><input type="text" class="form-control" name="identifier-val-{{identifier.type}}" value="{{identifier.val}}" required="required" placeholder="{{_('Identifier Value')}}"></td>
|
||||||
<td><a class="btn btn-default" onclick="removeIdentifierLine(this)">{{_('Remove')}}</a></td>
|
<td><a class="btn btn-default" onclick="removeIdentifierLine(this)">{{_('Remove')}}</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -37,7 +37,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<label for="mail_size">{{_('Attachment Size Limit')}}</label>
|
<label for="mail_size">{{_('Attachment Size Limit')}}</label>
|
||||||
<div class="form-group input-group">
|
<div class="form-group input-group">
|
||||||
<input type="number" min="1" max="600" step="1" class="form-control" name="mail_size" id="mail_size" value="{% if content.mail_size != None %}{{ (content.mail_size / 1024 / 1024)|int }}{% endif %}">
|
<input type="number" min="1" max="600" step="1" class="form-control" name="mail_size" id="mail_size" value="{% if content.mail_size != None %}{{ (content.mail_size / 1024 / 1024)|int }}{% endif %}" required>
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="button" id="attachement_size" class="btn btn-default" disabled>MB</button>
|
<button type="button" id="attachement_size" class="btn btn-default" disabled>MB</button>
|
||||||
</span>
|
</span>
|
||||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
534
messages.pot
534
messages.pot
File diff suppressed because it is too large
Load Diff
@ -36,12 +36,16 @@
|
|||||||
<div class="col-xs-12 col-sm-6">
|
<div class="col-xs-12 col-sm-6">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-xs-6 col-md-6 col-sm-offset-3" style="margin-top:50px;">
|
<div class="col-xs-6 col-md-6 col-sm-offset-3" style="margin-top:50px;">
|
||||||
<p class='text-justify attribute'><strong>Start Time: </strong>2020-08-29 11:15:36</p>
|
|
||||||
|
<p class='text-justify attribute'><strong>Start Time: </strong>2020-08-30 15:47:09</p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-xs-6 col-md-6 col-sm-offset-3">
|
<div class="col-xs-6 col-md-6 col-sm-offset-3">
|
||||||
<p class='text-justify attribute'><strong>Stop Time: </strong>2020-08-29 12:34:46</p>
|
|
||||||
|
<p class='text-justify attribute'><strong>Stop Time: </strong>2020-08-30 17:06:27</p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
@ -356,7 +360,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="text-left pull-left">
|
<div class="text-left pull-left">
|
||||||
<pre class="text-left">Traceback (most recent call last):
|
<pre class="text-left">Traceback (most recent call last):
|
||||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_cover_edit_books.py", line 89, in test_upload_jpg
|
File "/home/matthias/Entwicklung/calibre-web-test/test/test_cover_edit_books.py", line 91, in test_upload_jpg
|
||||||
self.assertTrue(self.check_element_on_page((By.ID, 'flash_alert')))
|
self.assertTrue(self.check_element_on_page((By.ID, 'flash_alert')))
|
||||||
AssertionError: False is not true</pre>
|
AssertionError: False is not true</pre>
|
||||||
</div>
|
</div>
|
||||||
@ -920,7 +924,7 @@ AssertionError: False is not true</pre>
|
|||||||
</div>
|
</div>
|
||||||
<div class="text-left pull-left">
|
<div class="text-left pull-left">
|
||||||
<pre class="text-left">Traceback (most recent call last):
|
<pre class="text-left">Traceback (most recent call last):
|
||||||
File "/home/matthias/Entwicklung/calibre-web-test/test/test_edit_books.py", line 734, in test_upload_cover_hdd
|
File "/home/matthias/Entwicklung/calibre-web-test/test/test_edit_books.py", line 735, in test_upload_cover_hdd
|
||||||
self.assertTrue(False, "Browser-Cache Problem: Old Cover is displayed instead of New Cover")
|
self.assertTrue(False, "Browser-Cache Problem: Old Cover is displayed instead of New Cover")
|
||||||
AssertionError: False is not true : Browser-Cache Problem: Old Cover is displayed instead of New Cover</pre>
|
AssertionError: False is not true : Browser-Cache Problem: Old Cover is displayed instead of New Cover</pre>
|
||||||
</div>
|
</div>
|
||||||
|
Loading…
Reference in New Issue
Block a user