mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-25 09:30:31 +00:00
Code cosmetics
This commit is contained in:
parent
7bb5afa585
commit
24c743d23d
@ -34,7 +34,7 @@ def version_info():
|
|||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
|
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
|
||||||
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
|
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
|
||||||
parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db')
|
parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db')
|
||||||
parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db')
|
parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db')
|
||||||
parser.add_argument('-c', metavar='path',
|
parser.add_argument('-c', metavar='path',
|
||||||
|
@ -25,7 +25,7 @@ import ast
|
|||||||
|
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy import Table, Column, ForeignKey
|
from sqlalchemy import Table, Column, ForeignKey
|
||||||
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float
|
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float, DateTime
|
||||||
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
|
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
136
cps/helper.py
136
cps/helper.py
@ -96,7 +96,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
|||||||
# read settings and append converter task to queue
|
# read settings and append converter task to queue
|
||||||
if kindle_mail:
|
if kindle_mail:
|
||||||
settings = config.get_mail_settings()
|
settings = config.get_mail_settings()
|
||||||
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
|
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
|
||||||
settings['body'] = _(u'This e-mail has been sent via Calibre-Web.')
|
settings['body'] = _(u'This e-mail has been sent via Calibre-Web.')
|
||||||
# text = _(u"%(format)s: %(book)s", format=new_book_format, book=book.title)
|
# text = _(u"%(format)s: %(book)s", format=new_book_format, book=book.title)
|
||||||
else:
|
else:
|
||||||
@ -108,7 +108,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
|||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
error_message = _(u"%(format)s not found: %(fn)s",
|
error_message = _(u"%(format)s not found: %(fn)s",
|
||||||
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
||||||
return error_message
|
return error_message
|
||||||
|
|
||||||
|
|
||||||
@ -141,36 +141,52 @@ def check_send_to_kindle(entry):
|
|||||||
returns all available book formats for sending to Kindle
|
returns all available book formats for sending to Kindle
|
||||||
"""
|
"""
|
||||||
if len(entry.data):
|
if len(entry.data):
|
||||||
bookformats=list()
|
bookformats = list()
|
||||||
if config.config_ebookconverter == 0:
|
if config.config_ebookconverter == 0:
|
||||||
# no converter - only for mobi and pdf formats
|
# no converter - only for mobi and pdf formats
|
||||||
for ele in iter(entry.data):
|
for ele in iter(entry.data):
|
||||||
if 'MOBI' in ele.format:
|
if 'MOBI' in ele.format:
|
||||||
bookformats.append({'format':'Mobi','convert':0,'text':_('Send %(format)s to Kindle',format='Mobi')})
|
bookformats.append({'format': 'Mobi',
|
||||||
|
'convert': 0,
|
||||||
|
'text': _('Send %(format)s to Kindle', format='Mobi')})
|
||||||
if 'PDF' in ele.format:
|
if 'PDF' in ele.format:
|
||||||
bookformats.append({'format':'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
|
bookformats.append({'format': 'Pdf',
|
||||||
|
'convert': 0,
|
||||||
|
'text': _('Send %(format)s to Kindle', format='Pdf')})
|
||||||
if 'AZW' in ele.format:
|
if 'AZW' in ele.format:
|
||||||
bookformats.append({'format':'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
|
bookformats.append({'format': 'Azw',
|
||||||
'''if 'AZW3' in ele.format:
|
'convert': 0,
|
||||||
bookformats.append({'format':'Azw3','convert':0,'text':_('Send %(format)s to Kindle',format='Azw3')})'''
|
'text': _('Send %(format)s to Kindle', format='Azw')})
|
||||||
else:
|
else:
|
||||||
formats = list()
|
formats = list()
|
||||||
for ele in iter(entry.data):
|
for ele in iter(entry.data):
|
||||||
formats.append(ele.format)
|
formats.append(ele.format)
|
||||||
if 'MOBI' in formats:
|
if 'MOBI' in formats:
|
||||||
bookformats.append({'format': 'Mobi','convert':0,'text':_('Send %(format)s to Kindle',format='Mobi')})
|
bookformats.append({'format': 'Mobi',
|
||||||
|
'convert': 0,
|
||||||
|
'text': _('Send %(format)s to Kindle', format='Mobi')})
|
||||||
if 'AZW' in formats:
|
if 'AZW' in formats:
|
||||||
bookformats.append({'format': 'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
|
bookformats.append({'format': 'Azw',
|
||||||
|
'convert': 0,
|
||||||
|
'text': _('Send %(format)s to Kindle', format='Azw')})
|
||||||
if 'PDF' in formats:
|
if 'PDF' in formats:
|
||||||
bookformats.append({'format': 'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
|
bookformats.append({'format': 'Pdf',
|
||||||
|
'convert': 0,
|
||||||
|
'text': _('Send %(format)s to Kindle', format='Pdf')})
|
||||||
if config.config_ebookconverter >= 1:
|
if config.config_ebookconverter >= 1:
|
||||||
if 'EPUB' in formats and not 'MOBI' in formats:
|
if 'EPUB' in formats and not 'MOBI' in formats:
|
||||||
bookformats.append({'format': 'Mobi','convert':1,
|
bookformats.append({'format': 'Mobi',
|
||||||
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Mobi')})
|
'convert':1,
|
||||||
|
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
||||||
|
orig='Epub',
|
||||||
|
format='Mobi')})
|
||||||
if config.config_ebookconverter == 2:
|
if config.config_ebookconverter == 2:
|
||||||
if 'AZW3' in formats and not 'MOBI' in formats:
|
if 'AZW3' in formats and not 'MOBI' in formats:
|
||||||
bookformats.append({'format': 'Mobi','convert':2,
|
bookformats.append({'format': 'Mobi',
|
||||||
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Azw3',format='Mobi')})
|
'convert': 2,
|
||||||
|
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
||||||
|
orig='Azw3',
|
||||||
|
format='Mobi')})
|
||||||
return bookformats
|
return bookformats
|
||||||
else:
|
else:
|
||||||
log.error(u'Cannot find book entry %d', entry.id)
|
log.error(u'Cannot find book entry %d', entry.id)
|
||||||
@ -204,7 +220,6 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
|
|||||||
# returns None if success, otherwise errormessage
|
# returns None if success, otherwise errormessage
|
||||||
return convert_book_format(book_id, calibrepath, u'azw3', book_format.lower(), user_id, kindle_mail)
|
return convert_book_format(book_id, calibrepath, u'azw3', book_format.lower(), user_id, kindle_mail)
|
||||||
|
|
||||||
|
|
||||||
for entry in iter(book.data):
|
for entry in iter(book.data):
|
||||||
if entry.format.upper() == book_format.upper():
|
if entry.format.upper() == book_format.upper():
|
||||||
converted_file_name = entry.name + '.' + book_format.lower()
|
converted_file_name = entry.name + '.' + book_format.lower()
|
||||||
@ -369,7 +384,7 @@ def update_dir_structure_gdrive(book_id, first_author):
|
|||||||
path = book.path
|
path = book.path
|
||||||
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
||||||
else:
|
else:
|
||||||
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
||||||
|
|
||||||
if authordir != new_authordir:
|
if authordir != new_authordir:
|
||||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
||||||
@ -379,7 +394,7 @@ def update_dir_structure_gdrive(book_id, first_author):
|
|||||||
path = book.path
|
path = book.path
|
||||||
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
||||||
else:
|
else:
|
||||||
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
||||||
# Rename all files from old names to new names
|
# Rename all files from old names to new names
|
||||||
|
|
||||||
if authordir != new_authordir or titledir != new_titledir:
|
if authordir != new_authordir or titledir != new_titledir:
|
||||||
@ -395,7 +410,7 @@ def update_dir_structure_gdrive(book_id, first_author):
|
|||||||
|
|
||||||
|
|
||||||
def delete_book_gdrive(book, book_format):
|
def delete_book_gdrive(book, book_format):
|
||||||
error= False
|
error = False
|
||||||
if book_format:
|
if book_format:
|
||||||
name = ''
|
name = ''
|
||||||
for entry in book.data:
|
for entry in book.data:
|
||||||
@ -403,12 +418,12 @@ def delete_book_gdrive(book, book_format):
|
|||||||
name = entry.name + '.' + book_format
|
name = entry.name + '.' + book_format
|
||||||
gFile = gd.getFileFromEbooksFolder(book.path, name)
|
gFile = gd.getFileFromEbooksFolder(book.path, name)
|
||||||
else:
|
else:
|
||||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path),book.path.split('/')[1])
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
|
||||||
if gFile:
|
if gFile:
|
||||||
gd.deleteDatabaseEntry(gFile['id'])
|
gd.deleteDatabaseEntry(gFile['id'])
|
||||||
gFile.Trash()
|
gFile.Trash()
|
||||||
else:
|
else:
|
||||||
error =_(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
error = _(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
||||||
return error
|
return error
|
||||||
|
|
||||||
|
|
||||||
@ -417,24 +432,25 @@ def reset_password(user_id):
|
|||||||
password = generate_random_password()
|
password = generate_random_password()
|
||||||
existing_user.password = generate_password_hash(password)
|
existing_user.password = generate_password_hash(password)
|
||||||
if not config.get_mail_server_configured():
|
if not config.get_mail_server_configured():
|
||||||
return (2, None)
|
return 2, None
|
||||||
try:
|
try:
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
send_registration_mail(existing_user.email, existing_user.nickname, password, True)
|
send_registration_mail(existing_user.email, existing_user.nickname, password, True)
|
||||||
return (1, existing_user.nickname)
|
return 1, existing_user.nickname
|
||||||
except Exception:
|
except Exception:
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
return (0, None)
|
return 0, None
|
||||||
|
|
||||||
|
|
||||||
def generate_random_password():
|
def generate_random_password():
|
||||||
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
||||||
passlen = 8
|
passlen = 8
|
||||||
return "".join(random.sample(s,passlen ))
|
return "".join(random.sample(s, passlen))
|
||||||
|
|
||||||
################################## External interface
|
################################## External interface
|
||||||
|
|
||||||
def update_dir_stucture(book_id, calibrepath, first_author = None):
|
|
||||||
|
def update_dir_stucture(book_id, calibrepath, first_author=None):
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
return update_dir_structure_gdrive(book_id, first_author)
|
return update_dir_structure_gdrive(book_id, first_author)
|
||||||
else:
|
else:
|
||||||
@ -454,23 +470,26 @@ def get_cover_on_failure(use_generic_cover):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_book_cover(book_id):
|
def get_book_cover(book_id):
|
||||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
||||||
return get_book_cover_internal(book, use_generic_cover_on_failure=True)
|
return get_book_cover_internal(book, use_generic_cover_on_failure=True)
|
||||||
|
|
||||||
|
|
||||||
def get_book_cover_with_uuid(book_uuid,
|
def get_book_cover_with_uuid(book_uuid,
|
||||||
use_generic_cover_on_failure=True):
|
use_generic_cover_on_failure=True):
|
||||||
book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first()
|
book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first()
|
||||||
return get_book_cover_internal(book, use_generic_cover_on_failure)
|
return get_book_cover_internal(book, use_generic_cover_on_failure)
|
||||||
|
|
||||||
|
|
||||||
def get_book_cover_internal(book,
|
def get_book_cover_internal(book,
|
||||||
use_generic_cover_on_failure):
|
use_generic_cover_on_failure):
|
||||||
if book and book.has_cover:
|
if book and book.has_cover:
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
try:
|
try:
|
||||||
if not gd.is_gdrive_ready():
|
if not gd.is_gdrive_ready():
|
||||||
return get_cover_on_failure(use_generic_cover_on_failure)
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
||||||
path=gd.get_cover_via_gdrive(book.path)
|
path = gd.get_cover_via_gdrive(book.path)
|
||||||
if path:
|
if path:
|
||||||
return redirect(path)
|
return redirect(path)
|
||||||
else:
|
else:
|
||||||
@ -530,7 +549,7 @@ def save_cover(img, book_path):
|
|||||||
return False, _("Only jpg/jpeg/png/webp files are supported as coverfile")
|
return False, _("Only jpg/jpeg/png/webp files are supported as coverfile")
|
||||||
# convert to jpg because calibre only supports jpg
|
# convert to jpg because calibre only supports jpg
|
||||||
if content_type in ('image/png', 'image/webp'):
|
if content_type in ('image/png', 'image/webp'):
|
||||||
if hasattr(img,'stream'):
|
if hasattr(img, 'stream'):
|
||||||
imgc = PILImage.open(img.stream)
|
imgc = PILImage.open(img.stream)
|
||||||
else:
|
else:
|
||||||
imgc = PILImage.open(io.BytesIO(img.content))
|
imgc = PILImage.open(io.BytesIO(img.content))
|
||||||
@ -539,7 +558,7 @@ def save_cover(img, book_path):
|
|||||||
im.save(tmp_bytesio, format='JPEG')
|
im.save(tmp_bytesio, format='JPEG')
|
||||||
img._content = tmp_bytesio.getvalue()
|
img._content = tmp_bytesio.getvalue()
|
||||||
else:
|
else:
|
||||||
if content_type not in ('image/jpeg'):
|
if content_type not in 'image/jpeg':
|
||||||
log.error("Only jpg/jpeg files are supported as coverfile")
|
log.error("Only jpg/jpeg files are supported as coverfile")
|
||||||
return False, _("Only jpg/jpeg files are supported as coverfile")
|
return False, _("Only jpg/jpeg files are supported as coverfile")
|
||||||
|
|
||||||
@ -557,7 +576,6 @@ def save_cover(img, book_path):
|
|||||||
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
|
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def do_download_file(book, book_format, data, headers):
|
def do_download_file(book, book_format, data, headers):
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
startTime = time.time()
|
startTime = time.time()
|
||||||
@ -579,7 +597,6 @@ def do_download_file(book, book_format, data, headers):
|
|||||||
##################################
|
##################################
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def check_unrar(unrarLocation):
|
def check_unrar(unrarLocation):
|
||||||
if not unrarLocation:
|
if not unrarLocation:
|
||||||
return
|
return
|
||||||
@ -601,13 +618,12 @@ def check_unrar(unrarLocation):
|
|||||||
return 'Error excecuting UnRar'
|
return 'Error excecuting UnRar'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def json_serial(obj):
|
def json_serial(obj):
|
||||||
"""JSON serializer for objects not serializable by default json code"""
|
"""JSON serializer for objects not serializable by default json code"""
|
||||||
|
|
||||||
if isinstance(obj, (datetime)):
|
if isinstance(obj, datetime):
|
||||||
return obj.isoformat()
|
return obj.isoformat()
|
||||||
if isinstance(obj, (timedelta)):
|
if isinstance(obj, timedelta):
|
||||||
return {
|
return {
|
||||||
'__type__': 'timedelta',
|
'__type__': 'timedelta',
|
||||||
'days': obj.days,
|
'days': obj.days,
|
||||||
@ -615,7 +631,7 @@ def json_serial(obj):
|
|||||||
'microseconds': obj.microseconds,
|
'microseconds': obj.microseconds,
|
||||||
}
|
}
|
||||||
# return obj.isoformat()
|
# return obj.isoformat()
|
||||||
raise TypeError ("Type %s not serializable" % type(obj))
|
raise TypeError("Type %s not serializable" % type(obj))
|
||||||
|
|
||||||
|
|
||||||
# helper function for displaying the runtime of tasks
|
# helper function for displaying the runtime of tasks
|
||||||
@ -637,7 +653,7 @@ def format_runtime(runtime):
|
|||||||
|
|
||||||
# helper function to apply localize status information in tasklist entries
|
# helper function to apply localize status information in tasklist entries
|
||||||
def render_task_status(tasklist):
|
def render_task_status(tasklist):
|
||||||
renderedtasklist=list()
|
renderedtasklist = list()
|
||||||
for task in tasklist:
|
for task in tasklist:
|
||||||
if task['user'] == current_user.nickname or current_user.role_admin():
|
if task['user'] == current_user.nickname or current_user.role_admin():
|
||||||
if task['formStarttime']:
|
if task['formStarttime']:
|
||||||
@ -653,7 +669,7 @@ def render_task_status(tasklist):
|
|||||||
task['runtime'] = format_runtime(task['formRuntime'])
|
task['runtime'] = format_runtime(task['formRuntime'])
|
||||||
|
|
||||||
# localize the task status
|
# localize the task status
|
||||||
if isinstance( task['stat'], int ):
|
if isinstance( task['stat'], int):
|
||||||
if task['stat'] == STAT_WAITING:
|
if task['stat'] == STAT_WAITING:
|
||||||
task['status'] = _(u'Waiting')
|
task['status'] = _(u'Waiting')
|
||||||
elif task['stat'] == STAT_FAIL:
|
elif task['stat'] == STAT_FAIL:
|
||||||
@ -666,14 +682,14 @@ def render_task_status(tasklist):
|
|||||||
task['status'] = _(u'Unknown Status')
|
task['status'] = _(u'Unknown Status')
|
||||||
|
|
||||||
# localize the task type
|
# localize the task type
|
||||||
if isinstance( task['taskType'], int ):
|
if isinstance( task['taskType'], int):
|
||||||
if task['taskType'] == TASK_EMAIL:
|
if task['taskType'] == TASK_EMAIL:
|
||||||
task['taskMessage'] = _(u'E-mail: ') + task['taskMess']
|
task['taskMessage'] = _(u'E-mail: ') + task['taskMess']
|
||||||
elif task['taskType'] == TASK_CONVERT:
|
elif task['taskType'] == TASK_CONVERT:
|
||||||
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
||||||
elif task['taskType'] == TASK_UPLOAD:
|
elif task['taskType'] == TASK_UPLOAD:
|
||||||
task['taskMessage'] = _(u'Upload: ') + task['taskMess']
|
task['taskMessage'] = _(u'Upload: ') + task['taskMess']
|
||||||
elif task['taskType'] == TASK_CONVERT_ANY:
|
elif task['taskType'] == TASK_CONVERT_ANY:
|
||||||
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
|
||||||
else:
|
else:
|
||||||
task['taskMessage'] = _(u'Unknown Task: ') + task['taskMess']
|
task['taskMessage'] = _(u'Unknown Task: ') + task['taskMess']
|
||||||
@ -709,11 +725,11 @@ def common_filters(allow_show_archived=False):
|
|||||||
pos_cc_list = current_user.allowed_column_value.split(',')
|
pos_cc_list = current_user.allowed_column_value.split(',')
|
||||||
pos_content_cc_filter = true() if pos_cc_list == [''] else \
|
pos_content_cc_filter = true() if pos_cc_list == [''] else \
|
||||||
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
|
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
|
||||||
any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list))
|
any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list))
|
||||||
neg_cc_list = current_user.denied_column_value.split(',')
|
neg_cc_list = current_user.denied_column_value.split(',')
|
||||||
neg_content_cc_filter = false() if neg_cc_list == [''] else \
|
neg_content_cc_filter = false() if neg_cc_list == [''] else \
|
||||||
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
|
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
|
||||||
any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list))
|
any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list))
|
||||||
else:
|
else:
|
||||||
pos_content_cc_filter = true()
|
pos_content_cc_filter = true()
|
||||||
neg_content_cc_filter = false()
|
neg_content_cc_filter = false()
|
||||||
@ -733,8 +749,9 @@ def tags_filters():
|
|||||||
# Creates for all stored languages a translated speaking name in the array for the UI
|
# Creates for all stored languages a translated speaking name in the array for the UI
|
||||||
def speaking_language(languages=None):
|
def speaking_language(languages=None):
|
||||||
if not languages:
|
if not languages:
|
||||||
languages = db.session.query(db.Languages).join(db.books_languages_link).join(db.Books).filter(common_filters())\
|
languages = db.session.query(db.Languages).join(db.books_languages_link).join(db.Books)\
|
||||||
.group_by(text('books_languages_link.lang_code')).all()
|
.filter(common_filters())\
|
||||||
|
.group_by(text('books_languages_link.lang_code')).all()
|
||||||
for lang in languages:
|
for lang in languages:
|
||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
@ -743,6 +760,7 @@ def speaking_language(languages=None):
|
|||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
return languages
|
return languages
|
||||||
|
|
||||||
|
|
||||||
# checks if domain is in database (including wildcards)
|
# checks if domain is in database (including wildcards)
|
||||||
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
||||||
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
||||||
@ -787,21 +805,25 @@ def fill_indexpage_with_archived_books(page, database, db_filter, order, allow_s
|
|||||||
randm = false()
|
randm = false()
|
||||||
off = int(int(config.config_books_per_page) * (page - 1))
|
off = int(int(config.config_books_per_page) * (page - 1))
|
||||||
pagination = Pagination(page, config.config_books_per_page,
|
pagination = Pagination(page, config.config_books_per_page,
|
||||||
len(db.session.query(database).filter(db_filter).filter(common_filters(allow_show_archived)).all()))
|
len(db.session.query(database).filter(db_filter)
|
||||||
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters(allow_show_archived)).\
|
.filter(common_filters(allow_show_archived)).all()))
|
||||||
order_by(*order).offset(off).limit(config.config_books_per_page).all()
|
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter)\
|
||||||
|
.filter(common_filters(allow_show_archived))\
|
||||||
|
.order_by(*order).offset(off).limit(config.config_books_per_page).all()
|
||||||
for book in entries:
|
for book in entries:
|
||||||
book = order_authors(book)
|
book = order_authors(book)
|
||||||
return entries, randm, pagination
|
return entries, randm, pagination
|
||||||
|
|
||||||
|
|
||||||
def get_typeahead(database, query, replace=('',''), tag_filter=true()):
|
def get_typeahead(database, query, replace=('', ''), tag_filter=true()):
|
||||||
query = query or ''
|
query = query or ''
|
||||||
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
||||||
entries = db.session.query(database).filter(tag_filter).filter(func.lower(database.name).ilike("%" + query + "%")).all()
|
entries = db.session.query(database).filter(tag_filter).\
|
||||||
|
filter(func.lower(database.name).ilike("%" + query + "%")).all()
|
||||||
json_dumps = json.dumps([dict(name=r.name.replace(*replace)) for r in entries])
|
json_dumps = json.dumps([dict(name=r.name.replace(*replace)) for r in entries])
|
||||||
return json_dumps
|
return json_dumps
|
||||||
|
|
||||||
|
|
||||||
# read search results from calibre-database and return it (function is used for feed and simple search
|
# read search results from calibre-database and return it (function is used for feed and simple search
|
||||||
def get_search_results(term):
|
def get_search_results(term):
|
||||||
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
||||||
@ -820,6 +842,7 @@ def get_search_results(term):
|
|||||||
func.lower(db.Books.title).ilike("%" + term + "%")
|
func.lower(db.Books.title).ilike("%" + term + "%")
|
||||||
)).all()
|
)).all()
|
||||||
|
|
||||||
|
|
||||||
def get_cc_columns():
|
def get_cc_columns():
|
||||||
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||||
if config.config_columns_to_ignore:
|
if config.config_columns_to_ignore:
|
||||||
@ -832,6 +855,7 @@ def get_cc_columns():
|
|||||||
cc = tmpcc
|
cc = tmpcc
|
||||||
return cc
|
return cc
|
||||||
|
|
||||||
|
|
||||||
def get_download_link(book_id, book_format):
|
def get_download_link(book_id, book_format):
|
||||||
book_format = book_format.split(".")[0]
|
book_format = book_format.split(".")[0]
|
||||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
|
||||||
@ -856,7 +880,8 @@ def get_download_link(book_id, book_format):
|
|||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
def check_exists_book(authr,title):
|
|
||||||
|
def check_exists_book(authr, title):
|
||||||
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
db.session.connection().connection.connection.create_function("lower", 1, lcase)
|
||||||
q = list()
|
q = list()
|
||||||
authorterms = re.split(r'\s*&\s*', authr)
|
authorterms = re.split(r'\s*&\s*', authr)
|
||||||
@ -865,11 +890,12 @@ def check_exists_book(authr,title):
|
|||||||
|
|
||||||
return db.session.query(db.Books).filter(
|
return db.session.query(db.Books).filter(
|
||||||
and_(db.Books.authors.any(and_(*q)),
|
and_(db.Books.authors.any(and_(*q)),
|
||||||
func.lower(db.Books.title).ilike("%" + title + "%")
|
func.lower(db.Books.title).ilike("%" + title + "%")
|
||||||
)).first()
|
)).first()
|
||||||
|
|
||||||
############### Database Helper functions
|
############### Database Helper functions
|
||||||
|
|
||||||
|
|
||||||
def lcase(s):
|
def lcase(s):
|
||||||
try:
|
try:
|
||||||
return unidecode.unidecode(s.lower())
|
return unidecode.unidecode(s.lower())
|
||||||
|
@ -80,9 +80,13 @@ def formatdate_filter(val):
|
|||||||
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
||||||
return format_date(formatdate, format='medium', locale=get_locale())
|
return format_date(formatdate, format='medium', locale=get_locale())
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
|
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e,
|
||||||
|
current_user.locale,
|
||||||
|
current_user.nickname
|
||||||
|
)
|
||||||
return formatdate
|
return formatdate
|
||||||
|
|
||||||
|
|
||||||
@jinjia.app_template_filter('formatdateinput')
|
@jinjia.app_template_filter('formatdateinput')
|
||||||
def format_date_input(val):
|
def format_date_input(val):
|
||||||
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)
|
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)
|
||||||
|
17
cps/kobo.py
17
cps/kobo.py
@ -385,7 +385,7 @@ def get_metadata(book):
|
|||||||
name = get_series(book)
|
name = get_series(book)
|
||||||
metadata["Series"] = {
|
metadata["Series"] = {
|
||||||
"Name": get_series(book),
|
"Name": get_series(book),
|
||||||
"Number": book.series_index,
|
"Number": book.series_index, # ToDo Check int() ?
|
||||||
"NumberFloat": float(book.series_index),
|
"NumberFloat": float(book.series_index),
|
||||||
# Get a deterministic id based on the series name.
|
# Get a deterministic id based on the series name.
|
||||||
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, name),
|
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, name),
|
||||||
@ -407,8 +407,10 @@ def HandleTagCreate():
|
|||||||
log.debug("Received malformed v1/library/tags request.")
|
log.debug("Received malformed v1/library/tags request.")
|
||||||
abort(400, description="Malformed tags POST request. Data is missing 'Name' or 'Items' field")
|
abort(400, description="Malformed tags POST request. Data is missing 'Name' or 'Items' field")
|
||||||
|
|
||||||
|
# ToDO: Names are not unique ! -> filter only private shelfs
|
||||||
shelf = ub.session.query(ub.Shelf).filter(and_(ub.Shelf.name) == name, ub.Shelf.user_id ==
|
shelf = ub.session.query(ub.Shelf).filter(and_(ub.Shelf.name) == name, ub.Shelf.user_id ==
|
||||||
current_user.id).one_or_none()
|
current_user.id).one_or_none() # ToDO: shouldn't it ) at the end
|
||||||
|
|
||||||
if shelf and not shelf_lib.check_shelf_edit_permissions(shelf):
|
if shelf and not shelf_lib.check_shelf_edit_permissions(shelf):
|
||||||
abort(401, description="User is unauthaurized to edit shelf.")
|
abort(401, description="User is unauthaurized to edit shelf.")
|
||||||
|
|
||||||
@ -517,6 +519,7 @@ def HandleTagRemoveItem(tag_id):
|
|||||||
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
|
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
|
||||||
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
|
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
|
||||||
|
|
||||||
|
# insconsitent to above requests
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
|
||||||
ub.Shelf.user_id == current_user.id).one_or_none()
|
ub.Shelf.user_id == current_user.id).one_or_none()
|
||||||
if not shelf:
|
if not shelf:
|
||||||
@ -552,7 +555,8 @@ def HandleTagRemoveItem(tag_id):
|
|||||||
def sync_shelves(sync_token, sync_results):
|
def sync_shelves(sync_token, sync_results):
|
||||||
new_tags_last_modified = sync_token.tags_last_modified
|
new_tags_last_modified = sync_token.tags_last_modified
|
||||||
|
|
||||||
for shelf in ub.session.query(ub.ShelfArchive).filter(func.datetime(ub.ShelfArchive.last_modified) > sync_token.tags_last_modified, ub.ShelfArchive.user_id == current_user.id):
|
for shelf in ub.session.query(ub.ShelfArchive).filter(func.datetime(ub.ShelfArchive.last_modified) > sync_token.tags_last_modified,
|
||||||
|
ub.ShelfArchive.user_id == current_user.id):
|
||||||
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
|
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
|
||||||
|
|
||||||
sync_results.append({
|
sync_results.append({
|
||||||
@ -564,7 +568,8 @@ def sync_shelves(sync_token, sync_results):
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
for shelf in ub.session.query(ub.Shelf).filter(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified, ub.Shelf.user_id == current_user.id):
|
for shelf in ub.session.query(ub.Shelf).filter(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
|
||||||
|
ub.Shelf.user_id == current_user.id):
|
||||||
if not shelf_lib.check_shelf_view_permissions(shelf):
|
if not shelf_lib.check_shelf_view_permissions(shelf):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -600,6 +605,7 @@ def create_kobo_tag(shelf):
|
|||||||
book = db.session.query(db.Books).filter(db.Books.id == book_shelf.book_id).one_or_none()
|
book = db.session.query(db.Books).filter(db.Books.id == book_shelf.book_id).one_or_none()
|
||||||
if not book:
|
if not book:
|
||||||
log.info(u"Book (id: %s) in BookShelf (id: %s) not found in book database", book_shelf.book_id, shelf.id)
|
log.info(u"Book (id: %s) in BookShelf (id: %s) not found in book database", book_shelf.book_id, shelf.id)
|
||||||
|
# ToDo shouldn't it continue?
|
||||||
return None
|
return None
|
||||||
tag["Items"].append(
|
tag["Items"].append(
|
||||||
{
|
{
|
||||||
@ -769,7 +775,8 @@ def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale):
|
|||||||
height=height), 307)
|
height=height), 307)
|
||||||
else:
|
else:
|
||||||
log.debug("Cover for unknown book: %s requested" % book_uuid)
|
log.debug("Cover for unknown book: %s requested" % book_uuid)
|
||||||
return redirect_or_proxy_request()
|
# additional proxy request make no sense, -> direct return
|
||||||
|
return make_response(jsonify({}))
|
||||||
log.debug("Cover request received for book %s" % book_uuid)
|
log.debug("Cover request received for book %s" % book_uuid)
|
||||||
return book_cover
|
return book_cover
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ def setup(log_file, log_level=None):
|
|||||||
r.setLevel(log_level)
|
r.setLevel(log_level)
|
||||||
|
|
||||||
# Otherwise name get's destroyed on windows
|
# Otherwise name get's destroyed on windows
|
||||||
if log_file != LOG_TO_STDERR and log_file != LOG_TO_STDOUT:
|
if log_file != LOG_TO_STDERR and log_file != LOG_TO_STDOUT:
|
||||||
log_file = _absolute_log_file(log_file, DEFAULT_LOG_FILE)
|
log_file = _absolute_log_file(log_file, DEFAULT_LOG_FILE)
|
||||||
|
|
||||||
previous_handler = r.handlers[0] if r.handlers else None
|
previous_handler = r.handlers[0] if r.handlers else None
|
||||||
|
@ -30,7 +30,7 @@ except ImportError:
|
|||||||
from flask_dance.consumer.storage.sqla import SQLAlchemyStorage as SQLAlchemyBackend
|
from flask_dance.consumer.storage.sqla import SQLAlchemyStorage as SQLAlchemyBackend
|
||||||
from flask_dance.consumer.storage.sqla import first, _get_real_user
|
from flask_dance.consumer.storage.sqla import first, _get_real_user
|
||||||
from sqlalchemy.orm.exc import NoResultFound
|
from sqlalchemy.orm.exc import NoResultFound
|
||||||
backend_resultcode = True # prevent storing values with this resultcode
|
backend_resultcode = True # prevent storing values with this resultcode
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ try:
|
|||||||
def set(self, blueprint, token, user=None, user_id=None):
|
def set(self, blueprint, token, user=None, user_id=None):
|
||||||
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
|
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
|
||||||
u = first(_get_real_user(ref, self.anon_user)
|
u = first(_get_real_user(ref, self.anon_user)
|
||||||
for ref in (user, self.user, blueprint.config.get("user")))
|
for ref in (user, self.user, blueprint.config.get("user")))
|
||||||
|
|
||||||
if self.user_required and not u and not uid:
|
if self.user_required and not u and not uid:
|
||||||
raise ValueError("Cannot set OAuth token without an associated user")
|
raise ValueError("Cannot set OAuth token without an associated user")
|
||||||
|
64
cps/opds.py
64
cps/opds.py
@ -56,8 +56,8 @@ def requires_basic_auth_if_no_ano(f):
|
|||||||
return decorated
|
return decorated
|
||||||
|
|
||||||
|
|
||||||
class FeedObject():
|
class FeedObject:
|
||||||
def __init__(self,rating_id , rating_name):
|
def __init__(self, rating_id, rating_name):
|
||||||
self.rating_id = rating_id
|
self.rating_id = rating_id
|
||||||
self.rating_name = rating_name
|
self.rating_name = rating_name
|
||||||
|
|
||||||
@ -101,7 +101,7 @@ def feed_normal_search():
|
|||||||
def feed_new():
|
def feed_new():
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, True, [db.Books.timestamp.desc()])
|
db.Books, True, [db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -119,7 +119,8 @@ def feed_discover():
|
|||||||
def feed_best_rated():
|
def feed_best_rated():
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.ratings.any(db.Ratings.rating > 9), [db.Books.timestamp.desc()])
|
db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -153,7 +154,8 @@ def feed_hot():
|
|||||||
def feed_authorindex():
|
def feed_authorindex():
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries = db.session.query(db.Authors).join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
entries = db.session.query(db.Authors).join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
||||||
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).limit(config.config_books_per_page).offset(off)
|
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).limit(config.config_books_per_page)\
|
||||||
|
.offset(off)
|
||||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||||
len(db.session.query(db.Authors).all()))
|
len(db.session.query(db.Authors).all()))
|
||||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_author', pagination=pagination)
|
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_author', pagination=pagination)
|
||||||
@ -164,7 +166,9 @@ def feed_authorindex():
|
|||||||
def feed_author(book_id):
|
def feed_author(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.authors.any(db.Authors.id == book_id), [db.Books.timestamp.desc()])
|
db.Books,
|
||||||
|
db.Books.authors.any(db.Authors.id == book_id),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -173,7 +177,8 @@ def feed_author(book_id):
|
|||||||
def feed_publisherindex():
|
def feed_publisherindex():
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries = db.session.query(db.Publishers).join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
entries = db.session.query(db.Publishers).join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
||||||
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort).limit(config.config_books_per_page).offset(off)
|
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort)\
|
||||||
|
.limit(config.config_books_per_page).offset(off)
|
||||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||||
len(db.session.query(db.Publishers).all()))
|
len(db.session.query(db.Publishers).all()))
|
||||||
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_publisher', pagination=pagination)
|
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_publisher', pagination=pagination)
|
||||||
@ -184,7 +189,8 @@ def feed_publisherindex():
|
|||||||
def feed_publisher(book_id):
|
def feed_publisher(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.publishers.any(db.Publishers.id == book_id),
|
db.Books,
|
||||||
|
db.Books.publishers.any(db.Publishers.id == book_id),
|
||||||
[db.Books.timestamp.desc()])
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
@ -205,7 +211,9 @@ def feed_categoryindex():
|
|||||||
def feed_category(book_id):
|
def feed_category(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.tags.any(db.Tags.id == book_id), [db.Books.timestamp.desc()])
|
db.Books,
|
||||||
|
db.Books.tags.any(db.Tags.id == book_id),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -225,9 +233,12 @@ def feed_seriesindex():
|
|||||||
def feed_series(book_id):
|
def feed_series(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.series.any(db.Series.id == book_id), [db.Books.series_index])
|
db.Books,
|
||||||
|
db.Books.series.any(db.Series.id == book_id),
|
||||||
|
[db.Books.series_index])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/ratings")
|
@opds.route("/opds/ratings")
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def feed_ratingindex():
|
def feed_ratingindex():
|
||||||
@ -244,16 +255,18 @@ def feed_ratingindex():
|
|||||||
element.append(FeedObject(entry[0].id, "{} Stars".format(entry.name)))
|
element.append(FeedObject(entry[0].id, "{} Stars".format(entry.name)))
|
||||||
return render_xml_template('feed.xml', listelements=element, folder='opds.feed_ratings', pagination=pagination)
|
return render_xml_template('feed.xml', listelements=element, folder='opds.feed_ratings', pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/ratings/<book_id>")
|
@opds.route("/opds/ratings/<book_id>")
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def feed_ratings(book_id):
|
def feed_ratings(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.ratings.any(db.Ratings.id == book_id),[db.Books.timestamp.desc()])
|
db.Books,
|
||||||
|
db.Books.ratings.any(db.Ratings.id == book_id),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/formats")
|
@opds.route("/opds/formats")
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def feed_formatindex():
|
def feed_formatindex():
|
||||||
@ -274,7 +287,9 @@ def feed_formatindex():
|
|||||||
def feed_format(book_id):
|
def feed_format(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.data.any(db.Data.format == book_id.upper()), [db.Books.timestamp.desc()])
|
db.Books,
|
||||||
|
db.Books.data.any(db.Data.format == book_id.upper()),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -306,7 +321,9 @@ def feed_languagesindex():
|
|||||||
def feed_languages(book_id):
|
def feed_languages(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
||||||
db.Books, db.Books.languages.any(db.Languages.id == book_id), [db.Books.timestamp.desc()])
|
db.Books,
|
||||||
|
db.Books.languages.any(db.Languages.id == book_id),
|
||||||
|
[db.Books.timestamp.desc()])
|
||||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@ -326,7 +343,8 @@ def feed_shelfindex():
|
|||||||
def feed_shelf(book_id):
|
def feed_shelf(book_id):
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
if current_user.is_anonymous:
|
if current_user.is_anonymous:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == book_id, not ub.Shelf.deleted).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1,
|
||||||
|
ub.Shelf.id == book_id, not ub.Shelf.deleted).first()
|
||||||
else:
|
else:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||||
ub.Shelf.id == book_id),
|
ub.Shelf.id == book_id),
|
||||||
@ -349,11 +367,11 @@ def feed_shelf(book_id):
|
|||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
@download_required
|
@download_required
|
||||||
def opds_download_link(book_id, book_format):
|
def opds_download_link(book_id, book_format):
|
||||||
return get_download_link(book_id,book_format.lower())
|
return get_download_link(book_id, book_format.lower())
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/ajax/book/<string:uuid>/<library>")
|
@opds.route("/ajax/book/<string:uuid>/<library>")
|
||||||
@opds.route("/ajax/book/<string:uuid>",defaults={'library': ""})
|
@opds.route("/ajax/book/<string:uuid>", defaults={'library': ""})
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def get_metadata_calibre_companion(uuid, library):
|
def get_metadata_calibre_companion(uuid, library):
|
||||||
entry = db.session.query(db.Books).filter(db.Books.uuid.like("%" + uuid + "%")).first()
|
entry = db.session.query(db.Books).filter(db.Books.uuid.like("%" + uuid + "%")).first()
|
||||||
@ -369,16 +387,17 @@ def get_metadata_calibre_companion(uuid, library):
|
|||||||
def feed_search(term):
|
def feed_search(term):
|
||||||
if term:
|
if term:
|
||||||
term = term.strip().lower()
|
term = term.strip().lower()
|
||||||
entries = get_search_results( term)
|
entries = get_search_results(term)
|
||||||
entriescount = len(entries) if len(entries) > 0 else 1
|
entriescount = len(entries) if len(entries) > 0 else 1
|
||||||
pagination = Pagination(1, entriescount, entriescount)
|
pagination = Pagination(1, entriescount, entriescount)
|
||||||
return render_xml_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
|
return render_xml_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
|
||||||
else:
|
else:
|
||||||
return render_xml_template('feed.xml', searchterm="")
|
return render_xml_template('feed.xml', searchterm="")
|
||||||
|
|
||||||
|
|
||||||
def check_auth(username, password):
|
def check_auth(username, password):
|
||||||
if sys.version_info.major == 3:
|
if sys.version_info.major == 3:
|
||||||
username=username.encode('windows-1252')
|
username = username.encode('windows-1252')
|
||||||
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) ==
|
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) ==
|
||||||
username.decode('utf-8').lower()).first()
|
username.decode('utf-8').lower()).first()
|
||||||
return bool(user and check_password_hash(str(user.password), password))
|
return bool(user and check_password_hash(str(user.password), password))
|
||||||
@ -392,13 +411,14 @@ def authenticate():
|
|||||||
|
|
||||||
|
|
||||||
def render_xml_template(*args, **kwargs):
|
def render_xml_template(*args, **kwargs):
|
||||||
#ToDo: return time in current timezone similar to %z
|
# ToDo: return time in current timezone similar to %z
|
||||||
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
||||||
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
|
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
|
||||||
response = make_response(xml)
|
response = make_response(xml)
|
||||||
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
|
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/thumb_240_240/<book_id>")
|
@opds.route("/opds/thumb_240_240/<book_id>")
|
||||||
@opds.route("/opds/cover_240_240/<book_id>")
|
@opds.route("/opds/cover_240_240/<book_id>")
|
||||||
@opds.route("/opds/cover_90_90/<book_id>")
|
@opds.route("/opds/cover_90_90/<book_id>")
|
||||||
@ -407,13 +427,15 @@ def render_xml_template(*args, **kwargs):
|
|||||||
def feed_get_cover(book_id):
|
def feed_get_cover(book_id):
|
||||||
return get_book_cover(book_id)
|
return get_book_cover(book_id)
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/readbooks")
|
@opds.route("/opds/readbooks")
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def feed_read_books():
|
def feed_read_books():
|
||||||
off = request.args.get("offset") or 0
|
off = request.args.get("offset") or 0
|
||||||
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
|
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
|
||||||
return render_xml_template('feed.xml', entries=result, pagination=pagination)
|
return render_xml_template('feed.xml', entries=result, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
@opds.route("/opds/unreadbooks")
|
@opds.route("/opds/unreadbooks")
|
||||||
@requires_basic_auth_if_no_ano
|
@requires_basic_auth_if_no_ano
|
||||||
def feed_unread_books():
|
def feed_unread_books():
|
||||||
|
@ -43,7 +43,6 @@ from . import logger
|
|||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _readable_listen_address(address, port):
|
def _readable_listen_address(address, port):
|
||||||
if ':' in address:
|
if ':' in address:
|
||||||
address = "[" + address + "]"
|
address = "[" + address + "]"
|
||||||
@ -84,7 +83,8 @@ class WebServer(object):
|
|||||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
||||||
self.ssl_args = dict(certfile=certfile_path, keyfile=keyfile_path)
|
self.ssl_args = dict(certfile=certfile_path, keyfile=keyfile_path)
|
||||||
else:
|
else:
|
||||||
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
|
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. '
|
||||||
|
'Ignoring ssl.')
|
||||||
log.warning('Cert path: %s', certfile_path)
|
log.warning('Cert path: %s', certfile_path)
|
||||||
log.warning('Key path: %s', keyfile_path)
|
log.warning('Key path: %s', keyfile_path)
|
||||||
|
|
||||||
|
@ -49,10 +49,11 @@ def get_datetime_from_json(json_object, field_name):
|
|||||||
return datetime.min
|
return datetime.min
|
||||||
|
|
||||||
|
|
||||||
class SyncToken():
|
class SyncToken:
|
||||||
""" The SyncToken is used to persist state accross requests.
|
""" The SyncToken is used to persist state accross requests.
|
||||||
When serialized over the response headers, the Kobo device will propagate the token onto following requests to the service.
|
When serialized over the response headers, the Kobo device will propagate the token onto following
|
||||||
As an example use-case, the SyncToken is used to detect books that have been added to the library since the last time the device synced to the server.
|
requests to the service. As an example use-case, the SyncToken is used to detect books that have been added
|
||||||
|
to the library since the last time the device synced to the server.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
books_last_created: Datetime representing the newest book that the device knows about.
|
books_last_created: Datetime representing the newest book that the device knows about.
|
||||||
@ -66,10 +67,11 @@ class SyncToken():
|
|||||||
|
|
||||||
token_schema = {
|
token_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"version": {"type": "string"}, "data": {"type": "object"},},
|
"properties": {"version": {"type": "string"}, "data": {"type": "object"}, },
|
||||||
}
|
}
|
||||||
# This Schema doesn't contain enough information to detect and propagate book deletions from Calibre to the device.
|
# This Schema doesn't contain enough information to detect and propagate book deletions from Calibre to the device.
|
||||||
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing from the db.
|
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing
|
||||||
|
# from the db.
|
||||||
data_schema_v1 = {
|
data_schema_v1 = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
59
cps/shelf.py
59
cps/shelf.py
@ -25,7 +25,7 @@ from __future__ import division, print_function, unicode_literals
|
|||||||
from flask import Blueprint, request, flash, redirect, url_for
|
from flask import Blueprint, request, flash, redirect, url_for
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from sqlalchemy.sql.expression import func, or_, and_
|
from sqlalchemy.sql.expression import func
|
||||||
|
|
||||||
from . import logger, ub, searched_ids, db
|
from . import logger, ub, searched_ids, db
|
||||||
from .web import render_title_template
|
from .web import render_title_template
|
||||||
@ -35,6 +35,7 @@ from .helper import common_filters
|
|||||||
shelf = Blueprint('shelf', __name__)
|
shelf = Blueprint('shelf', __name__)
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def check_shelf_edit_permissions(cur_shelf):
|
def check_shelf_edit_permissions(cur_shelf):
|
||||||
if not cur_shelf.is_public and not cur_shelf.user_id == int(current_user.id):
|
if not cur_shelf.is_public and not cur_shelf.user_id == int(current_user.id):
|
||||||
log.error("User %s not allowed to edit shelf %s", current_user, cur_shelf)
|
log.error("User %s not allowed to edit shelf %s", current_user, cur_shelf)
|
||||||
@ -74,7 +75,7 @@ def add_to_shelf(shelf_id, book_id):
|
|||||||
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
||||||
|
|
||||||
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
||||||
ub.BookShelf.book_id == book_id).first()
|
ub.BookShelf.book_id == book_id).first()
|
||||||
if book_in_shelf:
|
if book_in_shelf:
|
||||||
log.error("Book %s is already part of %s", book_id, shelf)
|
log.error("Book %s is already part of %s", book_id, shelf)
|
||||||
if not xhr:
|
if not xhr:
|
||||||
@ -195,7 +196,6 @@ def remove_from_shelf(shelf_id, book_id):
|
|||||||
return "Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name, 403
|
return "Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name, 403
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/create", methods=["GET", "POST"])
|
@shelf.route("/shelf/create", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def create_shelf():
|
def create_shelf():
|
||||||
@ -214,21 +214,24 @@ def create_shelf():
|
|||||||
.first() is None
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||||
|
category="error")
|
||||||
else:
|
else:
|
||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) & (ub.Shelf.user_id == int(current_user.id))) \
|
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
||||||
.first() is None
|
(ub.Shelf.user_id == int(current_user.id)))\
|
||||||
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||||
|
category="error")
|
||||||
|
|
||||||
if is_shelf_name_unique:
|
if is_shelf_name_unique:
|
||||||
try:
|
try:
|
||||||
ub.session.add(shelf)
|
ub.session.add(shelf)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
||||||
return redirect(url_for('shelf.show_shelf', shelf_id = shelf.id ))
|
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
||||||
except Exception:
|
except Exception:
|
||||||
flash(_(u"There was an error"), category="error")
|
flash(_(u"There was an error"), category="error")
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
||||||
@ -240,7 +243,7 @@ def create_shelf():
|
|||||||
@login_required
|
@login_required
|
||||||
def edit_shelf(shelf_id):
|
def edit_shelf(shelf_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
to_save = request.form.to_dict()
|
to_save = request.form.to_dict()
|
||||||
|
|
||||||
is_shelf_name_unique = False
|
is_shelf_name_unique = False
|
||||||
@ -251,15 +254,18 @@ def edit_shelf(shelf_id):
|
|||||||
.first() is None
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||||
|
category="error")
|
||||||
else:
|
else:
|
||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) & (ub.Shelf.user_id == int(current_user.id))) \
|
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
||||||
.filter(ub.Shelf.id != shelf_id) \
|
(ub.Shelf.user_id == int(current_user.id)))\
|
||||||
.first() is None
|
.filter(ub.Shelf.id != shelf_id)\
|
||||||
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||||
|
category="error")
|
||||||
|
|
||||||
if is_shelf_name_unique:
|
if is_shelf_name_unique:
|
||||||
shelf.name = to_save["title"]
|
shelf.name = to_save["title"]
|
||||||
@ -283,7 +289,7 @@ def delete_shelf_helper(cur_shelf):
|
|||||||
shelf_id = cur_shelf.id
|
shelf_id = cur_shelf.id
|
||||||
ub.session.delete(cur_shelf)
|
ub.session.delete(cur_shelf)
|
||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
||||||
ub.session.add(ub.ShelfArchive(uuid = cur_shelf.uuid, user_id = cur_shelf.uuid))
|
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.uuid))
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
log.info("successfully deleted %s", cur_shelf)
|
log.info("successfully deleted %s", cur_shelf)
|
||||||
|
|
||||||
@ -295,7 +301,7 @@ def delete_shelf(shelf_id):
|
|||||||
delete_shelf_helper(cur_shelf)
|
delete_shelf_helper(cur_shelf)
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
# @shelf.route("/shelfdown/<int:shelf_id>")
|
|
||||||
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
|
||||||
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
|
||||||
def show_shelf(shelf_type, shelf_id):
|
def show_shelf(shelf_type, shelf_id):
|
||||||
@ -319,13 +325,12 @@ def show_shelf(shelf_type, shelf_id):
|
|||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
||||||
shelf=shelf, page="shelf")
|
shelf=shelf, page="shelf")
|
||||||
else:
|
else:
|
||||||
flash(_(u"Error opening shelf. Shelf does not exist or is not accessible"), category="error")
|
flash(_(u"Error opening shelf. Shelf does not exist or is not accessible"), category="error")
|
||||||
return redirect(url_for("web.index"))
|
return redirect(url_for("web.index"))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/order/<int:shelf_id>", methods=["GET", "POST"])
|
@shelf.route("/shelf/order/<int:shelf_id>", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def order_shelf(shelf_id):
|
def order_shelf(shelf_id):
|
||||||
@ -347,17 +352,17 @@ def order_shelf(shelf_id):
|
|||||||
for book in books_in_shelf2:
|
for book in books_in_shelf2:
|
||||||
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).filter(common_filters()).first()
|
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).filter(common_filters()).first()
|
||||||
if cur_book:
|
if cur_book:
|
||||||
result.append({'title':cur_book.title,
|
result.append({'title': cur_book.title,
|
||||||
'id':cur_book.id,
|
'id': cur_book.id,
|
||||||
'author':cur_book.authors,
|
'author': cur_book.authors,
|
||||||
'series':cur_book.series,
|
'series': cur_book.series,
|
||||||
'series_index':cur_book.series_index})
|
'series_index': cur_book.series_index})
|
||||||
else:
|
else:
|
||||||
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||||
result.append({'title':_('Hidden Book'),
|
result.append({'title': _('Hidden Book'),
|
||||||
'id':cur_book.id,
|
'id': cur_book.id,
|
||||||
'author':[],
|
'author': [],
|
||||||
'series':[]})
|
'series': []})
|
||||||
return render_title_template('shelf_order.html', entries=result,
|
return render_title_template('shelf_order.html', entries=result,
|
||||||
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
||||||
shelf=shelf, page="shelforder")
|
shelf=shelf, page="shelforder")
|
||||||
|
@ -45,10 +45,10 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
|
|||||||
|
|
||||||
|
|
||||||
def process_wait(command, serr=subprocess.PIPE):
|
def process_wait(command, serr=subprocess.PIPE):
|
||||||
'''Run command, wait for process to terminate, and return an iterator over lines of its output.'''
|
# Run command, wait for process to terminate, and return an iterator over lines of its output.
|
||||||
p = process_open(command, serr=serr)
|
p = process_open(command, serr=serr)
|
||||||
p.wait()
|
p.wait()
|
||||||
for l in p.stdout.readlines():
|
for line in p.stdout.readlines():
|
||||||
if isinstance(l, bytes):
|
if isinstance(line, bytes):
|
||||||
l = l.decode('utf-8')
|
line = line.decode('utf-8')
|
||||||
yield l
|
yield line
|
||||||
|
@ -4,18 +4,18 @@
|
|||||||
|
|
||||||
<div class="filterheader hidden-xs hidden-sm">
|
<div class="filterheader hidden-xs hidden-sm">
|
||||||
{% if entries.__len__() %}
|
{% if entries.__len__() %}
|
||||||
{% if entries[0][0].sort %}
|
{% if data == 'author' %}
|
||||||
<button id="sort_name" class="btn btn-success"><b>B,A <-> A B</b></button>
|
<button id="sort_name" class="btn btn-primary"><b>B,A <-> A B</b></button>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<button id="desc" class="btn btn-success"><span class="glyphicon glyphicon-sort-by-alphabet"></span></button>
|
<button id="desc" class="btn btn-primary"><span class="glyphicon glyphicon-sort-by-alphabet"></span></button>
|
||||||
<button id="asc" class="btn btn-success"><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></button>
|
<button id="asc" class="btn btn-primary"><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></button>
|
||||||
{% if charlist|length %}
|
{% if charlist|length %}
|
||||||
<button id="all" class="btn btn-success">{{_('All')}}</button>
|
<button id="all" class="btn btn-primary">{{_('All')}}</button>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<div class="btn-group character" role="group">
|
<div class="btn-group character" role="group">
|
||||||
{% for char in charlist%}
|
{% for char in charlist%}
|
||||||
<button class="btn btn-success char">{{char.char}}</button>
|
<button class="btn btn-primary char">{{char.char}}</button>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
114
cps/ub.py
114
cps/ub.py
@ -42,11 +42,10 @@ from sqlalchemy import create_engine, exc, exists, event
|
|||||||
from sqlalchemy import Column, ForeignKey
|
from sqlalchemy import Column, ForeignKey
|
||||||
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float
|
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import backref, foreign, relationship, remote, sessionmaker, Session
|
from sqlalchemy.orm import backref, relationship, sessionmaker, Session
|
||||||
from sqlalchemy.sql.expression import and_
|
|
||||||
from werkzeug.security import generate_password_hash
|
from werkzeug.security import generate_password_hash
|
||||||
|
|
||||||
from . import constants # , config
|
from . import constants
|
||||||
|
|
||||||
|
|
||||||
session = None
|
session = None
|
||||||
@ -57,39 +56,39 @@ def get_sidebar_config(kwargs=None):
|
|||||||
kwargs = kwargs or []
|
kwargs = kwargs or []
|
||||||
if 'content' in kwargs:
|
if 'content' in kwargs:
|
||||||
content = kwargs['content']
|
content = kwargs['content']
|
||||||
content = isinstance(content, (User,LocalProxy)) and not content.role_anonymous()
|
content = isinstance(content, (User, LocalProxy)) and not content.role_anonymous()
|
||||||
else:
|
else:
|
||||||
content = 'conf' in kwargs
|
content = 'conf' in kwargs
|
||||||
sidebar = list()
|
sidebar = list()
|
||||||
sidebar.append({"glyph": "glyphicon-book", "text": _('Recently Added'), "link": 'web.index', "id": "new",
|
sidebar.append({"glyph": "glyphicon-book", "text": _('Recently Added'), "link": 'web.index', "id": "new",
|
||||||
"visibility": constants.SIDEBAR_RECENT, 'public': True, "page": "root",
|
"visibility": constants.SIDEBAR_RECENT, 'public': True, "page": "root",
|
||||||
"show_text": _('Show recent books'), "config_show":True})
|
"show_text": _('Show recent books'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-fire", "text": _('Hot Books'), "link": 'web.books_list', "id": "hot",
|
sidebar.append({"glyph": "glyphicon-fire", "text": _('Hot Books'), "link": 'web.books_list', "id": "hot",
|
||||||
"visibility": constants.SIDEBAR_HOT, 'public': True, "page": "hot", "show_text": _('Show Hot Books'),
|
"visibility": constants.SIDEBAR_HOT, 'public': True, "page": "hot",
|
||||||
"config_show":True})
|
"show_text": _('Show Hot Books'), "config_show": True})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-star", "text": _('Top Rated Books'), "link": 'web.books_list', "id": "rated",
|
{"glyph": "glyphicon-star", "text": _('Top Rated Books'), "link": 'web.books_list', "id": "rated",
|
||||||
"visibility": constants.SIDEBAR_BEST_RATED, 'public': True, "page": "rated",
|
"visibility": constants.SIDEBAR_BEST_RATED, 'public': True, "page": "rated",
|
||||||
"show_text": _('Show Top Rated Books'), "config_show":True})
|
"show_text": _('Show Top Rated Books'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-eye-open", "text": _('Read Books'), "link": 'web.books_list', "id": "read",
|
sidebar.append({"glyph": "glyphicon-eye-open", "text": _('Read Books'), "link": 'web.books_list', "id": "read",
|
||||||
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "read",
|
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "read",
|
||||||
"show_text": _('Show read and unread'), "config_show": content})
|
"show_text": _('Show read and unread'), "config_show": content})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-eye-close", "text": _('Unread Books'), "link": 'web.books_list', "id": "unread",
|
{"glyph": "glyphicon-eye-close", "text": _('Unread Books'), "link": 'web.books_list', "id": "unread",
|
||||||
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "unread",
|
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "unread",
|
||||||
"show_text": _('Show unread'), "config_show":False})
|
"show_text": _('Show unread'), "config_show": False})
|
||||||
sidebar.append({"glyph": "glyphicon-random", "text": _('Discover'), "link": 'web.books_list', "id": "rand",
|
sidebar.append({"glyph": "glyphicon-random", "text": _('Discover'), "link": 'web.books_list', "id": "rand",
|
||||||
"visibility": constants.SIDEBAR_RANDOM, 'public': True, "page": "discover",
|
"visibility": constants.SIDEBAR_RANDOM, 'public': True, "page": "discover",
|
||||||
"show_text": _('Show random books'), "config_show":True})
|
"show_text": _('Show random books'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-inbox", "text": _('Categories'), "link": 'web.category_list', "id": "cat",
|
sidebar.append({"glyph": "glyphicon-inbox", "text": _('Categories'), "link": 'web.category_list', "id": "cat",
|
||||||
"visibility": constants.SIDEBAR_CATEGORY, 'public': True, "page": "category",
|
"visibility": constants.SIDEBAR_CATEGORY, 'public': True, "page": "category",
|
||||||
"show_text": _('Show category selection'), "config_show":True})
|
"show_text": _('Show category selection'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-bookmark", "text": _('Series'), "link": 'web.series_list', "id": "serie",
|
sidebar.append({"glyph": "glyphicon-bookmark", "text": _('Series'), "link": 'web.series_list', "id": "serie",
|
||||||
"visibility": constants.SIDEBAR_SERIES, 'public': True, "page": "series",
|
"visibility": constants.SIDEBAR_SERIES, 'public': True, "page": "series",
|
||||||
"show_text": _('Show series selection'), "config_show":True})
|
"show_text": _('Show series selection'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-user", "text": _('Authors'), "link": 'web.author_list', "id": "author",
|
sidebar.append({"glyph": "glyphicon-user", "text": _('Authors'), "link": 'web.author_list', "id": "author",
|
||||||
"visibility": constants.SIDEBAR_AUTHOR, 'public': True, "page": "author",
|
"visibility": constants.SIDEBAR_AUTHOR, 'public': True, "page": "author",
|
||||||
"show_text": _('Show author selection'), "config_show":True})
|
"show_text": _('Show author selection'), "config_show": True})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-text-size", "text": _('Publishers'), "link": 'web.publisher_list', "id": "publisher",
|
{"glyph": "glyphicon-text-size", "text": _('Publishers'), "link": 'web.publisher_list', "id": "publisher",
|
||||||
"visibility": constants.SIDEBAR_PUBLISHER, 'public': True, "page": "publisher",
|
"visibility": constants.SIDEBAR_PUBLISHER, 'public': True, "page": "publisher",
|
||||||
@ -97,13 +96,13 @@ def get_sidebar_config(kwargs=None):
|
|||||||
sidebar.append({"glyph": "glyphicon-flag", "text": _('Languages'), "link": 'web.language_overview', "id": "lang",
|
sidebar.append({"glyph": "glyphicon-flag", "text": _('Languages'), "link": 'web.language_overview', "id": "lang",
|
||||||
"visibility": constants.SIDEBAR_LANGUAGE, 'public': (g.user.filter_language() == 'all'),
|
"visibility": constants.SIDEBAR_LANGUAGE, 'public': (g.user.filter_language() == 'all'),
|
||||||
"page": "language",
|
"page": "language",
|
||||||
"show_text": _('Show language selection'), "config_show":True})
|
"show_text": _('Show language selection'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-star-empty", "text": _('Ratings'), "link": 'web.ratings_list', "id": "rate",
|
sidebar.append({"glyph": "glyphicon-star-empty", "text": _('Ratings'), "link": 'web.ratings_list', "id": "rate",
|
||||||
"visibility": constants.SIDEBAR_RATING, 'public': True,
|
"visibility": constants.SIDEBAR_RATING, 'public': True,
|
||||||
"page": "rating", "show_text": _('Show ratings selection'), "config_show":True})
|
"page": "rating", "show_text": _('Show ratings selection'), "config_show": True})
|
||||||
sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format",
|
sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format",
|
||||||
"visibility": constants.SIDEBAR_FORMAT, 'public': True,
|
"visibility": constants.SIDEBAR_FORMAT, 'public': True,
|
||||||
"page": "format", "show_text": _('Show file formats selection'), "config_show":True})
|
"page": "format", "show_text": _('Show file formats selection'), "config_show": True})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-trash", "text": _('Archived Books'), "link": 'web.books_list', "id": "archived",
|
{"glyph": "glyphicon-trash", "text": _('Archived Books'), "link": 'web.books_list', "id": "archived",
|
||||||
"visibility": constants.SIDEBAR_ARCHIVED, 'public': (not g.user.is_anonymous), "page": "archived",
|
"visibility": constants.SIDEBAR_ARCHIVED, 'public': (not g.user.is_anonymous), "page": "archived",
|
||||||
@ -236,7 +235,8 @@ class Anonymous(AnonymousUserMixin, UserBase):
|
|||||||
self.loadSettings()
|
self.loadSettings()
|
||||||
|
|
||||||
def loadSettings(self):
|
def loadSettings(self):
|
||||||
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() # type: User
|
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
|
||||||
|
.first() # type: User
|
||||||
self.nickname = data.nickname
|
self.nickname = data.nickname
|
||||||
self.role = data.role
|
self.role = data.role
|
||||||
self.id=data.id
|
self.id=data.id
|
||||||
@ -259,7 +259,7 @@ class Anonymous(AnonymousUserMixin, UserBase):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def is_anonymous(self):
|
def is_anonymous(self):
|
||||||
return True # self.anon_browse
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_authenticated(self):
|
def is_authenticated(self):
|
||||||
@ -271,7 +271,7 @@ class Shelf(Base):
|
|||||||
__tablename__ = 'shelf'
|
__tablename__ = 'shelf'
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
uuid = Column(String, default=lambda : str(uuid.uuid4()))
|
uuid = Column(String, default=lambda: str(uuid.uuid4()))
|
||||||
name = Column(String)
|
name = Column(String)
|
||||||
is_public = Column(Integer, default=0)
|
is_public = Column(Integer, default=0)
|
||||||
user_id = Column(Integer, ForeignKey('user.id'))
|
user_id = Column(Integer, ForeignKey('user.id'))
|
||||||
@ -318,8 +318,12 @@ class ReadBook(Base):
|
|||||||
book_id = Column(Integer, unique=False)
|
book_id = Column(Integer, unique=False)
|
||||||
user_id = Column(Integer, ForeignKey('user.id'), unique=False)
|
user_id = Column(Integer, ForeignKey('user.id'), unique=False)
|
||||||
read_status = Column(Integer, unique=False, default=STATUS_UNREAD, nullable=False)
|
read_status = Column(Integer, unique=False, default=STATUS_UNREAD, nullable=False)
|
||||||
kobo_reading_state = relationship("KoboReadingState", uselist=False, primaryjoin="and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
|
kobo_reading_state = relationship("KoboReadingState", uselist=False,
|
||||||
"ReadBook.book_id == foreign(KoboReadingState.book_id))", cascade="all", backref=backref("book_read_link", uselist=False))
|
primaryjoin="and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
|
||||||
|
"ReadBook.book_id == foreign(KoboReadingState.book_id))",
|
||||||
|
cascade="all",
|
||||||
|
backref=backref("book_read_link",
|
||||||
|
uselist=False))
|
||||||
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
||||||
last_time_started_reading = Column(DateTime, nullable=True)
|
last_time_started_reading = Column(DateTime, nullable=True)
|
||||||
times_started_reading = Column(Integer, default=0, nullable=False)
|
times_started_reading = Column(Integer, default=0, nullable=False)
|
||||||
@ -334,6 +338,7 @@ class Bookmark(Base):
|
|||||||
format = Column(String(collation='NOCASE'))
|
format = Column(String(collation='NOCASE'))
|
||||||
bookmark_key = Column(String)
|
bookmark_key = Column(String)
|
||||||
|
|
||||||
|
|
||||||
# Baseclass representing books that are archived on the user's Kobo device.
|
# Baseclass representing books that are archived on the user's Kobo device.
|
||||||
class ArchivedBook(Base):
|
class ArchivedBook(Base):
|
||||||
__tablename__ = 'archived_book'
|
__tablename__ = 'archived_book'
|
||||||
@ -421,7 +426,6 @@ class Registration(Base):
|
|||||||
return u"<Registration('{0}')>".format(self.domain)
|
return u"<Registration('{0}')>".format(self.domain)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteAuthToken(Base):
|
class RemoteAuthToken(Base):
|
||||||
__tablename__ = 'remote_auth_token'
|
__tablename__ = 'remote_auth_token'
|
||||||
|
|
||||||
@ -486,7 +490,7 @@ def migrate_Database(session):
|
|||||||
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
|
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
|
||||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
|
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
|
||||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
|
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
|
||||||
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
|
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
|
||||||
session.commit()
|
session.commit()
|
||||||
try:
|
try:
|
||||||
session.query(exists().where(Shelf.uuid)).scalar()
|
session.query(exists().where(Shelf.uuid)).scalar()
|
||||||
@ -531,19 +535,13 @@ def migrate_Database(session):
|
|||||||
except exc.OperationalError:
|
except exc.OperationalError:
|
||||||
conn = engine.connect()
|
conn = engine.connect()
|
||||||
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||||
"+ series_books * :side_series + category_books * :side_category + hot_books * "
|
"+ series_books * :side_series + category_books * :side_category + hot_books * "
|
||||||
":side_hot + :side_autor + :detail_random)"
|
":side_hot + :side_autor + :detail_random)",
|
||||||
,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
|
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
|
||||||
'side_series': constants.SIDEBAR_SERIES,
|
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
|
||||||
'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT,
|
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
|
||||||
'side_autor': constants.SIDEBAR_AUTHOR,
|
'detail_random': constants.DETAIL_RANDOM})
|
||||||
'detail_random': constants.DETAIL_RANDOM})
|
|
||||||
session.commit()
|
session.commit()
|
||||||
'''try:
|
|
||||||
session.query(exists().where(User.mature_content)).scalar()
|
|
||||||
except exc.OperationalError:
|
|
||||||
conn = engine.connect()
|
|
||||||
conn.execute("ALTER TABLE user ADD column `mature_content` INTEGER DEFAULT 1")'''
|
|
||||||
try:
|
try:
|
||||||
session.query(exists().where(User.denied_tags)).scalar()
|
session.query(exists().where(User.denied_tags)).scalar()
|
||||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||||
@ -552,7 +550,8 @@ def migrate_Database(session):
|
|||||||
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
|
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
|
||||||
conn.execute("ALTER TABLE user ADD column `denied_column_value` DEFAULT ''")
|
conn.execute("ALTER TABLE user ADD column `denied_column_value` DEFAULT ''")
|
||||||
conn.execute("ALTER TABLE user ADD column `allowed_column_value` DEFAULT ''")
|
conn.execute("ALTER TABLE user ADD column `allowed_column_value` DEFAULT ''")
|
||||||
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() is None:
|
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
|
||||||
|
is None:
|
||||||
create_anonymous_user(session)
|
create_anonymous_user(session)
|
||||||
try:
|
try:
|
||||||
# check if one table with autoincrement is existing (should be user table)
|
# check if one table with autoincrement is existing (should be user table)
|
||||||
@ -562,20 +561,20 @@ def migrate_Database(session):
|
|||||||
# Create new table user_id and copy contents of table user into it
|
# Create new table user_id and copy contents of table user into it
|
||||||
conn = engine.connect()
|
conn = engine.connect()
|
||||||
conn.execute("CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
|
conn.execute("CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
|
||||||
"nickname VARCHAR(64),"
|
" nickname VARCHAR(64),"
|
||||||
"email VARCHAR(120),"
|
"email VARCHAR(120),"
|
||||||
"role SMALLINT,"
|
"role SMALLINT,"
|
||||||
"password VARCHAR,"
|
"password VARCHAR,"
|
||||||
"kindle_mail VARCHAR(120),"
|
"kindle_mail VARCHAR(120),"
|
||||||
"locale VARCHAR(2),"
|
"locale VARCHAR(2),"
|
||||||
"sidebar_view INTEGER,"
|
"sidebar_view INTEGER,"
|
||||||
"default_language VARCHAR(3),"
|
"default_language VARCHAR(3),"
|
||||||
"UNIQUE (nickname),"
|
"UNIQUE (nickname),"
|
||||||
"UNIQUE (email))")
|
"UNIQUE (email))")
|
||||||
conn.execute("INSERT INTO user_id(id, nickname, email, role, password, kindle_mail,locale,"
|
conn.execute("INSERT INTO user_id(id, nickname, email, role, password, kindle_mail,locale,"
|
||||||
"sidebar_view, default_language) "
|
"sidebar_view, default_language) "
|
||||||
"SELECT id, nickname, email, role, password, kindle_mail, locale,"
|
"SELECT id, nickname, email, role, password, kindle_mail, locale,"
|
||||||
"sidebar_view, default_language FROM user")
|
"sidebar_view, default_language FROM user")
|
||||||
# delete old user table and rename new user_id table to user:
|
# delete old user table and rename new user_id table to user:
|
||||||
conn.execute("DROP TABLE user")
|
conn.execute("DROP TABLE user")
|
||||||
conn.execute("ALTER TABLE user_id RENAME TO user")
|
conn.execute("ALTER TABLE user_id RENAME TO user")
|
||||||
@ -591,25 +590,26 @@ def clean_database(session):
|
|||||||
# Remove expired remote login tokens
|
# Remove expired remote login tokens
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
||||||
filter(RemoteAuthToken.token_type !=1 ).delete()
|
filter(RemoteAuthToken.token_type != 1).delete()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
# Save downloaded books per user in calibre-web's own database
|
# Save downloaded books per user in calibre-web's own database
|
||||||
def update_download(book_id, user_id):
|
def update_download(book_id, user_id):
|
||||||
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id ==
|
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id == book_id).first()
|
||||||
book_id).first()
|
|
||||||
|
|
||||||
if not check:
|
if not check:
|
||||||
new_download = Downloads(user_id=user_id, book_id=book_id)
|
new_download = Downloads(user_id=user_id, book_id=book_id)
|
||||||
session.add(new_download)
|
session.add(new_download)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
# Delete non exisiting downloaded books in calibre-web's own database
|
# Delete non exisiting downloaded books in calibre-web's own database
|
||||||
def delete_download(book_id):
|
def delete_download(book_id):
|
||||||
session.query(Downloads).filter(book_id == Downloads.book_id).delete()
|
session.query(Downloads).filter(book_id == Downloads.book_id).delete()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
# Generate user Guest (translated text), as anoymous user, no rights
|
# Generate user Guest (translated text), as anoymous user, no rights
|
||||||
def create_anonymous_user(session):
|
def create_anonymous_user(session):
|
||||||
user = User()
|
user = User()
|
||||||
@ -667,8 +667,12 @@ def dispose():
|
|||||||
old_session = session
|
old_session = session
|
||||||
session = None
|
session = None
|
||||||
if old_session:
|
if old_session:
|
||||||
try: old_session.close()
|
try:
|
||||||
except: pass
|
old_session.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
if old_session.bind:
|
if old_session.bind:
|
||||||
try: old_session.bind.dispose()
|
try:
|
||||||
except: pass
|
old_session.bind.dispose()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
@ -69,7 +69,7 @@ class Updater(threading.Thread):
|
|||||||
def get_available_updates(self, request_method, locale):
|
def get_available_updates(self, request_method, locale):
|
||||||
if config.config_updatechannel == constants.UPDATE_STABLE:
|
if config.config_updatechannel == constants.UPDATE_STABLE:
|
||||||
return self._stable_available_updates(request_method)
|
return self._stable_available_updates(request_method)
|
||||||
return self._nightly_available_updates(request_method,locale)
|
return self._nightly_available_updates(request_method, locale)
|
||||||
|
|
||||||
def do_work(self):
|
def do_work(self):
|
||||||
try:
|
try:
|
||||||
@ -132,7 +132,7 @@ class Updater(threading.Thread):
|
|||||||
def pause(self):
|
def pause(self):
|
||||||
self.can_run.clear()
|
self.can_run.clear()
|
||||||
|
|
||||||
#should just resume the thread
|
# should just resume the thread
|
||||||
def resume(self):
|
def resume(self):
|
||||||
self.can_run.set()
|
self.can_run.set()
|
||||||
|
|
||||||
@ -268,7 +268,7 @@ class Updater(threading.Thread):
|
|||||||
|
|
||||||
def is_venv(self):
|
def is_venv(self):
|
||||||
if (hasattr(sys, 'real_prefix')) or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
|
if (hasattr(sys, 'real_prefix')) or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
|
||||||
return os.sep + os.path.relpath(sys.prefix,constants.BASE_DIR)
|
return os.sep + os.path.relpath(sys.prefix, constants.BASE_DIR)
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -280,7 +280,7 @@ class Updater(threading.Thread):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _stable_version_info(cls):
|
def _stable_version_info(cls):
|
||||||
return constants.STABLE_VERSION # Current version
|
return constants.STABLE_VERSION # Current version
|
||||||
|
|
||||||
def _nightly_available_updates(self, request_method, locale):
|
def _nightly_available_updates(self, request_method, locale):
|
||||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||||
@ -436,7 +436,7 @@ class Updater(threading.Thread):
|
|||||||
patch_version_update > current_version[2]) or \
|
patch_version_update > current_version[2]) or \
|
||||||
minor_version_update > current_version[1]:
|
minor_version_update > current_version[1]:
|
||||||
parents.append([commit[i]['tag_name'], commit[i]['body'].replace('\r\n', '<p>')])
|
parents.append([commit[i]['tag_name'], commit[i]['body'].replace('\r\n', '<p>')])
|
||||||
newer=True
|
newer = True
|
||||||
i -= 1
|
i -= 1
|
||||||
continue
|
continue
|
||||||
if major_version_update < current_version[0]:
|
if major_version_update < current_version[0]:
|
||||||
|
Loading…
Reference in New Issue
Block a user