mirror of
https://github.com/janeczku/calibre-web
synced 2024-11-28 20:39:59 +00:00
Merge remote-tracking branch 'upstream/master' into master
This commit is contained in:
commit
b4324cd685
@ -32,8 +32,8 @@ Calibre-Web is a web app providing a clean interface for browsing, reading and d
|
|||||||
|
|
||||||
## Quick start
|
## Quick start
|
||||||
|
|
||||||
1. Install dependencies by running `pip3 install --target vendor -r requirements.txt` (python3.x) or `pip install --target vendor -r requirements.txt` (python2.7).
|
1. Install dependencies by running `pip3 install --target vendor -r requirements.txt` (python3.x). Alternativly set up a python virtual environment.
|
||||||
2. Execute the command: `python cps.py` (or `nohup python cps.py` - recommended if you want to exit the terminal window)
|
2. Execute the command: `python3 cps.py` (or `nohup python3 cps.py` - recommended if you want to exit the terminal window)
|
||||||
3. Point your browser to `http://localhost:8083` or `http://localhost:8083/opds` for the OPDS catalog
|
3. Point your browser to `http://localhost:8083` or `http://localhost:8083/opds` for the OPDS catalog
|
||||||
4. Set `Location of Calibre database` to the path of the folder where your Calibre library (metadata.db) lives, push "submit" button\
|
4. Set `Location of Calibre database` to the path of the folder where your Calibre library (metadata.db) lives, push "submit" button\
|
||||||
Optionally a Google Drive can be used to host the calibre library [-> Using Google Drive integration](https://github.com/janeczku/calibre-web/wiki/Configuration#using-google-drive-integration)
|
Optionally a Google Drive can be used to host the calibre library [-> Using Google Drive integration](https://github.com/janeczku/calibre-web/wiki/Configuration#using-google-drive-integration)
|
||||||
@ -48,7 +48,7 @@ Please note that running the above install command can fail on some versions of
|
|||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
python 3.x+, (Python 2.7+)
|
python 3.x+
|
||||||
|
|
||||||
Optionally, to enable on-the-fly conversion from one ebook format to another when using the send-to-kindle feature, or during editing of ebooks metadata:
|
Optionally, to enable on-the-fly conversion from one ebook format to another when using the send-to-kindle feature, or during editing of ebooks metadata:
|
||||||
|
|
||||||
|
239
cps/admin.py
239
cps/admin.py
@ -105,6 +105,7 @@ def unconfigured(f):
|
|||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
|
||||||
@admi.before_app_request
|
@admi.before_app_request
|
||||||
def before_request():
|
def before_request():
|
||||||
if current_user.is_authenticated:
|
if current_user.is_authenticated:
|
||||||
@ -118,8 +119,10 @@ def before_request():
|
|||||||
g.config_authors_max = config.config_authors_max
|
g.config_authors_max = config.config_authors_max
|
||||||
g.shelves_access = ub.session.query(ub.Shelf).filter(
|
g.shelves_access = ub.session.query(ub.Shelf).filter(
|
||||||
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
|
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
|
||||||
if not config.db_configured and request.endpoint not in (
|
if '/static/' not in request.path and not config.db_configured and \
|
||||||
'admin.basic_configuration', 'login', 'admin.config_pathchooser') and '/static/' not in request.path:
|
request.endpoint not in ('admin.basic_configuration',
|
||||||
|
'login',
|
||||||
|
'admin.config_pathchooser'):
|
||||||
return redirect(url_for('admin.basic_configuration'))
|
return redirect(url_for('admin.basic_configuration'))
|
||||||
|
|
||||||
|
|
||||||
@ -201,12 +204,12 @@ def configuration():
|
|||||||
@login_required
|
@login_required
|
||||||
@admin_required
|
@admin_required
|
||||||
def view_configuration():
|
def view_configuration():
|
||||||
readColumn = calibre_db.session.query(db.Custom_Columns)\
|
read_column = calibre_db.session.query(db.Custom_Columns)\
|
||||||
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
|
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||||
restrictColumns= calibre_db.session.query(db.Custom_Columns)\
|
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
|
||||||
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
|
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||||
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
|
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
|
||||||
restrictColumns=restrictColumns,
|
restrictColumns=restrict_columns,
|
||||||
title=_(u"UI Configuration"), page="uiconfig")
|
title=_(u"UI Configuration"), page="uiconfig")
|
||||||
|
|
||||||
|
|
||||||
@ -271,11 +274,7 @@ def edit_domain(allow):
|
|||||||
vals = request.form.to_dict()
|
vals = request.form.to_dict()
|
||||||
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
|
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
|
||||||
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
|
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
|
||||||
try:
|
return ub.session_commit("Registering Domains edited {}".format(answer.domain))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
|
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
|
||||||
@ -288,10 +287,7 @@ def add_domain(allow):
|
|||||||
if not check:
|
if not check:
|
||||||
new_domain = ub.Registration(domain=domain_name, allow=allow)
|
new_domain = ub.Registration(domain=domain_name, allow=allow)
|
||||||
ub.session.add(new_domain)
|
ub.session.add(new_domain)
|
||||||
try:
|
ub.session_commit("Registering Domains added {}".format(domain_name))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@ -302,18 +298,12 @@ def delete_domain():
|
|||||||
try:
|
try:
|
||||||
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
|
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
|
||||||
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
|
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
|
||||||
try:
|
ub.session_commit("Registering Domains deleted {}".format(domain_id))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
# If last domain was deleted, add all domains by default
|
# If last domain was deleted, add all domains by default
|
||||||
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
|
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
|
||||||
new_domain = ub.Registration(domain="%.%", allow=1)
|
new_domain = ub.Registration(domain="%.%", allow=1)
|
||||||
ub.session.add(new_domain)
|
ub.session.add(new_domain)
|
||||||
try:
|
ub.session_commit("Last Registering Domain deleted, added *.* as default")
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return ""
|
return ""
|
||||||
@ -330,6 +320,7 @@ def list_domain(allow):
|
|||||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
||||||
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
||||||
@login_required
|
@login_required
|
||||||
@ -355,10 +346,7 @@ def edit_restriction(res_type, user_id):
|
|||||||
elementlist = usr.list_allowed_tags()
|
elementlist = usr.list_allowed_tags()
|
||||||
elementlist[int(element['id'][1:])] = element['Element']
|
elementlist[int(element['id'][1:])] = element['Element']
|
||||||
usr.allowed_tags = ','.join(elementlist)
|
usr.allowed_tags = ','.join(elementlist)
|
||||||
try:
|
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.allowed_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
if res_type == 3: # CColumn per user
|
if res_type == 3: # CColumn per user
|
||||||
if isinstance(user_id, int):
|
if isinstance(user_id, int):
|
||||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||||
@ -367,10 +355,7 @@ def edit_restriction(res_type, user_id):
|
|||||||
elementlist = usr.list_allowed_column_values()
|
elementlist = usr.list_allowed_column_values()
|
||||||
elementlist[int(element['id'][1:])] = element['Element']
|
elementlist[int(element['id'][1:])] = element['Element']
|
||||||
usr.allowed_column_value = ','.join(elementlist)
|
usr.allowed_column_value = ','.join(elementlist)
|
||||||
try:
|
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname, usr.allowed_column_value))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
if element['id'].startswith('d'):
|
if element['id'].startswith('d'):
|
||||||
if res_type == 0: # Tags as template
|
if res_type == 0: # Tags as template
|
||||||
elementlist = config.list_denied_tags()
|
elementlist = config.list_denied_tags()
|
||||||
@ -390,10 +375,7 @@ def edit_restriction(res_type, user_id):
|
|||||||
elementlist = usr.list_denied_tags()
|
elementlist = usr.list_denied_tags()
|
||||||
elementlist[int(element['id'][1:])] = element['Element']
|
elementlist[int(element['id'][1:])] = element['Element']
|
||||||
usr.denied_tags = ','.join(elementlist)
|
usr.denied_tags = ','.join(elementlist)
|
||||||
try:
|
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.denied_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
if res_type == 3: # CColumn per user
|
if res_type == 3: # CColumn per user
|
||||||
if isinstance(user_id, int):
|
if isinstance(user_id, int):
|
||||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||||
@ -402,12 +384,10 @@ def edit_restriction(res_type, user_id):
|
|||||||
elementlist = usr.list_denied_column_values()
|
elementlist = usr.list_denied_column_values()
|
||||||
elementlist[int(element['id'][1:])] = element['Element']
|
elementlist[int(element['id'][1:])] = element['Element']
|
||||||
usr.denied_column_value = ','.join(elementlist)
|
usr.denied_column_value = ','.join(elementlist)
|
||||||
try:
|
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname, usr.denied_column_value))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def restriction_addition(element, list_func):
|
def restriction_addition(element, list_func):
|
||||||
elementlist = list_func()
|
elementlist = list_func()
|
||||||
if elementlist == ['']:
|
if elementlist == ['']:
|
||||||
@ -451,16 +431,10 @@ def add_restriction(res_type, user_id):
|
|||||||
usr = current_user
|
usr = current_user
|
||||||
if 'submit_allow' in element:
|
if 'submit_allow' in element:
|
||||||
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
|
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
|
||||||
try:
|
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.list_allowed_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
elif 'submit_deny' in element:
|
elif 'submit_deny' in element:
|
||||||
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
|
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
|
||||||
try:
|
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.list_denied_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
if res_type == 3: # CustomC per user
|
if res_type == 3: # CustomC per user
|
||||||
if isinstance(user_id, int):
|
if isinstance(user_id, int):
|
||||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||||
@ -468,18 +442,15 @@ def add_restriction(res_type, user_id):
|
|||||||
usr = current_user
|
usr = current_user
|
||||||
if 'submit_allow' in element:
|
if 'submit_allow' in element:
|
||||||
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
|
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
|
||||||
try:
|
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname,
|
||||||
ub.session.commit()
|
usr.list_allowed_column_values))
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
elif 'submit_deny' in element:
|
elif 'submit_deny' in element:
|
||||||
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
|
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
|
||||||
try:
|
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname,
|
||||||
ub.session.commit()
|
usr.list_denied_column_values))
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/ajax/deleterestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
@admi.route("/ajax/deleterestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
||||||
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
||||||
@login_required
|
@login_required
|
||||||
@ -507,16 +478,10 @@ def delete_restriction(res_type, user_id):
|
|||||||
usr = current_user
|
usr = current_user
|
||||||
if element['id'].startswith('a'):
|
if element['id'].startswith('a'):
|
||||||
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
|
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
|
||||||
try:
|
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
elif element['id'].startswith('d'):
|
elif element['id'].startswith('d'):
|
||||||
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
|
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
|
||||||
try:
|
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
elif res_type == 3: # Columns per user
|
elif res_type == 3: # Columns per user
|
||||||
if isinstance(user_id, int):
|
if isinstance(user_id, int):
|
||||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||||
@ -524,18 +489,16 @@ def delete_restriction(res_type, user_id):
|
|||||||
usr = current_user
|
usr = current_user
|
||||||
if element['id'].startswith('a'):
|
if element['id'].startswith('a'):
|
||||||
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
|
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
|
||||||
try:
|
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.nickname,
|
||||||
ub.session.commit()
|
usr.list_allowed_column_values))
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
elif element['id'].startswith('d'):
|
elif element['id'].startswith('d'):
|
||||||
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
|
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
|
||||||
try:
|
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.nickname,
|
||||||
ub.session.commit()
|
usr.list_denied_column_values))
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
|
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
|
||||||
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
|
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
|
||||||
@login_required
|
@login_required
|
||||||
@ -580,6 +543,7 @@ def list_restriction(res_type, user_id):
|
|||||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/basicconfig/pathchooser/")
|
@admi.route("/basicconfig/pathchooser/")
|
||||||
@unconfigured
|
@unconfigured
|
||||||
def config_pathchooser():
|
def config_pathchooser():
|
||||||
@ -587,12 +551,14 @@ def config_pathchooser():
|
|||||||
return pathchooser()
|
return pathchooser()
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/ajax/pathchooser/")
|
@admi.route("/ajax/pathchooser/")
|
||||||
@login_required
|
@login_required
|
||||||
@admin_required
|
@admin_required
|
||||||
def ajax_pathchooser():
|
def ajax_pathchooser():
|
||||||
return pathchooser()
|
return pathchooser()
|
||||||
|
|
||||||
|
|
||||||
def pathchooser():
|
def pathchooser():
|
||||||
browse_for = "folder"
|
browse_for = "folder"
|
||||||
folder_only = request.args.get('folder', False) == "true"
|
folder_only = request.args.get('folder', False) == "true"
|
||||||
@ -605,12 +571,12 @@ def pathchooser():
|
|||||||
else:
|
else:
|
||||||
oldfile = ""
|
oldfile = ""
|
||||||
|
|
||||||
abs = False
|
absolute = False
|
||||||
|
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
# if os.path.isabs(path):
|
# if os.path.isabs(path):
|
||||||
cwd = os.path.realpath(path)
|
cwd = os.path.realpath(path)
|
||||||
abs = True
|
absolute = True
|
||||||
# else:
|
# else:
|
||||||
# cwd = os.path.relpath(path)
|
# cwd = os.path.relpath(path)
|
||||||
else:
|
else:
|
||||||
@ -618,7 +584,7 @@ def pathchooser():
|
|||||||
|
|
||||||
cwd = os.path.normpath(os.path.realpath(cwd))
|
cwd = os.path.normpath(os.path.realpath(cwd))
|
||||||
parentdir = os.path.dirname(cwd)
|
parentdir = os.path.dirname(cwd)
|
||||||
if not abs:
|
if not absolute:
|
||||||
if os.path.realpath(cwd) == os.path.realpath("/"):
|
if os.path.realpath(cwd) == os.path.realpath("/"):
|
||||||
cwd = os.path.relpath(cwd)
|
cwd = os.path.relpath(cwd)
|
||||||
else:
|
else:
|
||||||
@ -705,8 +671,8 @@ def _configuration_gdrive_helper(to_save):
|
|||||||
config.config_use_google_drive = False
|
config.config_use_google_drive = False
|
||||||
|
|
||||||
gdrive_secrets = {}
|
gdrive_secrets = {}
|
||||||
gdriveError = gdriveutils.get_error_text(gdrive_secrets)
|
gdrive_error = gdriveutils.get_error_text(gdrive_secrets)
|
||||||
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdriveError:
|
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdrive_error:
|
||||||
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
||||||
gdrive_secrets = json.load(settings)['web']
|
gdrive_secrets = json.load(settings)['web']
|
||||||
if not gdrive_secrets:
|
if not gdrive_secrets:
|
||||||
@ -718,10 +684,11 @@ def _configuration_gdrive_helper(to_save):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# always show google drive settings, but in case of error deny support
|
# always show google drive settings, but in case of error deny support
|
||||||
config.config_use_google_drive = (not gdriveError) and ("config_use_google_drive" in to_save)
|
config.config_use_google_drive = (not gdrive_error) and ("config_use_google_drive" in to_save)
|
||||||
if _config_string(to_save, "config_google_drive_folder"):
|
if _config_string(to_save, "config_google_drive_folder"):
|
||||||
gdriveutils.deleteDatabaseOnChange()
|
gdriveutils.deleteDatabaseOnChange()
|
||||||
return gdriveError
|
return gdrive_error
|
||||||
|
|
||||||
|
|
||||||
def _configuration_oauth_helper(to_save):
|
def _configuration_oauth_helper(to_save):
|
||||||
active_oauths = 0
|
active_oauths = 0
|
||||||
@ -744,22 +711,24 @@ def _configuration_oauth_helper(to_save):
|
|||||||
"active": element["active"]})
|
"active": element["active"]})
|
||||||
return reboot_required
|
return reboot_required
|
||||||
|
|
||||||
def _configuration_logfile_helper(to_save, gdriveError):
|
|
||||||
|
def _configuration_logfile_helper(to_save, gdrive_error):
|
||||||
reboot_required = False
|
reboot_required = False
|
||||||
reboot_required |= _config_int(to_save, "config_log_level")
|
reboot_required |= _config_int(to_save, "config_log_level")
|
||||||
reboot_required |= _config_string(to_save, "config_logfile")
|
reboot_required |= _config_string(to_save, "config_logfile")
|
||||||
if not logger.is_valid_logfile(config.config_logfile):
|
if not logger.is_valid_logfile(config.config_logfile):
|
||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
|
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||||
|
|
||||||
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
|
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
|
||||||
reboot_required |= _config_string(to_save, "config_access_logfile")
|
reboot_required |= _config_string(to_save, "config_access_logfile")
|
||||||
if not logger.is_valid_logfile(config.config_access_logfile):
|
if not logger.is_valid_logfile(config.config_access_logfile):
|
||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
|
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||||
return reboot_required, None
|
return reboot_required, None
|
||||||
|
|
||||||
def _configuration_ldap_helper(to_save, gdriveError):
|
|
||||||
|
def _configuration_ldap_helper(to_save, gdrive_error):
|
||||||
reboot_required = False
|
reboot_required = False
|
||||||
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
|
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
|
||||||
reboot_required |= _config_int(to_save, "config_ldap_port")
|
reboot_required |= _config_int(to_save, "config_ldap_port")
|
||||||
@ -786,33 +755,33 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
|||||||
or not config.config_ldap_dn \
|
or not config.config_ldap_dn \
|
||||||
or not config.config_ldap_user_object:
|
or not config.config_ldap_user_object:
|
||||||
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
|
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
|
||||||
'Port, DN and User Object Identifier'), gdriveError)
|
'Port, DN and User Object Identifier'), gdrive_error)
|
||||||
|
|
||||||
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
|
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
|
||||||
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
|
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
|
||||||
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
|
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
|
||||||
return reboot_required, _configuration_result('Please Enter a LDAP Service Account and Password',
|
return reboot_required, _configuration_result('Please Enter a LDAP Service Account and Password',
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
else:
|
else:
|
||||||
if not config.config_ldap_serv_username:
|
if not config.config_ldap_serv_username:
|
||||||
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdriveError)
|
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdrive_error)
|
||||||
|
|
||||||
if config.config_ldap_group_object_filter:
|
if config.config_ldap_group_object_filter:
|
||||||
if config.config_ldap_group_object_filter.count("%s") != 1:
|
if config.config_ldap_group_object_filter.count("%s") != 1:
|
||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'),
|
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
|
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
|
||||||
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'),
|
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
|
|
||||||
if config.config_ldap_user_object.count("%s") != 1:
|
if config.config_ldap_user_object.count("%s") != 1:
|
||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'),
|
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
|
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
|
||||||
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'),
|
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
|
|
||||||
if to_save["ldap_import_user_filter"] == '0':
|
if to_save["ldap_import_user_filter"] == '0':
|
||||||
config.config_ldap_member_user_object = ""
|
config.config_ldap_member_user_object = ""
|
||||||
@ -820,10 +789,10 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
|||||||
if config.config_ldap_member_user_object.count("%s") != 1:
|
if config.config_ldap_member_user_object.count("%s") != 1:
|
||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'),
|
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
|
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
|
||||||
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'),
|
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
|
|
||||||
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
|
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
|
||||||
if not (os.path.isfile(config.config_ldap_cacert_path) and
|
if not (os.path.isfile(config.config_ldap_cacert_path) and
|
||||||
@ -832,7 +801,7 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
|||||||
return reboot_required, \
|
return reboot_required, \
|
||||||
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
|
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
|
||||||
'Please Enter Correct Path'),
|
'Please Enter Correct Path'),
|
||||||
gdriveError)
|
gdrive_error)
|
||||||
return reboot_required, None
|
return reboot_required, None
|
||||||
|
|
||||||
|
|
||||||
@ -840,7 +809,7 @@ def _configuration_update_helper(configured):
|
|||||||
reboot_required = False
|
reboot_required = False
|
||||||
db_change = False
|
db_change = False
|
||||||
to_save = request.form.to_dict()
|
to_save = request.form.to_dict()
|
||||||
gdriveError = None
|
gdrive_error = None
|
||||||
|
|
||||||
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
|
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
|
||||||
'',
|
'',
|
||||||
@ -849,21 +818,21 @@ def _configuration_update_helper(configured):
|
|||||||
try:
|
try:
|
||||||
db_change |= _config_string(to_save, "config_calibre_dir")
|
db_change |= _config_string(to_save, "config_calibre_dir")
|
||||||
|
|
||||||
# Google drive setup
|
# gdrive_error drive setup
|
||||||
gdriveError = _configuration_gdrive_helper(to_save)
|
gdrive_error = _configuration_gdrive_helper(to_save)
|
||||||
|
|
||||||
reboot_required |= _config_int(to_save, "config_port")
|
reboot_required |= _config_int(to_save, "config_port")
|
||||||
|
|
||||||
reboot_required |= _config_string(to_save, "config_keyfile")
|
reboot_required |= _config_string(to_save, "config_keyfile")
|
||||||
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
|
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
|
||||||
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'),
|
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'),
|
||||||
gdriveError,
|
gdrive_error,
|
||||||
configured)
|
configured)
|
||||||
|
|
||||||
reboot_required |= _config_string(to_save, "config_certfile")
|
reboot_required |= _config_string(to_save, "config_certfile")
|
||||||
if config.config_certfile and not os.path.isfile(config.config_certfile):
|
if config.config_certfile and not os.path.isfile(config.config_certfile):
|
||||||
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'),
|
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'),
|
||||||
gdriveError,
|
gdrive_error,
|
||||||
configured)
|
configured)
|
||||||
|
|
||||||
_config_checkbox_int(to_save, "config_uploading")
|
_config_checkbox_int(to_save, "config_uploading")
|
||||||
@ -890,7 +859,7 @@ def _configuration_update_helper(configured):
|
|||||||
|
|
||||||
# LDAP configurator,
|
# LDAP configurator,
|
||||||
if config.config_login_type == constants.LOGIN_LDAP:
|
if config.config_login_type == constants.LOGIN_LDAP:
|
||||||
reboot, message = _configuration_ldap_helper(to_save, gdriveError)
|
reboot, message = _configuration_ldap_helper(to_save, gdrive_error)
|
||||||
if message:
|
if message:
|
||||||
return message
|
return message
|
||||||
reboot_required |= reboot
|
reboot_required |= reboot
|
||||||
@ -920,7 +889,7 @@ def _configuration_update_helper(configured):
|
|||||||
if config.config_login_type == constants.LOGIN_OAUTH:
|
if config.config_login_type == constants.LOGIN_OAUTH:
|
||||||
reboot_required |= _configuration_oauth_helper(to_save)
|
reboot_required |= _configuration_oauth_helper(to_save)
|
||||||
|
|
||||||
reboot, message = _configuration_logfile_helper(to_save, gdriveError)
|
reboot, message = _configuration_logfile_helper(to_save, gdrive_error)
|
||||||
if message:
|
if message:
|
||||||
return message
|
return message
|
||||||
reboot_required |= reboot
|
reboot_required |= reboot
|
||||||
@ -929,10 +898,10 @@ def _configuration_update_helper(configured):
|
|||||||
if "config_rarfile_location" in to_save:
|
if "config_rarfile_location" in to_save:
|
||||||
unrar_status = helper.check_unrar(config.config_rarfile_location)
|
unrar_status = helper.check_unrar(config.config_rarfile_location)
|
||||||
if unrar_status:
|
if unrar_status:
|
||||||
return _configuration_result(unrar_status, gdriveError, configured)
|
return _configuration_result(unrar_status, gdrive_error, configured)
|
||||||
except (OperationalError, InvalidRequestError):
|
except (OperationalError, InvalidRequestError):
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
_configuration_result(_(u"Settings DB is not Writeable"), gdriveError, configured)
|
_configuration_result(_(u"Settings DB is not Writeable"), gdrive_error, configured)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
metadata_db = os.path.join(config.config_calibre_dir, "metadata.db")
|
metadata_db = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||||
@ -940,12 +909,12 @@ def _configuration_update_helper(configured):
|
|||||||
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
|
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
|
||||||
db_change = True
|
db_change = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return _configuration_result('%s' % e, gdriveError, configured)
|
return _configuration_result('%s' % e, gdrive_error, configured)
|
||||||
|
|
||||||
if db_change:
|
if db_change:
|
||||||
if not calibre_db.setup_db(config, ub.app_DB_path):
|
if not calibre_db.setup_db(config, ub.app_DB_path):
|
||||||
return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
||||||
gdriveError,
|
gdrive_error,
|
||||||
configured)
|
configured)
|
||||||
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
||||||
flash(_(u"DB is not Writeable"), category="warning")
|
flash(_(u"DB is not Writeable"), category="warning")
|
||||||
@ -955,16 +924,16 @@ def _configuration_update_helper(configured):
|
|||||||
if reboot_required:
|
if reboot_required:
|
||||||
web_server.stop(True)
|
web_server.stop(True)
|
||||||
|
|
||||||
return _configuration_result(None, gdriveError, configured)
|
return _configuration_result(None, gdrive_error, configured)
|
||||||
|
|
||||||
|
|
||||||
def _configuration_result(error_flash=None, gdriveError=None, configured=True):
|
def _configuration_result(error_flash=None, gdrive_error=None, configured=True):
|
||||||
gdrive_authenticate = not is_gdrive_ready()
|
gdrive_authenticate = not is_gdrive_ready()
|
||||||
gdrivefolders = []
|
gdrivefolders = []
|
||||||
if gdriveError is None:
|
if gdrive_error is None:
|
||||||
gdriveError = gdriveutils.get_error_text()
|
gdrive_error = gdriveutils.get_error_text()
|
||||||
if gdriveError:
|
if gdrive_error:
|
||||||
gdriveError = _(gdriveError)
|
gdrive_error = _(gdrive_error)
|
||||||
else:
|
else:
|
||||||
# if config.config_use_google_drive and\
|
# if config.config_use_google_drive and\
|
||||||
if not gdrive_authenticate and gdrive_support:
|
if not gdrive_authenticate and gdrive_support:
|
||||||
@ -977,10 +946,16 @@ def _configuration_result(error_flash=None, gdriveError=None, configured=True):
|
|||||||
flash(error_flash, category="error")
|
flash(error_flash, category="error")
|
||||||
show_login_button = False
|
show_login_button = False
|
||||||
|
|
||||||
return render_title_template("config_edit.html", config=config, provider=oauthblueprints,
|
return render_title_template("config_edit.html",
|
||||||
show_back_button=show_back_button, show_login_button=show_login_button,
|
config=config,
|
||||||
show_authenticate_google_drive=gdrive_authenticate, filepicker=configured,
|
provider=oauthblueprints,
|
||||||
gdriveError=gdriveError, gdrivefolders=gdrivefolders, feature_support=feature_support,
|
show_back_button=show_back_button,
|
||||||
|
show_login_button=show_login_button,
|
||||||
|
show_authenticate_google_drive=gdrive_authenticate,
|
||||||
|
filepicker=configured,
|
||||||
|
gdriveError=gdrive_error,
|
||||||
|
gdrivefolders=gdrivefolders,
|
||||||
|
feature_support=feature_support,
|
||||||
title=_(u"Basic Configuration"), page="config")
|
title=_(u"Basic Configuration"), page="config")
|
||||||
|
|
||||||
|
|
||||||
@ -1025,10 +1000,7 @@ def _handle_new_user(to_save, content,languages, translations, kobo_support):
|
|||||||
content.allowed_column_value = config.config_allowed_column_value
|
content.allowed_column_value = config.config_allowed_column_value
|
||||||
content.denied_column_value = config.config_denied_column_value
|
content.denied_column_value = config.config_denied_column_value
|
||||||
ub.session.add(content)
|
ub.session.add(content)
|
||||||
try:
|
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
|
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
|
||||||
return redirect(url_for('admin.admin'))
|
return redirect(url_for('admin.admin'))
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
@ -1044,10 +1016,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
|||||||
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
||||||
ub.User.id != content.id).count():
|
ub.User.id != content.id).count():
|
||||||
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
|
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
|
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
|
||||||
return redirect(url_for('admin.admin'))
|
return redirect(url_for('admin.admin'))
|
||||||
else:
|
else:
|
||||||
@ -1055,8 +1024,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
|||||||
return redirect(url_for('admin.admin'))
|
return redirect(url_for('admin.admin'))
|
||||||
else:
|
else:
|
||||||
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
||||||
ub.User.id != content.id).count() and \
|
ub.User.id != content.id).count() and 'admin_role' not in to_save:
|
||||||
not 'admin_role' in to_save:
|
|
||||||
flash(_(u"No admin user remaining, can't remove admin role", nick=content.nickname), category="error")
|
flash(_(u"No admin user remaining, can't remove admin role", nick=content.nickname), category="error")
|
||||||
return redirect(url_for('admin.admin'))
|
return redirect(url_for('admin.admin'))
|
||||||
|
|
||||||
@ -1075,7 +1043,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
|||||||
value = element['visibility']
|
value = element['visibility']
|
||||||
if value in val and not content.check_visibility(value):
|
if value in val and not content.check_visibility(value):
|
||||||
content.sidebar_view |= value
|
content.sidebar_view |= value
|
||||||
elif not value in val and content.check_visibility(value):
|
elif value not in val and content.check_visibility(value):
|
||||||
content.sidebar_view &= ~value
|
content.sidebar_view &= ~value
|
||||||
|
|
||||||
if "Show_detail_random" in to_save:
|
if "Show_detail_random" in to_save:
|
||||||
@ -1122,10 +1090,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
|||||||
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
||||||
content.kindle_mail = to_save["kindle_mail"]
|
content.kindle_mail = to_save["kindle_mail"]
|
||||||
try:
|
try:
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
|
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
@ -1247,9 +1212,8 @@ def reset_user_password(user_id):
|
|||||||
@login_required
|
@login_required
|
||||||
@admin_required
|
@admin_required
|
||||||
def view_logfile():
|
def view_logfile():
|
||||||
logfiles = {}
|
logfiles = {0: logger.get_logfile(config.config_logfile),
|
||||||
logfiles[0] = logger.get_logfile(config.config_logfile)
|
1: logger.get_accesslogfile(config.config_access_logfile)}
|
||||||
logfiles[1] = logger.get_accesslogfile(config.config_access_logfile)
|
|
||||||
return render_title_template("logviewer.html",
|
return render_title_template("logviewer.html",
|
||||||
title=_(u"Logfile viewer"),
|
title=_(u"Logfile viewer"),
|
||||||
accesslog_enable=config.config_access_log,
|
accesslog_enable=config.config_access_log,
|
||||||
@ -1273,6 +1237,7 @@ def send_logfile(logtype):
|
|||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/admin/logdownload/<int:logtype>")
|
@admi.route("/admin/logdownload/<int:logtype>")
|
||||||
@login_required
|
@login_required
|
||||||
@admin_required
|
@admin_required
|
||||||
@ -1390,7 +1355,7 @@ def import_ldap_users():
|
|||||||
kindlemail = ''
|
kindlemail = ''
|
||||||
if 'mail' in user_data:
|
if 'mail' in user_data:
|
||||||
useremail = user_data['mail'][0].decode('utf-8')
|
useremail = user_data['mail'][0].decode('utf-8')
|
||||||
if (len(user_data['mail']) > 1):
|
if len(user_data['mail']) > 1:
|
||||||
kindlemail = user_data['mail'][1].decode('utf-8')
|
kindlemail = user_data['mail'][1].decode('utf-8')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -1428,19 +1393,21 @@ def import_ldap_users():
|
|||||||
|
|
||||||
|
|
||||||
def extract_user_data_from_field(user, field):
|
def extract_user_data_from_field(user, field):
|
||||||
match = re.search(field + "=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
|
match = re.search(field + r"=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
else:
|
else:
|
||||||
raise Exception("Could Not Parse LDAP User: {}".format(user))
|
raise Exception("Could Not Parse LDAP User: {}".format(user))
|
||||||
|
|
||||||
def extract_dynamic_field_from_filter(user, filter):
|
|
||||||
match = re.search("([a-zA-Z0-9-]+)=%s", filter, re.IGNORECASE | re.UNICODE)
|
def extract_dynamic_field_from_filter(user, filtr):
|
||||||
|
match = re.search("([a-zA-Z0-9-]+)=%s", filtr, re.IGNORECASE | re.UNICODE)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
else:
|
else:
|
||||||
raise Exception("Could Not Parse LDAP Userfield: {}", user)
|
raise Exception("Could Not Parse LDAP Userfield: {}", user)
|
||||||
|
|
||||||
def extract_user_identifier(user, filter):
|
|
||||||
dynamic_field = extract_dynamic_field_from_filter(user, filter)
|
def extract_user_identifier(user, filtr):
|
||||||
|
dynamic_field = extract_dynamic_field_from_filter(user, filtr)
|
||||||
return extract_user_data_from_field(user, dynamic_field)
|
return extract_user_data_from_field(user, dynamic_field)
|
||||||
|
@ -275,7 +275,7 @@ class _ConfigSQL(object):
|
|||||||
def toDict(self):
|
def toDict(self):
|
||||||
storage = {}
|
storage = {}
|
||||||
for k, v in self.__dict__.items():
|
for k, v in self.__dict__.items():
|
||||||
if k[0] != '_' or k.endswith("password"):
|
if k[0] != '_' and not k.endswith("password") and not k.endswith("secret"):
|
||||||
storage[k] = v
|
storage[k] = v
|
||||||
return storage
|
return storage
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ def delete_book(book_id, book_format, jsonResponse):
|
|||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
||||||
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
||||||
ub.delete_download(book_id)
|
ub.delete_download(book_id)
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
|
|
||||||
# check if only this book links to:
|
# check if only this book links to:
|
||||||
# author, language, series, tags, custom columns
|
# author, language, series, tags, custom columns
|
||||||
|
44
cps/kobo.py
44
cps/kobo.py
@ -58,7 +58,7 @@ KOBO_FORMATS = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
|
|||||||
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
|
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
|
||||||
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
|
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
|
||||||
|
|
||||||
SYNC_ITEM_LIMIT = 5
|
SYNC_ITEM_LIMIT = 100
|
||||||
|
|
||||||
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
|
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
|
||||||
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
|
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
|
||||||
@ -462,10 +462,7 @@ def HandleTagCreate():
|
|||||||
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
|
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
|
||||||
if items_unknown_to_calibre:
|
if items_unknown_to_calibre:
|
||||||
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
|
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return make_response(jsonify(str(shelf.uuid)), 201)
|
return make_response(jsonify(str(shelf.uuid)), 201)
|
||||||
|
|
||||||
|
|
||||||
@ -497,10 +494,7 @@ def HandleTagUpdate(tag_id):
|
|||||||
|
|
||||||
shelf.name = name
|
shelf.name = name
|
||||||
ub.session.merge(shelf)
|
ub.session.merge(shelf)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return make_response(' ', 200)
|
return make_response(' ', 200)
|
||||||
|
|
||||||
|
|
||||||
@ -552,11 +546,7 @@ def HandleTagAddItem(tag_id):
|
|||||||
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
|
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
|
||||||
|
|
||||||
ub.session.merge(shelf)
|
ub.session.merge(shelf)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
return make_response('', 201)
|
return make_response('', 201)
|
||||||
|
|
||||||
|
|
||||||
@ -596,10 +586,7 @@ def HandleTagRemoveItem(tag_id):
|
|||||||
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
|
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
items_unknown_to_calibre.append(item)
|
items_unknown_to_calibre.append(item)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
if items_unknown_to_calibre:
|
if items_unknown_to_calibre:
|
||||||
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
|
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
|
||||||
@ -645,10 +632,7 @@ def sync_shelves(sync_token, sync_results):
|
|||||||
"ChangedTag": tag
|
"ChangedTag": tag
|
||||||
})
|
})
|
||||||
sync_token.tags_last_modified = new_tags_last_modified
|
sync_token.tags_last_modified = new_tags_last_modified
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
|
|
||||||
# Creates a Kobo "Tag" object from a ub.Shelf object
|
# Creates a Kobo "Tag" object from a ub.Shelf object
|
||||||
@ -729,10 +713,7 @@ def HandleStateRequest(book_uuid):
|
|||||||
abort(400, description="Malformed request data is missing 'ReadingStates' key")
|
abort(400, description="Malformed request data is missing 'ReadingStates' key")
|
||||||
|
|
||||||
ub.session.merge(kobo_reading_state)
|
ub.session.merge(kobo_reading_state)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return jsonify({
|
return jsonify({
|
||||||
"RequestResult": "Success",
|
"RequestResult": "Success",
|
||||||
"UpdateResults": [update_results_response],
|
"UpdateResults": [update_results_response],
|
||||||
@ -770,10 +751,7 @@ def get_or_create_reading_state(book_id):
|
|||||||
kobo_reading_state.statistics = ub.KoboStatistics()
|
kobo_reading_state.statistics = ub.KoboStatistics()
|
||||||
book_read.kobo_reading_state = kobo_reading_state
|
book_read.kobo_reading_state = kobo_reading_state
|
||||||
ub.session.add(book_read)
|
ub.session.add(book_read)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return book_read.kobo_reading_state
|
return book_read.kobo_reading_state
|
||||||
|
|
||||||
|
|
||||||
@ -876,11 +854,7 @@ def HandleBookDeletionRequest(book_uuid):
|
|||||||
archived_book.last_modified = datetime.datetime.utcnow()
|
archived_book.last_modified = datetime.datetime.utcnow()
|
||||||
|
|
||||||
ub.session.merge(archived_book)
|
ub.session.merge(archived_book)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
return ("", 204)
|
return ("", 204)
|
||||||
|
|
||||||
|
|
||||||
|
@ -148,10 +148,7 @@ def generate_auth_token(user_id):
|
|||||||
auth_token.token_type = 1
|
auth_token.token_type = 1
|
||||||
|
|
||||||
ub.session.add(auth_token)
|
ub.session.add(auth_token)
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return render_title_template(
|
return render_title_template(
|
||||||
"generate_kobo_auth_url.html",
|
"generate_kobo_auth_url.html",
|
||||||
title=_(u"Kobo Setup"),
|
title=_(u"Kobo Setup"),
|
||||||
@ -168,8 +165,5 @@ def delete_auth_token(user_id):
|
|||||||
# Invalidate any prevously generated Kobo Auth token for this user.
|
# Invalidate any prevously generated Kobo Auth token for this user.
|
||||||
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.user_id == user_id)\
|
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.user_id == user_id)\
|
||||||
.filter(ub.RemoteAuthToken.token_type==1).delete()
|
.filter(ub.RemoteAuthToken.token_type==1).delete()
|
||||||
try:
|
|
||||||
ub.session.commit()
|
return ub.session_commit()
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
|
||||||
|
@ -49,6 +49,13 @@ class _Logger(logging.Logger):
|
|||||||
else:
|
else:
|
||||||
self.error(message, stacklevel=2, *args, **kwargs)
|
self.error(message, stacklevel=2, *args, **kwargs)
|
||||||
|
|
||||||
|
def debug_no_auth(self, message, *args, **kwargs):
|
||||||
|
if message.startswith("send: AUTH"):
|
||||||
|
self.debug(message[:16], stacklevel=2, *args, **kwargs)
|
||||||
|
else:
|
||||||
|
self.debug(message, stacklevel=2, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get(name=None):
|
def get(name=None):
|
||||||
return logging.getLogger(name)
|
return logging.getLogger(name)
|
||||||
|
@ -85,11 +85,7 @@ def register_user_with_oauth(user=None):
|
|||||||
except NoResultFound:
|
except NoResultFound:
|
||||||
# no found, return error
|
# no found, return error
|
||||||
return
|
return
|
||||||
try:
|
ub.session_commit("User {} with OAuth for provider {} registered".format(user.nickname, oauth_key))
|
||||||
ub.session.commit()
|
|
||||||
except Exception as e:
|
|
||||||
log.debug_or_exception(e)
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
|
|
||||||
def logout_oauth_user():
|
def logout_oauth_user():
|
||||||
@ -101,19 +97,12 @@ def logout_oauth_user():
|
|||||||
if ub.oauth_support:
|
if ub.oauth_support:
|
||||||
oauthblueprints = []
|
oauthblueprints = []
|
||||||
if not ub.session.query(ub.OAuthProvider).count():
|
if not ub.session.query(ub.OAuthProvider).count():
|
||||||
|
for provider in ("github", "google"):
|
||||||
oauthProvider = ub.OAuthProvider()
|
oauthProvider = ub.OAuthProvider()
|
||||||
oauthProvider.provider_name = "github"
|
oauthProvider.provider_name = provider
|
||||||
oauthProvider.active = False
|
oauthProvider.active = False
|
||||||
ub.session.add(oauthProvider)
|
ub.session.add(oauthProvider)
|
||||||
ub.session.commit()
|
ub.session_commit("{} Blueprint Created".format(provider))
|
||||||
oauthProvider = ub.OAuthProvider()
|
|
||||||
oauthProvider.provider_name = "google"
|
|
||||||
oauthProvider.active = False
|
|
||||||
ub.session.add(oauthProvider)
|
|
||||||
try:
|
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
oauth_ids = ub.session.query(ub.OAuthProvider).all()
|
oauth_ids = ub.session.query(ub.OAuthProvider).all()
|
||||||
ele1 = dict(provider_name='github',
|
ele1 = dict(provider_name='github',
|
||||||
@ -203,12 +192,8 @@ if ub.oauth_support:
|
|||||||
provider_user_id=provider_user_id,
|
provider_user_id=provider_user_id,
|
||||||
token=token,
|
token=token,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
ub.session.add(oauth_entry)
|
ub.session.add(oauth_entry)
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
except Exception as e:
|
|
||||||
log.debug_or_exception(e)
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
# Disable Flask-Dance's default behavior for saving the OAuth token
|
# Disable Flask-Dance's default behavior for saving the OAuth token
|
||||||
# Value differrs depending on flask-dance version
|
# Value differrs depending on flask-dance version
|
||||||
|
@ -59,8 +59,7 @@ def remote_login_required(f):
|
|||||||
def remote_login():
|
def remote_login():
|
||||||
auth_token = ub.RemoteAuthToken()
|
auth_token = ub.RemoteAuthToken()
|
||||||
ub.session.add(auth_token)
|
ub.session.add(auth_token)
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
|
|
||||||
verify_url = url_for('remotelogin.verify_token', token=auth_token.auth_token, _external=true)
|
verify_url = url_for('remotelogin.verify_token', token=auth_token.auth_token, _external=true)
|
||||||
log.debug(u"Remot Login request with token: %s", auth_token.auth_token)
|
log.debug(u"Remot Login request with token: %s", auth_token.auth_token)
|
||||||
return render_title_template('remote_login.html', title=_(u"login"), token=auth_token.auth_token,
|
return render_title_template('remote_login.html', title=_(u"login"), token=auth_token.auth_token,
|
||||||
@ -80,9 +79,9 @@ def verify_token(token):
|
|||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
# Token expired
|
# Token expired
|
||||||
if datetime.now() > auth_token.expiration:
|
elif datetime.now() > auth_token.expiration:
|
||||||
ub.session.delete(auth_token)
|
ub.session.delete(auth_token)
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
|
|
||||||
flash(_(u"Token has expired"), category="error")
|
flash(_(u"Token has expired"), category="error")
|
||||||
log.error(u"Remote Login token expired")
|
log.error(u"Remote Login token expired")
|
||||||
@ -91,7 +90,7 @@ def verify_token(token):
|
|||||||
# Update token with user information
|
# Update token with user information
|
||||||
auth_token.user_id = current_user.id
|
auth_token.user_id = current_user.id
|
||||||
auth_token.verified = True
|
auth_token.verified = True
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
|
|
||||||
flash(_(u"Success! Please return to your device"), category="success")
|
flash(_(u"Success! Please return to your device"), category="success")
|
||||||
log.debug(u"Remote Login token for userid %s verified", auth_token.user_id)
|
log.debug(u"Remote Login token for userid %s verified", auth_token.user_id)
|
||||||
@ -114,7 +113,7 @@ def token_verified():
|
|||||||
# Token expired
|
# Token expired
|
||||||
elif datetime.now() > auth_token.expiration:
|
elif datetime.now() > auth_token.expiration:
|
||||||
ub.session.delete(auth_token)
|
ub.session.delete(auth_token)
|
||||||
ub.session.commit()
|
ub.session_commit()
|
||||||
|
|
||||||
data['status'] = 'error'
|
data['status'] = 'error'
|
||||||
data['message'] = _(u"Token has expired")
|
data['message'] = _(u"Token has expired")
|
||||||
@ -127,7 +126,7 @@ def token_verified():
|
|||||||
login_user(user)
|
login_user(user)
|
||||||
|
|
||||||
ub.session.delete(auth_token)
|
ub.session.delete(auth_token)
|
||||||
ub.session.commit()
|
ub.session_commit("User {} logged in via remotelogin, token deleted".format(user.nickname))
|
||||||
|
|
||||||
data['status'] = 'success'
|
data['status'] = 'success'
|
||||||
log.debug(u"Remote Login for userid %s succeded", user.id)
|
log.debug(u"Remote Login for userid %s succeded", user.id)
|
||||||
|
126
cps/shelf.py
126
cps/shelf.py
@ -27,7 +27,7 @@ import sys
|
|||||||
from flask import Blueprint, request, flash, redirect, url_for
|
from flask import Blueprint, request, flash, redirect, url_for
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from sqlalchemy.sql.expression import func
|
from sqlalchemy.sql.expression import func, true
|
||||||
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
||||||
|
|
||||||
from . import logger, ub, calibre_db, db
|
from . import logger, ub, calibre_db, db
|
||||||
@ -221,61 +221,60 @@ def remove_from_shelf(shelf_id, book_id):
|
|||||||
@login_required
|
@login_required
|
||||||
def create_shelf():
|
def create_shelf():
|
||||||
shelf = ub.Shelf()
|
shelf = ub.Shelf()
|
||||||
if request.method == "POST":
|
return create_edit_shelf(shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
||||||
to_save = request.form.to_dict()
|
|
||||||
if "is_public" in to_save:
|
|
||||||
shelf.is_public = 1
|
|
||||||
shelf.name = to_save["title"]
|
|
||||||
shelf.user_id = int(current_user.id)
|
|
||||||
|
|
||||||
is_shelf_name_unique = False
|
|
||||||
if shelf.is_public == 1:
|
|
||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
|
|
||||||
.first() is None
|
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
|
||||||
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
|
||||||
category="error")
|
|
||||||
else:
|
|
||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
|
||||||
(ub.Shelf.user_id == int(current_user.id)))\
|
|
||||||
.first() is None
|
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
|
||||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
|
||||||
category="error")
|
|
||||||
|
|
||||||
if is_shelf_name_unique:
|
|
||||||
try:
|
|
||||||
ub.session.add(shelf)
|
|
||||||
ub.session.commit()
|
|
||||||
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
|
||||||
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
|
||||||
except (OperationalError, InvalidRequestError):
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
|
||||||
except Exception:
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"There was an error"), category="error")
|
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
|
||||||
else:
|
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
|
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def edit_shelf(shelf_id):
|
def edit_shelf(shelf_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
|
return create_edit_shelf(shelf, title=_(u"Edit a shelf"), page="shelfedit", shelf_id=shelf_id)
|
||||||
|
|
||||||
|
|
||||||
|
# if shelf ID is set, we are editing a shelf
|
||||||
|
def create_edit_shelf(shelf, title, page, shelf_id=False):
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
to_save = request.form.to_dict()
|
to_save = request.form.to_dict()
|
||||||
|
if "is_public" in to_save:
|
||||||
|
shelf.is_public = 1
|
||||||
|
else:
|
||||||
|
shelf.is_public = 0
|
||||||
|
if check_shelf_is_unique(shelf, to_save, shelf_id):
|
||||||
|
shelf.name = to_save["title"]
|
||||||
|
# shelf.last_modified = datetime.utcnow()
|
||||||
|
if not shelf_id:
|
||||||
|
shelf.user_id = int(current_user.id)
|
||||||
|
ub.session.add(shelf)
|
||||||
|
shelf_action = "created"
|
||||||
|
flash_text = _(u"Shelf %(title)s created", title=to_save["title"])
|
||||||
|
else:
|
||||||
|
shelf_action = "changed"
|
||||||
|
flash_text = _(u"Shelf %(title)s changed", title=to_save["title"])
|
||||||
|
try:
|
||||||
|
ub.session.commit()
|
||||||
|
log.info(u"Shelf {} {}".format(to_save["title"], shelf_action))
|
||||||
|
flash(flash_text, category="success")
|
||||||
|
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
||||||
|
except (OperationalError, InvalidRequestError) as e:
|
||||||
|
ub.session.rollback()
|
||||||
|
log.debug_or_exception(e)
|
||||||
|
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||||
|
except Exception as e:
|
||||||
|
ub.session.rollback()
|
||||||
|
log.debug_or_exception(e)
|
||||||
|
flash(_(u"There was an error"), category="error")
|
||||||
|
return render_title_template('shelf_edit.html', shelf=shelf, title=title, page=page)
|
||||||
|
|
||||||
is_shelf_name_unique = False
|
|
||||||
|
def check_shelf_is_unique(shelf, to_save, shelf_id=False):
|
||||||
|
if shelf_id:
|
||||||
|
ident = ub.Shelf.id != shelf_id
|
||||||
|
else:
|
||||||
|
ident = true()
|
||||||
if shelf.is_public == 1:
|
if shelf.is_public == 1:
|
||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
|
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
|
||||||
.filter(ub.Shelf.id != shelf_id) \
|
.filter(ident) \
|
||||||
.first() is None
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
@ -285,32 +284,13 @@ def edit_shelf(shelf_id):
|
|||||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
||||||
(ub.Shelf.user_id == int(current_user.id))) \
|
(ub.Shelf.user_id == int(current_user.id))) \
|
||||||
.filter(ub.Shelf.id != shelf_id)\
|
.filter(ident) \
|
||||||
.first() is None
|
.first() is None
|
||||||
|
|
||||||
if not is_shelf_name_unique:
|
if not is_shelf_name_unique:
|
||||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||||
category="error")
|
category="error")
|
||||||
|
return is_shelf_name_unique
|
||||||
if is_shelf_name_unique:
|
|
||||||
shelf.name = to_save["title"]
|
|
||||||
shelf.last_modified = datetime.utcnow()
|
|
||||||
if "is_public" in to_save:
|
|
||||||
shelf.is_public = 1
|
|
||||||
else:
|
|
||||||
shelf.is_public = 0
|
|
||||||
try:
|
|
||||||
ub.session.commit()
|
|
||||||
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
|
||||||
except (OperationalError, InvalidRequestError):
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
|
||||||
except Exception:
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"There was an error"), category="error")
|
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
|
||||||
else:
|
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
|
||||||
|
|
||||||
|
|
||||||
def delete_shelf_helper(cur_shelf):
|
def delete_shelf_helper(cur_shelf):
|
||||||
@ -320,12 +300,7 @@ def delete_shelf_helper(cur_shelf):
|
|||||||
ub.session.delete(cur_shelf)
|
ub.session.delete(cur_shelf)
|
||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
||||||
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id))
|
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id))
|
||||||
try:
|
ub.session_commit("successfully deleted Shelf {}".format(cur_shelf.name))
|
||||||
ub.session.commit()
|
|
||||||
log.info("successfully deleted %s", cur_shelf)
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/delete/<int:shelf_id>")
|
@shelf.route("/shelf/delete/<int:shelf_id>")
|
||||||
@ -339,11 +314,13 @@ def delete_shelf(shelf_id):
|
|||||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/simpleshelf/<int:shelf_id>")
|
@shelf.route("/simpleshelf/<int:shelf_id>")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def show_simpleshelf(shelf_id):
|
def show_simpleshelf(shelf_id):
|
||||||
return render_show_shelf(2, shelf_id, 1, None)
|
return render_show_shelf(2, shelf_id, 1, None)
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/<int:shelf_id>", defaults={"sort_param": "order", 'page': 1})
|
@shelf.route("/shelf/<int:shelf_id>", defaults={"sort_param": "order", 'page': 1})
|
||||||
@shelf.route("/shelf/<int:shelf_id>/<sort_param>", defaults={'page': 1})
|
@shelf.route("/shelf/<int:shelf_id>/<sort_param>", defaults={'page': 1})
|
||||||
@shelf.route("/shelf/<int:shelf_id>/<sort_param>/<int:page>")
|
@shelf.route("/shelf/<int:shelf_id>/<sort_param>/<int:page>")
|
||||||
@ -381,6 +358,7 @@ def order_shelf(shelf_id):
|
|||||||
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
||||||
shelf=shelf, page="shelforder")
|
shelf=shelf, page="shelforder")
|
||||||
|
|
||||||
|
|
||||||
def change_shelf_order(shelf_id, order):
|
def change_shelf_order(shelf_id, order):
|
||||||
result = calibre_db.session.query(db.Books).join(ub.BookShelf,ub.BookShelf.book_id == db.Books.id)\
|
result = calibre_db.session.query(db.Books).join(ub.BookShelf,ub.BookShelf.book_id == db.Books.id)\
|
||||||
.filter(ub.BookShelf.shelf == shelf_id).order_by(*order).all()
|
.filter(ub.BookShelf.shelf == shelf_id).order_by(*order).all()
|
||||||
@ -388,10 +366,8 @@ def change_shelf_order(shelf_id, order):
|
|||||||
book = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
book = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
||||||
.filter(ub.BookShelf.book_id == entry.id).first()
|
.filter(ub.BookShelf.book_id == entry.id).first()
|
||||||
book.order = index
|
book.order = index
|
||||||
try:
|
ub.session_commit("Shelf-id:{} - Order changed".format(shelf_id))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
|
|
||||||
def render_show_shelf(shelf_type, shelf_id, page_no, sort_param):
|
def render_show_shelf(shelf_type, shelf_id, page_no, sort_param):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
|
@ -66,9 +66,15 @@ class TaskConvert(CalibreTask):
|
|||||||
# if we're sending to kindle after converting, create a one-off task and run it immediately
|
# if we're sending to kindle after converting, create a one-off task and run it immediately
|
||||||
# todo: figure out how to incorporate this into the progress
|
# todo: figure out how to incorporate this into the progress
|
||||||
try:
|
try:
|
||||||
worker_thread.add(self.user, TaskEmail(self.settings['subject'], self.results["path"],
|
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
|
||||||
filename, self.settings, self.kindle_mail,
|
self.results["path"],
|
||||||
self.settings['subject'], self.settings['body'], internal=True))
|
filename,
|
||||||
|
self.settings,
|
||||||
|
self.kindle_mail,
|
||||||
|
self.settings['subject'],
|
||||||
|
self.settings['body'],
|
||||||
|
internal=True)
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return self._handleError(str(e))
|
return self._handleError(str(e))
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ class EmailBase:
|
|||||||
|
|
||||||
def send(self, strg):
|
def send(self, strg):
|
||||||
"""Send `strg' to the server."""
|
"""Send `strg' to the server."""
|
||||||
log.debug('send: %r', strg[:300])
|
log.debug_no_auth('send: {}'.format(strg[:300]))
|
||||||
if hasattr(self, 'sock') and self.sock:
|
if hasattr(self, 'sock') and self.sock:
|
||||||
try:
|
try:
|
||||||
if self.transferSize:
|
if self.transferSize:
|
||||||
|
13
cps/ub.py
13
cps/ub.py
@ -46,8 +46,9 @@ from sqlalchemy.orm.attributes import flag_modified
|
|||||||
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
|
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
|
||||||
from werkzeug.security import generate_password_hash
|
from werkzeug.security import generate_password_hash
|
||||||
|
|
||||||
from . import constants
|
from . import constants, logger
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
session = None
|
session = None
|
||||||
app_DB_path = None
|
app_DB_path = None
|
||||||
@ -695,3 +696,13 @@ def dispose():
|
|||||||
old_session.bind.dispose()
|
old_session.bind.dispose()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def session_commit(success=None):
|
||||||
|
try:
|
||||||
|
session.commit()
|
||||||
|
if success:
|
||||||
|
log.info(success)
|
||||||
|
except (exc.OperationalError, exc.InvalidRequestError) as e:
|
||||||
|
session.rollback()
|
||||||
|
log.debug_or_exception(e)
|
||||||
|
return ""
|
||||||
|
20
cps/web.py
20
cps/web.py
@ -135,10 +135,7 @@ def bookmark(book_id, book_format):
|
|||||||
ub.Bookmark.book_id == book_id,
|
ub.Bookmark.book_id == book_id,
|
||||||
ub.Bookmark.format == book_format)).delete()
|
ub.Bookmark.format == book_format)).delete()
|
||||||
if not bookmark_key:
|
if not bookmark_key:
|
||||||
try:
|
ub.session_commit()
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return "", 204
|
return "", 204
|
||||||
|
|
||||||
lbookmark = ub.Bookmark(user_id=current_user.id,
|
lbookmark = ub.Bookmark(user_id=current_user.id,
|
||||||
@ -146,10 +143,7 @@ def bookmark(book_id, book_format):
|
|||||||
format=book_format,
|
format=book_format,
|
||||||
bookmark_key=bookmark_key)
|
bookmark_key=bookmark_key)
|
||||||
ub.session.merge(lbookmark)
|
ub.session.merge(lbookmark)
|
||||||
try:
|
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return "", 201
|
return "", 201
|
||||||
|
|
||||||
|
|
||||||
@ -174,10 +168,7 @@ def toggle_read(book_id):
|
|||||||
kobo_reading_state.statistics = ub.KoboStatistics()
|
kobo_reading_state.statistics = ub.KoboStatistics()
|
||||||
book.kobo_reading_state = kobo_reading_state
|
book.kobo_reading_state = kobo_reading_state
|
||||||
ub.session.merge(book)
|
ub.session.merge(book)
|
||||||
try:
|
ub.session_commit("Book {} readbit toggled".format(book_id))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
calibre_db.update_title_sort(config)
|
calibre_db.update_title_sort(config)
|
||||||
@ -211,10 +202,7 @@ def toggle_archived(book_id):
|
|||||||
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
|
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
|
||||||
archived_book.is_archived = True
|
archived_book.is_archived = True
|
||||||
ub.session.merge(archived_book)
|
ub.session.merge(archived_book)
|
||||||
try:
|
ub.session_commit("Book {} archivebit toggled".format(book_id))
|
||||||
ub.session.commit()
|
|
||||||
except OperationalError:
|
|
||||||
ub.session.rollback()
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user