mirror of
https://github.com/janeczku/calibre-web
synced 2024-11-24 18:47:23 +00:00
Merge remote-tracking branch 'upstream/master' into master
This commit is contained in:
commit
b4324cd685
@ -32,8 +32,8 @@ Calibre-Web is a web app providing a clean interface for browsing, reading and d
|
||||
|
||||
## Quick start
|
||||
|
||||
1. Install dependencies by running `pip3 install --target vendor -r requirements.txt` (python3.x) or `pip install --target vendor -r requirements.txt` (python2.7).
|
||||
2. Execute the command: `python cps.py` (or `nohup python cps.py` - recommended if you want to exit the terminal window)
|
||||
1. Install dependencies by running `pip3 install --target vendor -r requirements.txt` (python3.x). Alternativly set up a python virtual environment.
|
||||
2. Execute the command: `python3 cps.py` (or `nohup python3 cps.py` - recommended if you want to exit the terminal window)
|
||||
3. Point your browser to `http://localhost:8083` or `http://localhost:8083/opds` for the OPDS catalog
|
||||
4. Set `Location of Calibre database` to the path of the folder where your Calibre library (metadata.db) lives, push "submit" button\
|
||||
Optionally a Google Drive can be used to host the calibre library [-> Using Google Drive integration](https://github.com/janeczku/calibre-web/wiki/Configuration#using-google-drive-integration)
|
||||
@ -48,7 +48,7 @@ Please note that running the above install command can fail on some versions of
|
||||
|
||||
## Requirements
|
||||
|
||||
python 3.x+, (Python 2.7+)
|
||||
python 3.x+
|
||||
|
||||
Optionally, to enable on-the-fly conversion from one ebook format to another when using the send-to-kindle feature, or during editing of ebooks metadata:
|
||||
|
||||
|
239
cps/admin.py
239
cps/admin.py
@ -105,6 +105,7 @@ def unconfigured(f):
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
@admi.before_app_request
|
||||
def before_request():
|
||||
if current_user.is_authenticated:
|
||||
@ -118,8 +119,10 @@ def before_request():
|
||||
g.config_authors_max = config.config_authors_max
|
||||
g.shelves_access = ub.session.query(ub.Shelf).filter(
|
||||
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
|
||||
if not config.db_configured and request.endpoint not in (
|
||||
'admin.basic_configuration', 'login', 'admin.config_pathchooser') and '/static/' not in request.path:
|
||||
if '/static/' not in request.path and not config.db_configured and \
|
||||
request.endpoint not in ('admin.basic_configuration',
|
||||
'login',
|
||||
'admin.config_pathchooser'):
|
||||
return redirect(url_for('admin.basic_configuration'))
|
||||
|
||||
|
||||
@ -201,12 +204,12 @@ def configuration():
|
||||
@login_required
|
||||
@admin_required
|
||||
def view_configuration():
|
||||
readColumn = calibre_db.session.query(db.Custom_Columns)\
|
||||
read_column = calibre_db.session.query(db.Custom_Columns)\
|
||||
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||
restrictColumns= calibre_db.session.query(db.Custom_Columns)\
|
||||
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
|
||||
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
|
||||
restrictColumns=restrictColumns,
|
||||
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
|
||||
restrictColumns=restrict_columns,
|
||||
title=_(u"UI Configuration"), page="uiconfig")
|
||||
|
||||
|
||||
@ -271,11 +274,7 @@ def edit_domain(allow):
|
||||
vals = request.form.to_dict()
|
||||
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
|
||||
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
return ""
|
||||
return ub.session_commit("Registering Domains edited {}".format(answer.domain))
|
||||
|
||||
|
||||
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
|
||||
@ -288,10 +287,7 @@ def add_domain(allow):
|
||||
if not check:
|
||||
new_domain = ub.Registration(domain=domain_name, allow=allow)
|
||||
ub.session.add(new_domain)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Registering Domains added {}".format(domain_name))
|
||||
return ""
|
||||
|
||||
|
||||
@ -302,18 +298,12 @@ def delete_domain():
|
||||
try:
|
||||
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
|
||||
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Registering Domains deleted {}".format(domain_id))
|
||||
# If last domain was deleted, add all domains by default
|
||||
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
|
||||
new_domain = ub.Registration(domain="%.%", allow=1)
|
||||
ub.session.add(new_domain)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Last Registering Domain deleted, added *.* as default")
|
||||
except KeyError:
|
||||
pass
|
||||
return ""
|
||||
@ -330,6 +320,7 @@ def list_domain(allow):
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
||||
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
||||
@login_required
|
||||
@ -355,10 +346,7 @@ def edit_restriction(res_type, user_id):
|
||||
elementlist = usr.list_allowed_tags()
|
||||
elementlist[int(element['id'][1:])] = element['Element']
|
||||
usr.allowed_tags = ','.join(elementlist)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.allowed_tags))
|
||||
if res_type == 3: # CColumn per user
|
||||
if isinstance(user_id, int):
|
||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||
@ -367,10 +355,7 @@ def edit_restriction(res_type, user_id):
|
||||
elementlist = usr.list_allowed_column_values()
|
||||
elementlist[int(element['id'][1:])] = element['Element']
|
||||
usr.allowed_column_value = ','.join(elementlist)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname, usr.allowed_column_value))
|
||||
if element['id'].startswith('d'):
|
||||
if res_type == 0: # Tags as template
|
||||
elementlist = config.list_denied_tags()
|
||||
@ -390,10 +375,7 @@ def edit_restriction(res_type, user_id):
|
||||
elementlist = usr.list_denied_tags()
|
||||
elementlist[int(element['id'][1:])] = element['Element']
|
||||
usr.denied_tags = ','.join(elementlist)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.denied_tags))
|
||||
if res_type == 3: # CColumn per user
|
||||
if isinstance(user_id, int):
|
||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||
@ -402,12 +384,10 @@ def edit_restriction(res_type, user_id):
|
||||
elementlist = usr.list_denied_column_values()
|
||||
elementlist[int(element['id'][1:])] = element['Element']
|
||||
usr.denied_column_value = ','.join(elementlist)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname, usr.denied_column_value))
|
||||
return ""
|
||||
|
||||
|
||||
def restriction_addition(element, list_func):
|
||||
elementlist = list_func()
|
||||
if elementlist == ['']:
|
||||
@ -451,16 +431,10 @@ def add_restriction(res_type, user_id):
|
||||
usr = current_user
|
||||
if 'submit_allow' in element:
|
||||
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.list_allowed_tags))
|
||||
elif 'submit_deny' in element:
|
||||
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.list_denied_tags))
|
||||
if res_type == 3: # CustomC per user
|
||||
if isinstance(user_id, int):
|
||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||
@ -468,18 +442,15 @@ def add_restriction(res_type, user_id):
|
||||
usr = current_user
|
||||
if 'submit_allow' in element:
|
||||
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname,
|
||||
usr.list_allowed_column_values))
|
||||
elif 'submit_deny' in element:
|
||||
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname,
|
||||
usr.list_denied_column_values))
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/ajax/deleterestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
|
||||
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
|
||||
@login_required
|
||||
@ -507,16 +478,10 @@ def delete_restriction(res_type, user_id):
|
||||
usr = current_user
|
||||
if element['id'].startswith('a'):
|
||||
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
|
||||
elif element['id'].startswith('d'):
|
||||
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
|
||||
elif res_type == 3: # Columns per user
|
||||
if isinstance(user_id, int):
|
||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||
@ -524,18 +489,16 @@ def delete_restriction(res_type, user_id):
|
||||
usr = current_user
|
||||
if element['id'].startswith('a'):
|
||||
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.nickname,
|
||||
usr.list_allowed_column_values))
|
||||
|
||||
elif element['id'].startswith('d'):
|
||||
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.nickname,
|
||||
usr.list_denied_column_values))
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
|
||||
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
|
||||
@login_required
|
||||
@ -580,6 +543,7 @@ def list_restriction(res_type, user_id):
|
||||
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
@admi.route("/basicconfig/pathchooser/")
|
||||
@unconfigured
|
||||
def config_pathchooser():
|
||||
@ -587,12 +551,14 @@ def config_pathchooser():
|
||||
return pathchooser()
|
||||
abort(403)
|
||||
|
||||
|
||||
@admi.route("/ajax/pathchooser/")
|
||||
@login_required
|
||||
@admin_required
|
||||
def ajax_pathchooser():
|
||||
return pathchooser()
|
||||
|
||||
|
||||
def pathchooser():
|
||||
browse_for = "folder"
|
||||
folder_only = request.args.get('folder', False) == "true"
|
||||
@ -605,12 +571,12 @@ def pathchooser():
|
||||
else:
|
||||
oldfile = ""
|
||||
|
||||
abs = False
|
||||
absolute = False
|
||||
|
||||
if os.path.isdir(path):
|
||||
# if os.path.isabs(path):
|
||||
cwd = os.path.realpath(path)
|
||||
abs = True
|
||||
absolute = True
|
||||
# else:
|
||||
# cwd = os.path.relpath(path)
|
||||
else:
|
||||
@ -618,7 +584,7 @@ def pathchooser():
|
||||
|
||||
cwd = os.path.normpath(os.path.realpath(cwd))
|
||||
parentdir = os.path.dirname(cwd)
|
||||
if not abs:
|
||||
if not absolute:
|
||||
if os.path.realpath(cwd) == os.path.realpath("/"):
|
||||
cwd = os.path.relpath(cwd)
|
||||
else:
|
||||
@ -705,8 +671,8 @@ def _configuration_gdrive_helper(to_save):
|
||||
config.config_use_google_drive = False
|
||||
|
||||
gdrive_secrets = {}
|
||||
gdriveError = gdriveutils.get_error_text(gdrive_secrets)
|
||||
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdriveError:
|
||||
gdrive_error = gdriveutils.get_error_text(gdrive_secrets)
|
||||
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdrive_error:
|
||||
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
||||
gdrive_secrets = json.load(settings)['web']
|
||||
if not gdrive_secrets:
|
||||
@ -718,10 +684,11 @@ def _configuration_gdrive_helper(to_save):
|
||||
)
|
||||
|
||||
# always show google drive settings, but in case of error deny support
|
||||
config.config_use_google_drive = (not gdriveError) and ("config_use_google_drive" in to_save)
|
||||
config.config_use_google_drive = (not gdrive_error) and ("config_use_google_drive" in to_save)
|
||||
if _config_string(to_save, "config_google_drive_folder"):
|
||||
gdriveutils.deleteDatabaseOnChange()
|
||||
return gdriveError
|
||||
return gdrive_error
|
||||
|
||||
|
||||
def _configuration_oauth_helper(to_save):
|
||||
active_oauths = 0
|
||||
@ -744,22 +711,24 @@ def _configuration_oauth_helper(to_save):
|
||||
"active": element["active"]})
|
||||
return reboot_required
|
||||
|
||||
def _configuration_logfile_helper(to_save, gdriveError):
|
||||
|
||||
def _configuration_logfile_helper(to_save, gdrive_error):
|
||||
reboot_required = False
|
||||
reboot_required |= _config_int(to_save, "config_log_level")
|
||||
reboot_required |= _config_string(to_save, "config_logfile")
|
||||
if not logger.is_valid_logfile(config.config_logfile):
|
||||
return reboot_required, \
|
||||
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
|
||||
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||
|
||||
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
|
||||
reboot_required |= _config_string(to_save, "config_access_logfile")
|
||||
if not logger.is_valid_logfile(config.config_access_logfile):
|
||||
return reboot_required, \
|
||||
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
|
||||
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||
return reboot_required, None
|
||||
|
||||
def _configuration_ldap_helper(to_save, gdriveError):
|
||||
|
||||
def _configuration_ldap_helper(to_save, gdrive_error):
|
||||
reboot_required = False
|
||||
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
|
||||
reboot_required |= _config_int(to_save, "config_ldap_port")
|
||||
@ -786,33 +755,33 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
||||
or not config.config_ldap_dn \
|
||||
or not config.config_ldap_user_object:
|
||||
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
|
||||
'Port, DN and User Object Identifier'), gdriveError)
|
||||
'Port, DN and User Object Identifier'), gdrive_error)
|
||||
|
||||
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
|
||||
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
|
||||
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
|
||||
return reboot_required, _configuration_result('Please Enter a LDAP Service Account and Password',
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
else:
|
||||
if not config.config_ldap_serv_username:
|
||||
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdriveError)
|
||||
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdrive_error)
|
||||
|
||||
if config.config_ldap_group_object_filter:
|
||||
if config.config_ldap_group_object_filter.count("%s") != 1:
|
||||
return reboot_required, \
|
||||
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
|
||||
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
|
||||
if config.config_ldap_user_object.count("%s") != 1:
|
||||
return reboot_required, \
|
||||
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
|
||||
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
|
||||
if to_save["ldap_import_user_filter"] == '0':
|
||||
config.config_ldap_member_user_object = ""
|
||||
@ -820,10 +789,10 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
||||
if config.config_ldap_member_user_object.count("%s") != 1:
|
||||
return reboot_required, \
|
||||
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
|
||||
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
|
||||
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
|
||||
if not (os.path.isfile(config.config_ldap_cacert_path) and
|
||||
@ -832,7 +801,7 @@ def _configuration_ldap_helper(to_save, gdriveError):
|
||||
return reboot_required, \
|
||||
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
|
||||
'Please Enter Correct Path'),
|
||||
gdriveError)
|
||||
gdrive_error)
|
||||
return reboot_required, None
|
||||
|
||||
|
||||
@ -840,7 +809,7 @@ def _configuration_update_helper(configured):
|
||||
reboot_required = False
|
||||
db_change = False
|
||||
to_save = request.form.to_dict()
|
||||
gdriveError = None
|
||||
gdrive_error = None
|
||||
|
||||
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
|
||||
'',
|
||||
@ -849,21 +818,21 @@ def _configuration_update_helper(configured):
|
||||
try:
|
||||
db_change |= _config_string(to_save, "config_calibre_dir")
|
||||
|
||||
# Google drive setup
|
||||
gdriveError = _configuration_gdrive_helper(to_save)
|
||||
# gdrive_error drive setup
|
||||
gdrive_error = _configuration_gdrive_helper(to_save)
|
||||
|
||||
reboot_required |= _config_int(to_save, "config_port")
|
||||
|
||||
reboot_required |= _config_string(to_save, "config_keyfile")
|
||||
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
|
||||
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'),
|
||||
gdriveError,
|
||||
gdrive_error,
|
||||
configured)
|
||||
|
||||
reboot_required |= _config_string(to_save, "config_certfile")
|
||||
if config.config_certfile and not os.path.isfile(config.config_certfile):
|
||||
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'),
|
||||
gdriveError,
|
||||
gdrive_error,
|
||||
configured)
|
||||
|
||||
_config_checkbox_int(to_save, "config_uploading")
|
||||
@ -890,7 +859,7 @@ def _configuration_update_helper(configured):
|
||||
|
||||
# LDAP configurator,
|
||||
if config.config_login_type == constants.LOGIN_LDAP:
|
||||
reboot, message = _configuration_ldap_helper(to_save, gdriveError)
|
||||
reboot, message = _configuration_ldap_helper(to_save, gdrive_error)
|
||||
if message:
|
||||
return message
|
||||
reboot_required |= reboot
|
||||
@ -920,7 +889,7 @@ def _configuration_update_helper(configured):
|
||||
if config.config_login_type == constants.LOGIN_OAUTH:
|
||||
reboot_required |= _configuration_oauth_helper(to_save)
|
||||
|
||||
reboot, message = _configuration_logfile_helper(to_save, gdriveError)
|
||||
reboot, message = _configuration_logfile_helper(to_save, gdrive_error)
|
||||
if message:
|
||||
return message
|
||||
reboot_required |= reboot
|
||||
@ -929,10 +898,10 @@ def _configuration_update_helper(configured):
|
||||
if "config_rarfile_location" in to_save:
|
||||
unrar_status = helper.check_unrar(config.config_rarfile_location)
|
||||
if unrar_status:
|
||||
return _configuration_result(unrar_status, gdriveError, configured)
|
||||
return _configuration_result(unrar_status, gdrive_error, configured)
|
||||
except (OperationalError, InvalidRequestError):
|
||||
ub.session.rollback()
|
||||
_configuration_result(_(u"Settings DB is not Writeable"), gdriveError, configured)
|
||||
_configuration_result(_(u"Settings DB is not Writeable"), gdrive_error, configured)
|
||||
|
||||
try:
|
||||
metadata_db = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||
@ -940,12 +909,12 @@ def _configuration_update_helper(configured):
|
||||
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
|
||||
db_change = True
|
||||
except Exception as e:
|
||||
return _configuration_result('%s' % e, gdriveError, configured)
|
||||
return _configuration_result('%s' % e, gdrive_error, configured)
|
||||
|
||||
if db_change:
|
||||
if not calibre_db.setup_db(config, ub.app_DB_path):
|
||||
return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
||||
gdriveError,
|
||||
gdrive_error,
|
||||
configured)
|
||||
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
||||
flash(_(u"DB is not Writeable"), category="warning")
|
||||
@ -955,16 +924,16 @@ def _configuration_update_helper(configured):
|
||||
if reboot_required:
|
||||
web_server.stop(True)
|
||||
|
||||
return _configuration_result(None, gdriveError, configured)
|
||||
return _configuration_result(None, gdrive_error, configured)
|
||||
|
||||
|
||||
def _configuration_result(error_flash=None, gdriveError=None, configured=True):
|
||||
def _configuration_result(error_flash=None, gdrive_error=None, configured=True):
|
||||
gdrive_authenticate = not is_gdrive_ready()
|
||||
gdrivefolders = []
|
||||
if gdriveError is None:
|
||||
gdriveError = gdriveutils.get_error_text()
|
||||
if gdriveError:
|
||||
gdriveError = _(gdriveError)
|
||||
if gdrive_error is None:
|
||||
gdrive_error = gdriveutils.get_error_text()
|
||||
if gdrive_error:
|
||||
gdrive_error = _(gdrive_error)
|
||||
else:
|
||||
# if config.config_use_google_drive and\
|
||||
if not gdrive_authenticate and gdrive_support:
|
||||
@ -977,10 +946,16 @@ def _configuration_result(error_flash=None, gdriveError=None, configured=True):
|
||||
flash(error_flash, category="error")
|
||||
show_login_button = False
|
||||
|
||||
return render_title_template("config_edit.html", config=config, provider=oauthblueprints,
|
||||
show_back_button=show_back_button, show_login_button=show_login_button,
|
||||
show_authenticate_google_drive=gdrive_authenticate, filepicker=configured,
|
||||
gdriveError=gdriveError, gdrivefolders=gdrivefolders, feature_support=feature_support,
|
||||
return render_title_template("config_edit.html",
|
||||
config=config,
|
||||
provider=oauthblueprints,
|
||||
show_back_button=show_back_button,
|
||||
show_login_button=show_login_button,
|
||||
show_authenticate_google_drive=gdrive_authenticate,
|
||||
filepicker=configured,
|
||||
gdriveError=gdrive_error,
|
||||
gdrivefolders=gdrivefolders,
|
||||
feature_support=feature_support,
|
||||
title=_(u"Basic Configuration"), page="config")
|
||||
|
||||
|
||||
@ -1025,10 +1000,7 @@ def _handle_new_user(to_save, content,languages, translations, kobo_support):
|
||||
content.allowed_column_value = config.config_allowed_column_value
|
||||
content.denied_column_value = config.config_denied_column_value
|
||||
ub.session.add(content)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
|
||||
return redirect(url_for('admin.admin'))
|
||||
except IntegrityError:
|
||||
@ -1044,10 +1016,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
||||
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
||||
ub.User.id != content.id).count():
|
||||
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
|
||||
return redirect(url_for('admin.admin'))
|
||||
else:
|
||||
@ -1055,8 +1024,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
||||
return redirect(url_for('admin.admin'))
|
||||
else:
|
||||
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
|
||||
ub.User.id != content.id).count() and \
|
||||
not 'admin_role' in to_save:
|
||||
ub.User.id != content.id).count() and 'admin_role' not in to_save:
|
||||
flash(_(u"No admin user remaining, can't remove admin role", nick=content.nickname), category="error")
|
||||
return redirect(url_for('admin.admin'))
|
||||
|
||||
@ -1075,7 +1043,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
||||
value = element['visibility']
|
||||
if value in val and not content.check_visibility(value):
|
||||
content.sidebar_view |= value
|
||||
elif not value in val and content.check_visibility(value):
|
||||
elif value not in val and content.check_visibility(value):
|
||||
content.sidebar_view &= ~value
|
||||
|
||||
if "Show_detail_random" in to_save:
|
||||
@ -1122,10 +1090,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
|
||||
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
||||
content.kindle_mail = to_save["kindle_mail"]
|
||||
try:
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
|
||||
except IntegrityError:
|
||||
ub.session.rollback()
|
||||
@ -1247,9 +1212,8 @@ def reset_user_password(user_id):
|
||||
@login_required
|
||||
@admin_required
|
||||
def view_logfile():
|
||||
logfiles = {}
|
||||
logfiles[0] = logger.get_logfile(config.config_logfile)
|
||||
logfiles[1] = logger.get_accesslogfile(config.config_access_logfile)
|
||||
logfiles = {0: logger.get_logfile(config.config_logfile),
|
||||
1: logger.get_accesslogfile(config.config_access_logfile)}
|
||||
return render_title_template("logviewer.html",
|
||||
title=_(u"Logfile viewer"),
|
||||
accesslog_enable=config.config_access_log,
|
||||
@ -1273,6 +1237,7 @@ def send_logfile(logtype):
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
@admi.route("/admin/logdownload/<int:logtype>")
|
||||
@login_required
|
||||
@admin_required
|
||||
@ -1390,7 +1355,7 @@ def import_ldap_users():
|
||||
kindlemail = ''
|
||||
if 'mail' in user_data:
|
||||
useremail = user_data['mail'][0].decode('utf-8')
|
||||
if (len(user_data['mail']) > 1):
|
||||
if len(user_data['mail']) > 1:
|
||||
kindlemail = user_data['mail'][1].decode('utf-8')
|
||||
|
||||
else:
|
||||
@ -1428,19 +1393,21 @@ def import_ldap_users():
|
||||
|
||||
|
||||
def extract_user_data_from_field(user, field):
|
||||
match = re.search(field + "=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
|
||||
match = re.search(field + r"=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
|
||||
if match:
|
||||
return match.group(1)
|
||||
else:
|
||||
raise Exception("Could Not Parse LDAP User: {}".format(user))
|
||||
|
||||
def extract_dynamic_field_from_filter(user, filter):
|
||||
match = re.search("([a-zA-Z0-9-]+)=%s", filter, re.IGNORECASE | re.UNICODE)
|
||||
|
||||
def extract_dynamic_field_from_filter(user, filtr):
|
||||
match = re.search("([a-zA-Z0-9-]+)=%s", filtr, re.IGNORECASE | re.UNICODE)
|
||||
if match:
|
||||
return match.group(1)
|
||||
else:
|
||||
raise Exception("Could Not Parse LDAP Userfield: {}", user)
|
||||
|
||||
def extract_user_identifier(user, filter):
|
||||
dynamic_field = extract_dynamic_field_from_filter(user, filter)
|
||||
|
||||
def extract_user_identifier(user, filtr):
|
||||
dynamic_field = extract_dynamic_field_from_filter(user, filtr)
|
||||
return extract_user_data_from_field(user, dynamic_field)
|
||||
|
@ -275,7 +275,7 @@ class _ConfigSQL(object):
|
||||
def toDict(self):
|
||||
storage = {}
|
||||
for k, v in self.__dict__.items():
|
||||
if k[0] != '_' or k.endswith("password"):
|
||||
if k[0] != '_' and not k.endswith("password") and not k.endswith("secret"):
|
||||
storage[k] = v
|
||||
return storage
|
||||
|
||||
|
@ -240,7 +240,7 @@ def delete_book(book_id, book_format, jsonResponse):
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
||||
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
||||
ub.delete_download(book_id)
|
||||
ub.session.commit()
|
||||
ub.session_commit()
|
||||
|
||||
# check if only this book links to:
|
||||
# author, language, series, tags, custom columns
|
||||
|
44
cps/kobo.py
44
cps/kobo.py
@ -58,7 +58,7 @@ KOBO_FORMATS = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
|
||||
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
|
||||
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
|
||||
|
||||
SYNC_ITEM_LIMIT = 5
|
||||
SYNC_ITEM_LIMIT = 100
|
||||
|
||||
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
|
||||
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
|
||||
@ -462,10 +462,7 @@ def HandleTagCreate():
|
||||
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
|
||||
if items_unknown_to_calibre:
|
||||
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return make_response(jsonify(str(shelf.uuid)), 201)
|
||||
|
||||
|
||||
@ -497,10 +494,7 @@ def HandleTagUpdate(tag_id):
|
||||
|
||||
shelf.name = name
|
||||
ub.session.merge(shelf)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return make_response(' ', 200)
|
||||
|
||||
|
||||
@ -552,11 +546,7 @@ def HandleTagAddItem(tag_id):
|
||||
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
|
||||
|
||||
ub.session.merge(shelf)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
|
||||
ub.session_commit()
|
||||
return make_response('', 201)
|
||||
|
||||
|
||||
@ -596,10 +586,7 @@ def HandleTagRemoveItem(tag_id):
|
||||
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
|
||||
except KeyError:
|
||||
items_unknown_to_calibre.append(item)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
|
||||
if items_unknown_to_calibre:
|
||||
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
|
||||
@ -645,10 +632,7 @@ def sync_shelves(sync_token, sync_results):
|
||||
"ChangedTag": tag
|
||||
})
|
||||
sync_token.tags_last_modified = new_tags_last_modified
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
|
||||
|
||||
# Creates a Kobo "Tag" object from a ub.Shelf object
|
||||
@ -729,10 +713,7 @@ def HandleStateRequest(book_uuid):
|
||||
abort(400, description="Malformed request data is missing 'ReadingStates' key")
|
||||
|
||||
ub.session.merge(kobo_reading_state)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return jsonify({
|
||||
"RequestResult": "Success",
|
||||
"UpdateResults": [update_results_response],
|
||||
@ -770,10 +751,7 @@ def get_or_create_reading_state(book_id):
|
||||
kobo_reading_state.statistics = ub.KoboStatistics()
|
||||
book_read.kobo_reading_state = kobo_reading_state
|
||||
ub.session.add(book_read)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return book_read.kobo_reading_state
|
||||
|
||||
|
||||
@ -876,11 +854,7 @@ def HandleBookDeletionRequest(book_uuid):
|
||||
archived_book.last_modified = datetime.datetime.utcnow()
|
||||
|
||||
ub.session.merge(archived_book)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
|
||||
ub.session_commit()
|
||||
return ("", 204)
|
||||
|
||||
|
||||
|
@ -148,10 +148,7 @@ def generate_auth_token(user_id):
|
||||
auth_token.token_type = 1
|
||||
|
||||
ub.session.add(auth_token)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return render_title_template(
|
||||
"generate_kobo_auth_url.html",
|
||||
title=_(u"Kobo Setup"),
|
||||
@ -168,8 +165,5 @@ def delete_auth_token(user_id):
|
||||
# Invalidate any prevously generated Kobo Auth token for this user.
|
||||
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.user_id == user_id)\
|
||||
.filter(ub.RemoteAuthToken.token_type==1).delete()
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
return ""
|
||||
|
||||
return ub.session_commit()
|
||||
|
@ -49,6 +49,13 @@ class _Logger(logging.Logger):
|
||||
else:
|
||||
self.error(message, stacklevel=2, *args, **kwargs)
|
||||
|
||||
def debug_no_auth(self, message, *args, **kwargs):
|
||||
if message.startswith("send: AUTH"):
|
||||
self.debug(message[:16], stacklevel=2, *args, **kwargs)
|
||||
else:
|
||||
self.debug(message, stacklevel=2, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
def get(name=None):
|
||||
return logging.getLogger(name)
|
||||
|
@ -85,11 +85,7 @@ def register_user_with_oauth(user=None):
|
||||
except NoResultFound:
|
||||
# no found, return error
|
||||
return
|
||||
try:
|
||||
ub.session.commit()
|
||||
except Exception as e:
|
||||
log.debug_or_exception(e)
|
||||
ub.session.rollback()
|
||||
ub.session_commit("User {} with OAuth for provider {} registered".format(user.nickname, oauth_key))
|
||||
|
||||
|
||||
def logout_oauth_user():
|
||||
@ -101,19 +97,12 @@ def logout_oauth_user():
|
||||
if ub.oauth_support:
|
||||
oauthblueprints = []
|
||||
if not ub.session.query(ub.OAuthProvider).count():
|
||||
for provider in ("github", "google"):
|
||||
oauthProvider = ub.OAuthProvider()
|
||||
oauthProvider.provider_name = "github"
|
||||
oauthProvider.provider_name = provider
|
||||
oauthProvider.active = False
|
||||
ub.session.add(oauthProvider)
|
||||
ub.session.commit()
|
||||
oauthProvider = ub.OAuthProvider()
|
||||
oauthProvider.provider_name = "google"
|
||||
oauthProvider.active = False
|
||||
ub.session.add(oauthProvider)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("{} Blueprint Created".format(provider))
|
||||
|
||||
oauth_ids = ub.session.query(ub.OAuthProvider).all()
|
||||
ele1 = dict(provider_name='github',
|
||||
@ -203,12 +192,8 @@ if ub.oauth_support:
|
||||
provider_user_id=provider_user_id,
|
||||
token=token,
|
||||
)
|
||||
try:
|
||||
ub.session.add(oauth_entry)
|
||||
ub.session.commit()
|
||||
except Exception as e:
|
||||
log.debug_or_exception(e)
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
|
||||
# Disable Flask-Dance's default behavior for saving the OAuth token
|
||||
# Value differrs depending on flask-dance version
|
||||
|
@ -59,8 +59,7 @@ def remote_login_required(f):
|
||||
def remote_login():
|
||||
auth_token = ub.RemoteAuthToken()
|
||||
ub.session.add(auth_token)
|
||||
ub.session.commit()
|
||||
|
||||
ub.session_commit()
|
||||
verify_url = url_for('remotelogin.verify_token', token=auth_token.auth_token, _external=true)
|
||||
log.debug(u"Remot Login request with token: %s", auth_token.auth_token)
|
||||
return render_title_template('remote_login.html', title=_(u"login"), token=auth_token.auth_token,
|
||||
@ -80,9 +79,9 @@ def verify_token(token):
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
# Token expired
|
||||
if datetime.now() > auth_token.expiration:
|
||||
elif datetime.now() > auth_token.expiration:
|
||||
ub.session.delete(auth_token)
|
||||
ub.session.commit()
|
||||
ub.session_commit()
|
||||
|
||||
flash(_(u"Token has expired"), category="error")
|
||||
log.error(u"Remote Login token expired")
|
||||
@ -91,7 +90,7 @@ def verify_token(token):
|
||||
# Update token with user information
|
||||
auth_token.user_id = current_user.id
|
||||
auth_token.verified = True
|
||||
ub.session.commit()
|
||||
ub.session_commit()
|
||||
|
||||
flash(_(u"Success! Please return to your device"), category="success")
|
||||
log.debug(u"Remote Login token for userid %s verified", auth_token.user_id)
|
||||
@ -114,7 +113,7 @@ def token_verified():
|
||||
# Token expired
|
||||
elif datetime.now() > auth_token.expiration:
|
||||
ub.session.delete(auth_token)
|
||||
ub.session.commit()
|
||||
ub.session_commit()
|
||||
|
||||
data['status'] = 'error'
|
||||
data['message'] = _(u"Token has expired")
|
||||
@ -127,7 +126,7 @@ def token_verified():
|
||||
login_user(user)
|
||||
|
||||
ub.session.delete(auth_token)
|
||||
ub.session.commit()
|
||||
ub.session_commit("User {} logged in via remotelogin, token deleted".format(user.nickname))
|
||||
|
||||
data['status'] = 'success'
|
||||
log.debug(u"Remote Login for userid %s succeded", user.id)
|
||||
|
126
cps/shelf.py
126
cps/shelf.py
@ -27,7 +27,7 @@ import sys
|
||||
from flask import Blueprint, request, flash, redirect, url_for
|
||||
from flask_babel import gettext as _
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.sql.expression import func
|
||||
from sqlalchemy.sql.expression import func, true
|
||||
from sqlalchemy.exc import OperationalError, InvalidRequestError
|
||||
|
||||
from . import logger, ub, calibre_db, db
|
||||
@ -221,61 +221,60 @@ def remove_from_shelf(shelf_id, book_id):
|
||||
@login_required
|
||||
def create_shelf():
|
||||
shelf = ub.Shelf()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
if "is_public" in to_save:
|
||||
shelf.is_public = 1
|
||||
shelf.name = to_save["title"]
|
||||
shelf.user_id = int(current_user.id)
|
||||
|
||||
is_shelf_name_unique = False
|
||||
if shelf.is_public == 1:
|
||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
|
||||
.first() is None
|
||||
|
||||
if not is_shelf_name_unique:
|
||||
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||
category="error")
|
||||
else:
|
||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
||||
(ub.Shelf.user_id == int(current_user.id)))\
|
||||
.first() is None
|
||||
|
||||
if not is_shelf_name_unique:
|
||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||
category="error")
|
||||
|
||||
if is_shelf_name_unique:
|
||||
try:
|
||||
ub.session.add(shelf)
|
||||
ub.session.commit()
|
||||
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
||||
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
||||
except (OperationalError, InvalidRequestError):
|
||||
ub.session.rollback()
|
||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||
except Exception:
|
||||
ub.session.rollback()
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
||||
else:
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
||||
return create_edit_shelf(shelf, title=_(u"Create a Shelf"), page="shelfcreate")
|
||||
|
||||
|
||||
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def edit_shelf(shelf_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
return create_edit_shelf(shelf, title=_(u"Edit a shelf"), page="shelfedit", shelf_id=shelf_id)
|
||||
|
||||
|
||||
# if shelf ID is set, we are editing a shelf
|
||||
def create_edit_shelf(shelf, title, page, shelf_id=False):
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
if "is_public" in to_save:
|
||||
shelf.is_public = 1
|
||||
else:
|
||||
shelf.is_public = 0
|
||||
if check_shelf_is_unique(shelf, to_save, shelf_id):
|
||||
shelf.name = to_save["title"]
|
||||
# shelf.last_modified = datetime.utcnow()
|
||||
if not shelf_id:
|
||||
shelf.user_id = int(current_user.id)
|
||||
ub.session.add(shelf)
|
||||
shelf_action = "created"
|
||||
flash_text = _(u"Shelf %(title)s created", title=to_save["title"])
|
||||
else:
|
||||
shelf_action = "changed"
|
||||
flash_text = _(u"Shelf %(title)s changed", title=to_save["title"])
|
||||
try:
|
||||
ub.session.commit()
|
||||
log.info(u"Shelf {} {}".format(to_save["title"], shelf_action))
|
||||
flash(flash_text, category="success")
|
||||
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.debug_or_exception(e)
|
||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||
except Exception as e:
|
||||
ub.session.rollback()
|
||||
log.debug_or_exception(e)
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=title, page=page)
|
||||
|
||||
is_shelf_name_unique = False
|
||||
|
||||
def check_shelf_is_unique(shelf, to_save, shelf_id=False):
|
||||
if shelf_id:
|
||||
ident = ub.Shelf.id != shelf_id
|
||||
else:
|
||||
ident = true()
|
||||
if shelf.is_public == 1:
|
||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
|
||||
.filter(ub.Shelf.id != shelf_id) \
|
||||
.filter(ident) \
|
||||
.first() is None
|
||||
|
||||
if not is_shelf_name_unique:
|
||||
@ -285,32 +284,13 @@ def edit_shelf(shelf_id):
|
||||
is_shelf_name_unique = ub.session.query(ub.Shelf) \
|
||||
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
|
||||
(ub.Shelf.user_id == int(current_user.id))) \
|
||||
.filter(ub.Shelf.id != shelf_id)\
|
||||
.filter(ident) \
|
||||
.first() is None
|
||||
|
||||
if not is_shelf_name_unique:
|
||||
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
|
||||
category="error")
|
||||
|
||||
if is_shelf_name_unique:
|
||||
shelf.name = to_save["title"]
|
||||
shelf.last_modified = datetime.utcnow()
|
||||
if "is_public" in to_save:
|
||||
shelf.is_public = 1
|
||||
else:
|
||||
shelf.is_public = 0
|
||||
try:
|
||||
ub.session.commit()
|
||||
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
||||
except (OperationalError, InvalidRequestError):
|
||||
ub.session.rollback()
|
||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||
except Exception:
|
||||
ub.session.rollback()
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
||||
else:
|
||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
|
||||
return is_shelf_name_unique
|
||||
|
||||
|
||||
def delete_shelf_helper(cur_shelf):
|
||||
@ -320,12 +300,7 @@ def delete_shelf_helper(cur_shelf):
|
||||
ub.session.delete(cur_shelf)
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
||||
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id))
|
||||
try:
|
||||
ub.session.commit()
|
||||
log.info("successfully deleted %s", cur_shelf)
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
|
||||
ub.session_commit("successfully deleted Shelf {}".format(cur_shelf.name))
|
||||
|
||||
|
||||
@shelf.route("/shelf/delete/<int:shelf_id>")
|
||||
@ -339,11 +314,13 @@ def delete_shelf(shelf_id):
|
||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
|
||||
@shelf.route("/simpleshelf/<int:shelf_id>")
|
||||
@login_required_if_no_ano
|
||||
def show_simpleshelf(shelf_id):
|
||||
return render_show_shelf(2, shelf_id, 1, None)
|
||||
|
||||
|
||||
@shelf.route("/shelf/<int:shelf_id>", defaults={"sort_param": "order", 'page': 1})
|
||||
@shelf.route("/shelf/<int:shelf_id>/<sort_param>", defaults={'page': 1})
|
||||
@shelf.route("/shelf/<int:shelf_id>/<sort_param>/<int:page>")
|
||||
@ -381,6 +358,7 @@ def order_shelf(shelf_id):
|
||||
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
|
||||
shelf=shelf, page="shelforder")
|
||||
|
||||
|
||||
def change_shelf_order(shelf_id, order):
|
||||
result = calibre_db.session.query(db.Books).join(ub.BookShelf,ub.BookShelf.book_id == db.Books.id)\
|
||||
.filter(ub.BookShelf.shelf == shelf_id).order_by(*order).all()
|
||||
@ -388,10 +366,8 @@ def change_shelf_order(shelf_id, order):
|
||||
book = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
||||
.filter(ub.BookShelf.book_id == entry.id).first()
|
||||
book.order = index
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Shelf-id:{} - Order changed".format(shelf_id))
|
||||
|
||||
|
||||
def render_show_shelf(shelf_type, shelf_id, page_no, sort_param):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
|
@ -66,9 +66,15 @@ class TaskConvert(CalibreTask):
|
||||
# if we're sending to kindle after converting, create a one-off task and run it immediately
|
||||
# todo: figure out how to incorporate this into the progress
|
||||
try:
|
||||
worker_thread.add(self.user, TaskEmail(self.settings['subject'], self.results["path"],
|
||||
filename, self.settings, self.kindle_mail,
|
||||
self.settings['subject'], self.settings['body'], internal=True))
|
||||
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
|
||||
self.results["path"],
|
||||
filename,
|
||||
self.settings,
|
||||
self.kindle_mail,
|
||||
self.settings['subject'],
|
||||
self.settings['body'],
|
||||
internal=True)
|
||||
)
|
||||
except Exception as e:
|
||||
return self._handleError(str(e))
|
||||
|
||||
|
@ -44,7 +44,7 @@ class EmailBase:
|
||||
|
||||
def send(self, strg):
|
||||
"""Send `strg' to the server."""
|
||||
log.debug('send: %r', strg[:300])
|
||||
log.debug_no_auth('send: {}'.format(strg[:300]))
|
||||
if hasattr(self, 'sock') and self.sock:
|
||||
try:
|
||||
if self.transferSize:
|
||||
|
13
cps/ub.py
13
cps/ub.py
@ -46,8 +46,9 @@ from sqlalchemy.orm.attributes import flag_modified
|
||||
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
|
||||
from werkzeug.security import generate_password_hash
|
||||
|
||||
from . import constants
|
||||
from . import constants, logger
|
||||
|
||||
log = logger.create()
|
||||
|
||||
session = None
|
||||
app_DB_path = None
|
||||
@ -695,3 +696,13 @@ def dispose():
|
||||
old_session.bind.dispose()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def session_commit(success=None):
|
||||
try:
|
||||
session.commit()
|
||||
if success:
|
||||
log.info(success)
|
||||
except (exc.OperationalError, exc.InvalidRequestError) as e:
|
||||
session.rollback()
|
||||
log.debug_or_exception(e)
|
||||
return ""
|
||||
|
20
cps/web.py
20
cps/web.py
@ -135,10 +135,7 @@ def bookmark(book_id, book_format):
|
||||
ub.Bookmark.book_id == book_id,
|
||||
ub.Bookmark.format == book_format)).delete()
|
||||
if not bookmark_key:
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit()
|
||||
return "", 204
|
||||
|
||||
lbookmark = ub.Bookmark(user_id=current_user.id,
|
||||
@ -146,10 +143,7 @@ def bookmark(book_id, book_format):
|
||||
format=book_format,
|
||||
bookmark_key=bookmark_key)
|
||||
ub.session.merge(lbookmark)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
|
||||
return "", 201
|
||||
|
||||
|
||||
@ -174,10 +168,7 @@ def toggle_read(book_id):
|
||||
kobo_reading_state.statistics = ub.KoboStatistics()
|
||||
book.kobo_reading_state = kobo_reading_state
|
||||
ub.session.merge(book)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Book {} readbit toggled".format(book_id))
|
||||
else:
|
||||
try:
|
||||
calibre_db.update_title_sort(config)
|
||||
@ -211,10 +202,7 @@ def toggle_archived(book_id):
|
||||
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
|
||||
archived_book.is_archived = True
|
||||
ub.session.merge(archived_book)
|
||||
try:
|
||||
ub.session.commit()
|
||||
except OperationalError:
|
||||
ub.session.rollback()
|
||||
ub.session_commit("Book {} archivebit toggled".format(book_id))
|
||||
return ""
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user