1
0
mirror of https://github.com/janeczku/calibre-web synced 2024-12-25 17:40:31 +00:00

Merge branch 'master' into Develop

This commit is contained in:
Ozzieisaacs 2021-01-03 19:28:12 +01:00
commit 6fe4ed3e24
52 changed files with 13139 additions and 10960 deletions

View File

@ -105,6 +105,7 @@ def unconfigured(f):
return inner
@admi.before_app_request
def before_request():
if current_user.is_authenticated:
@ -118,8 +119,10 @@ def before_request():
g.config_authors_max = config.config_authors_max
g.shelves_access = ub.session.query(ub.Shelf).filter(
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
if not config.db_configured and request.endpoint not in (
'admin.basic_configuration', 'login', 'admin.config_pathchooser') and '/static/' not in request.path:
if '/static/' not in request.path and not config.db_configured and \
request.endpoint not in ('admin.basic_configuration',
'login',
'admin.config_pathchooser'):
return redirect(url_for('admin.basic_configuration'))
@ -201,12 +204,12 @@ def configuration():
@login_required
@admin_required
def view_configuration():
readColumn = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
restrictColumns= calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
restrictColumns=restrictColumns,
read_column = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
restrictColumns=restrict_columns,
title=_(u"UI Configuration"), page="uiconfig")
@ -248,7 +251,7 @@ def update_view_configuration():
@admi.route("/ajax/loaddialogtexts/<element_id>")
@login_required
def load_dialogtexts(element_id):
texts = { "header": "", "main": "" }
texts = {"header": "", "main": ""}
if element_id == "config_delete_kobo_token":
texts["main"] = _('Do you really want to delete the Kobo Token?')
elif element_id == "btndeletedomain":
@ -271,11 +274,7 @@ def edit_domain(allow):
vals = request.form.to_dict()
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
return ""
return ub.session_commit("Registering Domains edited {}".format(answer.domain))
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
@ -288,10 +287,7 @@ def add_domain(allow):
if not check:
new_domain = ub.Registration(domain=domain_name, allow=allow)
ub.session.add(new_domain)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Registering Domains added {}".format(domain_name))
return ""
@ -302,18 +298,12 @@ def delete_domain():
try:
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Registering Domains deleted {}".format(domain_id))
# If last domain was deleted, add all domains by default
if not ub.session.query(ub.Registration).filter(ub.Registration.allow==1).count():
new_domain = ub.Registration(domain="%.%",allow=1)
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
new_domain = ub.Registration(domain="%.%", allow=1)
ub.session.add(new_domain)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Last Registering Domain deleted, added *.* as default")
except KeyError:
pass
return ""
@ -330,7 +320,8 @@ def list_domain(allow):
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id":0}, methods=['POST'])
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
@ -339,12 +330,12 @@ def edit_restriction(res_type, user_id):
if element['id'].startswith('a'):
if res_type == 0: # Tags as template
elementlist = config.list_allowed_tags()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_allowed_column_values()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
@ -353,33 +344,27 @@ def edit_restriction(res_type, user_id):
else:
usr = current_user
elementlist = usr.list_allowed_tags()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_tags = ','.join(elementlist)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.allowed_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_allowed_column_values()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_column_value = ','.join(elementlist)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname, usr.allowed_column_value))
if element['id'].startswith('d'):
if res_type == 0: # Tags as template
elementlist = config.list_denied_tags()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_denied_column_values()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
@ -388,26 +373,21 @@ def edit_restriction(res_type, user_id):
else:
usr = current_user
elementlist = usr.list_denied_tags()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_tags = ','.join(elementlist)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.denied_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_denied_column_values()
elementlist[int(element['id'][1:])]=element['Element']
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_column_value = ','.join(elementlist)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname, usr.denied_column_value))
return ""
def restriction_addition(element, list_func):
elementlist = list_func()
if elementlist == ['']:
@ -424,7 +404,7 @@ def restriction_deletion(element, list_func):
return ','.join(elementlist)
@admi.route("/ajax/addrestriction/<int:res_type>", defaults={"user_id":0}, methods=['POST'])
@admi.route("/ajax/addrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
@admi.route("/ajax/addrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
@ -451,16 +431,10 @@ def add_restriction(res_type, user_id):
usr = current_user
if 'submit_allow' in element:
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.nickname, usr.list_allowed_tags))
elif 'submit_deny' in element:
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed denied tags of user {} to {}".format(usr.nickname, usr.list_denied_tags))
if res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
@ -468,19 +442,16 @@ def add_restriction(res_type, user_id):
usr = current_user
if 'submit_allow' in element:
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.nickname,
usr.list_allowed_column_values))
elif 'submit_deny' in element:
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Changed denied columns of user {} to {}".format(usr.nickname,
usr.list_denied_column_values))
return ""
@admi.route("/ajax/deleterestriction/<int:res_type>", defaults={"user_id":0}, methods=['POST'])
@admi.route("/ajax/deleterestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
@ -507,16 +478,10 @@ def delete_restriction(res_type, user_id):
usr = current_user
if element['id'].startswith('a'):
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
elif element['id'].startswith('d'):
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.nickname, usr.list_allowed_tags))
elif res_type == 3: # Columns per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
@ -524,62 +489,61 @@ def delete_restriction(res_type, user_id):
usr = current_user
if element['id'].startswith('a'):
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.nickname,
usr.list_allowed_column_values))
elif element['id'].startswith('d'):
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.nickname,
usr.list_denied_column_values))
return ""
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id":0})
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
@login_required
@admin_required
def list_restriction(res_type, user_id):
if res_type == 0: # Tags as template
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(config.list_denied_tags()) if x != '' ]
allow = [{'Element': x, 'type':_('Allow'), 'id': 'a'+str(i) }
for i,x in enumerate(config.list_allowed_tags()) if x != '']
for i,x in enumerate(config.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 1: # CustomC as template
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(config.list_denied_column_values()) if x != '' ]
allow = [{'Element': x, 'type':_('Allow'), 'id': 'a'+str(i) }
for i,x in enumerate(config.list_allowed_column_values()) if x != '']
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(config.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
elif res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(usr.list_denied_tags()) if x != '' ]
allow = [{'Element': x, 'type':_('Allow'), 'id': 'a'+str(i) }
for i,x in enumerate(usr.list_allowed_tags()) if x != '']
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id==user_id).first()
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(usr.list_denied_column_values()) if x != '' ]
allow = [{'Element': x, 'type':_('Allow'), 'id': 'a'+str(i) }
for i,x in enumerate(usr.list_allowed_column_values()) if x != '']
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
else:
json_dumps=""
json_dumps = ""
js = json.dumps(json_dumps)
response = make_response(js.replace("'", '"'))
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/basicconfig/pathchooser/")
@unconfigured
def config_pathchooser():
@ -587,12 +551,14 @@ def config_pathchooser():
return pathchooser()
abort(403)
@admi.route("/ajax/pathchooser/")
@login_required
@admin_required
def ajax_pathchooser():
return pathchooser()
def pathchooser():
browse_for = "folder"
folder_only = request.args.get('folder', False) == "true"
@ -605,20 +571,20 @@ def pathchooser():
else:
oldfile = ""
abs = False
absolute = False
if os.path.isdir(path):
#if os.path.isabs(path):
# if os.path.isabs(path):
cwd = os.path.realpath(path)
abs = True
#else:
absolute = True
# else:
# cwd = os.path.relpath(path)
else:
cwd = os.getcwd()
cwd = os.path.normpath(os.path.realpath(cwd))
parentdir = os.path.dirname(cwd)
if not abs:
if not absolute:
if os.path.realpath(cwd) == os.path.realpath("/"):
cwd = os.path.relpath(cwd)
else:
@ -705,8 +671,8 @@ def _configuration_gdrive_helper(to_save):
config.config_use_google_drive = False
gdrive_secrets = {}
gdriveError = gdriveutils.get_error_text(gdrive_secrets)
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdriveError:
gdrive_error = gdriveutils.get_error_text(gdrive_secrets)
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdrive_error:
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
gdrive_secrets = json.load(settings)['web']
if not gdrive_secrets:
@ -718,10 +684,11 @@ def _configuration_gdrive_helper(to_save):
)
# always show google drive settings, but in case of error deny support
config.config_use_google_drive = (not gdriveError) and ("config_use_google_drive" in to_save)
config.config_use_google_drive = (not gdrive_error) and ("config_use_google_drive" in to_save)
if _config_string(to_save, "config_google_drive_folder"):
gdriveutils.deleteDatabaseOnChange()
return gdriveError
return gdrive_error
def _configuration_oauth_helper(to_save):
active_oauths = 0
@ -744,22 +711,24 @@ def _configuration_oauth_helper(to_save):
"active": element["active"]})
return reboot_required
def _configuration_logfile_helper(to_save, gdriveError):
def _configuration_logfile_helper(to_save, gdrive_error):
reboot_required = False
reboot_required |= _config_int(to_save, "config_log_level")
reboot_required |= _config_string(to_save, "config_logfile")
if not logger.is_valid_logfile(config.config_logfile):
return reboot_required, \
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
reboot_required |= _config_string(to_save, "config_access_logfile")
if not logger.is_valid_logfile(config.config_access_logfile):
return reboot_required, \
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdriveError)
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'), gdrive_error)
return reboot_required, None
def _configuration_ldap_helper(to_save, gdriveError):
def _configuration_ldap_helper(to_save, gdrive_error):
reboot_required = False
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
reboot_required |= _config_int(to_save, "config_ldap_port")
@ -786,33 +755,33 @@ def _configuration_ldap_helper(to_save, gdriveError):
or not config.config_ldap_dn \
or not config.config_ldap_user_object:
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier'), gdriveError)
'Port, DN and User Object Identifier'), gdrive_error)
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
return reboot_required, _configuration_result('Please Enter a LDAP Service Account and Password',
gdriveError)
gdrive_error)
else:
if not config.config_ldap_serv_username:
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdriveError)
return reboot_required, _configuration_result('Please Enter a LDAP Service Account', gdrive_error)
if config.config_ldap_group_object_filter:
if config.config_ldap_group_object_filter.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'),
gdriveError)
gdrive_error)
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'),
gdriveError)
gdrive_error)
if config.config_ldap_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'),
gdriveError)
gdrive_error)
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'),
gdriveError)
gdrive_error)
if to_save["ldap_import_user_filter"] == '0':
config.config_ldap_member_user_object = ""
@ -820,10 +789,10 @@ def _configuration_ldap_helper(to_save, gdriveError):
if config.config_ldap_member_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'),
gdriveError)
gdrive_error)
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'),
gdriveError)
gdrive_error)
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
if not (os.path.isfile(config.config_ldap_cacert_path) and
@ -832,7 +801,7 @@ def _configuration_ldap_helper(to_save, gdriveError):
return reboot_required, \
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
'Please Enter Correct Path'),
gdriveError)
gdrive_error)
return reboot_required, None
@ -840,7 +809,7 @@ def _configuration_update_helper(configured):
reboot_required = False
db_change = False
to_save = request.form.to_dict()
gdriveError = None
gdrive_error = None
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
@ -849,26 +818,26 @@ def _configuration_update_helper(configured):
try:
db_change |= _config_string(to_save, "config_calibre_dir")
# Google drive setup
gdriveError = _configuration_gdrive_helper(to_save)
# gdrive_error drive setup
gdrive_error = _configuration_gdrive_helper(to_save)
reboot_required |= _config_int(to_save, "config_port")
reboot_required |= _config_string(to_save, "config_keyfile")
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'),
gdriveError,
gdrive_error,
configured)
reboot_required |= _config_string(to_save, "config_certfile")
if config.config_certfile and not os.path.isfile(config.config_certfile):
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'),
gdriveError,
gdrive_error,
configured)
_config_checkbox_int(to_save, "config_uploading")
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
and config.config_login_type == constants.LOGIN_LDAP)
_config_checkbox_int(to_save, "config_public_reg")
_config_checkbox_int(to_save, "config_register_email")
@ -888,9 +857,9 @@ def _configuration_update_helper(configured):
reboot_required |= _config_int(to_save, "config_login_type")
#LDAP configurator,
# LDAP configurator,
if config.config_login_type == constants.LOGIN_LDAP:
reboot, message = _configuration_ldap_helper(to_save, gdriveError)
reboot, message = _configuration_ldap_helper(to_save, gdrive_error)
if message:
return message
reboot_required |= reboot
@ -899,7 +868,7 @@ def _configuration_update_helper(configured):
_config_checkbox(to_save, "config_remote_login")
if not config.config_remote_login:
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type==0).delete()
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type == 0).delete()
# Goodreads configuration
_config_checkbox(to_save, "config_use_goodreads")
@ -920,7 +889,7 @@ def _configuration_update_helper(configured):
if config.config_login_type == constants.LOGIN_OAUTH:
reboot_required |= _configuration_oauth_helper(to_save)
reboot, message = _configuration_logfile_helper(to_save, gdriveError)
reboot, message = _configuration_logfile_helper(to_save, gdrive_error)
if message:
return message
reboot_required |= reboot
@ -929,10 +898,10 @@ def _configuration_update_helper(configured):
if "config_rarfile_location" in to_save:
unrar_status = helper.check_unrar(config.config_rarfile_location)
if unrar_status:
return _configuration_result(unrar_status, gdriveError, configured)
return _configuration_result(unrar_status, gdrive_error, configured)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
_configuration_result(_(u"Settings DB is not Writeable"), gdriveError, configured)
_configuration_result(_(u"Settings DB is not Writeable"), gdrive_error, configured)
try:
metadata_db = os.path.join(config.config_calibre_dir, "metadata.db")
@ -940,12 +909,12 @@ def _configuration_update_helper(configured):
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
db_change = True
except Exception as e:
return _configuration_result('%s' % e, gdriveError, configured)
return _configuration_result('%s' % e, gdrive_error, configured)
if db_change:
if not calibre_db.setup_db(config, ub.app_DB_path):
return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
gdriveError,
gdrive_error,
configured)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning")
@ -955,16 +924,16 @@ def _configuration_update_helper(configured):
if reboot_required:
web_server.stop(True)
return _configuration_result(None, gdriveError, configured)
return _configuration_result(None, gdrive_error, configured)
def _configuration_result(error_flash=None, gdriveError=None, configured=True):
def _configuration_result(error_flash=None, gdrive_error=None, configured=True):
gdrive_authenticate = not is_gdrive_ready()
gdrivefolders = []
if gdriveError is None:
gdriveError = gdriveutils.get_error_text()
if gdriveError:
gdriveError = _(gdriveError)
if gdrive_error is None:
gdrive_error = gdriveutils.get_error_text()
if gdrive_error:
gdrive_error = _(gdrive_error)
else:
# if config.config_use_google_drive and\
if not gdrive_authenticate and gdrive_support:
@ -977,14 +946,20 @@ def _configuration_result(error_flash=None, gdriveError=None, configured=True):
flash(error_flash, category="error")
show_login_button = False
return render_title_template("config_edit.html", config=config, provider=oauthblueprints,
show_back_button=show_back_button, show_login_button=show_login_button,
show_authenticate_google_drive=gdrive_authenticate, filepicker=configured,
gdriveError=gdriveError, gdrivefolders=gdrivefolders, feature_support=feature_support,
return render_title_template("config_edit.html",
config=config,
provider=oauthblueprints,
show_back_button=show_back_button,
show_login_button=show_login_button,
show_authenticate_google_drive=gdrive_authenticate,
filepicker=configured,
gdriveError=gdrive_error,
gdrivefolders=gdrivefolders,
feature_support=feature_support,
title=_(u"Basic Configuration"), page="config")
def _handle_new_user(to_save, content,languages, translations, kobo_support):
def _handle_new_user(to_save, content, languages, translations, kobo_support):
content.default_language = to_save["default_language"]
# content.mature_content = "Show_mature_content" in to_save
content.locale = to_save.get("locale", content.locale)
@ -1025,10 +1000,7 @@ def _handle_new_user(to_save, content,languages, translations, kobo_support):
content.allowed_column_value = config.config_allowed_column_value
content.denied_column_value = config.config_denied_column_value
ub.session.add(content)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session.commit()
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
return redirect(url_for('admin.admin'))
except IntegrityError:
@ -1039,15 +1011,12 @@ def _handle_new_user(to_save, content,languages, translations, kobo_support):
flash(_(u"Settings DB is not Writeable"), category="error")
def _handle_edit_user(to_save, content,languages, translations, kobo_support):
def _handle_edit_user(to_save, content, languages, translations, kobo_support):
if "delete" in to_save:
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count():
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
return redirect(url_for('admin.admin'))
else:
@ -1055,8 +1024,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
return redirect(url_for('admin.admin'))
else:
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count() and \
not 'admin_role' in to_save:
ub.User.id != content.id).count() and 'admin_role' not in to_save:
flash(_(u"No admin user remaining, can't remove admin role", nick=content.nickname), category="error")
return redirect(url_for('admin.admin'))
@ -1075,7 +1043,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
value = element['visibility']
if value in val and not content.check_visibility(value):
content.sidebar_view |= value
elif not value in val and content.check_visibility(value):
elif value not in val and content.check_visibility(value):
content.sidebar_view &= ~value
if "Show_detail_random" in to_save:
@ -1122,10 +1090,7 @@ def _handle_edit_user(to_save, content,languages, translations, kobo_support):
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
content.kindle_mail = to_save["kindle_mail"]
try:
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
except IntegrityError:
ub.session.rollback()
@ -1247,9 +1212,8 @@ def reset_user_password(user_id):
@login_required
@admin_required
def view_logfile():
logfiles = {}
logfiles[0] = logger.get_logfile(config.config_logfile)
logfiles[1] = logger.get_accesslogfile(config.config_access_logfile)
logfiles = {0: logger.get_logfile(config.config_logfile),
1: logger.get_accesslogfile(config.config_access_logfile)}
return render_title_template("logviewer.html",
title=_(u"Logfile viewer"),
accesslog_enable=config.config_access_log,
@ -1273,6 +1237,7 @@ def send_logfile(logtype):
else:
return ""
@admi.route("/admin/logdownload/<int:logtype>")
@login_required
@admin_required
@ -1332,7 +1297,7 @@ def get_updater_status():
elif request.method == "GET":
try:
status['status'] = updater_thread.get_update_status()
if status['status'] == -1:
if status['status'] == -1:
status['status'] = 7
except Exception:
status['status'] = 11
@ -1390,7 +1355,7 @@ def import_ldap_users():
kindlemail = ''
if 'mail' in user_data:
useremail = user_data['mail'][0].decode('utf-8')
if (len(user_data['mail']) > 1):
if len(user_data['mail']) > 1:
kindlemail = user_data['mail'][1].decode('utf-8')
else:
@ -1414,7 +1379,7 @@ def import_ldap_users():
ub.session.add(content)
try:
ub.session.commit()
imported +=1
imported += 1
except Exception as e:
log.warning("Failed to create LDAP user: %s - %s", user, e)
ub.session.rollback()
@ -1428,19 +1393,21 @@ def import_ldap_users():
def extract_user_data_from_field(user, field):
match = re.search(field + "=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
match = re.search(field + r"=([\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP User: {}".format(user))
def extract_dynamic_field_from_filter(user, filter):
match = re.search("([a-zA-Z0-9-]+)=%s", filter, re.IGNORECASE | re.UNICODE)
def extract_dynamic_field_from_filter(user, filtr):
match = re.search("([a-zA-Z0-9-]+)=%s", filtr, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP Userfield: {}", user)
def extract_user_identifier(user, filter):
dynamic_field = extract_dynamic_field_from_filter(user, filter)
def extract_user_identifier(user, filtr):
dynamic_field = extract_dynamic_field_from_filter(user, filtr)
return extract_user_data_from_field(user, dynamic_field)

View File

@ -275,7 +275,7 @@ class _ConfigSQL(object):
def toDict(self):
storage = {}
for k, v in self.__dict__.items():
if k[0] != '_' or k.endswith("password"):
if k[0] != '_' and not k.endswith("password") and not k.endswith("secret"):
storage[k] = v
return storage

View File

@ -21,7 +21,12 @@ import shutil
import glob
import zipfile
import json
import io
from io import BytesIO
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import os
from flask import send_file
@ -32,11 +37,12 @@ from .about import collect_stats
log = logger.create()
def assemble_logfiles(file_name):
log_list = glob.glob(file_name + '*')
wfd = io.StringIO()
log_list = sorted(glob.glob(file_name + '*'), reverse=True)
wfd = StringIO()
for f in log_list:
with open(f, 'r') as fd:
shutil.copyfileobj(fd, wfd)
wfd.seek(0)
return send_file(wfd,
as_attachment=True,
attachment_filename=os.path.basename(file_name))
@ -47,7 +53,7 @@ def send_debug():
for element in [logger.LOG_TO_STDOUT, logger.LOG_TO_STDERR]:
if element in file_list:
file_list.remove(element)
memory_zip = io.BytesIO()
memory_zip = BytesIO()
with zipfile.ZipFile(memory_zip, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr('settings.txt', json.dumps(config.toDict()))
zf.writestr('libs.txt', json.dumps(collect_stats()))

View File

@ -240,7 +240,7 @@ def delete_book(book_id, book_format, jsonResponse):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session.commit()
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns

View File

@ -462,10 +462,7 @@ def HandleTagCreate():
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return make_response(jsonify(str(shelf.uuid)), 201)
@ -497,10 +494,7 @@ def HandleTagUpdate(tag_id):
shelf.name = name
ub.session.merge(shelf)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return make_response(' ', 200)
@ -552,11 +546,7 @@ def HandleTagAddItem(tag_id):
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
ub.session.merge(shelf)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return make_response('', 201)
@ -596,10 +586,7 @@ def HandleTagRemoveItem(tag_id):
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
except KeyError:
items_unknown_to_calibre.append(item)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
if items_unknown_to_calibre:
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
@ -645,10 +632,7 @@ def sync_shelves(sync_token, sync_results):
"ChangedTag": tag
})
sync_token.tags_last_modified = new_tags_last_modified
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
# Creates a Kobo "Tag" object from a ub.Shelf object
@ -729,10 +713,7 @@ def HandleStateRequest(book_uuid):
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(kobo_reading_state)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [update_results_response],
@ -770,10 +751,7 @@ def get_or_create_reading_state(book_id):
kobo_reading_state.statistics = ub.KoboStatistics()
book_read.kobo_reading_state = kobo_reading_state
ub.session.add(book_read)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return book_read.kobo_reading_state
@ -876,11 +854,7 @@ def HandleBookDeletionRequest(book_uuid):
archived_book.last_modified = datetime.datetime.utcnow()
ub.session.merge(archived_book)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return ("", 204)

View File

@ -148,10 +148,7 @@ def generate_auth_token(user_id):
auth_token.token_type = 1
ub.session.add(auth_token)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return render_title_template(
"generate_kobo_auth_url.html",
title=_(u"Kobo Setup"),
@ -168,8 +165,5 @@ def delete_auth_token(user_id):
# Invalidate any prevously generated Kobo Auth token for this user.
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.user_id == user_id)\
.filter(ub.RemoteAuthToken.token_type==1).delete()
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
return ""
return ub.session_commit()

View File

@ -49,6 +49,13 @@ class _Logger(logging.Logger):
else:
self.error(message, stacklevel=2, *args, **kwargs)
def debug_no_auth(self, message, *args, **kwargs):
if message.startswith("send: AUTH"):
self.debug(message[:16], stacklevel=2, *args, **kwargs)
else:
self.debug(message, stacklevel=2, *args, **kwargs)
def get(name=None):
return logging.getLogger(name)

View File

@ -85,11 +85,7 @@ def register_user_with_oauth(user=None):
except NoResultFound:
# no found, return error
return
try:
ub.session.commit()
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
ub.session_commit("User {} with OAuth for provider {} registered".format(user.nickname, oauth_key))
def logout_oauth_user():
@ -101,19 +97,12 @@ def logout_oauth_user():
if ub.oauth_support:
oauthblueprints = []
if not ub.session.query(ub.OAuthProvider).count():
oauthProvider = ub.OAuthProvider()
oauthProvider.provider_name = "github"
oauthProvider.active = False
ub.session.add(oauthProvider)
ub.session.commit()
oauthProvider = ub.OAuthProvider()
oauthProvider.provider_name = "google"
oauthProvider.active = False
ub.session.add(oauthProvider)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
for provider in ("github", "google"):
oauthProvider = ub.OAuthProvider()
oauthProvider.provider_name = provider
oauthProvider.active = False
ub.session.add(oauthProvider)
ub.session_commit("{} Blueprint Created".format(provider))
oauth_ids = ub.session.query(ub.OAuthProvider).all()
ele1 = dict(provider_name='github',
@ -203,12 +192,8 @@ if ub.oauth_support:
provider_user_id=provider_user_id,
token=token,
)
try:
ub.session.add(oauth_entry)
ub.session.commit()
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
ub.session.add(oauth_entry)
ub.session_commit()
# Disable Flask-Dance's default behavior for saving the OAuth token
# Value differrs depending on flask-dance version

View File

@ -59,8 +59,7 @@ def remote_login_required(f):
def remote_login():
auth_token = ub.RemoteAuthToken()
ub.session.add(auth_token)
ub.session.commit()
ub.session_commit()
verify_url = url_for('remotelogin.verify_token', token=auth_token.auth_token, _external=true)
log.debug(u"Remot Login request with token: %s", auth_token.auth_token)
return render_title_template('remote_login.html', title=_(u"login"), token=auth_token.auth_token,
@ -80,9 +79,9 @@ def verify_token(token):
return redirect(url_for('web.index'))
# Token expired
if datetime.now() > auth_token.expiration:
elif datetime.now() > auth_token.expiration:
ub.session.delete(auth_token)
ub.session.commit()
ub.session_commit()
flash(_(u"Token has expired"), category="error")
log.error(u"Remote Login token expired")
@ -91,7 +90,7 @@ def verify_token(token):
# Update token with user information
auth_token.user_id = current_user.id
auth_token.verified = True
ub.session.commit()
ub.session_commit()
flash(_(u"Success! Please return to your device"), category="success")
log.debug(u"Remote Login token for userid %s verified", auth_token.user_id)
@ -114,7 +113,7 @@ def token_verified():
# Token expired
elif datetime.now() > auth_token.expiration:
ub.session.delete(auth_token)
ub.session.commit()
ub.session_commit()
data['status'] = 'error'
data['message'] = _(u"Token has expired")
@ -127,7 +126,7 @@ def token_verified():
login_user(user)
ub.session.delete(auth_token)
ub.session.commit()
ub.session_commit("User {} logged in via remotelogin, token deleted".format(user.nickname))
data['status'] = 'success'
log.debug(u"Remote Login for userid %s succeded", user.id)

View File

@ -27,7 +27,7 @@ import sys
from flask import Blueprint, request, flash, redirect, url_for
from flask_babel import gettext as _
from flask_login import login_required, current_user
from sqlalchemy.sql.expression import func
from sqlalchemy.sql.expression import func, true
from sqlalchemy.exc import OperationalError, InvalidRequestError
from . import logger, ub, calibre_db, db
@ -221,96 +221,76 @@ def remove_from_shelf(shelf_id, book_id):
@login_required
def create_shelf():
shelf = ub.Shelf()
if request.method == "POST":
to_save = request.form.to_dict()
if "is_public" in to_save:
shelf.is_public = 1
shelf.name = to_save["title"]
shelf.user_id = int(current_user.id)
is_shelf_name_unique = False
if shelf.is_public == 1:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
.first() is None
if not is_shelf_name_unique:
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
else:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
(ub.Shelf.user_id == int(current_user.id)))\
.first() is None
if not is_shelf_name_unique:
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
if is_shelf_name_unique:
try:
ub.session.add(shelf)
ub.session.commit()
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
except (OperationalError, InvalidRequestError):
ub.session.rollback()
flash(_(u"Settings DB is not Writeable"), category="error")
except Exception:
ub.session.rollback()
flash(_(u"There was an error"), category="error")
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
else:
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
return create_edit_shelf(shelf, title=_(u"Create a Shelf"), page="shelfcreate")
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
@login_required
def edit_shelf(shelf_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
return create_edit_shelf(shelf, title=_(u"Edit a shelf"), page="shelfedit", shelf_id=shelf_id)
# if shelf ID is set, we are editing a shelf
def create_edit_shelf(shelf, title, page, shelf_id=False):
if request.method == "POST":
to_save = request.form.to_dict()
is_shelf_name_unique = False
if shelf.is_public == 1:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
.filter(ub.Shelf.id != shelf_id) \
.first() is None
if not is_shelf_name_unique:
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
if "is_public" in to_save:
shelf.is_public = 1
else:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
(ub.Shelf.user_id == int(current_user.id)))\
.filter(ub.Shelf.id != shelf_id)\
.first() is None
if not is_shelf_name_unique:
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
if is_shelf_name_unique:
shelf.is_public = 0
if check_shelf_is_unique(shelf, to_save, shelf_id):
shelf.name = to_save["title"]
shelf.last_modified = datetime.utcnow()
if "is_public" in to_save:
shelf.is_public = 1
# shelf.last_modified = datetime.utcnow()
if not shelf_id:
shelf.user_id = int(current_user.id)
ub.session.add(shelf)
shelf_action = "created"
flash_text = _(u"Shelf %(title)s created", title=to_save["title"])
else:
shelf.is_public = 0
shelf_action = "changed"
flash_text = _(u"Shelf %(title)s changed", title=to_save["title"])
try:
ub.session.commit()
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
except (OperationalError, InvalidRequestError):
log.info(u"Shelf {} {}".format(to_save["title"], shelf_action))
flash(flash_text, category="success")
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
except (OperationalError, InvalidRequestError) as e:
ub.session.rollback()
log.debug_or_exception(e)
flash(_(u"Settings DB is not Writeable"), category="error")
except Exception:
except Exception as e:
ub.session.rollback()
log.debug_or_exception(e)
flash(_(u"There was an error"), category="error")
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
return render_title_template('shelf_edit.html', shelf=shelf, title=title, page=page)
def check_shelf_is_unique(shelf, to_save, shelf_id=False):
if shelf_id:
ident = ub.Shelf.id != shelf_id
else:
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
ident = true()
if shelf.is_public == 1:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1)) \
.filter(ident) \
.first() is None
if not is_shelf_name_unique:
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
else:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
(ub.Shelf.user_id == int(current_user.id))) \
.filter(ident) \
.first() is None
if not is_shelf_name_unique:
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
return is_shelf_name_unique
def delete_shelf_helper(cur_shelf):
@ -320,12 +300,7 @@ def delete_shelf_helper(cur_shelf):
ub.session.delete(cur_shelf)
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id))
try:
ub.session.commit()
log.info("successfully deleted %s", cur_shelf)
except OperationalError:
ub.session.rollback()
ub.session_commit("successfully deleted Shelf {}".format(cur_shelf.name))
@shelf.route("/shelf/delete/<int:shelf_id>")
@ -339,11 +314,13 @@ def delete_shelf(shelf_id):
flash(_(u"Settings DB is not Writeable"), category="error")
return redirect(url_for('web.index'))
@shelf.route("/simpleshelf/<int:shelf_id>")
@login_required_if_no_ano
def show_simpleshelf(shelf_id):
return render_show_shelf(2, shelf_id, 1, None)
@shelf.route("/shelf/<int:shelf_id>", defaults={"sort_param": "order", 'page': 1})
@shelf.route("/shelf/<int:shelf_id>/<sort_param>", defaults={'page': 1})
@shelf.route("/shelf/<int:shelf_id>/<sort_param>/<int:page>")
@ -381,6 +358,7 @@ def order_shelf(shelf_id):
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
shelf=shelf, page="shelforder")
def change_shelf_order(shelf_id, order):
result = calibre_db.session.query(db.Books).join(ub.BookShelf,ub.BookShelf.book_id == db.Books.id)\
.filter(ub.BookShelf.shelf == shelf_id).order_by(*order).all()
@ -388,10 +366,8 @@ def change_shelf_order(shelf_id, order):
book = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
.filter(ub.BookShelf.book_id == entry.id).first()
book.order = index
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Shelf-id:{} - Order changed".format(shelf_id))
def render_show_shelf(shelf_type, shelf_id, page_no, sort_param):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()

View File

@ -411,19 +411,6 @@ bitjs.archive = bitjs.archive || {};
return "unrar.js";
};
/**
* Unrarrer5
* @extends {bitjs.archive.Unarchiver}
* @constructor
*/
bitjs.archive.Unrarrer5 = function(arrayBuffer, optPathToBitJS) {
bitjs.base(this, arrayBuffer, optPathToBitJS);
};
bitjs.inherits(bitjs.archive.Unrarrer5, bitjs.archive.Unarchiver);
bitjs.archive.Unrarrer5.prototype.getScriptFileName = function() {
return "unrar5.js";
};
/**
* Untarrer
* @extends {bitjs.archive.Unarchiver}

View File

@ -14,10 +14,10 @@
/* global VM_FIXEDGLOBALSIZE, VM_GLOBALMEMSIZE, MAXWINMASK, VM_GLOBALMEMADDR, MAXWINSIZE */
// This file expects to be invoked as a Worker (see onmessage below).
/*importScripts("../io/bitstream.js");
importScripts("../io/bitstream.js");
importScripts("../io/bytebuffer.js");
importScripts("archive.js");
importScripts("rarvm.js");*/
importScripts("rarvm.js");
// Progress variables.
var currentFilename = "";
@ -29,21 +29,19 @@ var totalFilesInArchive = 0;
// Helper functions.
var info = function(str) {
console.log(str);
// postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
};
var err = function(str) {
console.log(str);
// postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
var postProgress = function() {
/*postMessage(new bitjs.archive.UnarchiveProgressEvent(
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive));*/
totalFilesInArchive));
};
// shows a byte value as its hex representation
@ -1300,7 +1298,7 @@ var unrar = function(arrayBuffer) {
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
//postMessage(new bitjs.archive.UnarchiveStartEvent());
postMessage(new bitjs.archive.UnarchiveStartEvent());
var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */);
var header = new RarVolumeHeader(bstream);
@ -1350,7 +1348,7 @@ var unrar = function(arrayBuffer) {
localfile.unrar();
if (localfile.isValid) {
// postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postProgress();
}
}
@ -1360,7 +1358,7 @@ var unrar = function(arrayBuffer) {
} else {
err("Invalid RAR file");
}
// postMessage(new bitjs.archive.UnarchiveFinishEvent());
postMessage(new bitjs.archive.UnarchiveFinishEvent());
};
// event.data.file has the ArrayBuffer.

View File

@ -162,15 +162,10 @@ function initProgressClick() {
function loadFromArrayBuffer(ab) {
var start = (new Date).getTime();
var h = new Uint8Array(ab, 0, 10);
unrar5(ab);
var pathToBitJS = "../../static/js/archive/";
var lastCompletion = 0;
/*if (h[0] === 0x52 && h[1] === 0x61 && h[2] === 0x72 && h[3] === 0x21) { //Rar!
if (h[7] === 0x01) {
unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS);
} else {
unarchiver = new bitjs.archive.Unrarrer5(ab, pathToBitJS);
}
if (h[0] === 0x52 && h[1] === 0x61 && h[2] === 0x72 && h[3] === 0x21) { //Rar!
unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS);
} else if (h[0] === 80 && h[1] === 75) { //PK (Zip)
unarchiver = new bitjs.archive.Unzipper(ab, pathToBitJS);
} else if (h[0] === 255 && h[1] === 216) { // JPEG
@ -234,7 +229,7 @@ function loadFromArrayBuffer(ab) {
unarchiver.start();
} else {
alert("Some error");
}*/
}
}
function scrollTocToActive() {

View File

@ -66,9 +66,15 @@ class TaskConvert(CalibreTask):
# if we're sending to kindle after converting, create a one-off task and run it immediately
# todo: figure out how to incorporate this into the progress
try:
worker_thread.add(self.user, TaskEmail(self.settings['subject'], self.results["path"],
filename, self.settings, self.kindle_mail,
self.settings['subject'], self.settings['body'], internal=True))
worker_thread.add(self.user, TaskEmail(self.settings['subject'],
self.results["path"],
filename,
self.settings,
self.kindle_mail,
self.settings['subject'],
self.settings['body'],
internal=True)
)
except Exception as e:
return self._handleError(str(e))

View File

@ -44,7 +44,7 @@ class EmailBase:
def send(self, strg):
"""Send `strg' to the server."""
log.debug('send: %r', strg[:300])
log.debug_no_auth('send: {}'.format(strg[:300]))
if hasattr(self, 'sock') and self.sock:
try:
if self.transferSize:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -46,8 +46,9 @@ from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
from werkzeug.security import generate_password_hash
from . import constants
from . import constants, logger
log = logger.create()
session = None
app_DB_path = None
@ -695,3 +696,13 @@ def dispose():
old_session.bind.dispose()
except Exception:
pass
def session_commit(success=None):
try:
session.commit()
if success:
log.info(success)
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.debug_or_exception(e)
return ""

View File

@ -135,10 +135,7 @@ def bookmark(book_id, book_format):
ub.Bookmark.book_id == book_id,
ub.Bookmark.format == book_format)).delete()
if not bookmark_key:
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit()
return "", 204
lbookmark = ub.Bookmark(user_id=current_user.id,
@ -146,10 +143,7 @@ def bookmark(book_id, book_format):
format=book_format,
bookmark_key=bookmark_key)
ub.session.merge(lbookmark)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
return "", 201
@ -174,10 +168,7 @@ def toggle_read(book_id):
kobo_reading_state.statistics = ub.KoboStatistics()
book.kobo_reading_state = kobo_reading_state
ub.session.merge(book)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Book {} readbit toggled".format(book_id))
else:
try:
calibre_db.update_title_sort(config)
@ -211,10 +202,7 @@ def toggle_archived(book_id):
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
ub.session.merge(archived_book)
try:
ub.session.commit()
except OperationalError:
ub.session.rollback()
ub.session_commit("Book {} archivebit toggled".format(book_id))
return ""

File diff suppressed because it is too large Load Diff