mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-25 09:30:31 +00:00
Suppress some errors
This commit is contained in:
parent
9a963bbe79
commit
b75247ea3a
@ -37,6 +37,7 @@ try:
|
||||
except ImportError:
|
||||
from flask_login.__about__ import __version__ as flask_loginVersion
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
import unidecode
|
||||
# _() necessary to make babel aware of string for translation
|
||||
unidecode_version = _(u'installed')
|
||||
|
@ -49,7 +49,7 @@ def init_cache_busting(app):
|
||||
# compute version component
|
||||
rooted_filename = os.path.join(dirpath, filename)
|
||||
with open(rooted_filename, 'rb') as f:
|
||||
file_hash = hashlib.md5(f.read()).hexdigest()[:7]
|
||||
file_hash = hashlib.md5(f.read()).hexdigest()[:7] # nosec
|
||||
|
||||
# save version to tables
|
||||
file_path = rooted_filename.replace(static_folder, "")
|
||||
@ -64,6 +64,7 @@ def init_cache_busting(app):
|
||||
return filename.split("?", 1)[0]
|
||||
|
||||
@app.url_defaults
|
||||
# pylint: disable=unused-variable
|
||||
def reverse_to_cache_busted_url(endpoint, values):
|
||||
"""
|
||||
Make `url_for` produce busted filenames when using the 'static' endpoint.
|
||||
|
@ -104,7 +104,7 @@ LDAP_AUTH_SIMPLE = 0
|
||||
|
||||
DEFAULT_MAIL_SERVER = "mail.example.org"
|
||||
|
||||
DEFAULT_PASSWORD = "admin123" # nosec # noqa
|
||||
DEFAULT_PASSWORD = "admin123" # noqa nosec
|
||||
DEFAULT_PORT = 8083
|
||||
env_CALIBRE_PORT = os.environ.get("CALIBRE_PORT", DEFAULT_PORT)
|
||||
try:
|
||||
|
@ -60,14 +60,8 @@ def init_errorhandler():
|
||||
if services.ldap:
|
||||
# Only way of catching the LDAPException upon logging in with LDAP server down
|
||||
@app.errorhandler(services.ldap.LDAPException)
|
||||
# pylint: disable=unused-variable
|
||||
def handle_exception(e):
|
||||
log.debug('LDAP server not accessible while trying to login to opds feed')
|
||||
return error_http(FailedDependency())
|
||||
|
||||
|
||||
# @app.errorhandler(InvalidRequestError)
|
||||
#@app.errorhandler(OperationalError)
|
||||
#def handle_db_exception(e):
|
||||
# db.session.rollback()
|
||||
# log.error('Database request error: %s',e)
|
||||
# return internal_error(InternalServerError(e))
|
||||
|
@ -142,7 +142,7 @@ def on_received_watch_confirmation():
|
||||
else:
|
||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
|
||||
if not response['deleted'] and response['file']['title'] == 'metadata.db' \
|
||||
and response['file']['md5Checksum'] != hashlib.md5(dbpath):
|
||||
and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec
|
||||
tmp_dir = os.path.join(tempfile.gettempdir(), 'calibre_web')
|
||||
if not os.path.isdir(tmp_dir):
|
||||
os.mkdir(tmp_dir)
|
||||
|
@ -81,6 +81,7 @@ log = logger.create()
|
||||
|
||||
def register_url_value_preprocessor(kobo):
|
||||
@kobo.url_value_preprocessor
|
||||
# pylint: disable=unused-variable
|
||||
def pop_auth_token(__, values):
|
||||
g.auth_token = values.pop("auth_token")
|
||||
|
||||
|
@ -22,7 +22,7 @@ import os
|
||||
import errno
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
import subprocess # nosec
|
||||
|
||||
try:
|
||||
from gevent.pywsgi import WSGIServer
|
||||
@ -259,7 +259,7 @@ class WebServer(object):
|
||||
|
||||
log.info("Performing restart of Calibre-Web")
|
||||
args = self._get_args_for_reloading()
|
||||
subprocess.call(args, close_fds=True)
|
||||
subprocess.call(args, close_fds=True) # nosec
|
||||
return True
|
||||
|
||||
def _killServer(self, __, ___):
|
||||
|
@ -22,6 +22,7 @@ from base64 import b64decode, b64encode
|
||||
from jsonschema import validate, exceptions, __version__
|
||||
from datetime import datetime
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
from urllib import unquote
|
||||
except ImportError:
|
||||
from urllib.parse import unquote
|
||||
@ -91,14 +92,14 @@ class SyncToken:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
raw_kobo_store_token="", # nosec
|
||||
raw_kobo_store_token="",
|
||||
books_last_created=datetime.min,
|
||||
books_last_modified=datetime.min,
|
||||
archive_last_modified=datetime.min,
|
||||
reading_state_last_modified=datetime.min,
|
||||
tags_last_modified=datetime.min,
|
||||
books_last_id=-1
|
||||
):
|
||||
): # nosec
|
||||
self.raw_kobo_store_token = raw_kobo_store_token
|
||||
self.books_last_created = books_last_created
|
||||
self.books_last_modified = books_last_modified
|
||||
|
@ -41,7 +41,7 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
|
||||
else:
|
||||
exc_command = [x for x in command]
|
||||
|
||||
return subprocess.Popen(exc_command, shell=False, stdout=sout, stderr=serr, universal_newlines=newlines, env=env)
|
||||
return subprocess.Popen(exc_command, shell=False, stdout=sout, stderr=serr, universal_newlines=newlines, env=env) # nosec
|
||||
|
||||
|
||||
def process_wait(command, serr=subprocess.PIPE):
|
||||
|
@ -284,7 +284,8 @@ class Updater(threading.Thread):
|
||||
def _stable_version_info(cls):
|
||||
return constants.STABLE_VERSION # Current version
|
||||
|
||||
def _populate_parent_commits(self, update_data, status, locale, tz, parents):
|
||||
@staticmethod
|
||||
def _populate_parent_commits(update_data, status, locale, tz, parents):
|
||||
try:
|
||||
parent_commit = update_data['parents'][0]
|
||||
# limit the maximum search depth
|
||||
@ -322,7 +323,8 @@ class Updater(threading.Thread):
|
||||
break
|
||||
return parents
|
||||
|
||||
def _load_nightly_data(self, repository_url, commit, status):
|
||||
@staticmethod
|
||||
def _load_nightly_data(repository_url, commit, status):
|
||||
try:
|
||||
headers = {'Accept': 'application/vnd.github.v3+json'}
|
||||
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
|
||||
|
@ -191,7 +191,7 @@ def upload(uploadfile, rarExcecutable):
|
||||
|
||||
filename = uploadfile.filename
|
||||
filename_root, file_extension = os.path.splitext(filename)
|
||||
md5 = hashlib.md5(filename.encode('utf-8')).hexdigest()
|
||||
md5 = hashlib.md5(filename.encode('utf-8')).hexdigest() # nosec
|
||||
tmp_file_path = os.path.join(tmp_dir, md5)
|
||||
log.debug("Temporary file: %s", tmp_file_path)
|
||||
uploadfile.save(tmp_file_path)
|
||||
|
Loading…
Reference in New Issue
Block a user