mirror of
https://github.com/janeczku/calibre-web
synced 2024-11-24 18:47:23 +00:00
logging clean-up
- moved most constants to separate file - sorted and cleaned-up imports - moved logging setup to separate file
This commit is contained in:
parent
a02f949d23
commit
b89ab9ff10
10
cps.py
10
cps.py
@ -17,14 +17,14 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
# Insert local directories into path
|
# Insert local directories into path
|
||||||
sys.path.append(base_path)
|
sys.path.append(os.path.join(sys.path[0], 'vendor'))
|
||||||
sys.path.append(os.path.join(base_path, 'cps'))
|
|
||||||
sys.path.append(os.path.join(base_path, 'vendor'))
|
|
||||||
|
|
||||||
from cps import create_app
|
from cps import create_app
|
||||||
from cps.opds import opds
|
from cps.opds import opds
|
||||||
|
@ -20,29 +20,28 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
__all__ =['app']
|
|
||||||
|
|
||||||
import mimetypes
|
from __future__ import division, print_function, unicode_literals
|
||||||
from flask import Flask, request, g
|
|
||||||
from flask_login import LoginManager
|
|
||||||
from flask_babel import Babel
|
|
||||||
import cache_buster
|
|
||||||
from reverseproxy import ReverseProxied
|
|
||||||
import logging
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
from flask_principal import Principal
|
|
||||||
from babel.core import UnknownLocaleError
|
|
||||||
from babel import Locale as LC
|
|
||||||
from babel import negotiate_locale
|
|
||||||
import os
|
|
||||||
import ub
|
|
||||||
import sys
|
import sys
|
||||||
from ub import Config, Settings
|
import os
|
||||||
|
import mimetypes
|
||||||
try:
|
try:
|
||||||
import cPickle
|
import cPickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle as cPickle
|
import pickle as cPickle
|
||||||
|
|
||||||
|
from babel import Locale as LC
|
||||||
|
from babel import negotiate_locale
|
||||||
|
from babel.core import UnknownLocaleError
|
||||||
|
from flask import Flask, request, g
|
||||||
|
from flask_login import LoginManager
|
||||||
|
from flask_babel import Babel
|
||||||
|
from flask_principal import Principal
|
||||||
|
|
||||||
|
from . import logger, cache_buster, ub
|
||||||
|
from .constants import TRANSLATIONS_DIR as _TRANSLATIONS_DIR
|
||||||
|
from .reverseproxy import ReverseProxied
|
||||||
|
|
||||||
|
|
||||||
mimetypes.init()
|
mimetypes.init()
|
||||||
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
||||||
@ -70,12 +69,11 @@ lm.anonymous_user = ub.Anonymous
|
|||||||
|
|
||||||
|
|
||||||
ub.init_db()
|
ub.init_db()
|
||||||
config = Config()
|
config = ub.Config()
|
||||||
|
|
||||||
from . import db
|
from . import db
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(config.get_main_dir, 'cps/translations/iso639.pickle'), 'rb') as f:
|
with open(os.path.join(_TRANSLATIONS_DIR, 'iso639.pickle'), 'rb') as f:
|
||||||
language_table = cPickle.load(f)
|
language_table = cPickle.load(f)
|
||||||
except cPickle.UnpicklingError as error:
|
except cPickle.UnpicklingError as error:
|
||||||
# app.logger.error("Can't read file cps/translations/iso639.pickle: %s", error)
|
# app.logger.error("Can't read file cps/translations/iso639.pickle: %s", error)
|
||||||
@ -91,24 +89,14 @@ from .server import server
|
|||||||
Server = server()
|
Server = server()
|
||||||
|
|
||||||
babel = Babel()
|
babel = Babel()
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def create_app():
|
def create_app():
|
||||||
app.wsgi_app = ReverseProxied(app.wsgi_app)
|
app.wsgi_app = ReverseProxied(app.wsgi_app)
|
||||||
cache_buster.init_cache_busting(app)
|
cache_buster.init_cache_busting(app)
|
||||||
|
|
||||||
formatter = logging.Formatter(
|
log.info('Starting Calibre Web...')
|
||||||
"[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
|
|
||||||
try:
|
|
||||||
file_handler = RotatingFileHandler(config.get_config_logfile(), maxBytes=50000, backupCount=2)
|
|
||||||
except IOError:
|
|
||||||
file_handler = RotatingFileHandler(os.path.join(config.get_main_dir, "calibre-web.log"),
|
|
||||||
maxBytes=50000, backupCount=2)
|
|
||||||
# ToDo: reset logfile value in config class
|
|
||||||
file_handler.setFormatter(formatter)
|
|
||||||
app.logger.addHandler(file_handler)
|
|
||||||
app.logger.setLevel(config.config_log_level)
|
|
||||||
|
|
||||||
app.logger.info('Starting Calibre Web...')
|
|
||||||
Principal(app)
|
Principal(app)
|
||||||
lm.init_app(app)
|
lm.init_app(app)
|
||||||
app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT')
|
app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT')
|
||||||
@ -132,7 +120,7 @@ def get_locale():
|
|||||||
try:
|
try:
|
||||||
preferred.append(str(LC.parse(x.replace('-', '_'))))
|
preferred.append(str(LC.parse(x.replace('-', '_'))))
|
||||||
except (UnknownLocaleError, ValueError) as e:
|
except (UnknownLocaleError, ValueError) as e:
|
||||||
app.logger.debug("Could not parse locale: %s", e)
|
log.warning('Could not parse locale "%s": %s', x, e)
|
||||||
preferred.append('en')
|
preferred.append('en')
|
||||||
return negotiate_locale(preferred, translations)
|
return negotiate_locale(preferred, translations)
|
||||||
|
|
||||||
@ -145,3 +133,6 @@ def get_timezone():
|
|||||||
|
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
updater_thread = Updater()
|
updater_thread = Updater()
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['app']
|
||||||
|
37
cps/about.py
37
cps/about.py
@ -21,29 +21,30 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from flask import Blueprint
|
from __future__ import division, print_function, unicode_literals
|
||||||
from flask_login import login_required
|
|
||||||
from . import db
|
|
||||||
import sys
|
import sys
|
||||||
from .uploader import get_versions
|
|
||||||
from babel import __version__ as babelVersion
|
|
||||||
from sqlalchemy import __version__ as sqlalchemyVersion
|
|
||||||
from flask_principal import __version__ as flask_principalVersion
|
|
||||||
from iso639 import __version__ as iso639Version
|
|
||||||
from pytz import __version__ as pytzVersion
|
|
||||||
from flask import __version__ as flaskVersion
|
|
||||||
from werkzeug import __version__ as werkzeugVersion
|
|
||||||
from jinja2 import __version__ as jinja2Version
|
|
||||||
from .converter import versioncheck
|
|
||||||
from flask_babel import gettext as _
|
|
||||||
from cps import Server
|
|
||||||
import requests
|
import requests
|
||||||
from .web import render_title_template
|
|
||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import __version__ as flaskVersion
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
from flask_principal import __version__ as flask_principalVersion
|
||||||
|
from flask_login import login_required
|
||||||
try:
|
try:
|
||||||
from flask_login import __version__ as flask_loginVersion
|
from flask_login import __version__ as flask_loginVersion
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from flask_login.__about__ import __version__ as flask_loginVersion
|
from flask_login.__about__ import __version__ as flask_loginVersion
|
||||||
|
from werkzeug import __version__ as werkzeugVersion
|
||||||
|
|
||||||
|
from babel import __version__ as babelVersion
|
||||||
|
from jinja2 import __version__ as jinja2Version
|
||||||
|
from pytz import __version__ as pytzVersion
|
||||||
|
from sqlalchemy import __version__ as sqlalchemyVersion
|
||||||
|
|
||||||
|
from . import db, converter, Server, uploader
|
||||||
|
from .isoLanguages import __version__ as iso639Version
|
||||||
|
from .web import render_title_template
|
||||||
|
|
||||||
|
|
||||||
about = Blueprint('about', __name__)
|
about = Blueprint('about', __name__)
|
||||||
|
|
||||||
@ -55,7 +56,7 @@ def stats():
|
|||||||
authors = db.session.query(db.Authors).count()
|
authors = db.session.query(db.Authors).count()
|
||||||
categorys = db.session.query(db.Tags).count()
|
categorys = db.session.query(db.Tags).count()
|
||||||
series = db.session.query(db.Series).count()
|
series = db.session.query(db.Series).count()
|
||||||
versions = get_versions()
|
versions = uploader.get_versions()
|
||||||
versions['Babel'] = 'v' + babelVersion
|
versions['Babel'] = 'v' + babelVersion
|
||||||
versions['Sqlalchemy'] = 'v' + sqlalchemyVersion
|
versions['Sqlalchemy'] = 'v' + sqlalchemyVersion
|
||||||
versions['Werkzeug'] = 'v' + werkzeugVersion
|
versions['Werkzeug'] = 'v' + werkzeugVersion
|
||||||
@ -69,7 +70,7 @@ def stats():
|
|||||||
versions['Requests'] = 'v' + requests.__version__
|
versions['Requests'] = 'v' + requests.__version__
|
||||||
versions['pySqlite'] = 'v' + db.engine.dialect.dbapi.version
|
versions['pySqlite'] = 'v' + db.engine.dialect.dbapi.version
|
||||||
versions['Sqlite'] = 'v' + db.engine.dialect.dbapi.sqlite_version
|
versions['Sqlite'] = 'v' + db.engine.dialect.dbapi.sqlite_version
|
||||||
versions.update(versioncheck())
|
versions.update(converter.versioncheck())
|
||||||
versions.update(Server.getNameVersion())
|
versions.update(Server.getNameVersion())
|
||||||
versions['Python'] = sys.version
|
versions['Python'] = sys.version
|
||||||
return render_title_template('stats.html', bookcounter=counter, authorcounter=authors, versions=versions,
|
return render_title_template('stats.html', bookcounter=counter, authorcounter=authors, versions=versions,
|
||||||
|
198
cps/admin.py
198
cps/admin.py
@ -21,29 +21,32 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
import os
|
||||||
from flask import Blueprint, flash, redirect, url_for
|
|
||||||
from flask import abort, request, make_response
|
|
||||||
from flask_login import login_required, current_user, logout_user
|
|
||||||
from .web import admin_required, render_title_template, before_request, unconfigured, \
|
|
||||||
login_required_if_no_ano
|
|
||||||
from . import db, ub, Server, get_locale, config, app, updater_thread, babel
|
|
||||||
import json
|
import json
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import time
|
import time
|
||||||
from babel.dates import format_datetime
|
from datetime import datetime, timedelta
|
||||||
from flask_babel import gettext as _
|
|
||||||
from babel import Locale as LC
|
|
||||||
from sqlalchemy.exc import IntegrityError
|
|
||||||
from .gdriveutils import is_gdrive_ready, gdrive_support, downloadFile, deleteDatabaseOnChange, listRootFolders
|
|
||||||
from .helper import speaking_language, check_valid_domain, check_unrar, send_test_mail, generate_random_password, \
|
|
||||||
send_registration_mail
|
|
||||||
from werkzeug.security import generate_password_hash
|
|
||||||
try:
|
try:
|
||||||
from imp import reload
|
from imp import reload
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
from babel import Locale as LC
|
||||||
|
from babel.dates import format_datetime
|
||||||
|
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response
|
||||||
|
from flask_login import login_required, current_user, logout_user
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
from sqlalchemy import and_
|
||||||
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
from werkzeug.security import generate_password_hash
|
||||||
|
|
||||||
|
from . import constants, logger
|
||||||
|
from . import db, ub, Server, get_locale, config, updater_thread, babel, gdriveutils
|
||||||
|
from .helper import speaking_language, check_valid_domain, check_unrar, send_test_mail, generate_random_password, \
|
||||||
|
send_registration_mail
|
||||||
|
from .gdriveutils import is_gdrive_ready, gdrive_support, downloadFile, deleteDatabaseOnChange, listRootFolders
|
||||||
|
from .web import admin_required, render_title_template, before_request, unconfigured, login_required_if_no_ano
|
||||||
|
|
||||||
feature_support = dict()
|
feature_support = dict()
|
||||||
try:
|
try:
|
||||||
from goodreads.client import GoodreadsClient
|
from goodreads.client import GoodreadsClient
|
||||||
@ -51,11 +54,11 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
feature_support['goodreads'] = False
|
feature_support['goodreads'] = False
|
||||||
|
|
||||||
try:
|
# try:
|
||||||
import rarfile
|
# import rarfile
|
||||||
feature_support['rar'] = True
|
# feature_support['rar'] = True
|
||||||
except ImportError:
|
# except ImportError:
|
||||||
feature_support['rar'] = False
|
# feature_support['rar'] = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import ldap
|
import ldap
|
||||||
@ -70,8 +73,10 @@ except ImportError:
|
|||||||
feature_support['oauth'] = False
|
feature_support['oauth'] = False
|
||||||
oauth_check = {}
|
oauth_check = {}
|
||||||
|
|
||||||
|
|
||||||
feature_support['gdrive'] = gdrive_support
|
feature_support['gdrive'] = gdrive_support
|
||||||
admi = Blueprint('admin', __name__)
|
admi = Blueprint('admin', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
@admi.route("/admin")
|
@admi.route("/admin")
|
||||||
@ -174,7 +179,7 @@ def view_configuration():
|
|||||||
if "config_mature_content_tags" in to_save:
|
if "config_mature_content_tags" in to_save:
|
||||||
content.config_mature_content_tags = to_save["config_mature_content_tags"].strip()
|
content.config_mature_content_tags = to_save["config_mature_content_tags"].strip()
|
||||||
if "Show_mature_content" in to_save:
|
if "Show_mature_content" in to_save:
|
||||||
content.config_default_show = content.config_default_show + ub.MATURE_CONTENT
|
content.config_default_show |= constants.MATURE_CONTENT
|
||||||
|
|
||||||
if "config_authors_max" in to_save:
|
if "config_authors_max" in to_save:
|
||||||
content.config_authors_max = int(to_save["config_authors_max"])
|
content.config_authors_max = int(to_save["config_authors_max"])
|
||||||
@ -182,26 +187,26 @@ def view_configuration():
|
|||||||
# Default user configuration
|
# Default user configuration
|
||||||
content.config_default_role = 0
|
content.config_default_role = 0
|
||||||
if "admin_role" in to_save:
|
if "admin_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_ADMIN
|
content.config_default_role |= constants.ROLE_ADMIN
|
||||||
if "download_role" in to_save:
|
if "download_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_DOWNLOAD
|
content.config_default_role |= constants.ROLE_DOWNLOAD
|
||||||
if "viewer_role" in to_save:
|
if "viewer_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_VIEWER
|
content.config_default_role |= constants.ROLE_VIEWER
|
||||||
if "upload_role" in to_save:
|
if "upload_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_UPLOAD
|
content.config_default_role |= constants.ROLE_UPLOAD
|
||||||
if "edit_role" in to_save:
|
if "edit_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_EDIT
|
content.config_default_role |= constants.ROLE_EDIT
|
||||||
if "delete_role" in to_save:
|
if "delete_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_DELETE_BOOKS
|
content.config_default_role |= constants.ROLE_DELETE_BOOKS
|
||||||
if "passwd_role" in to_save:
|
if "passwd_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_PASSWD
|
content.config_default_role |= constants.ROLE_PASSWD
|
||||||
if "edit_shelf_role" in to_save:
|
if "edit_shelf_role" in to_save:
|
||||||
content.config_default_role = content.config_default_role + ub.ROLE_EDIT_SHELFS
|
content.config_default_role |= constants.ROLE_EDIT_SHELFS
|
||||||
|
|
||||||
val = 0
|
val = 0
|
||||||
for key,v in to_save.items():
|
for key,v in to_save.items():
|
||||||
if key.startswith('show'):
|
if key.startswith('show'):
|
||||||
val += int(key[5:])
|
val |= int(key[5:])
|
||||||
content.config_default_show = val
|
content.config_default_show = val
|
||||||
|
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
@ -215,9 +220,9 @@ def view_configuration():
|
|||||||
# stop Server
|
# stop Server
|
||||||
Server.setRestartTyp(True)
|
Server.setRestartTyp(True)
|
||||||
Server.stopServer()
|
Server.stopServer()
|
||||||
app.logger.info('Reboot required, restarting')
|
log.info('Reboot required, restarting')
|
||||||
readColumn = db.session.query(db.Custom_Columns)\
|
readColumn = db.session.query(db.Custom_Columns)\
|
||||||
.filter(db.and_(db.Custom_Columns.datatype == 'bool',db.Custom_Columns.mark_for_delete == 0)).all()
|
.filter(and_(db.Custom_Columns.datatype == 'bool',db.Custom_Columns.mark_for_delete == 0)).all()
|
||||||
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
|
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
|
||||||
title=_(u"UI Configuration"), page="uiconfig")
|
title=_(u"UI Configuration"), page="uiconfig")
|
||||||
|
|
||||||
@ -294,10 +299,10 @@ def configuration_helper(origin):
|
|||||||
if not feature_support['gdrive']:
|
if not feature_support['gdrive']:
|
||||||
gdriveError = _('Import of optional Google Drive requirements missing')
|
gdriveError = _('Import of optional Google Drive requirements missing')
|
||||||
else:
|
else:
|
||||||
if not os.path.isfile(os.path.join(config.get_main_dir, 'client_secrets.json')):
|
if not os.path.isfile(gdriveutils.CLIENT_SECRETS):
|
||||||
gdriveError = _('client_secrets.json is missing or not readable')
|
gdriveError = _('client_secrets.json is missing or not readable')
|
||||||
else:
|
else:
|
||||||
with open(os.path.join(config.get_main_dir, 'client_secrets.json'), 'r') as settings:
|
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
||||||
filedata = json.load(settings)
|
filedata = json.load(settings)
|
||||||
if 'web' not in filedata:
|
if 'web' not in filedata:
|
||||||
gdriveError = _('client_secrets.json is not configured for web application')
|
gdriveError = _('client_secrets.json is not configured for web application')
|
||||||
@ -309,13 +314,13 @@ def configuration_helper(origin):
|
|||||||
content.config_calibre_dir = to_save["config_calibre_dir"]
|
content.config_calibre_dir = to_save["config_calibre_dir"]
|
||||||
db_change = True
|
db_change = True
|
||||||
# Google drive setup
|
# Google drive setup
|
||||||
if not os.path.isfile(os.path.join(config.get_main_dir, 'settings.yaml')):
|
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
|
||||||
content.config_use_google_drive = False
|
content.config_use_google_drive = False
|
||||||
if "config_use_google_drive" in to_save and not content.config_use_google_drive and not gdriveError:
|
if "config_use_google_drive" in to_save and not content.config_use_google_drive and not gdriveError:
|
||||||
if filedata:
|
if filedata:
|
||||||
if filedata['web']['redirect_uris'][0].endswith('/'):
|
if filedata['web']['redirect_uris'][0].endswith('/'):
|
||||||
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-1]
|
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-1]
|
||||||
with open(os.path.join(config.get_main_dir, 'settings.yaml'), 'w') as f:
|
with open(gdriveutils.SETTINGS_YAML, 'w') as f:
|
||||||
yaml = "client_config_backend: settings\nclient_config_file: %(client_file)s\n" \
|
yaml = "client_config_backend: settings\nclient_config_file: %(client_file)s\n" \
|
||||||
"client_config:\n" \
|
"client_config:\n" \
|
||||||
" client_id: %(client_id)s\n client_secret: %(client_secret)s\n" \
|
" client_id: %(client_id)s\n client_secret: %(client_secret)s\n" \
|
||||||
@ -323,11 +328,11 @@ def configuration_helper(origin):
|
|||||||
"save_credentials_backend: file\nsave_credentials_file: %(credential)s\n\n" \
|
"save_credentials_backend: file\nsave_credentials_file: %(credential)s\n\n" \
|
||||||
"get_refresh_token: True\n\noauth_scope:\n" \
|
"get_refresh_token: True\n\noauth_scope:\n" \
|
||||||
" - https://www.googleapis.com/auth/drive\n"
|
" - https://www.googleapis.com/auth/drive\n"
|
||||||
f.write(yaml % {'client_file': os.path.join(config.get_main_dir, 'client_secrets.json'),
|
f.write(yaml % {'client_file': gdriveutils.CLIENT_SECRETS,
|
||||||
'client_id': filedata['web']['client_id'],
|
'client_id': filedata['web']['client_id'],
|
||||||
'client_secret': filedata['web']['client_secret'],
|
'client_secret': filedata['web']['client_secret'],
|
||||||
'redirect_uri': filedata['web']['redirect_uris'][0],
|
'redirect_uri': filedata['web']['redirect_uris'][0],
|
||||||
'credential': os.path.join(config.get_main_dir, 'gdrive_credentials')})
|
'credential': gdriveutils.CREDENTIALS})
|
||||||
else:
|
else:
|
||||||
flash(_(u'client_secrets.json is not configured for web application'), category="error")
|
flash(_(u'client_secrets.json is not configured for web application'), category="error")
|
||||||
return render_title_template("config_edit.html", config=config, origin=origin,
|
return render_title_template("config_edit.html", config=config, origin=origin,
|
||||||
@ -397,7 +402,7 @@ def configuration_helper(origin):
|
|||||||
gdriveError=gdriveError, feature_support=feature_support,
|
gdriveError=gdriveError, feature_support=feature_support,
|
||||||
title=_(u"Basic Configuration"), page="config")
|
title=_(u"Basic Configuration"), page="config")
|
||||||
else:
|
else:
|
||||||
content.config_login_type = ub.LOGIN_LDAP
|
content.config_login_type = constants.LOGIN_LDAP
|
||||||
content.config_ldap_provider_url = to_save["config_ldap_provider_url"]
|
content.config_ldap_provider_url = to_save["config_ldap_provider_url"]
|
||||||
content.config_ldap_dn = to_save["config_ldap_dn"]
|
content.config_ldap_dn = to_save["config_ldap_dn"]
|
||||||
db_change = True
|
db_change = True
|
||||||
@ -425,7 +430,7 @@ def configuration_helper(origin):
|
|||||||
gdriveError=gdriveError, feature_support=feature_support,
|
gdriveError=gdriveError, feature_support=feature_support,
|
||||||
title=_(u"Basic Configuration"), page="config")
|
title=_(u"Basic Configuration"), page="config")
|
||||||
else:
|
else:
|
||||||
content.config_login_type = ub.LOGIN_OAUTH_GITHUB
|
content.config_login_type = constants.LOGIN_OAUTH_GITHUB
|
||||||
content.config_github_oauth_client_id = to_save["config_github_oauth_client_id"]
|
content.config_github_oauth_client_id = to_save["config_github_oauth_client_id"]
|
||||||
content.config_github_oauth_client_secret = to_save["config_github_oauth_client_secret"]
|
content.config_github_oauth_client_secret = to_save["config_github_oauth_client_secret"]
|
||||||
reboot_required = True
|
reboot_required = True
|
||||||
@ -439,31 +444,25 @@ def configuration_helper(origin):
|
|||||||
gdriveError=gdriveError, feature_support=feature_support,
|
gdriveError=gdriveError, feature_support=feature_support,
|
||||||
title=_(u"Basic Configuration"), page="config")
|
title=_(u"Basic Configuration"), page="config")
|
||||||
else:
|
else:
|
||||||
content.config_login_type = ub.LOGIN_OAUTH_GOOGLE
|
content.config_login_type = constants.LOGIN_OAUTH_GOOGLE
|
||||||
content.config_google_oauth_client_id = to_save["config_google_oauth_client_id"]
|
content.config_google_oauth_client_id = to_save["config_google_oauth_client_id"]
|
||||||
content.config_google_oauth_client_secret = to_save["config_google_oauth_client_secret"]
|
content.config_google_oauth_client_secret = to_save["config_google_oauth_client_secret"]
|
||||||
reboot_required = True
|
reboot_required = True
|
||||||
|
|
||||||
if "config_login_type" in to_save and to_save["config_login_type"] == "0":
|
if "config_login_type" in to_save and to_save["config_login_type"] == "0":
|
||||||
content.config_login_type = ub.LOGIN_STANDARD
|
content.config_login_type = constants.LOGIN_STANDARD
|
||||||
|
|
||||||
if "config_log_level" in to_save:
|
if "config_log_level" in to_save:
|
||||||
content.config_log_level = int(to_save["config_log_level"])
|
content.config_log_level = int(to_save["config_log_level"])
|
||||||
if content.config_logfile != to_save["config_logfile"]:
|
if content.config_logfile != to_save["config_logfile"]:
|
||||||
# check valid path, only path or file
|
# check valid path, only path or file
|
||||||
if os.path.dirname(to_save["config_logfile"]):
|
if not logger.is_valid_logfile(to_save["config_logfile"]):
|
||||||
if os.path.exists(os.path.dirname(to_save["config_logfile"])) and \
|
|
||||||
os.path.basename(to_save["config_logfile"]) and not os.path.isdir(to_save["config_logfile"]):
|
|
||||||
content.config_logfile = to_save["config_logfile"]
|
|
||||||
else:
|
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
flash(_(u'Logfile location is not valid, please enter correct path'), category="error")
|
flash(_(u'Logfile location is not valid, please enter correct path'), category="error")
|
||||||
return render_title_template("config_edit.html", config=config, origin=origin,
|
return render_title_template("config_edit.html", config=config, origin=origin,
|
||||||
gdriveError=gdriveError, feature_support=feature_support,
|
gdriveError=gdriveError, feature_support=feature_support,
|
||||||
title=_(u"Basic Configuration"), page="config")
|
title=_(u"Basic Configuration"), page="config")
|
||||||
else:
|
content.config_logfile = to_save["config_logfile"]
|
||||||
content.config_logfile = to_save["config_logfile"]
|
|
||||||
reboot_required = True
|
|
||||||
|
|
||||||
# Rarfile Content configuration
|
# Rarfile Content configuration
|
||||||
if "config_rarfile_location" in to_save and to_save['config_rarfile_location'] is not u"":
|
if "config_rarfile_location" in to_save and to_save['config_rarfile_location'] is not u"":
|
||||||
@ -485,7 +484,6 @@ def configuration_helper(origin):
|
|||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
flash(_(u"Calibre-Web configuration updated"), category="success")
|
flash(_(u"Calibre-Web configuration updated"), category="success")
|
||||||
config.loadSettings()
|
config.loadSettings()
|
||||||
app.logger.setLevel(config.config_log_level)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
flash(e, category="error")
|
flash(e, category="error")
|
||||||
return render_title_template("config_edit.html", config=config, origin=origin,
|
return render_title_template("config_edit.html", config=config, origin=origin,
|
||||||
@ -502,7 +500,7 @@ def configuration_helper(origin):
|
|||||||
# stop Server
|
# stop Server
|
||||||
Server.setRestartTyp(True)
|
Server.setRestartTyp(True)
|
||||||
Server.stopServer()
|
Server.stopServer()
|
||||||
app.logger.info('Reboot required, restarting')
|
log.info('Reboot required, restarting')
|
||||||
if origin:
|
if origin:
|
||||||
success = True
|
success = True
|
||||||
if is_gdrive_ready() and feature_support['gdrive'] is True: # and config.config_use_google_drive == True:
|
if is_gdrive_ready() and feature_support['gdrive'] is True: # and config.config_use_google_drive == True:
|
||||||
@ -536,23 +534,23 @@ def new_user():
|
|||||||
content.sidebar_view = val
|
content.sidebar_view = val
|
||||||
|
|
||||||
if "show_detail_random" in to_save:
|
if "show_detail_random" in to_save:
|
||||||
content.sidebar_view += ub.DETAIL_RANDOM
|
content.sidebar_view |= constants.DETAIL_RANDOM
|
||||||
|
|
||||||
content.role = 0
|
content.role = 0
|
||||||
if "admin_role" in to_save:
|
if "admin_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_ADMIN
|
content.role |= constants.ROLE_ADMIN
|
||||||
if "download_role" in to_save:
|
if "download_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_DOWNLOAD
|
content.role |= constants.ROLE_DOWNLOAD
|
||||||
if "upload_role" in to_save:
|
if "upload_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_UPLOAD
|
content.role |= constants.ROLE_UPLOAD
|
||||||
if "edit_role" in to_save:
|
if "edit_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_EDIT
|
content.role |= constants.ROLE_EDIT
|
||||||
if "delete_role" in to_save:
|
if "delete_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_DELETE_BOOKS
|
content.role |= constants.ROLE_DELETE_BOOKS
|
||||||
if "passwd_role" in to_save:
|
if "passwd_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_PASSWD
|
content.role |= constants.ROLE_PASSWD
|
||||||
if "edit_shelf_role" in to_save:
|
if "edit_shelf_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_EDIT_SHELFS
|
content.role |= constants.ROLE_EDIT_SHELFS
|
||||||
if not to_save["nickname"] or not to_save["email"] or not to_save["password"]:
|
if not to_save["nickname"] or not to_save["email"] or not to_save["password"]:
|
||||||
flash(_(u"Please fill out all fields!"), category="error")
|
flash(_(u"Please fill out all fields!"), category="error")
|
||||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||||
@ -576,7 +574,7 @@ def new_user():
|
|||||||
else:
|
else:
|
||||||
content.role = config.config_default_role
|
content.role = config.config_default_role
|
||||||
content.sidebar_view = config.config_default_show
|
content.sidebar_view = config.config_default_show
|
||||||
content.mature_content = bool(config.config_default_show & ub.MATURE_CONTENT)
|
content.mature_content = bool(config.config_default_show & constants.MATURE_CONTENT)
|
||||||
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
||||||
languages=languages, title=_(u"Add new user"), page="newuser",
|
languages=languages, title=_(u"Add new user"), page="newuser",
|
||||||
registered_oauth=oauth_check)
|
registered_oauth=oauth_check)
|
||||||
@ -642,58 +640,58 @@ def edit_user(user_id):
|
|||||||
if "password" in to_save and to_save["password"]:
|
if "password" in to_save and to_save["password"]:
|
||||||
content.password = generate_password_hash(to_save["password"])
|
content.password = generate_password_hash(to_save["password"])
|
||||||
|
|
||||||
if "admin_role" in to_save and not content.role_admin():
|
if "admin_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_ADMIN
|
content.role |= constants.ROLE_ADMIN
|
||||||
elif "admin_role" not in to_save and content.role_admin():
|
else:
|
||||||
content.role = content.role - ub.ROLE_ADMIN
|
content.role &= ~constants.ROLE_ADMIN
|
||||||
|
|
||||||
if "download_role" in to_save and not content.role_download():
|
if "download_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_DOWNLOAD
|
content.role |= constants.ROLE_DOWNLOAD
|
||||||
elif "download_role" not in to_save and content.role_download():
|
else:
|
||||||
content.role = content.role - ub.ROLE_DOWNLOAD
|
content.role &= ~constants.ROLE_DOWNLOAD
|
||||||
|
|
||||||
if "viewer_role" in to_save and not content.role_viewer():
|
if "viewer_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_VIEWER
|
content.role |= constants.ROLE_VIEWER
|
||||||
elif "viewer_role" not in to_save and content.role_viewer():
|
else:
|
||||||
content.role = content.role - ub.ROLE_VIEWER
|
content.role &= ~constants.ROLE_VIEWER
|
||||||
|
|
||||||
if "upload_role" in to_save and not content.role_upload():
|
if "upload_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_UPLOAD
|
content.role |= constants.ROLE_UPLOAD
|
||||||
elif "upload_role" not in to_save and content.role_upload():
|
else:
|
||||||
content.role = content.role - ub.ROLE_UPLOAD
|
content.role &= ~constants.ROLE_UPLOAD
|
||||||
|
|
||||||
if "edit_role" in to_save and not content.role_edit():
|
if "edit_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_EDIT
|
content.role |= constants.ROLE_EDIT
|
||||||
elif "edit_role" not in to_save and content.role_edit():
|
else:
|
||||||
content.role = content.role - ub.ROLE_EDIT
|
content.role &= ~constants.ROLE_EDIT
|
||||||
|
|
||||||
if "delete_role" in to_save and not content.role_delete_books():
|
if "delete_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_DELETE_BOOKS
|
content.role |= constants.ROLE_DELETE_BOOKS
|
||||||
elif "delete_role" not in to_save and content.role_delete_books():
|
else:
|
||||||
content.role = content.role - ub.ROLE_DELETE_BOOKS
|
content.role &= ~constants.ROLE_DELETE_BOOKS
|
||||||
|
|
||||||
if "passwd_role" in to_save and not content.role_passwd():
|
if "passwd_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_PASSWD
|
content.role |= constants.ROLE_PASSWD
|
||||||
elif "passwd_role" not in to_save and content.role_passwd():
|
else:
|
||||||
content.role = content.role - ub.ROLE_PASSWD
|
content.role &= ~constants.ROLE_PASSWD
|
||||||
|
|
||||||
if "edit_shelf_role" in to_save and not content.role_edit_shelfs():
|
if "edit_shelf_role" in to_save:
|
||||||
content.role = content.role + ub.ROLE_EDIT_SHELFS
|
content.role |= constants.ROLE_EDIT_SHELFS
|
||||||
elif "edit_shelf_role" not in to_save and content.role_edit_shelfs():
|
else:
|
||||||
content.role = content.role - ub.ROLE_EDIT_SHELFS
|
content.role &= ~constants.ROLE_EDIT_SHELFS
|
||||||
|
|
||||||
val = [int(k[5:]) for k, __ in to_save.items() if k.startswith('show')]
|
val = [int(k[5:]) for k, __ in to_save.items() if k.startswith('show_')]
|
||||||
sidebar = ub.get_sidebar_config()
|
sidebar = ub.get_sidebar_config()
|
||||||
for element in sidebar:
|
for element in sidebar:
|
||||||
if element['visibility'] in val and not content.check_visibility(element['visibility']):
|
if element['visibility'] in val and not content.check_visibility(element['visibility']):
|
||||||
content.sidebar_view += element['visibility']
|
content.sidebar_view |= element['visibility']
|
||||||
elif not element['visibility'] in val and content.check_visibility(element['visibility']):
|
elif not element['visibility'] in val and content.check_visibility(element['visibility']):
|
||||||
content.sidebar_view -= element['visibility']
|
content.sidebar_view &= ~element['visibility']
|
||||||
|
|
||||||
if "Show_detail_random" in to_save and not content.show_detail_random():
|
if "Show_detail_random" in to_save:
|
||||||
content.sidebar_view += ub.DETAIL_RANDOM
|
content.sidebar_view |= constants.DETAIL_RANDOM
|
||||||
elif "Show_detail_random" not in to_save and content.show_detail_random():
|
else:
|
||||||
content.sidebar_view -= ub.DETAIL_RANDOM
|
content.sidebar_view &= ~constants.DETAIL_RANDOM
|
||||||
|
|
||||||
content.mature_content = "Show_mature_content" in to_save
|
content.mature_content = "Show_mature_content" in to_save
|
||||||
|
|
||||||
|
@ -17,8 +17,14 @@
|
|||||||
# Inspired by https://github.com/ChrisTM/Flask-CacheBust
|
# Inspired by https://github.com/ChrisTM/Flask-CacheBust
|
||||||
# Uses query strings so CSS font files are found without having to resort to absolute URLs
|
# Uses query strings so CSS font files are found without having to resort to absolute URLs
|
||||||
|
|
||||||
import hashlib
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
import os
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from . import logger
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def init_cache_busting(app):
|
def init_cache_busting(app):
|
||||||
@ -34,7 +40,7 @@ def init_cache_busting(app):
|
|||||||
|
|
||||||
hash_table = {} # map of file hashes
|
hash_table = {} # map of file hashes
|
||||||
|
|
||||||
app.logger.debug('Computing cache-busting values...')
|
log.debug('Computing cache-busting values...')
|
||||||
# compute file hashes
|
# compute file hashes
|
||||||
for dirpath, __, filenames in os.walk(static_folder):
|
for dirpath, __, filenames in os.walk(static_folder):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
@ -47,7 +53,7 @@ def init_cache_busting(app):
|
|||||||
file_path = rooted_filename.replace(static_folder, "")
|
file_path = rooted_filename.replace(static_folder, "")
|
||||||
file_path = file_path.replace("\\", "/") # Convert Windows path to web path
|
file_path = file_path.replace("\\", "/") # Convert Windows path to web path
|
||||||
hash_table[file_path] = file_hash
|
hash_table[file_path] = file_hash
|
||||||
app.logger.debug('Finished computing cache-busting values')
|
log.debug('Finished computing cache-busting values')
|
||||||
|
|
||||||
def bust_filename(filename):
|
def bust_filename(filename):
|
||||||
return hash_table.get(filename, "")
|
return hash_table.get(filename, "")
|
||||||
|
21
cps/cli.py
21
cps/cli.py
@ -18,9 +18,13 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import argparse
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from .constants import CONFIG_DIR as _CONFIG_DIR
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
|
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
|
||||||
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
|
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
|
||||||
@ -33,17 +37,8 @@ parser.add_argument('-k', metavar='path',
|
|||||||
parser.add_argument('-v', action='store_true', help='shows version number and exits Calibre-web')
|
parser.add_argument('-v', action='store_true', help='shows version number and exits Calibre-web')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
generalPath = os.path.normpath(os.getenv("CALIBRE_DBPATH",
|
settingspath = args.p or os.path.join(_CONFIG_DIR, "app.db")
|
||||||
os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep))
|
gdpath = args.g or os.path.join(_CONFIG_DIR, "gdrive.db")
|
||||||
if args.p:
|
|
||||||
settingspath = args.p
|
|
||||||
else:
|
|
||||||
settingspath = os.path.join(generalPath, "app.db")
|
|
||||||
|
|
||||||
if args.g:
|
|
||||||
gdpath = args.g
|
|
||||||
else:
|
|
||||||
gdpath = os.path.join(generalPath, "gdrive.db")
|
|
||||||
|
|
||||||
certfilepath = None
|
certfilepath = None
|
||||||
keyfilepath = None
|
keyfilepath = None
|
||||||
|
12
cps/comic.py
12
cps/comic.py
@ -17,17 +17,21 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
import os
|
||||||
from constants import BookMeta
|
|
||||||
from cps import app
|
from . import logger, isoLanguages
|
||||||
from iso639 import languages as isoLanguages
|
from .constants import BookMeta
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||||
use_comic_meta = True
|
use_comic_meta = True
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
app.logger.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
|
log.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
|
||||||
import zipfile
|
import zipfile
|
||||||
import tarfile
|
import tarfile
|
||||||
use_comic_meta = False
|
use_comic_meta = False
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||||
# Copyright (C) 2019 OzzieIsaacs
|
# Copyright (C) 2019 OzzieIsaacs, pwr
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
@ -17,10 +17,99 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
|
|
||||||
|
BASE_DIR = sys.path[0]
|
||||||
|
STATIC_DIR = os.path.join(BASE_DIR, 'cps', 'static')
|
||||||
|
TEMPLATES_DIR = os.path.join(BASE_DIR, 'cps', 'templates')
|
||||||
|
TRANSLATIONS_DIR = os.path.join(BASE_DIR, 'cps', 'translations')
|
||||||
|
CONFIG_DIR = os.environ.get('CALIBRE_DBPATH', BASE_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
ROLE_USER = 0 << 0
|
||||||
|
ROLE_ADMIN = 1 << 0
|
||||||
|
ROLE_DOWNLOAD = 1 << 1
|
||||||
|
ROLE_UPLOAD = 1 << 2
|
||||||
|
ROLE_EDIT = 1 << 3
|
||||||
|
ROLE_PASSWD = 1 << 4
|
||||||
|
ROLE_ANONYMOUS = 1 << 5
|
||||||
|
ROLE_EDIT_SHELFS = 1 << 6
|
||||||
|
ROLE_DELETE_BOOKS = 1 << 7
|
||||||
|
ROLE_VIEWER = 1 << 8
|
||||||
|
|
||||||
|
ALL_ROLES = {
|
||||||
|
"admin_role": ROLE_ADMIN,
|
||||||
|
"download_role": ROLE_DOWNLOAD,
|
||||||
|
"upload_role": ROLE_UPLOAD,
|
||||||
|
"edit_role": ROLE_EDIT,
|
||||||
|
"passwd_role": ROLE_PASSWD,
|
||||||
|
"edit_shelf_role": ROLE_EDIT_SHELFS,
|
||||||
|
"delete_role": ROLE_DELETE_BOOKS,
|
||||||
|
"viewer_role": ROLE_VIEWER,
|
||||||
|
}
|
||||||
|
|
||||||
|
DETAIL_RANDOM = 1 << 0
|
||||||
|
SIDEBAR_LANGUAGE = 1 << 1
|
||||||
|
SIDEBAR_SERIES = 1 << 2
|
||||||
|
SIDEBAR_CATEGORY = 1 << 3
|
||||||
|
SIDEBAR_HOT = 1 << 4
|
||||||
|
SIDEBAR_RANDOM = 1 << 5
|
||||||
|
SIDEBAR_AUTHOR = 1 << 6
|
||||||
|
SIDEBAR_BEST_RATED = 1 << 7
|
||||||
|
SIDEBAR_READ_AND_UNREAD = 1 << 8
|
||||||
|
SIDEBAR_RECENT = 1 << 9
|
||||||
|
SIDEBAR_SORTED = 1 << 10
|
||||||
|
MATURE_CONTENT = 1 << 11
|
||||||
|
SIDEBAR_PUBLISHER = 1 << 12
|
||||||
|
SIDEBAR_RATING = 1 << 13
|
||||||
|
SIDEBAR_FORMAT = 1 << 14
|
||||||
|
|
||||||
|
ADMIN_USER_ROLES = (ROLE_VIEWER << 1) - 1 - (ROLE_ANONYMOUS | ROLE_EDIT_SHELFS)
|
||||||
|
ADMIN_USER_SIDEBAR = (SIDEBAR_FORMAT << 1) - 1
|
||||||
|
|
||||||
|
UPDATE_STABLE = 0 << 0
|
||||||
|
AUTO_UPDATE_STABLE = 1 << 0
|
||||||
|
UPDATE_NIGHTLY = 1 << 1
|
||||||
|
AUTO_UPDATE_NIGHTLY = 1 << 2
|
||||||
|
|
||||||
|
LOGIN_STANDARD = 0
|
||||||
|
LOGIN_LDAP = 1
|
||||||
|
LOGIN_OAUTH_GITHUB = 2
|
||||||
|
LOGIN_OAUTH_GOOGLE = 3
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_PASSWORD = "admin123"
|
||||||
|
DEFAULT_PORT = 8083
|
||||||
|
try:
|
||||||
|
env_CALIBRE_PORT = os.environ.get("CALIBRE_PORT", DEFAULT_PORT)
|
||||||
|
DEFAULT_PORT = int(env_CALIBRE_PORT)
|
||||||
|
except ValueError:
|
||||||
|
print('Environment variable CALIBRE_PORT has invalid value (%s), faling back to default (8083)' % env_CALIBRE_PORT)
|
||||||
|
del env_CALIBRE_PORT
|
||||||
|
|
||||||
|
|
||||||
|
EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'}
|
||||||
|
EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'}
|
||||||
|
EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx',
|
||||||
|
'fb2', 'html', 'rtf', 'odt', 'mp3', 'm4a', 'm4b'}
|
||||||
|
# EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +
|
||||||
|
# (['rar','cbr'] if feature_support['rar'] else []))
|
||||||
|
|
||||||
|
|
||||||
|
def has_flag(value, bit_flag):
|
||||||
|
return bit_flag == (bit_flag & (value or 0))
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
:rtype: BookMeta
|
:rtype: BookMeta
|
||||||
"""
|
"""
|
||||||
BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, description, tags, series, '
|
BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, description, tags, series, '
|
||||||
'series_id, languages')
|
'series_id, languages')
|
||||||
|
|
||||||
|
|
||||||
|
# clean-up the module namespace
|
||||||
|
del sys, os, namedtuple
|
||||||
|
@ -17,14 +17,14 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
import os
|
||||||
# import subprocess
|
|
||||||
import ub
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from subproc_wrapper import process_open
|
|
||||||
from . import config
|
from . import config
|
||||||
|
from .subproc_wrapper import process_open
|
||||||
|
|
||||||
|
|
||||||
def versionKindle():
|
def versionKindle():
|
||||||
|
18
cps/db.py
18
cps/db.py
@ -18,16 +18,20 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from sqlalchemy import *
|
from __future__ import division, print_function, unicode_literals
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
import sys
|
||||||
from sqlalchemy.orm import *
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import ast
|
import ast
|
||||||
from . import config
|
|
||||||
import ub
|
from sqlalchemy import create_engine
|
||||||
import sys
|
from sqlalchemy import Table, Column, ForeignKey
|
||||||
import unidecode
|
from sqlalchemy import String, Integer, Boolean
|
||||||
|
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
from . import config, ub
|
||||||
|
|
||||||
|
|
||||||
session = None
|
session = None
|
||||||
cc_exceptions = ['datetime', 'comments', 'float', 'composite', 'series']
|
cc_exceptions = ['datetime', 'comments', 'float', 'composite', 'series']
|
||||||
|
@ -21,28 +21,25 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
# opds routing functions
|
from __future__ import division, print_function, unicode_literals
|
||||||
from . import config, language_table, get_locale, app, ub, global_WorkerThread, db
|
|
||||||
from flask import request, flash, redirect, url_for, abort, Markup, Response
|
|
||||||
from flask import Blueprint
|
|
||||||
import datetime
|
|
||||||
import os
|
import os
|
||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
from flask_babel import gettext as _
|
|
||||||
from uuid import uuid4
|
|
||||||
from . import helper
|
|
||||||
from .helper import order_authors, common_filters
|
|
||||||
from flask_login import current_user
|
|
||||||
from .web import login_required_if_no_ano, render_title_template, edit_required, \
|
|
||||||
upload_required, login_required, EXTENSIONS_UPLOAD
|
|
||||||
from . import gdriveutils
|
|
||||||
from shutil import move, copyfile
|
from shutil import move, copyfile
|
||||||
from . import uploader
|
from uuid import uuid4
|
||||||
from iso639 import languages as isoLanguages
|
|
||||||
|
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
from flask_login import current_user
|
||||||
|
|
||||||
|
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
|
||||||
|
from . import config, get_locale, db, ub, global_WorkerThread, language_table
|
||||||
|
from .helper import order_authors, common_filters
|
||||||
|
from .web import login_required_if_no_ano, render_title_template, edit_required, upload_required, login_required
|
||||||
|
|
||||||
|
|
||||||
editbook = Blueprint('editbook', __name__)
|
editbook = Blueprint('editbook', __name__)
|
||||||
|
log = logger.create()
|
||||||
EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'}
|
|
||||||
|
|
||||||
|
|
||||||
# Modifies different Database objects, first check if elements have to be added to database, than check
|
# Modifies different Database objects, first check if elements have to be added to database, than check
|
||||||
@ -201,7 +198,7 @@ def delete_book(book_id, book_format):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
else:
|
else:
|
||||||
# book not found
|
# book not found
|
||||||
app.logger.info('Book with id "'+str(book_id)+'" could not be deleted')
|
log.error('Book with id "%s" could not be deleted: not found', book_id)
|
||||||
if book_format:
|
if book_format:
|
||||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||||
else:
|
else:
|
||||||
@ -231,16 +228,16 @@ def render_edit_book(book_id):
|
|||||||
valid_source_formats=list()
|
valid_source_formats=list()
|
||||||
if config.config_ebookconverter == 2:
|
if config.config_ebookconverter == 2:
|
||||||
for file in book.data:
|
for file in book.data:
|
||||||
if file.format.lower() in EXTENSIONS_CONVERT:
|
if file.format.lower() in constants.EXTENSIONS_CONVERT:
|
||||||
valid_source_formats.append(file.format.lower())
|
valid_source_formats.append(file.format.lower())
|
||||||
|
|
||||||
# Determine what formats don't already exist
|
# Determine what formats don't already exist
|
||||||
allowed_conversion_formats = EXTENSIONS_CONVERT.copy()
|
allowed_conversion_formats = constants.EXTENSIONS_CONVERT.copy()
|
||||||
for file in book.data:
|
for file in book.data:
|
||||||
try:
|
try:
|
||||||
allowed_conversion_formats.remove(file.format.lower())
|
allowed_conversion_formats.remove(file.format.lower())
|
||||||
except Exception:
|
except Exception:
|
||||||
app.logger.warning(file.format.lower() + ' already removed from list.')
|
log.warning('%s already removed from list.', file.format.lower())
|
||||||
|
|
||||||
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
||||||
title=_(u"edit metadata"), page="editbook",
|
title=_(u"edit metadata"), page="editbook",
|
||||||
@ -321,7 +318,7 @@ def upload_single_file(request, book, book_id):
|
|||||||
if requested_file.filename != '':
|
if requested_file.filename != '':
|
||||||
if '.' in requested_file.filename:
|
if '.' in requested_file.filename:
|
||||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||||
if file_ext not in EXTENSIONS_UPLOAD:
|
if file_ext not in constants.EXTENSIONS_UPLOAD:
|
||||||
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
|
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
|
||||||
category="error")
|
category="error")
|
||||||
return redirect(url_for('web.show_book', book_id=book.id))
|
return redirect(url_for('web.show_book', book_id=book.id))
|
||||||
@ -352,7 +349,7 @@ def upload_single_file(request, book, book_id):
|
|||||||
|
|
||||||
# Format entry already exists, no need to update the database
|
# Format entry already exists, no need to update the database
|
||||||
if is_format:
|
if is_format:
|
||||||
app.logger.info('Book format already existing')
|
log.warning('Book format %s already existing', file_ext.upper())
|
||||||
else:
|
else:
|
||||||
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
|
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
|
||||||
db.session.add(db_format)
|
db.session.add(db_format)
|
||||||
@ -530,7 +527,7 @@ def edit_book(book_id):
|
|||||||
res = list(language_table[get_locale()].keys())[invers_lang_table.index(lang)]
|
res = list(language_table[get_locale()].keys())[invers_lang_table.index(lang)]
|
||||||
input_l.append(res)
|
input_l.append(res)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
app.logger.error('%s is not a valid language' % lang)
|
log.error('%s is not a valid language', lang)
|
||||||
flash(_(u"%(langname)s is not a valid language", langname=lang), category="error")
|
flash(_(u"%(langname)s is not a valid language", langname=lang), category="error")
|
||||||
modify_database_object(input_l, book.languages, db.Languages, db.session, 'languages')
|
modify_database_object(input_l, book.languages, db.Languages, db.session, 'languages')
|
||||||
|
|
||||||
@ -569,7 +566,7 @@ def edit_book(book_id):
|
|||||||
flash(error, category="error")
|
flash(error, category="error")
|
||||||
return render_edit_book(book_id)
|
return render_edit_book(book_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
flash(_("Error editing book, please check logfile for details"), category="error")
|
flash(_("Error editing book, please check logfile for details"), category="error")
|
||||||
return redirect(url_for('web.show_book', book_id=book.id))
|
return redirect(url_for('web.show_book', book_id=book.id))
|
||||||
@ -590,7 +587,7 @@ def upload():
|
|||||||
# check if file extension is correct
|
# check if file extension is correct
|
||||||
if '.' in requested_file.filename:
|
if '.' in requested_file.filename:
|
||||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||||
if file_ext not in EXTENSIONS_UPLOAD:
|
if file_ext not in constants.EXTENSIONS_UPLOAD:
|
||||||
flash(
|
flash(
|
||||||
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
||||||
ext=file_ext), category="error")
|
ext=file_ext), category="error")
|
||||||
@ -631,7 +628,7 @@ def upload():
|
|||||||
|
|
||||||
if meta.cover is None:
|
if meta.cover is None:
|
||||||
has_cover = 0
|
has_cover = 0
|
||||||
copyfile(os.path.join(config.get_main_dir, "cps/static/generic_cover.jpg"),
|
copyfile(os.path.join(constants.STATIC_DIR, 'generic_cover.jpg'),
|
||||||
os.path.join(filepath, "cover.jpg"))
|
os.path.join(filepath, "cover.jpg"))
|
||||||
else:
|
else:
|
||||||
has_cover = 1
|
has_cover = 1
|
||||||
@ -741,9 +738,7 @@ def convert_bookformat(book_id):
|
|||||||
flash(_(u"Source or destination format for conversion missing"), category="error")
|
flash(_(u"Source or destination format for conversion missing"), category="error")
|
||||||
return redirect(request.environ["HTTP_REFERER"])
|
return redirect(request.environ["HTTP_REFERER"])
|
||||||
|
|
||||||
app.logger.debug('converting: book id: ' + str(book_id) +
|
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
|
||||||
' from: ' + request.form['book_format_from'] +
|
|
||||||
' to: ' + request.form['book_format_to'])
|
|
||||||
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
|
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
|
||||||
book_format_to.upper(), current_user.nickname)
|
book_format_to.upper(), current_user.nickname)
|
||||||
|
|
||||||
|
@ -17,11 +17,13 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
import os
|
|
||||||
|
from . import isoLanguages
|
||||||
from .constants import BookMeta
|
from .constants import BookMeta
|
||||||
import isoLanguages
|
|
||||||
|
|
||||||
|
|
||||||
def extractCover(zipFile, coverFile, coverpath, tmp_file_name):
|
def extractCover(zipFile, coverFile, coverpath, tmp_file_name):
|
||||||
@ -125,7 +127,7 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||||||
else:
|
else:
|
||||||
title = epub_metadata['title']
|
title = epub_metadata['title']
|
||||||
|
|
||||||
return uploader.BookMeta(
|
return BookMeta(
|
||||||
file_path=tmp_file_path,
|
file_path=tmp_file_path,
|
||||||
extension=original_file_extension,
|
extension=original_file_extension,
|
||||||
title=title.encode('utf-8').decode('utf-8'),
|
title=title.encode('utf-8').decode('utf-8'),
|
||||||
|
@ -17,7 +17,9 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
|
|
||||||
from .constants import BookMeta
|
from .constants import BookMeta
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,26 +20,31 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
import os
|
import os
|
||||||
from flask import Blueprint
|
import hashlib
|
||||||
from . import gdriveutils
|
|
||||||
from flask import flash, request, redirect, url_for, abort
|
|
||||||
from flask_babel import gettext as _
|
|
||||||
from . import app, config, ub, db
|
|
||||||
from flask_login import login_required
|
|
||||||
import json
|
import json
|
||||||
|
import tempfile
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from time import time
|
from time import time
|
||||||
import tempfile
|
|
||||||
from shutil import move, copyfile
|
from shutil import move, copyfile
|
||||||
from .web import admin_required
|
|
||||||
|
from flask import Blueprint, flash, request, redirect, url_for, abort
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
from flask_login import login_required
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from googleapiclient.errors import HttpError
|
from googleapiclient.errors import HttpError
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
from . import logger, gdriveutils, config, ub, db
|
||||||
|
from .web import admin_required
|
||||||
|
|
||||||
|
|
||||||
gdrive = Blueprint('gdrive', __name__)
|
gdrive = Blueprint('gdrive', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
current_milli_time = lambda: int(round(time() * 1000))
|
current_milli_time = lambda: int(round(time() * 1000))
|
||||||
|
|
||||||
@ -66,10 +71,10 @@ def google_drive_callback():
|
|||||||
abort(403)
|
abort(403)
|
||||||
try:
|
try:
|
||||||
credentials = gdriveutils.Gauth.Instance().auth.flow.step2_exchange(auth_code)
|
credentials = gdriveutils.Gauth.Instance().auth.flow.step2_exchange(auth_code)
|
||||||
with open(os.path.join(config.get_main_dir,'gdrive_credentials'), 'w') as f:
|
with open(gdriveutils.CREDENTIALS, 'w') as f:
|
||||||
f.write(credentials.to_json())
|
f.write(credentials.to_json())
|
||||||
except ValueError as error:
|
except ValueError as error:
|
||||||
app.logger.error(error)
|
log.error(error)
|
||||||
return redirect(url_for('admin.configuration'))
|
return redirect(url_for('admin.configuration'))
|
||||||
|
|
||||||
|
|
||||||
@ -78,7 +83,7 @@ def google_drive_callback():
|
|||||||
@admin_required
|
@admin_required
|
||||||
def watch_gdrive():
|
def watch_gdrive():
|
||||||
if not config.config_google_drive_watch_changes_response:
|
if not config.config_google_drive_watch_changes_response:
|
||||||
with open(os.path.join(config.get_main_dir,'client_secrets.json'), 'r') as settings:
|
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
|
||||||
filedata = json.load(settings)
|
filedata = json.load(settings)
|
||||||
if filedata['web']['redirect_uris'][0].endswith('/'):
|
if filedata['web']['redirect_uris'][0].endswith('/'):
|
||||||
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-((len('/gdrive/callback')+1))]
|
filedata['web']['redirect_uris'][0] = filedata['web']['redirect_uris'][0][:-((len('/gdrive/callback')+1))]
|
||||||
@ -126,7 +131,7 @@ def revoke_watch_gdrive():
|
|||||||
|
|
||||||
@gdrive.route("/gdrive/watch/callback", methods=['GET', 'POST'])
|
@gdrive.route("/gdrive/watch/callback", methods=['GET', 'POST'])
|
||||||
def on_received_watch_confirmation():
|
def on_received_watch_confirmation():
|
||||||
app.logger.debug(request.headers)
|
log.debug('%r', request.headers)
|
||||||
if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \
|
if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \
|
||||||
and request.headers.get('X-Goog-Resource-State') == 'change' \
|
and request.headers.get('X-Goog-Resource-State') == 'change' \
|
||||||
and request.data:
|
and request.data:
|
||||||
@ -134,27 +139,26 @@ def on_received_watch_confirmation():
|
|||||||
data = request.data
|
data = request.data
|
||||||
|
|
||||||
def updateMetaData():
|
def updateMetaData():
|
||||||
app.logger.info('Change received from gdrive')
|
log.info('Change received from gdrive')
|
||||||
app.logger.debug(data)
|
log.debug('%r', data)
|
||||||
try:
|
try:
|
||||||
j = json.loads(data)
|
j = json.loads(data)
|
||||||
app.logger.info('Getting change details')
|
log.info('Getting change details')
|
||||||
response = gdriveutils.getChangeById(gdriveutils.Gdrive.Instance().drive, j['id'])
|
response = gdriveutils.getChangeById(gdriveutils.Gdrive.Instance().drive, j['id'])
|
||||||
app.logger.debug(response)
|
log.debug('%r', response)
|
||||||
if response:
|
if response:
|
||||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||||
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath):
|
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath):
|
||||||
tmpDir = tempfile.gettempdir()
|
tmpDir = tempfile.gettempdir()
|
||||||
app.logger.info('Database file updated')
|
log.info('Database file updated')
|
||||||
copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time())))
|
copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time())))
|
||||||
app.logger.info('Backing up existing and downloading updated metadata.db')
|
log.info('Backing up existing and downloading updated metadata.db')
|
||||||
gdriveutils.downloadFile(None, "metadata.db", os.path.join(tmpDir, "tmp_metadata.db"))
|
gdriveutils.downloadFile(None, "metadata.db", os.path.join(tmpDir, "tmp_metadata.db"))
|
||||||
app.logger.info('Setting up new DB')
|
log.info('Setting up new DB')
|
||||||
# prevent error on windows, as os.rename does on exisiting files
|
# prevent error on windows, as os.rename does on exisiting files
|
||||||
move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath)
|
move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath)
|
||||||
db.setup_db()
|
db.setup_db()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.info(e.message)
|
log.exception(e)
|
||||||
app.logger.exception(e)
|
|
||||||
updateMetaData()
|
updateMetaData()
|
||||||
return ''
|
return ''
|
||||||
|
@ -17,23 +17,35 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from flask import Response, stream_with_context
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy import Column, UniqueConstraint
|
||||||
|
from sqlalchemy import String, Integer
|
||||||
|
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from pydrive.auth import GoogleAuth
|
from pydrive.auth import GoogleAuth
|
||||||
from pydrive.drive import GoogleDrive
|
from pydrive.drive import GoogleDrive
|
||||||
from pydrive.auth import RefreshError, InvalidConfigError
|
from pydrive.auth import RefreshError
|
||||||
from apiclient import errors
|
from apiclient import errors
|
||||||
gdrive_support = True
|
gdrive_support = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
gdrive_support = False
|
gdrive_support = False
|
||||||
|
|
||||||
import os
|
from . import logger, cli, config
|
||||||
from . import config, app
|
from .constants import BASE_DIR as _BASE_DIR
|
||||||
import cli
|
|
||||||
import shutil
|
|
||||||
from flask import Response, stream_with_context
|
SETTINGS_YAML = os.path.join(_BASE_DIR, 'settings.yaml')
|
||||||
from sqlalchemy import *
|
CREDENTIALS = os.path.join(_BASE_DIR, 'gdrive_credentials')
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
CLIENT_SECRETS = os.path.join(_BASE_DIR, 'client_secrets.json')
|
||||||
from sqlalchemy.orm import *
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
@ -78,7 +90,7 @@ class Singleton:
|
|||||||
@Singleton
|
@Singleton
|
||||||
class Gauth:
|
class Gauth:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.auth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
self.auth = GoogleAuth(settings_file=SETTINGS_YAML)
|
||||||
|
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
@ -87,8 +99,7 @@ class Gdrive:
|
|||||||
self.drive = getDrive(gauth=Gauth.Instance().auth)
|
self.drive = getDrive(gauth=Gauth.Instance().auth)
|
||||||
|
|
||||||
def is_gdrive_ready():
|
def is_gdrive_ready():
|
||||||
return os.path.exists(os.path.join(config.get_main_dir, 'settings.yaml')) and \
|
return os.path.exists(SETTINGS_YAML) and os.path.exists(CREDENTIALS)
|
||||||
os.path.exists(os.path.join(config.get_main_dir, 'gdrive_credentials'))
|
|
||||||
|
|
||||||
|
|
||||||
engine = create_engine('sqlite:///{0}'.format(cli.gdpath), echo=False)
|
engine = create_engine('sqlite:///{0}'.format(cli.gdpath), echo=False)
|
||||||
@ -150,17 +161,17 @@ migrate()
|
|||||||
def getDrive(drive=None, gauth=None):
|
def getDrive(drive=None, gauth=None):
|
||||||
if not drive:
|
if not drive:
|
||||||
if not gauth:
|
if not gauth:
|
||||||
gauth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
gauth = GoogleAuth(settings_file=SETTINGS_YAML)
|
||||||
# Try to load saved client credentials
|
# Try to load saved client credentials
|
||||||
gauth.LoadCredentialsFile(os.path.join(config.get_main_dir,'gdrive_credentials'))
|
gauth.LoadCredentialsFile(CREDENTIALS)
|
||||||
if gauth.access_token_expired:
|
if gauth.access_token_expired:
|
||||||
# Refresh them if expired
|
# Refresh them if expired
|
||||||
try:
|
try:
|
||||||
gauth.Refresh()
|
gauth.Refresh()
|
||||||
except RefreshError as e:
|
except RefreshError as e:
|
||||||
app.logger.error("Google Drive error: " + e.message)
|
log.error("Google Drive error: %s", e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
else:
|
else:
|
||||||
# Initialize the saved creds
|
# Initialize the saved creds
|
||||||
gauth.Authorize()
|
gauth.Authorize()
|
||||||
@ -170,7 +181,7 @@ def getDrive(drive=None, gauth=None):
|
|||||||
try:
|
try:
|
||||||
drive.auth.Refresh()
|
drive.auth.Refresh()
|
||||||
except RefreshError as e:
|
except RefreshError as e:
|
||||||
app.logger.error("Google Drive error: " + e.message)
|
log.error("Google Drive error: %s", e)
|
||||||
return drive
|
return drive
|
||||||
|
|
||||||
def listRootFolders():
|
def listRootFolders():
|
||||||
@ -207,7 +218,7 @@ def getEbooksFolderId(drive=None):
|
|||||||
try:
|
try:
|
||||||
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
|
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
|
||||||
except Exception:
|
except Exception:
|
||||||
app.logger.error('Error gDrive, root ID not found')
|
log.error('Error gDrive, root ID not found')
|
||||||
gDriveId.path = '/'
|
gDriveId.path = '/'
|
||||||
session.merge(gDriveId)
|
session.merge(gDriveId)
|
||||||
session.commit()
|
session.commit()
|
||||||
@ -447,10 +458,10 @@ def getChangeById (drive, change_id):
|
|||||||
change = drive.auth.service.changes().get(changeId=change_id).execute()
|
change = drive.auth.service.changes().get(changeId=change_id).execute()
|
||||||
return change
|
return change
|
||||||
except (errors.HttpError) as error:
|
except (errors.HttpError) as error:
|
||||||
app.logger.info(error.message)
|
log.error(error)
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.info(e)
|
log.error(e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -520,6 +531,6 @@ def do_gdrive_download(df, headers):
|
|||||||
if resp.status == 206:
|
if resp.status == 206:
|
||||||
yield content
|
yield content
|
||||||
else:
|
else:
|
||||||
app.logger.info('An error occurred: %s' % resp)
|
log.warning('An error occurred: %s', resp)
|
||||||
return
|
return
|
||||||
return Response(stream_with_context(stream()), headers=headers)
|
return Response(stream_with_context(stream()), headers=headers)
|
||||||
|
130
cps/helper.py
130
cps/helper.py
@ -18,40 +18,30 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from . import config, global_WorkerThread, get_locale, db, mimetypes
|
|
||||||
from flask import current_app as app
|
|
||||||
from tempfile import gettempdir
|
|
||||||
import sys
|
import sys
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import unicodedata
|
import requests
|
||||||
from .worker import STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS, TASK_EMAIL, TASK_CONVERT, TASK_UPLOAD, \
|
import shutil
|
||||||
TASK_CONVERT_ANY
|
|
||||||
import time
|
import time
|
||||||
|
import unicodedata
|
||||||
|
from datetime import datetime
|
||||||
|
from functools import reduce
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
from babel import Locale as LC
|
||||||
|
from babel.core import UnknownLocaleError
|
||||||
|
from babel.dates import format_datetime
|
||||||
from flask import send_from_directory, make_response, redirect, abort
|
from flask import send_from_directory, make_response, redirect, abort
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from babel.dates import format_datetime
|
from sqlalchemy.sql.expression import true, false, and_, or_, text, func
|
||||||
from babel.core import UnknownLocaleError
|
|
||||||
from datetime import datetime
|
|
||||||
from babel import Locale as LC
|
|
||||||
import shutil
|
|
||||||
import requests
|
|
||||||
from sqlalchemy.sql.expression import true, and_, false, text, func
|
|
||||||
from iso639 import languages as isoLanguages
|
|
||||||
from pagination import Pagination
|
|
||||||
from werkzeug.datastructures import Headers
|
from werkzeug.datastructures import Headers
|
||||||
import json
|
|
||||||
|
|
||||||
try:
|
|
||||||
import gdriveutils as gd
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
import random
|
|
||||||
from subproc_wrapper import process_open
|
|
||||||
import ub
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
@ -70,17 +60,23 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
use_levenshtein = False
|
use_levenshtein = False
|
||||||
|
|
||||||
try:
|
|
||||||
from functools import reduce
|
|
||||||
except ImportError:
|
|
||||||
pass # We're not using Python 3
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
use_PIL = True
|
use_PIL = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
use_PIL = False
|
use_PIL = False
|
||||||
|
|
||||||
|
from . import logger, config, global_WorkerThread, get_locale, db, ub, isoLanguages
|
||||||
|
from . import gdriveutils as gd
|
||||||
|
from .constants import STATIC_DIR as _STATIC_DIR
|
||||||
|
from .pagination import Pagination
|
||||||
|
from .subproc_wrapper import process_open
|
||||||
|
from .worker import STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
||||||
|
from .worker import TASK_EMAIL, TASK_CONVERT, TASK_UPLOAD, TASK_CONVERT_ANY
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def update_download(book_id, user_id):
|
def update_download(book_id, user_id):
|
||||||
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
|
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
|
||||||
@ -96,7 +92,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
|
|||||||
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first()
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first()
|
||||||
if not data:
|
if not data:
|
||||||
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
||||||
app.logger.error("convert_book_format: " + error_message)
|
log.error("convert_book_format: %s", error_message)
|
||||||
return error_message
|
return error_message
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower())
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower())
|
||||||
@ -190,7 +186,7 @@ def check_send_to_kindle(entry):
|
|||||||
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})'''
|
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})'''
|
||||||
return bookformats
|
return bookformats
|
||||||
else:
|
else:
|
||||||
app.logger.error(u'Cannot find book entry %d', entry.id)
|
log.error(u'Cannot find book entry %d', entry.id)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -275,8 +271,8 @@ def get_sorted_author(value):
|
|||||||
value2 = value[-1] + ", " + " ".join(value[:-1])
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
||||||
else:
|
else:
|
||||||
value2 = value
|
value2 = value
|
||||||
except Exception:
|
except Exception as ex:
|
||||||
app.logger.error("Sorting author " + str(value) + "failed")
|
log.error("Sorting author %s failed: %s", value, ex)
|
||||||
value2 = value
|
value2 = value
|
||||||
return value2
|
return value2
|
||||||
|
|
||||||
@ -293,13 +289,12 @@ def delete_book_file(book, calibrepath, book_format=None):
|
|||||||
else:
|
else:
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
if len(next(os.walk(path))[1]):
|
if len(next(os.walk(path))[1]):
|
||||||
app.logger.error(
|
log.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path)
|
||||||
"Deleting book " + str(book.id) + " failed, path has subfolders: " + book.path)
|
|
||||||
return False
|
return False
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
app.logger.error("Deleting book " + str(book.id) + " failed, book path not valid: " + book.path)
|
log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -322,7 +317,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|||||||
if not os.path.exists(new_title_path):
|
if not os.path.exists(new_title_path):
|
||||||
os.renames(path, new_title_path)
|
os.renames(path, new_title_path)
|
||||||
else:
|
else:
|
||||||
app.logger.info("Copying title: " + path + " into existing: " + new_title_path)
|
log.info("Copying title: %s into existing: %s", path, new_title_path)
|
||||||
for dir_name, __, file_list in os.walk(path):
|
for dir_name, __, file_list in os.walk(path):
|
||||||
for file in file_list:
|
for file in file_list:
|
||||||
os.renames(os.path.join(dir_name, file),
|
os.renames(os.path.join(dir_name, file),
|
||||||
@ -330,8 +325,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|||||||
path = new_title_path
|
path = new_title_path
|
||||||
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
app.logger.error("Rename title from: " + path + " to " + new_title_path + ": " + str(ex))
|
log.error("Rename title from: %s to %s: %s", path, new_title_path, ex)
|
||||||
app.logger.debug(ex, exc_info=True)
|
log.debug(ex, exc_info=True)
|
||||||
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||||
src=path, dest=new_title_path, error=str(ex))
|
src=path, dest=new_title_path, error=str(ex))
|
||||||
if authordir != new_authordir:
|
if authordir != new_authordir:
|
||||||
@ -340,8 +335,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|||||||
os.renames(path, new_author_path)
|
os.renames(path, new_author_path)
|
||||||
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
app.logger.error("Rename author from: " + path + " to " + new_author_path + ": " + str(ex))
|
log.error("Rename author from: %s to %s: %s", path, new_author_path, ex)
|
||||||
app.logger.debug(ex, exc_info=True)
|
log.debug(ex, exc_info=True)
|
||||||
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||||
src=path, dest=new_author_path, error=str(ex))
|
src=path, dest=new_author_path, error=str(ex))
|
||||||
# Rename all files from old names to new names
|
# Rename all files from old names to new names
|
||||||
@ -354,8 +349,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|||||||
os.path.join(path_name, new_name + '.' + file_format.format.lower()))
|
os.path.join(path_name, new_name + '.' + file_format.format.lower()))
|
||||||
file_format.name = new_name
|
file_format.name = new_name
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
app.logger.error("Rename file in path " + path + " to " + new_name + ": " + str(ex))
|
log.error("Rename file in path %s to %s: %s", path, new_name, ex)
|
||||||
app.logger.debug(ex, exc_info=True)
|
log.debug(ex, exc_info=True)
|
||||||
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||||
src=path, dest=new_name, error=str(ex))
|
src=path, dest=new_name, error=str(ex))
|
||||||
return False
|
return False
|
||||||
@ -454,26 +449,25 @@ def get_book_cover(book_id):
|
|||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
try:
|
try:
|
||||||
if not gd.is_gdrive_ready():
|
if not gd.is_gdrive_ready():
|
||||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
||||||
path=gd.get_cover_via_gdrive(book.path)
|
path=gd.get_cover_via_gdrive(book.path)
|
||||||
if path:
|
if path:
|
||||||
return redirect(path)
|
return redirect(path)
|
||||||
else:
|
else:
|
||||||
app.logger.error(book.path + '/cover.jpg not found on Google Drive')
|
log.error('%s/cover.jpg not found on Google Drive', book.path)
|
||||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.error("Error Message: " + e.message)
|
log.exception(e)
|
||||||
app.logger.exception(e)
|
|
||||||
# traceback.print_exc()
|
# traceback.print_exc()
|
||||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||||
else:
|
else:
|
||||||
cover_file_path = os.path.join(config.config_calibre_dir, book.path)
|
cover_file_path = os.path.join(config.config_calibre_dir, book.path)
|
||||||
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
||||||
return send_from_directory(cover_file_path, "cover.jpg")
|
return send_from_directory(cover_file_path, "cover.jpg")
|
||||||
else:
|
else:
|
||||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||||
else:
|
else:
|
||||||
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
|
||||||
|
|
||||||
|
|
||||||
# saves book cover from url
|
# saves book cover from url
|
||||||
@ -493,15 +487,15 @@ def save_cover_from_filestorage(filepath, saved_filename, img):
|
|||||||
try:
|
try:
|
||||||
os.makedirs(filepath)
|
os.makedirs(filepath)
|
||||||
except OSError:
|
except OSError:
|
||||||
app.logger.error(u"Failed to create path for cover")
|
log.error(u"Failed to create path for cover")
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
img.save(os.path.join(filepath, saved_filename))
|
img.save(os.path.join(filepath, saved_filename))
|
||||||
except OSError:
|
except OSError:
|
||||||
app.logger.error(u"Failed to store cover-file")
|
log.error(u"Failed to store cover-file")
|
||||||
return False
|
return False
|
||||||
except IOError:
|
except IOError:
|
||||||
app.logger.error(u"Cover-file is not a valid image file")
|
log.error(u"Cover-file is not a valid image file")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -512,7 +506,7 @@ def save_cover(img, book_path):
|
|||||||
|
|
||||||
if use_PIL:
|
if use_PIL:
|
||||||
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
|
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
|
||||||
app.logger.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
log.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
||||||
return False
|
return False
|
||||||
# convert to jpg because calibre only supports jpg
|
# convert to jpg because calibre only supports jpg
|
||||||
if content_type in ('image/png', 'image/webp'):
|
if content_type in ('image/png', 'image/webp'):
|
||||||
@ -526,7 +520,7 @@ def save_cover(img, book_path):
|
|||||||
img._content = tmp_bytesio.getvalue()
|
img._content = tmp_bytesio.getvalue()
|
||||||
else:
|
else:
|
||||||
if content_type not in ('image/jpeg'):
|
if content_type not in ('image/jpeg'):
|
||||||
app.logger.error("Only jpg/jpeg files are supported as coverfile")
|
log.error("Only jpg/jpeg files are supported as coverfile")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if ub.config.config_use_google_drive:
|
if ub.config.config_use_google_drive:
|
||||||
@ -534,7 +528,7 @@ def save_cover(img, book_path):
|
|||||||
if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True:
|
if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True:
|
||||||
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
|
||||||
os.path.join(tmpDir, "uploaded_cover.jpg"))
|
os.path.join(tmpDir, "uploaded_cover.jpg"))
|
||||||
app.logger.info("Cover is saved on Google Drive")
|
log.info("Cover is saved on Google Drive")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -547,7 +541,7 @@ def do_download_file(book, book_format, data, headers):
|
|||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
startTime = time.time()
|
startTime = time.time()
|
||||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
||||||
app.logger.debug(time.time() - startTime)
|
log.debug('%s', time.time() - startTime)
|
||||||
if df:
|
if df:
|
||||||
return gd.do_gdrive_download(df, headers)
|
return gd.do_gdrive_download(df, headers)
|
||||||
else:
|
else:
|
||||||
@ -556,7 +550,7 @@ def do_download_file(book, book_format, data, headers):
|
|||||||
filename = os.path.join(config.config_calibre_dir, book.path)
|
filename = os.path.join(config.config_calibre_dir, book.path)
|
||||||
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
|
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
|
||||||
# ToDo: improve error handling
|
# ToDo: improve error handling
|
||||||
app.logger.error('File not found: %s' % os.path.join(filename, data.name + "." + book_format))
|
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
|
||||||
response = make_response(send_from_directory(filename, data.name + "." + book_format))
|
response = make_response(send_from_directory(filename, data.name + "." + book_format))
|
||||||
response.headers = headers
|
response.headers = headers
|
||||||
return response
|
return response
|
||||||
@ -581,7 +575,7 @@ def check_unrar(unrarLocation):
|
|||||||
version = value.group(1)
|
version = value.group(1)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
error = True
|
error = True
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
version =_(u'Error excecuting UnRar')
|
version =_(u'Error excecuting UnRar')
|
||||||
else:
|
else:
|
||||||
version = _(u'Unrar binary file not found')
|
version = _(u'Unrar binary file not found')
|
||||||
@ -724,12 +718,12 @@ def get_search_results(term):
|
|||||||
db.Books.authors.any(db.func.lower(db.Authors.name).ilike("%" + term + "%"))
|
db.Books.authors.any(db.func.lower(db.Authors.name).ilike("%" + term + "%"))
|
||||||
|
|
||||||
return db.session.query(db.Books).filter(common_filters()).filter(
|
return db.session.query(db.Books).filter(common_filters()).filter(
|
||||||
db.or_(db.Books.tags.any(db.func.lower(db.Tags.name).ilike("%" + term + "%")),
|
or_(db.Books.tags.any(db.func.lower(db.Tags.name).ilike("%" + term + "%")),
|
||||||
db.Books.series.any(db.func.lower(db.Series.name).ilike("%" + term + "%")),
|
db.Books.series.any(db.func.lower(db.Series.name).ilike("%" + term + "%")),
|
||||||
db.Books.authors.any(and_(*q)),
|
db.Books.authors.any(and_(*q)),
|
||||||
db.Books.publishers.any(db.func.lower(db.Publishers.name).ilike("%" + term + "%")),
|
db.Books.publishers.any(db.func.lower(db.Publishers.name).ilike("%" + term + "%")),
|
||||||
db.func.lower(db.Books.title).ilike("%" + term + "%")
|
db.func.lower(db.Books.title).ilike("%" + term + "%")
|
||||||
)).all()
|
)).all()
|
||||||
|
|
||||||
def get_unique_other_books(library_books, author_books):
|
def get_unique_other_books(library_books, author_books):
|
||||||
# Get all identifiers (ISBN, Goodreads, etc) and filter author's books by that list so we show fewer duplicates
|
# Get all identifiers (ISBN, Goodreads, etc) and filter author's books by that list so we show fewer duplicates
|
||||||
|
@ -17,6 +17,9 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from iso639 import languages, __version__
|
from iso639 import languages, __version__
|
||||||
get = languages.get
|
get = languages.get
|
||||||
|
@ -23,15 +23,21 @@
|
|||||||
|
|
||||||
# custom jinja filters
|
# custom jinja filters
|
||||||
|
|
||||||
from flask import Blueprint, request, url_for
|
from __future__ import division, print_function, unicode_literals
|
||||||
import datetime
|
import datetime
|
||||||
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
from . import mimetypes, app
|
|
||||||
from babel.dates import format_date
|
from babel.dates import format_date
|
||||||
|
from flask import Blueprint, request, url_for
|
||||||
from flask_babel import get_locale
|
from flask_babel import get_locale
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
|
||||||
|
from . import logger
|
||||||
|
|
||||||
|
|
||||||
jinjia = Blueprint('jinjia', __name__)
|
jinjia = Blueprint('jinjia', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
# pagination links in jinja
|
# pagination links in jinja
|
||||||
@ -79,8 +85,7 @@ def formatdate_filter(val):
|
|||||||
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
||||||
return format_date(formatdate, format='medium', locale=get_locale())
|
return format_date(formatdate, format='medium', locale=get_locale())
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
app.logger.error('Babel error: %s, Current user locale: %s, Current User: %s' %
|
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
|
||||||
(e, current_user.locale, current_user.nickname))
|
|
||||||
return formatdate
|
return formatdate
|
||||||
|
|
||||||
@jinjia.app_template_filter('formatdateinput')
|
@jinjia.app_template_filter('formatdateinput')
|
||||||
|
126
cps/logger.py
Normal file
126
cps/logger.py
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||||
|
# Copyright (C) 2019 pwr
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
import os
|
||||||
|
import inspect
|
||||||
|
import logging
|
||||||
|
from logging import Formatter, StreamHandler
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
|
||||||
|
from .constants import BASE_DIR as _BASE_DIR
|
||||||
|
|
||||||
|
|
||||||
|
# FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d %(funcName)s} %(message)s")
|
||||||
|
FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d} %(message)s")
|
||||||
|
DEFAULT_LOG_LEVEL = logging.INFO
|
||||||
|
DEFAULT_LOG_FILE = os.path.join(_BASE_DIR, "calibre-web.log")
|
||||||
|
LOG_TO_STDERR = '/dev/stderr'
|
||||||
|
|
||||||
|
|
||||||
|
logging.addLevelName(logging.WARNING, "WARN")
|
||||||
|
logging.addLevelName(logging.CRITICAL, "CRIT")
|
||||||
|
|
||||||
|
|
||||||
|
def get(name=None):
|
||||||
|
return logging.getLogger(name)
|
||||||
|
|
||||||
|
|
||||||
|
def create():
|
||||||
|
parent_frame = inspect.stack(0)[1]
|
||||||
|
if hasattr(parent_frame, 'frame'):
|
||||||
|
parent_frame = parent_frame.frame
|
||||||
|
else:
|
||||||
|
parent_frame = parent_frame[0]
|
||||||
|
parent_module = inspect.getmodule(parent_frame)
|
||||||
|
return get(parent_module.__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def is_debug_enabled():
|
||||||
|
return logging.root.level <= logging.DEBUG
|
||||||
|
|
||||||
|
|
||||||
|
def get_level_name(level):
|
||||||
|
return logging.getLevelName(level)
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_logfile(file_path):
|
||||||
|
if not file_path:
|
||||||
|
return True
|
||||||
|
if os.path.isdir(file_path):
|
||||||
|
return False
|
||||||
|
log_dir = os.path.dirname(file_path)
|
||||||
|
return (not log_dir) or os.path.isdir(log_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(log_file, log_level=None):
|
||||||
|
if log_file:
|
||||||
|
if not os.path.dirname(log_file):
|
||||||
|
log_file = os.path.join(_BASE_DIR, log_file)
|
||||||
|
log_file = os.path.abspath(log_file)
|
||||||
|
else:
|
||||||
|
# log_file = LOG_TO_STDERR
|
||||||
|
log_file = DEFAULT_LOG_FILE
|
||||||
|
|
||||||
|
# print ('%r -- %r' % (log_level, log_file))
|
||||||
|
r = logging.root
|
||||||
|
r.setLevel(log_level or DEFAULT_LOG_LEVEL)
|
||||||
|
|
||||||
|
previous_handler = r.handlers[0] if r.handlers else None
|
||||||
|
# print ('previous %r' % previous_handler)
|
||||||
|
|
||||||
|
if previous_handler:
|
||||||
|
# if the log_file has not changed, don't create a new handler
|
||||||
|
if getattr(previous_handler, 'baseFilename', None) == log_file:
|
||||||
|
return
|
||||||
|
r.debug("logging to %s level %s", log_file, r.level)
|
||||||
|
|
||||||
|
if log_file == LOG_TO_STDERR:
|
||||||
|
file_handler = StreamHandler()
|
||||||
|
file_handler.baseFilename = LOG_TO_STDERR
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
file_handler = RotatingFileHandler(log_file, maxBytes=50000, backupCount=2)
|
||||||
|
except IOError:
|
||||||
|
if log_file == DEFAULT_LOG_FILE:
|
||||||
|
raise
|
||||||
|
file_handler = RotatingFileHandler(DEFAULT_LOG_FILE, maxBytes=50000, backupCount=2)
|
||||||
|
file_handler.setFormatter(FORMATTER)
|
||||||
|
|
||||||
|
for h in r.handlers:
|
||||||
|
r.removeHandler(h)
|
||||||
|
h.close()
|
||||||
|
r.addHandler(file_handler)
|
||||||
|
# print ('new handler %r' % file_handler)
|
||||||
|
|
||||||
|
|
||||||
|
# Enable logging of smtp lib debug output
|
||||||
|
class StderrLogger(object):
|
||||||
|
def __init__(self, name=None):
|
||||||
|
self.log = get(name or self.__class__.__name__)
|
||||||
|
self.buffer = ''
|
||||||
|
|
||||||
|
def write(self, message):
|
||||||
|
try:
|
||||||
|
if message == '\n':
|
||||||
|
self.log.debug(self.buffer.replace('\n', '\\n'))
|
||||||
|
self.buffer = ''
|
||||||
|
else:
|
||||||
|
self.buffer += message
|
||||||
|
except:
|
||||||
|
self.logger.debug("Logging Error")
|
@ -1,7 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from flask import session
|
from flask import session
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from flask_dance.consumer.backend.sqla import SQLAlchemyBackend, first, _get_real_user
|
from flask_dance.consumer.backend.sqla import SQLAlchemyBackend, first, _get_real_user
|
||||||
from sqlalchemy.orm.exc import NoResultFound
|
from sqlalchemy.orm.exc import NoResultFound
|
||||||
|
@ -21,30 +21,34 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>
|
# along with this program. If not, see <http://www.gnu.org/licenses/>
|
||||||
|
|
||||||
from flask_dance.contrib.github import make_github_blueprint, github
|
from __future__ import division, print_function, unicode_literals
|
||||||
from flask_dance.contrib.google import make_google_blueprint, google
|
|
||||||
from flask_dance.consumer import oauth_authorized, oauth_error
|
|
||||||
from oauth import OAuthBackend
|
|
||||||
from sqlalchemy.orm.exc import NoResultFound
|
|
||||||
from flask import session, request, make_response, abort
|
|
||||||
import json
|
import json
|
||||||
from cps import config, app
|
from functools import wraps
|
||||||
import ub
|
from oauth import OAuthBackend
|
||||||
from flask_login import login_user, current_user
|
|
||||||
|
from flask import session, request, make_response, abort
|
||||||
from flask import Blueprint, flash, redirect, url_for
|
from flask import Blueprint, flash, redirect, url_for
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
# from web import github_oauth_required
|
from flask_dance.consumer import oauth_authorized, oauth_error
|
||||||
from functools import wraps
|
from flask_dance.contrib.github import make_github_blueprint, github
|
||||||
from web import login_required
|
from flask_dance.contrib.google import make_google_blueprint, google
|
||||||
|
from flask_login import login_user, current_user
|
||||||
|
from sqlalchemy.orm.exc import NoResultFound
|
||||||
|
|
||||||
|
from . import constants, logger, config, app, ub
|
||||||
|
from .web import login_required
|
||||||
|
# from .web import github_oauth_required
|
||||||
|
|
||||||
|
|
||||||
oauth_check = {}
|
oauth_check = {}
|
||||||
oauth = Blueprint('oauth', __name__)
|
oauth = Blueprint('oauth', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def github_oauth_required(f):
|
def github_oauth_required(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
if config.config_login_type == ub.LOGIN_OAUTH_GITHUB:
|
if config.config_login_type == constants.LOGIN_OAUTH_GITHUB:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
if request.is_xhr:
|
if request.is_xhr:
|
||||||
data = {'status': 'error', 'message': 'Not Found'}
|
data = {'status': 'error', 'message': 'Not Found'}
|
||||||
@ -59,7 +63,7 @@ def github_oauth_required(f):
|
|||||||
def google_oauth_required(f):
|
def google_oauth_required(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
if config.config_use_google_oauth == ub.LOGIN_OAUTH_GOOGLE:
|
if config.config_use_google_oauth == constants.LOGIN_OAUTH_GOOGLE:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
if request.is_xhr:
|
if request.is_xhr:
|
||||||
data = {'status': 'error', 'message': 'Not Found'}
|
data = {'status': 'error', 'message': 'Not Found'}
|
||||||
@ -101,7 +105,7 @@ def register_user_with_oauth(user=None):
|
|||||||
try:
|
try:
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
|
|
||||||
|
|
||||||
@ -195,7 +199,7 @@ if ub.oauth_support:
|
|||||||
ub.session.add(oauth)
|
ub.session.add(oauth)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
|
|
||||||
# Disable Flask-Dance's default behavior for saving the OAuth token
|
# Disable Flask-Dance's default behavior for saving the OAuth token
|
||||||
@ -221,7 +225,7 @@ if ub.oauth_support:
|
|||||||
ub.session.add(oauth)
|
ub.session.add(oauth)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
return redirect(url_for('web.login'))
|
return redirect(url_for('web.login'))
|
||||||
#if config.config_public_reg:
|
#if config.config_public_reg:
|
||||||
@ -264,11 +268,11 @@ if ub.oauth_support:
|
|||||||
logout_oauth_user()
|
logout_oauth_user()
|
||||||
flash(_(u"Unlink to %(oauth)s success.", oauth=oauth_check[provider]), category="success")
|
flash(_(u"Unlink to %(oauth)s success.", oauth=oauth_check[provider]), category="success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.exception(e)
|
log.exception(e)
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
flash(_(u"Unlink to %(oauth)s failed.", oauth=oauth_check[provider]), category="error")
|
flash(_(u"Unlink to %(oauth)s failed.", oauth=oauth_check[provider]), category="error")
|
||||||
except NoResultFound:
|
except NoResultFound:
|
||||||
app.logger.warning("oauth %s for user %d not fount" % (provider, current_user.id))
|
log.warning("oauth %s for user %d not fount", provider, current_user.id)
|
||||||
flash(_(u"Not linked to %(oauth)s.", oauth=oauth_check[provider]), category="error")
|
flash(_(u"Not linked to %(oauth)s.", oauth=oauth_check[provider]), category="error")
|
||||||
return redirect(url_for('web.profile'))
|
return redirect(url_for('web.profile'))
|
||||||
|
|
||||||
|
36
cps/opds.py
36
cps/opds.py
@ -21,22 +21,24 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
# opds routing functions
|
from __future__ import division, print_function, unicode_literals
|
||||||
from . import config, db
|
|
||||||
from flask import request, render_template, Response, g, make_response
|
|
||||||
from pagination import Pagination
|
|
||||||
from flask import Blueprint
|
|
||||||
import datetime
|
|
||||||
import ub
|
|
||||||
from flask_login import current_user
|
|
||||||
from functools import wraps
|
|
||||||
from .web import login_required_if_no_ano, common_filters, get_search_results, render_read_books, download_required
|
|
||||||
from sqlalchemy.sql.expression import func, text
|
|
||||||
from werkzeug.security import check_password_hash
|
|
||||||
from .helper import fill_indexpage, get_download_link, get_book_cover
|
|
||||||
import sys
|
import sys
|
||||||
|
import datetime
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from flask import Blueprint, request, render_template, Response, g, make_response
|
||||||
|
from flask_login import current_user
|
||||||
|
from sqlalchemy.sql.expression import func, text, or_, and_
|
||||||
|
from werkzeug.security import check_password_hash
|
||||||
|
|
||||||
|
from . import logger, config, db, ub
|
||||||
|
from .helper import fill_indexpage, get_download_link, get_book_cover
|
||||||
|
from .pagination import Pagination
|
||||||
|
from .web import common_filters, get_search_results, render_read_books, download_required
|
||||||
|
|
||||||
|
|
||||||
opds = Blueprint('opds', __name__)
|
opds = Blueprint('opds', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def requires_basic_auth_if_no_ano(f):
|
def requires_basic_auth_if_no_ano(f):
|
||||||
@ -231,10 +233,10 @@ def feed_shelf(book_id):
|
|||||||
if current_user.is_anonymous:
|
if current_user.is_anonymous:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == book_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == book_id).first()
|
||||||
else:
|
else:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||||
ub.Shelf.id == book_id),
|
ub.Shelf.id == book_id),
|
||||||
ub.and_(ub.Shelf.is_public == 1,
|
and_(ub.Shelf.is_public == 1,
|
||||||
ub.Shelf.id == book_id))).first()
|
ub.Shelf.id == book_id))).first()
|
||||||
result = list()
|
result = list()
|
||||||
# user is allowed to access shelf
|
# user is allowed to access shelf
|
||||||
if shelf:
|
if shelf:
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,10 +28,12 @@
|
|||||||
|
|
||||||
# http://flask.pocoo.org/snippets/62/
|
# http://flask.pocoo.org/snippets/62/
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
try:
|
try:
|
||||||
from urllib.parse import urlparse, urljoin
|
from urllib.parse import urlparse, urljoin
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from urlparse import urlparse, urljoin
|
from urlparse import urlparse, urljoin
|
||||||
|
|
||||||
from flask import request, url_for, redirect
|
from flask import request, url_for, redirect
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,6 +37,8 @@
|
|||||||
#
|
#
|
||||||
# Inspired by http://flask.pocoo.org/snippets/35/
|
# Inspired by http://flask.pocoo.org/snippets/35/
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
class ReverseProxied(object):
|
class ReverseProxied(object):
|
||||||
"""Wrap the application in this middleware and configure the
|
"""Wrap the application in this middleware and configure the
|
||||||
|
129
cps/server.py
129
cps/server.py
@ -17,12 +17,11 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from socket import error as SocketError
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
from . import config, global_WorkerThread
|
import socket
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from gevent.pywsgi import WSGIServer
|
from gevent.pywsgi import WSGIServer
|
||||||
@ -36,6 +35,11 @@ except ImportError:
|
|||||||
from tornado import version as tornadoVersion
|
from tornado import version as tornadoVersion
|
||||||
gevent_present = False
|
gevent_present = False
|
||||||
|
|
||||||
|
from . import logger, config, global_WorkerThread
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
class server:
|
class server:
|
||||||
|
|
||||||
@ -49,76 +53,77 @@ class server:
|
|||||||
|
|
||||||
def init_app(self, application):
|
def init_app(self, application):
|
||||||
self.app = application
|
self.app = application
|
||||||
|
self.port = config.config_port
|
||||||
|
|
||||||
|
self.ssl_args = None
|
||||||
|
certfile_path = config.get_config_certfile()
|
||||||
|
keyfile_path = config.get_config_keyfile()
|
||||||
|
if certfile_path and keyfile_path:
|
||||||
|
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
||||||
|
self.ssl_args = {"certfile": certfile_path,
|
||||||
|
"keyfile": keyfile_path}
|
||||||
|
else:
|
||||||
|
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
|
||||||
|
log.warning('Cert path: %s', certfile_path)
|
||||||
|
log.warning('Key path: %s', keyfile_path)
|
||||||
|
|
||||||
|
def _make_gevent_socket(self):
|
||||||
|
if os.name == 'nt':
|
||||||
|
return ('0.0.0.0', self.port)
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET6)
|
||||||
|
except socket.error as ex:
|
||||||
|
log.error('%s', ex)
|
||||||
|
log.warning('Unable to listen on \'\', trying on IPv4 only...')
|
||||||
|
s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET)
|
||||||
|
log.debug("%r %r", s._sock, s._sock.getsockname())
|
||||||
|
return s
|
||||||
|
|
||||||
def start_gevent(self):
|
def start_gevent(self):
|
||||||
ssl_args = dict()
|
ssl_args = self.ssl_args or {}
|
||||||
try:
|
log.info('Starting Gevent server')
|
||||||
certfile_path = config.get_config_certfile()
|
|
||||||
keyfile_path = config.get_config_keyfile()
|
|
||||||
if certfile_path and keyfile_path:
|
|
||||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
|
||||||
ssl_args = {"certfile": certfile_path,
|
|
||||||
"keyfile": keyfile_path}
|
|
||||||
else:
|
|
||||||
self.app.logger.info('The specified paths for the ssl certificate file and/or key file seem '
|
|
||||||
'to be broken. Ignoring ssl. Cert path: %s | Key path: '
|
|
||||||
'%s' % (certfile_path, keyfile_path))
|
|
||||||
if os.name == 'nt':
|
|
||||||
self.wsgiserver = WSGIServer(('0.0.0.0', config.config_port), self.app, spawn=Pool(), **ssl_args)
|
|
||||||
else:
|
|
||||||
self.wsgiserver = WSGIServer(('', config.config_port), self.app, spawn=Pool(), **ssl_args)
|
|
||||||
self.wsgiserver.serve_forever()
|
|
||||||
|
|
||||||
except SocketError:
|
try:
|
||||||
try:
|
sock = self._make_gevent_socket()
|
||||||
self.app.logger.info('Unable to listen on \'\', trying on IPv4 only...')
|
self.wsgiserver = WSGIServer(sock, self.app, spawn=Pool(), **ssl_args)
|
||||||
self.wsgiserver = WSGIServer(('0.0.0.0', config.config_port), self.app, spawn=Pool(), **ssl_args)
|
self.wsgiserver.serve_forever()
|
||||||
self.wsgiserver.serve_forever()
|
except (OSError, socket.error) as e:
|
||||||
except (OSError, SocketError) as e:
|
log.info("Error starting server: %s", e.strerror)
|
||||||
self.app.logger.info("Error starting server: %s" % e.strerror)
|
print("Error starting server: %s" % e.strerror)
|
||||||
print("Error starting server: %s" % e.strerror)
|
global_WorkerThread.stop()
|
||||||
global_WorkerThread.stop()
|
sys.exit(1)
|
||||||
sys.exit(1)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.app.logger.info("Unknown error while starting gevent")
|
log.exception("Unknown error while starting gevent")
|
||||||
|
|
||||||
|
def start_tornado(self):
|
||||||
|
log.info('Starting Tornado server')
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Max Buffersize set to 200MB
|
||||||
|
http_server = HTTPServer(WSGIContainer(self.app),
|
||||||
|
max_buffer_size = 209700000,
|
||||||
|
ssl_options=self.ssl_args)
|
||||||
|
http_server.listen(self.port)
|
||||||
|
self.wsgiserver=IOLoop.instance()
|
||||||
|
self.wsgiserver.start()
|
||||||
|
# wait for stop signal
|
||||||
|
self.wsgiserver.close(True)
|
||||||
|
except socket.error as err:
|
||||||
|
log.exception("Error starting tornado server")
|
||||||
|
print("Error starting server: %s" % err.strerror)
|
||||||
|
global_WorkerThread.stop()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
def startServer(self):
|
def startServer(self):
|
||||||
if gevent_present:
|
if gevent_present:
|
||||||
self.app.logger.info('Starting Gevent server')
|
|
||||||
# leave subprocess out to allow forking for fetchers and processors
|
# leave subprocess out to allow forking for fetchers and processors
|
||||||
self.start_gevent()
|
self.start_gevent()
|
||||||
else:
|
else:
|
||||||
try:
|
self.start_tornado()
|
||||||
ssl = None
|
|
||||||
self.app.logger.info('Starting Tornado server')
|
|
||||||
certfile_path = config.get_config_certfile()
|
|
||||||
keyfile_path = config.get_config_keyfile()
|
|
||||||
if certfile_path and keyfile_path:
|
|
||||||
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
|
|
||||||
ssl = {"certfile": certfile_path,
|
|
||||||
"keyfile": keyfile_path}
|
|
||||||
else:
|
|
||||||
self.app.logger.info('The specified paths for the ssl certificate file and/or key file '
|
|
||||||
'seem to be broken. Ignoring ssl. Cert path: %s | Key '
|
|
||||||
'path: %s' % (certfile_path, keyfile_path))
|
|
||||||
|
|
||||||
# Max Buffersize set to 200MB
|
|
||||||
http_server = HTTPServer(WSGIContainer(self.app),
|
|
||||||
max_buffer_size = 209700000,
|
|
||||||
ssl_options=ssl)
|
|
||||||
http_server.listen(config.config_port)
|
|
||||||
self.wsgiserver=IOLoop.instance()
|
|
||||||
self.wsgiserver.start()
|
|
||||||
# wait for stop signal
|
|
||||||
self.wsgiserver.close(True)
|
|
||||||
except SocketError as e:
|
|
||||||
self.app.logger.info("Error starting server: %s" % e.strerror)
|
|
||||||
print("Error starting server: %s" % e.strerror)
|
|
||||||
global_WorkerThread.stop()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if self.restart is True:
|
if self.restart is True:
|
||||||
self.app.logger.info("Performing restart of Calibre-Web")
|
log.info("Performing restart of Calibre-Web")
|
||||||
global_WorkerThread.stop()
|
global_WorkerThread.stop()
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
arguments = ["\"" + sys.executable + "\""]
|
arguments = ["\"" + sys.executable + "\""]
|
||||||
@ -128,7 +133,7 @@ class server:
|
|||||||
else:
|
else:
|
||||||
os.execl(sys.executable, sys.executable, *sys.argv)
|
os.execl(sys.executable, sys.executable, *sys.argv)
|
||||||
else:
|
else:
|
||||||
self.app.logger.info("Performing shutdown of Calibre-Web")
|
log.info("Performing shutdown of Calibre-Web")
|
||||||
global_WorkerThread.stop()
|
global_WorkerThread.stop()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
60
cps/shelf.py
60
cps/shelf.py
@ -21,28 +21,34 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
|
|
||||||
from flask import Blueprint, request, flash, redirect, url_for
|
from flask import Blueprint, request, flash, redirect, url_for
|
||||||
from . import ub, searched_ids, app, db
|
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from sqlalchemy.sql.expression import func, or_
|
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
from sqlalchemy.sql.expression import func, or_, and_
|
||||||
|
|
||||||
|
from . import logger, ub, searched_ids, db
|
||||||
from .web import render_title_template
|
from .web import render_title_template
|
||||||
|
|
||||||
|
|
||||||
shelf = Blueprint('shelf', __name__)
|
shelf = Blueprint('shelf', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
@shelf.route("/shelf/add/<int:shelf_id>/<int:book_id>")
|
@shelf.route("/shelf/add/<int:shelf_id>/<int:book_id>")
|
||||||
@login_required
|
@login_required
|
||||||
def add_to_shelf(shelf_id, book_id):
|
def add_to_shelf(shelf_id, book_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
if shelf is None:
|
if shelf is None:
|
||||||
app.logger.info("Invalid shelf specified")
|
log.error("Invalid shelf specified: %s", shelf_id)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
flash(_(u"Invalid shelf specified"), category="error")
|
flash(_(u"Invalid shelf specified"), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
return "Invalid shelf specified", 400
|
return "Invalid shelf specified", 400
|
||||||
|
|
||||||
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
||||||
app.logger.info("Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name)
|
log.error("User %s not allowed to add a book to %s", current_user, shelf)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name),
|
flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name),
|
||||||
category="error")
|
category="error")
|
||||||
@ -50,7 +56,7 @@ def add_to_shelf(shelf_id, book_id):
|
|||||||
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
||||||
|
|
||||||
if shelf.is_public and not current_user.role_edit_shelfs():
|
if shelf.is_public and not current_user.role_edit_shelfs():
|
||||||
app.logger.info("User is not allowed to edit public shelves")
|
log.info("User %s not allowed to edit public shelves", current_user)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
flash(_(u"You are not allowed to edit public shelves"), category="error")
|
flash(_(u"You are not allowed to edit public shelves"), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
@ -59,7 +65,7 @@ def add_to_shelf(shelf_id, book_id):
|
|||||||
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
||||||
ub.BookShelf.book_id == book_id).first()
|
ub.BookShelf.book_id == book_id).first()
|
||||||
if book_in_shelf:
|
if book_in_shelf:
|
||||||
app.logger.info("Book is already part of the shelf: %s" % shelf.name)
|
log.error("Book %s is already part of %s", book_id, shelf)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
flash(_(u"Book is already part of the shelf: %(shelfname)s", shelfname=shelf.name), category="error")
|
flash(_(u"Book is already part of the shelf: %(shelfname)s", shelfname=shelf.name), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
@ -88,17 +94,17 @@ def add_to_shelf(shelf_id, book_id):
|
|||||||
def search_to_shelf(shelf_id):
|
def search_to_shelf(shelf_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
if shelf is None:
|
if shelf is None:
|
||||||
app.logger.info("Invalid shelf specified")
|
log.error("Invalid shelf specified: %s", shelf_id)
|
||||||
flash(_(u"Invalid shelf specified"), category="error")
|
flash(_(u"Invalid shelf specified"), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
||||||
app.logger.info("You are not allowed to add a book to the the shelf: %s" % shelf.name)
|
log.error("User %s not allowed to add a book to %s", current_user, shelf)
|
||||||
flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error")
|
flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
if shelf.is_public and not current_user.role_edit_shelfs():
|
if shelf.is_public and not current_user.role_edit_shelfs():
|
||||||
app.logger.info("User is not allowed to edit public shelves")
|
log.error("User %s not allowed to edit public shelves", current_user)
|
||||||
flash(_(u"User is not allowed to edit public shelves"), category="error")
|
flash(_(u"User is not allowed to edit public shelves"), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
@ -116,7 +122,7 @@ def search_to_shelf(shelf_id):
|
|||||||
books_for_shelf = searched_ids[current_user.id]
|
books_for_shelf = searched_ids[current_user.id]
|
||||||
|
|
||||||
if not books_for_shelf:
|
if not books_for_shelf:
|
||||||
app.logger.info("Books are already part of the shelf: %s" % shelf.name)
|
log.error("Books are already part of %s", shelf)
|
||||||
flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error")
|
flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error")
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
@ -142,7 +148,7 @@ def search_to_shelf(shelf_id):
|
|||||||
def remove_from_shelf(shelf_id, book_id):
|
def remove_from_shelf(shelf_id, book_id):
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||||
if shelf is None:
|
if shelf is None:
|
||||||
app.logger.info("Invalid shelf specified")
|
log.error("Invalid shelf specified: %s", shelf_id)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
return "Invalid shelf specified", 400
|
return "Invalid shelf specified", 400
|
||||||
@ -161,7 +167,7 @@ def remove_from_shelf(shelf_id, book_id):
|
|||||||
ub.BookShelf.book_id == book_id).first()
|
ub.BookShelf.book_id == book_id).first()
|
||||||
|
|
||||||
if book_shelf is None:
|
if book_shelf is None:
|
||||||
app.logger.info("Book already removed from shelf")
|
log.error("Book %s already removed from %s", book_id, shelf)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
return "Book already removed from shelf", 410
|
return "Book already removed from shelf", 410
|
||||||
@ -174,7 +180,7 @@ def remove_from_shelf(shelf_id, book_id):
|
|||||||
return redirect(request.environ["HTTP_REFERER"])
|
return redirect(request.environ["HTTP_REFERER"])
|
||||||
return "", 204
|
return "", 204
|
||||||
else:
|
else:
|
||||||
app.logger.info("Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name)
|
log.error("User %s not allowed to remove a book from %s", current_user, shelf)
|
||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name),
|
flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name),
|
||||||
category="error")
|
category="error")
|
||||||
@ -248,15 +254,15 @@ def delete_shelf(shelf_id):
|
|||||||
else:
|
else:
|
||||||
if (not cur_shelf.is_public and cur_shelf.user_id == int(current_user.id)) \
|
if (not cur_shelf.is_public and cur_shelf.user_id == int(current_user.id)) \
|
||||||
or (cur_shelf.is_public and current_user.role_edit_shelfs()):
|
or (cur_shelf.is_public and current_user.role_edit_shelfs()):
|
||||||
deleted = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
deleted = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||||
ub.Shelf.id == shelf_id),
|
ub.Shelf.id == shelf_id),
|
||||||
ub.and_(ub.Shelf.is_public == 1,
|
and_(ub.Shelf.is_public == 1,
|
||||||
ub.Shelf.id == shelf_id))).delete()
|
ub.Shelf.id == shelf_id))).delete()
|
||||||
|
|
||||||
if deleted:
|
if deleted:
|
||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
app.logger.info(_(u"successfully deleted shelf %(name)s", name=cur_shelf.name, category="success"))
|
log.info("successfully deleted %s", cur_shelf)
|
||||||
return redirect(url_for('web.index'))
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
# @shelf.route("/shelfdown/<int:shelf_id>")
|
# @shelf.route("/shelfdown/<int:shelf_id>")
|
||||||
@ -267,10 +273,10 @@ def show_shelf(type, shelf_id):
|
|||||||
if current_user.is_anonymous:
|
if current_user.is_anonymous:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
||||||
else:
|
else:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||||
ub.Shelf.id == shelf_id),
|
ub.Shelf.id == shelf_id),
|
||||||
ub.and_(ub.Shelf.is_public == 1,
|
and_(ub.Shelf.is_public == 1,
|
||||||
ub.Shelf.id == shelf_id))).first()
|
ub.Shelf.id == shelf_id))).first()
|
||||||
result = list()
|
result = list()
|
||||||
# user is allowed to access shelf
|
# user is allowed to access shelf
|
||||||
if shelf:
|
if shelf:
|
||||||
@ -283,7 +289,7 @@ def show_shelf(type, shelf_id):
|
|||||||
if cur_book:
|
if cur_book:
|
||||||
result.append(cur_book)
|
result.append(cur_book)
|
||||||
else:
|
else:
|
||||||
app.logger.info('Not existing book %s in shelf %s deleted' % (book.book_id, shelf.id))
|
log.info('Not existing book %s in %s deleted', book.book_id, shelf)
|
||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
||||||
@ -309,10 +315,10 @@ def order_shelf(shelf_id):
|
|||||||
if current_user.is_anonymous:
|
if current_user.is_anonymous:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
||||||
else:
|
else:
|
||||||
shelf = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
|
||||||
ub.Shelf.id == shelf_id),
|
ub.Shelf.id == shelf_id),
|
||||||
ub.and_(ub.Shelf.is_public == 1,
|
and_(ub.Shelf.is_public == 1,
|
||||||
ub.Shelf.id == shelf_id))).first()
|
ub.Shelf.id == shelf_id))).first()
|
||||||
result = list()
|
result = list()
|
||||||
if shelf:
|
if shelf:
|
||||||
books_in_shelf2 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
books_in_shelf2 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
||||||
|
@ -16,9 +16,11 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import subprocess
|
|
||||||
import os
|
from __future__ import division, print_function, unicode_literals
|
||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
def process_open(command, quotes=(), env=None, sout=subprocess.PIPE):
|
def process_open(command, quotes=(), env=None, sout=subprocess.PIPE):
|
||||||
|
251
cps/ub.py
251
cps/ub.py
@ -18,79 +18,36 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from sqlalchemy import *
|
from __future__ import division, print_function, unicode_literals
|
||||||
from sqlalchemy import exc
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
from sqlalchemy.orm import *
|
|
||||||
from flask_login import AnonymousUserMixin
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import logging
|
|
||||||
from werkzeug.security import generate_password_hash
|
|
||||||
import json
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
import cli
|
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
from flask_login import AnonymousUserMixin
|
||||||
try:
|
try:
|
||||||
from flask_dance.consumer.backend.sqla import OAuthConsumerMixin
|
from flask_dance.consumer.backend.sqla import OAuthConsumerMixin
|
||||||
oauth_support = True
|
oauth_support = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
oauth_support = False
|
oauth_support = False
|
||||||
|
from sqlalchemy import create_engine, exc, exists
|
||||||
|
from sqlalchemy import Column, ForeignKey
|
||||||
|
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime
|
||||||
|
from sqlalchemy.orm import relationship, sessionmaker
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from werkzeug.security import generate_password_hash
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import ldap
|
import ldap
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
ROLE_USER = 0
|
from . import constants, logger, cli
|
||||||
ROLE_ADMIN = 1
|
|
||||||
ROLE_DOWNLOAD = 2
|
|
||||||
ROLE_UPLOAD = 4
|
|
||||||
ROLE_EDIT = 8
|
|
||||||
ROLE_PASSWD = 16
|
|
||||||
ROLE_ANONYMOUS = 32
|
|
||||||
ROLE_EDIT_SHELFS = 64
|
|
||||||
ROLE_DELETE_BOOKS = 128
|
|
||||||
ROLE_VIEWER = 256
|
|
||||||
|
|
||||||
|
|
||||||
DETAIL_RANDOM = 1
|
|
||||||
SIDEBAR_LANGUAGE = 2
|
|
||||||
SIDEBAR_SERIES = 4
|
|
||||||
SIDEBAR_CATEGORY = 8
|
|
||||||
SIDEBAR_HOT = 16
|
|
||||||
SIDEBAR_RANDOM = 32
|
|
||||||
SIDEBAR_AUTHOR = 64
|
|
||||||
SIDEBAR_BEST_RATED = 128
|
|
||||||
SIDEBAR_READ_AND_UNREAD = 256
|
|
||||||
SIDEBAR_RECENT = 512
|
|
||||||
SIDEBAR_SORTED = 1024
|
|
||||||
MATURE_CONTENT = 2048
|
|
||||||
SIDEBAR_PUBLISHER = 4096
|
|
||||||
SIDEBAR_RATING = 8192
|
|
||||||
SIDEBAR_FORMAT = 16384
|
|
||||||
|
|
||||||
UPDATE_STABLE = 0
|
|
||||||
AUTO_UPDATE_STABLE = 1
|
|
||||||
UPDATE_NIGHTLY = 2
|
|
||||||
AUTO_UPDATE_NIGHTLY = 4
|
|
||||||
|
|
||||||
LOGIN_STANDARD = 0
|
|
||||||
LOGIN_LDAP = 1
|
|
||||||
LOGIN_OAUTH_GITHUB = 2
|
|
||||||
LOGIN_OAUTH_GOOGLE = 3
|
|
||||||
|
|
||||||
DEFAULT_PASS = "admin123"
|
|
||||||
try:
|
|
||||||
DEFAULT_PORT = int(os.environ.get("CALIBRE_PORT", 8083))
|
|
||||||
except ValueError:
|
|
||||||
print ('Environmentvariable CALIBRE_PORT is set to an invalid value: ' +
|
|
||||||
os.environ.get("CALIBRE_PORT", 8083) + ', faling back to default (8083)')
|
|
||||||
DEFAULT_PORT = 8083
|
|
||||||
|
|
||||||
session = None
|
session = None
|
||||||
|
|
||||||
|
|
||||||
@ -109,47 +66,47 @@ def get_sidebar_config(kwargs=None):
|
|||||||
content = 'conf' in kwargs
|
content = 'conf' in kwargs
|
||||||
sidebar = list()
|
sidebar = list()
|
||||||
sidebar.append({"glyph": "glyphicon-book", "text": _('Recently Added'), "link": 'web.index', "id": "new",
|
sidebar.append({"glyph": "glyphicon-book", "text": _('Recently Added'), "link": 'web.index', "id": "new",
|
||||||
"visibility": SIDEBAR_RECENT, 'public': True, "page": "root",
|
"visibility": constants.SIDEBAR_RECENT, 'public': True, "page": "root",
|
||||||
"show_text": _('Show recent books'), "config_show":True})
|
"show_text": _('Show recent books'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-fire", "text": _('Hot Books'), "link": 'web.books_list', "id": "hot",
|
sidebar.append({"glyph": "glyphicon-fire", "text": _('Hot Books'), "link": 'web.books_list', "id": "hot",
|
||||||
"visibility": SIDEBAR_HOT, 'public': True, "page": "hot", "show_text": _('Show hot books'),
|
"visibility": constants.SIDEBAR_HOT, 'public': True, "page": "hot", "show_text": _('Show hot books'),
|
||||||
"config_show":True})
|
"config_show":True})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-star", "text": _('Best rated Books'), "link": 'web.books_list', "id": "rated",
|
{"glyph": "glyphicon-star", "text": _('Best rated Books'), "link": 'web.books_list', "id": "rated",
|
||||||
"visibility": SIDEBAR_BEST_RATED, 'public': True, "page": "rated",
|
"visibility": constants.SIDEBAR_BEST_RATED, 'public': True, "page": "rated",
|
||||||
"show_text": _('Show best rated books'), "config_show":True})
|
"show_text": _('Show best rated books'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-eye-open", "text": _('Read Books'), "link": 'web.books_list', "id": "read",
|
sidebar.append({"glyph": "glyphicon-eye-open", "text": _('Read Books'), "link": 'web.books_list', "id": "read",
|
||||||
"visibility": SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "read",
|
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "read",
|
||||||
"show_text": _('Show read and unread'), "config_show": content})
|
"show_text": _('Show read and unread'), "config_show": content})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-eye-close", "text": _('Unread Books'), "link": 'web.books_list', "id": "unread",
|
{"glyph": "glyphicon-eye-close", "text": _('Unread Books'), "link": 'web.books_list', "id": "unread",
|
||||||
"visibility": SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "unread",
|
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "unread",
|
||||||
"show_text": _('Show unread'), "config_show":False})
|
"show_text": _('Show unread'), "config_show":False})
|
||||||
sidebar.append({"glyph": "glyphicon-random", "text": _('Discover'), "link": 'web.books_list', "id": "rand",
|
sidebar.append({"glyph": "glyphicon-random", "text": _('Discover'), "link": 'web.books_list', "id": "rand",
|
||||||
"visibility": SIDEBAR_RANDOM, 'public': True, "page": "discover",
|
"visibility": constants.SIDEBAR_RANDOM, 'public': True, "page": "discover",
|
||||||
"show_text": _('Show random books'), "config_show":True})
|
"show_text": _('Show random books'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-inbox", "text": _('Categories'), "link": 'web.category_list', "id": "cat",
|
sidebar.append({"glyph": "glyphicon-inbox", "text": _('Categories'), "link": 'web.category_list', "id": "cat",
|
||||||
"visibility": SIDEBAR_CATEGORY, 'public': True, "page": "category",
|
"visibility": constants.SIDEBAR_CATEGORY, 'public': True, "page": "category",
|
||||||
"show_text": _('Show category selection'), "config_show":True})
|
"show_text": _('Show category selection'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-bookmark", "text": _('Series'), "link": 'web.series_list', "id": "serie",
|
sidebar.append({"glyph": "glyphicon-bookmark", "text": _('Series'), "link": 'web.series_list', "id": "serie",
|
||||||
"visibility": SIDEBAR_SERIES, 'public': True, "page": "series",
|
"visibility": constants.SIDEBAR_SERIES, 'public': True, "page": "series",
|
||||||
"show_text": _('Show series selection'), "config_show":True})
|
"show_text": _('Show series selection'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-user", "text": _('Authors'), "link": 'web.author_list', "id": "author",
|
sidebar.append({"glyph": "glyphicon-user", "text": _('Authors'), "link": 'web.author_list', "id": "author",
|
||||||
"visibility": SIDEBAR_AUTHOR, 'public': True, "page": "author",
|
"visibility": constants.SIDEBAR_AUTHOR, 'public': True, "page": "author",
|
||||||
"show_text": _('Show author selection'), "config_show":True})
|
"show_text": _('Show author selection'), "config_show":True})
|
||||||
sidebar.append(
|
sidebar.append(
|
||||||
{"glyph": "glyphicon-text-size", "text": _('Publishers'), "link": 'web.publisher_list', "id": "publisher",
|
{"glyph": "glyphicon-text-size", "text": _('Publishers'), "link": 'web.publisher_list', "id": "publisher",
|
||||||
"visibility": SIDEBAR_PUBLISHER, 'public': True, "page": "publisher",
|
"visibility": constants.SIDEBAR_PUBLISHER, 'public': True, "page": "publisher",
|
||||||
"show_text": _('Show publisher selection'), "config_show":True})
|
"show_text": _('Show publisher selection'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-flag", "text": _('Languages'), "link": 'web.language_overview', "id": "lang",
|
sidebar.append({"glyph": "glyphicon-flag", "text": _('Languages'), "link": 'web.language_overview', "id": "lang",
|
||||||
"visibility": SIDEBAR_LANGUAGE, 'public': (g.user.filter_language() == 'all'),
|
"visibility": constants.SIDEBAR_LANGUAGE, 'public': (g.user.filter_language() == 'all'),
|
||||||
"page": "language",
|
"page": "language",
|
||||||
"show_text": _('Show language selection'), "config_show":True})
|
"show_text": _('Show language selection'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-star-empty", "text": _('Ratings'), "link": 'web.ratings_list', "id": "rate",
|
sidebar.append({"glyph": "glyphicon-star-empty", "text": _('Ratings'), "link": 'web.ratings_list', "id": "rate",
|
||||||
"visibility": SIDEBAR_RATING, 'public': True,
|
"visibility": constants.SIDEBAR_RATING, 'public': True,
|
||||||
"page": "rating", "show_text": _('Show ratings selection'), "config_show":True})
|
"page": "rating", "show_text": _('Show ratings selection'), "config_show":True})
|
||||||
sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format",
|
sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format",
|
||||||
"visibility": SIDEBAR_FORMAT, 'public': True,
|
"visibility": constants.SIDEBAR_FORMAT, 'public': True,
|
||||||
"page": "format", "show_text": _('Show file formats selection'), "config_show":True})
|
"page": "format", "show_text": _('Show file formats selection'), "config_show":True})
|
||||||
return sidebar
|
return sidebar
|
||||||
|
|
||||||
@ -161,51 +118,35 @@ class UserBase:
|
|||||||
def is_authenticated(self):
|
def is_authenticated(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _has_role(self, role_flag):
|
||||||
|
return constants.has_flag(self.role, role_flag)
|
||||||
|
|
||||||
def role_admin(self):
|
def role_admin(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_ADMIN)
|
||||||
return True if self.role & ROLE_ADMIN == ROLE_ADMIN else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_download(self):
|
def role_download(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_DOWNLOAD)
|
||||||
return True if self.role & ROLE_DOWNLOAD == ROLE_DOWNLOAD else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_upload(self):
|
def role_upload(self):
|
||||||
return bool((self.role is not None)and(self.role & ROLE_UPLOAD == ROLE_UPLOAD))
|
return self._has_role(constants.ROLE_UPLOAD)
|
||||||
|
|
||||||
def role_edit(self):
|
def role_edit(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_EDIT)
|
||||||
return True if self.role & ROLE_EDIT == ROLE_EDIT else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_passwd(self):
|
def role_passwd(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_PASSWD)
|
||||||
return True if self.role & ROLE_PASSWD == ROLE_PASSWD else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_anonymous(self):
|
def role_anonymous(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_ANONYMOUS)
|
||||||
return True if self.role & ROLE_ANONYMOUS == ROLE_ANONYMOUS else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_edit_shelfs(self):
|
def role_edit_shelfs(self):
|
||||||
if self.role is not None:
|
return self._has_role(constants.ROLE_EDIT_SHELFS)
|
||||||
return True if self.role & ROLE_EDIT_SHELFS == ROLE_EDIT_SHELFS else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_delete_books(self):
|
def role_delete_books(self):
|
||||||
return bool((self.role is not None)and(self.role & ROLE_DELETE_BOOKS == ROLE_DELETE_BOOKS))
|
return self._has_role(constants.ROLE_DELETE_BOOKS)
|
||||||
|
|
||||||
|
|
||||||
def role_viewer(self):
|
def role_viewer(self):
|
||||||
return bool((self.role is not None)and(self.role & ROLE_VIEWER == ROLE_VIEWER))
|
return self._has_role(constants.ROLE_VIEWER)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_active(self):
|
def is_active(self):
|
||||||
@ -222,10 +163,10 @@ class UserBase:
|
|||||||
return self.default_language
|
return self.default_language
|
||||||
|
|
||||||
def check_visibility(self, value):
|
def check_visibility(self, value):
|
||||||
return bool((self.sidebar_view is not None) and (self.sidebar_view & value == value))
|
return constants.has_flag(self.sidebar_view, value)
|
||||||
|
|
||||||
def show_detail_random(self):
|
def show_detail_random(self):
|
||||||
return bool((self.sidebar_view is not None)and(self.sidebar_view & DETAIL_RANDOM == DETAIL_RANDOM))
|
return self.check_visibility(constants.DETAIL_RANDOM)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<User %r>' % self.nickname
|
return '<User %r>' % self.nickname
|
||||||
@ -246,7 +187,7 @@ class User(UserBase, Base):
|
|||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
nickname = Column(String(64), unique=True)
|
nickname = Column(String(64), unique=True)
|
||||||
email = Column(String(120), unique=True, default="")
|
email = Column(String(120), unique=True, default="")
|
||||||
role = Column(SmallInteger, default=ROLE_USER)
|
role = Column(SmallInteger, default=constants.ROLE_USER)
|
||||||
password = Column(String)
|
password = Column(String)
|
||||||
kindle_mail = Column(String(120), default="")
|
kindle_mail = Column(String(120), default="")
|
||||||
shelf = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
|
shelf = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
|
||||||
@ -270,7 +211,7 @@ class Anonymous(AnonymousUserMixin, UserBase):
|
|||||||
self.loadSettings()
|
self.loadSettings()
|
||||||
|
|
||||||
def loadSettings(self):
|
def loadSettings(self):
|
||||||
data = session.query(User).filter(User.role.op('&')(ROLE_ANONYMOUS) == ROLE_ANONYMOUS).first() # type: User
|
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() # type: User
|
||||||
settings = session.query(Settings).first()
|
settings = session.query(Settings).first()
|
||||||
self.nickname = data.nickname
|
self.nickname = data.nickname
|
||||||
self.role = data.role
|
self.role = data.role
|
||||||
@ -308,7 +249,7 @@ class Shelf(Base):
|
|||||||
user_id = Column(Integer, ForeignKey('user.id'))
|
user_id = Column(Integer, ForeignKey('user.id'))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<Shelf %r>' % self.name
|
return '<Shelf %d:%r>' % (self.id, self.name)
|
||||||
|
|
||||||
|
|
||||||
# Baseclass representing Relationship between books and Shelfs in Calibre-Web in app.db (N:M)
|
# Baseclass representing Relationship between books and Shelfs in Calibre-Web in app.db (N:M)
|
||||||
@ -379,7 +320,7 @@ class Settings(Base):
|
|||||||
mail_password = Column(String)
|
mail_password = Column(String)
|
||||||
mail_from = Column(String)
|
mail_from = Column(String)
|
||||||
config_calibre_dir = Column(String)
|
config_calibre_dir = Column(String)
|
||||||
config_port = Column(Integer, default=DEFAULT_PORT)
|
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||||
config_certfile = Column(String)
|
config_certfile = Column(String)
|
||||||
config_keyfile = Column(String)
|
config_keyfile = Column(String)
|
||||||
config_calibre_web_title = Column(String, default=u'Calibre-Web')
|
config_calibre_web_title = Column(String, default=u'Calibre-Web')
|
||||||
@ -388,7 +329,7 @@ class Settings(Base):
|
|||||||
config_authors_max = Column(Integer, default=0)
|
config_authors_max = Column(Integer, default=0)
|
||||||
config_read_column = Column(Integer, default=0)
|
config_read_column = Column(Integer, default=0)
|
||||||
config_title_regex = Column(String, default=u'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines)\s+')
|
config_title_regex = Column(String, default=u'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines)\s+')
|
||||||
config_log_level = Column(SmallInteger, default=logging.INFO)
|
config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL)
|
||||||
config_uploading = Column(SmallInteger, default=0)
|
config_uploading = Column(SmallInteger, default=0)
|
||||||
config_anonbrowse = Column(SmallInteger, default=0)
|
config_anonbrowse = Column(SmallInteger, default=0)
|
||||||
config_public_reg = Column(SmallInteger, default=0)
|
config_public_reg = Column(SmallInteger, default=0)
|
||||||
@ -445,8 +386,6 @@ class RemoteAuthToken(Base):
|
|||||||
# Class holds all application specific settings in calibre-web
|
# Class holds all application specific settings in calibre-web
|
||||||
class Config:
|
class Config:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.config_main_dir = os.path.join(os.path.normpath(os.path.dirname(
|
|
||||||
os.path.realpath(__file__)) + os.sep + ".." + os.sep))
|
|
||||||
self.db_configured = None
|
self.db_configured = None
|
||||||
self.config_logfile = None
|
self.config_logfile = None
|
||||||
self.loadSettings()
|
self.loadSettings()
|
||||||
@ -497,19 +436,12 @@ class Config:
|
|||||||
# self.config_use_google_oauth = data.config_use_google_oauth
|
# self.config_use_google_oauth = data.config_use_google_oauth
|
||||||
self.config_google_oauth_client_id = data.config_google_oauth_client_id
|
self.config_google_oauth_client_id = data.config_google_oauth_client_id
|
||||||
self.config_google_oauth_client_secret = data.config_google_oauth_client_secret
|
self.config_google_oauth_client_secret = data.config_google_oauth_client_secret
|
||||||
if data.config_mature_content_tags:
|
self.config_mature_content_tags = data.config_mature_content_tags or u''
|
||||||
self.config_mature_content_tags = data.config_mature_content_tags
|
self.config_logfile = data.config_logfile or u''
|
||||||
else:
|
|
||||||
self.config_mature_content_tags = u''
|
|
||||||
if data.config_logfile:
|
|
||||||
self.config_logfile = data.config_logfile
|
|
||||||
self.config_rarfile_location = data.config_rarfile_location
|
self.config_rarfile_location = data.config_rarfile_location
|
||||||
self.config_theme = data.config_theme
|
self.config_theme = data.config_theme
|
||||||
self.config_updatechannel = data.config_updatechannel
|
self.config_updatechannel = data.config_updatechannel
|
||||||
|
logger.setup(self.config_logfile, self.config_log_level)
|
||||||
@property
|
|
||||||
def get_main_dir(self):
|
|
||||||
return self.config_main_dir
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def get_update_channel(self):
|
def get_update_channel(self):
|
||||||
@ -533,72 +465,41 @@ class Config:
|
|||||||
else:
|
else:
|
||||||
return self.config_keyfile
|
return self.config_keyfile
|
||||||
|
|
||||||
def get_config_logfile(self):
|
def _has_role(self, role_flag):
|
||||||
if not self.config_logfile:
|
return constants.has_flag(self.config_default_role, role_flag)
|
||||||
return os.path.join(self.get_main_dir, "calibre-web.log")
|
|
||||||
else:
|
|
||||||
if os.path.dirname(self.config_logfile):
|
|
||||||
return self.config_logfile
|
|
||||||
else:
|
|
||||||
return os.path.join(self.get_main_dir, self.config_logfile)
|
|
||||||
|
|
||||||
def role_admin(self):
|
def role_admin(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_ADMIN)
|
||||||
return True if self.config_default_role & ROLE_ADMIN == ROLE_ADMIN else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_download(self):
|
def role_download(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_DOWNLOAD)
|
||||||
return True if self.config_default_role & ROLE_DOWNLOAD == ROLE_DOWNLOAD else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_viewer(self):
|
def role_viewer(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_VIEWER)
|
||||||
return True if self.config_default_role & ROLE_VIEWER == ROLE_VIEWER else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_upload(self):
|
def role_upload(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_UPLOAD)
|
||||||
return True if self.config_default_role & ROLE_UPLOAD == ROLE_UPLOAD else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_edit(self):
|
def role_edit(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_EDIT)
|
||||||
return True if self.config_default_role & ROLE_EDIT == ROLE_EDIT else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_passwd(self):
|
def role_passwd(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_PASSWD)
|
||||||
return True if self.config_default_role & ROLE_PASSWD == ROLE_PASSWD else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_edit_shelfs(self):
|
def role_edit_shelfs(self):
|
||||||
if self.config_default_role is not None:
|
return self._has_role(constants.ROLE_EDIT_SHELFS)
|
||||||
return True if self.config_default_role & ROLE_EDIT_SHELFS == ROLE_EDIT_SHELFS else False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def role_delete_books(self):
|
def role_delete_books(self):
|
||||||
return bool((self.config_default_role is not None) and
|
return self._has_role(constants.ROLE_DELETE_BOOKS)
|
||||||
(self.config_default_role & ROLE_DELETE_BOOKS == ROLE_DELETE_BOOKS))
|
|
||||||
|
|
||||||
def show_detail_random(self):
|
|
||||||
return bool((self.config_default_show is not None) and
|
|
||||||
(self.config_default_show & DETAIL_RANDOM == DETAIL_RANDOM))
|
|
||||||
|
|
||||||
def show_element_new_user(self, value):
|
def show_element_new_user(self, value):
|
||||||
return bool((self.config_default_show is not None) and
|
return constants.has_flag(self.config_default_show, value)
|
||||||
(self.config_default_show & value == value))
|
|
||||||
|
def show_detail_random(self):
|
||||||
|
return self.show_element_new_user(constants.DETAIL_RANDOM)
|
||||||
|
|
||||||
def show_mature_content(self):
|
def show_mature_content(self):
|
||||||
return bool((self.config_default_show is not None) and
|
return self.show_element_new_user(constants.MATURE_CONTENT)
|
||||||
(self.config_default_show & MATURE_CONTENT == MATURE_CONTENT))
|
|
||||||
|
|
||||||
def mature_content_tags(self):
|
def mature_content_tags(self):
|
||||||
if sys.version_info > (3, 0): # Python3 str, Python2 unicode
|
if sys.version_info > (3, 0): # Python3 str, Python2 unicode
|
||||||
@ -608,16 +509,7 @@ class Config:
|
|||||||
return list(map(lstrip, self.config_mature_content_tags.split(",")))
|
return list(map(lstrip, self.config_mature_content_tags.split(",")))
|
||||||
|
|
||||||
def get_Log_Level(self):
|
def get_Log_Level(self):
|
||||||
ret_value = ""
|
return logger.get_level_name(self.config_log_level)
|
||||||
if self.config_log_level == logging.INFO:
|
|
||||||
ret_value = 'INFO'
|
|
||||||
elif self.config_log_level == logging.DEBUG:
|
|
||||||
ret_value = 'DEBUG'
|
|
||||||
elif self.config_log_level == logging.WARNING:
|
|
||||||
ret_value = 'WARNING'
|
|
||||||
elif self.config_log_level == logging.ERROR:
|
|
||||||
ret_value = 'ERROR'
|
|
||||||
return ret_value
|
|
||||||
|
|
||||||
|
|
||||||
# Migrate database to current version, has to be updated after every database change. Currently migration from
|
# Migrate database to current version, has to be updated after every database change. Currently migration from
|
||||||
@ -696,9 +588,9 @@ def migrate_Database():
|
|||||||
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||||
"+ series_books * :side_series + category_books * :side_category + hot_books * "
|
"+ series_books * :side_series + category_books * :side_category + hot_books * "
|
||||||
":side_hot + :side_autor + :detail_random)"
|
":side_hot + :side_autor + :detail_random)"
|
||||||
,{'side_random': SIDEBAR_RANDOM, 'side_lang': SIDEBAR_LANGUAGE, 'side_series': SIDEBAR_SERIES,
|
,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE, 'side_series': constants.SIDEBAR_SERIES,
|
||||||
'side_category': SIDEBAR_CATEGORY, 'side_hot': SIDEBAR_HOT, 'side_autor': SIDEBAR_AUTHOR,
|
'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
|
||||||
'detail_random': DETAIL_RANDOM})
|
'detail_random': constants.DETAIL_RANDOM})
|
||||||
session.commit()
|
session.commit()
|
||||||
try:
|
try:
|
||||||
session.query(exists().where(User.mature_content)).scalar()
|
session.query(exists().where(User.mature_content)).scalar()
|
||||||
@ -706,7 +598,7 @@ def migrate_Database():
|
|||||||
conn = engine.connect()
|
conn = engine.connect()
|
||||||
conn.execute("ALTER TABLE user ADD column `mature_content` INTEGER DEFAULT 1")
|
conn.execute("ALTER TABLE user ADD column `mature_content` INTEGER DEFAULT 1")
|
||||||
|
|
||||||
if session.query(User).filter(User.role.op('&')(ROLE_ANONYMOUS) == ROLE_ANONYMOUS).first() is None:
|
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() is None:
|
||||||
create_anonymous_user()
|
create_anonymous_user()
|
||||||
try:
|
try:
|
||||||
session.query(exists().where(Settings.config_remote_login)).scalar()
|
session.query(exists().where(Settings.config_remote_login)).scalar()
|
||||||
@ -850,7 +742,7 @@ def create_anonymous_user():
|
|||||||
user = User()
|
user = User()
|
||||||
user.nickname = "Guest"
|
user.nickname = "Guest"
|
||||||
user.email = 'no@email'
|
user.email = 'no@email'
|
||||||
user.role = ROLE_ANONYMOUS
|
user.role = constants.ROLE_ANONYMOUS
|
||||||
user.password = ''
|
user.password = ''
|
||||||
|
|
||||||
session.add(user)
|
session.add(user)
|
||||||
@ -864,13 +756,10 @@ def create_anonymous_user():
|
|||||||
def create_admin_user():
|
def create_admin_user():
|
||||||
user = User()
|
user = User()
|
||||||
user.nickname = "admin"
|
user.nickname = "admin"
|
||||||
user.role = ROLE_USER + ROLE_ADMIN + ROLE_DOWNLOAD + ROLE_UPLOAD + ROLE_EDIT + ROLE_DELETE_BOOKS + ROLE_PASSWD +\
|
user.role = constants.ADMIN_USER_ROLES
|
||||||
ROLE_VIEWER
|
user.sidebar_view = constants.ADMIN_USER_SIDEBAR
|
||||||
user.sidebar_view = DETAIL_RANDOM + SIDEBAR_LANGUAGE + SIDEBAR_SERIES + SIDEBAR_CATEGORY + SIDEBAR_HOT + \
|
|
||||||
SIDEBAR_RANDOM + SIDEBAR_AUTHOR + SIDEBAR_BEST_RATED + SIDEBAR_READ_AND_UNREAD + SIDEBAR_RECENT + \
|
|
||||||
SIDEBAR_SORTED + MATURE_CONTENT + SIDEBAR_PUBLISHER + SIDEBAR_RATING + SIDEBAR_FORMAT
|
|
||||||
|
|
||||||
user.password = generate_password_hash(DEFAULT_PASS)
|
user.password = generate_password_hash(constants.DEFAULT_PASSWORD)
|
||||||
|
|
||||||
session.add(user)
|
session.add(user)
|
||||||
try:
|
try:
|
||||||
|
@ -17,22 +17,27 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from . import config, get_locale, Server, app
|
|
||||||
import threading
|
|
||||||
import zipfile
|
|
||||||
import requests
|
|
||||||
import time
|
|
||||||
from io import BytesIO
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import os
|
||||||
from ub import UPDATE_STABLE
|
|
||||||
from tempfile import gettempdir
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
from flask_babel import gettext as _
|
import requests
|
||||||
|
import shutil
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
|
from io import BytesIO
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
from babel.dates import format_datetime
|
from babel.dates import format_datetime
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
|
||||||
|
from . import constants, logger, config, get_locale, Server
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
_REPOSITORY_API_URL = 'https://api.github.com/repos/janeczku/calibre-web'
|
||||||
|
|
||||||
|
|
||||||
def is_sha1(sha1):
|
def is_sha1(sha1):
|
||||||
@ -53,13 +58,13 @@ class Updater(threading.Thread):
|
|||||||
self.updateIndex = None
|
self.updateIndex = None
|
||||||
|
|
||||||
def get_current_version_info(self):
|
def get_current_version_info(self):
|
||||||
if config.get_update_channel == UPDATE_STABLE:
|
if config.get_update_channel == constants.UPDATE_STABLE:
|
||||||
return self._stable_version_info()
|
return self._stable_version_info()
|
||||||
else:
|
else:
|
||||||
return self._nightly_version_info()
|
return self._nightly_version_info()
|
||||||
|
|
||||||
def get_available_updates(self, request_method):
|
def get_available_updates(self, request_method):
|
||||||
if config.get_update_channel == UPDATE_STABLE:
|
if config.get_update_channel == constants.UPDATE_STABLE:
|
||||||
return self._stable_available_updates(request_method)
|
return self._stable_available_updates(request_method)
|
||||||
else:
|
else:
|
||||||
return self._nightly_available_updates(request_method)
|
return self._nightly_available_updates(request_method)
|
||||||
@ -67,45 +72,45 @@ class Updater(threading.Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
self.status = 1
|
self.status = 1
|
||||||
app.logger.debug(u'Download update file')
|
log.debug(u'Download update file')
|
||||||
headers = {'Accept': 'application/vnd.github.v3+json'}
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
||||||
r = requests.get(self._get_request_path(), stream=True, headers=headers)
|
r = requests.get(self._get_request_path(), stream=True, headers=headers)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
self.status = 2
|
self.status = 2
|
||||||
app.logger.debug(u'Opening zipfile')
|
log.debug(u'Opening zipfile')
|
||||||
z = zipfile.ZipFile(BytesIO(r.content))
|
z = zipfile.ZipFile(BytesIO(r.content))
|
||||||
self.status = 3
|
self.status = 3
|
||||||
app.logger.debug(u'Extracting zipfile')
|
log.debug(u'Extracting zipfile')
|
||||||
tmp_dir = gettempdir()
|
tmp_dir = gettempdir()
|
||||||
z.extractall(tmp_dir)
|
z.extractall(tmp_dir)
|
||||||
foldername = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
foldername = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
||||||
if not os.path.isdir(foldername):
|
if not os.path.isdir(foldername):
|
||||||
self.status = 11
|
self.status = 11
|
||||||
app.logger.info(u'Extracted contents of zipfile not found in temp folder')
|
log.info(u'Extracted contents of zipfile not found in temp folder')
|
||||||
return
|
return
|
||||||
self.status = 4
|
self.status = 4
|
||||||
app.logger.debug(u'Replacing files')
|
log.debug(u'Replacing files')
|
||||||
self.update_source(foldername, config.get_main_dir)
|
self.update_source(foldername, constants.BASE_DIR)
|
||||||
self.status = 6
|
self.status = 6
|
||||||
app.logger.debug(u'Preparing restart of server')
|
log.debug(u'Preparing restart of server')
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
Server.setRestartTyp(True)
|
Server.setRestartTyp(True)
|
||||||
Server.stopServer()
|
Server.stopServer()
|
||||||
self.status = 7
|
self.status = 7
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
except requests.exceptions.HTTPError as ex:
|
except requests.exceptions.HTTPError as ex:
|
||||||
app.logger.info( u'HTTP Error' + ' ' + str(ex))
|
log.info(u'HTTP Error %s', ex)
|
||||||
self.status = 8
|
self.status = 8
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
app.logger.info(u'Connection error')
|
log.info(u'Connection error')
|
||||||
self.status = 9
|
self.status = 9
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
app.logger.info(u'Timeout while establishing connection')
|
log.info(u'Timeout while establishing connection')
|
||||||
self.status = 10
|
self.status = 10
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
self.status = 11
|
self.status = 11
|
||||||
app.logger.info(u'General error')
|
log.info(u'General error')
|
||||||
|
|
||||||
def get_update_status(self):
|
def get_update_status(self):
|
||||||
return self.status
|
return self.status
|
||||||
@ -153,14 +158,14 @@ class Updater(threading.Thread):
|
|||||||
if sys.platform == "win32" or sys.platform == "darwin":
|
if sys.platform == "win32" or sys.platform == "darwin":
|
||||||
change_permissions = False
|
change_permissions = False
|
||||||
else:
|
else:
|
||||||
app.logger.debug('Update on OS-System : ' + sys.platform)
|
log.debug('Update on OS-System : %s', sys.platform)
|
||||||
new_permissions = os.stat(root_dst_dir)
|
new_permissions = os.stat(root_dst_dir)
|
||||||
# print new_permissions
|
# print new_permissions
|
||||||
for src_dir, __, files in os.walk(root_src_dir):
|
for src_dir, __, files in os.walk(root_src_dir):
|
||||||
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
||||||
if not os.path.exists(dst_dir):
|
if not os.path.exists(dst_dir):
|
||||||
os.makedirs(dst_dir)
|
os.makedirs(dst_dir)
|
||||||
app.logger.debug('Create-Dir: '+dst_dir)
|
log.debug('Create-Dir: %s', dst_dir)
|
||||||
if change_permissions:
|
if change_permissions:
|
||||||
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
||||||
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
||||||
@ -170,22 +175,22 @@ class Updater(threading.Thread):
|
|||||||
if os.path.exists(dst_file):
|
if os.path.exists(dst_file):
|
||||||
if change_permissions:
|
if change_permissions:
|
||||||
permission = os.stat(dst_file)
|
permission = os.stat(dst_file)
|
||||||
app.logger.debug('Remove file before copy: '+dst_file)
|
log.debug('Remove file before copy: %s', dst_file)
|
||||||
os.remove(dst_file)
|
os.remove(dst_file)
|
||||||
else:
|
else:
|
||||||
if change_permissions:
|
if change_permissions:
|
||||||
permission = new_permissions
|
permission = new_permissions
|
||||||
shutil.move(src_file, dst_dir)
|
shutil.move(src_file, dst_dir)
|
||||||
app.logger.debug('Move File '+src_file+' to '+dst_dir)
|
log.debug('Move File %s to %s', src_file, dst_dir)
|
||||||
if change_permissions:
|
if change_permissions:
|
||||||
try:
|
try:
|
||||||
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
||||||
except (Exception) as e:
|
except (Exception) as e:
|
||||||
# ex = sys.exc_info()
|
# ex = sys.exc_info()
|
||||||
old_permissions = os.stat(dst_file)
|
old_permissions = os.stat(dst_file)
|
||||||
app.logger.debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
log.debug('Fail change permissions of %s. Before: %s:%s After %s:%s error: %s',
|
||||||
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
dst_file, old_permissions.st_uid, old_permissions.st_gid,
|
||||||
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
permission.st_uid, permission.st_gid, e)
|
||||||
return
|
return
|
||||||
|
|
||||||
def update_source(self, source, destination):
|
def update_source(self, source, destination):
|
||||||
@ -219,15 +224,15 @@ class Updater(threading.Thread):
|
|||||||
for item in remove_items:
|
for item in remove_items:
|
||||||
item_path = os.path.join(destination, item[1:])
|
item_path = os.path.join(destination, item[1:])
|
||||||
if os.path.isdir(item_path):
|
if os.path.isdir(item_path):
|
||||||
app.logger.debug("Delete dir " + item_path)
|
log.debug("Delete dir %s", item_path)
|
||||||
shutil.rmtree(item_path, ignore_errors=True)
|
shutil.rmtree(item_path, ignore_errors=True)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
app.logger.debug("Delete file " + item_path)
|
log.debug("Delete file %s", item_path)
|
||||||
# log_from_thread("Delete file " + item_path)
|
# log_from_thread("Delete file " + item_path)
|
||||||
os.remove(item_path)
|
os.remove(item_path)
|
||||||
except Exception:
|
except Exception:
|
||||||
app.logger.debug("Could not remove:" + item_path)
|
log.debug("Could not remove: %s", item_path)
|
||||||
shutil.rmtree(source, ignore_errors=True)
|
shutil.rmtree(source, ignore_errors=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -248,7 +253,7 @@ class Updater(threading.Thread):
|
|||||||
def _nightly_available_updates(self, request_method):
|
def _nightly_available_updates(self, request_method):
|
||||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||||
if request_method == "GET":
|
if request_method == "GET":
|
||||||
repository_url = 'https://api.github.com/repos/janeczku/calibre-web'
|
repository_url = _REPOSITORY_API_URL
|
||||||
status, commit = self._load_remote_data(repository_url +'/git/refs/heads/master')
|
status, commit = self._load_remote_data(repository_url +'/git/refs/heads/master')
|
||||||
parents = []
|
parents = []
|
||||||
if status['message'] != '':
|
if status['message'] != '':
|
||||||
@ -348,7 +353,7 @@ class Updater(threading.Thread):
|
|||||||
if request_method == "GET":
|
if request_method == "GET":
|
||||||
parents = []
|
parents = []
|
||||||
# repository_url = 'https://api.github.com/repos/flatpak/flatpak/releases' # test URL
|
# repository_url = 'https://api.github.com/repos/flatpak/flatpak/releases' # test URL
|
||||||
repository_url = 'https://api.github.com/repos/janeczku/calibre-web/releases?per_page=100'
|
repository_url = _REPOSITORY_API_URL + '/releases?per_page=100'
|
||||||
status, commit = self._load_remote_data(repository_url)
|
status, commit = self._load_remote_data(repository_url)
|
||||||
if status['message'] != '':
|
if status['message'] != '':
|
||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
@ -434,10 +439,10 @@ class Updater(threading.Thread):
|
|||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
|
|
||||||
def _get_request_path(self):
|
def _get_request_path(self):
|
||||||
if config.get_update_channel == UPDATE_STABLE:
|
if config.get_update_channel == constants.UPDATE_STABLE:
|
||||||
return self.updateFile
|
return self.updateFile
|
||||||
else:
|
else:
|
||||||
return 'https://api.github.com/repos/janeczku/calibre-web/zipball/master'
|
return _REPOSITORY_API_URL + '/zipball/master'
|
||||||
|
|
||||||
def _load_remote_data(self, repository_url):
|
def _load_remote_data(self, repository_url):
|
||||||
status = {
|
status = {
|
||||||
|
@ -17,13 +17,19 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import division, print_function, unicode_literals
|
||||||
from tempfile import gettempdir
|
|
||||||
import hashlib
|
|
||||||
import os
|
import os
|
||||||
|
import hashlib
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
import comic
|
|
||||||
from . import app
|
from . import logger, comic
|
||||||
|
from .constants import BookMeta
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from lxml.etree import LXML_VERSION as lxmlversion
|
from lxml.etree import LXML_VERSION as lxmlversion
|
||||||
@ -36,7 +42,7 @@ try:
|
|||||||
from wand.exceptions import PolicyError
|
from wand.exceptions import PolicyError
|
||||||
use_generic_pdf_cover = False
|
use_generic_pdf_cover = False
|
||||||
except (ImportError, RuntimeError) as e:
|
except (ImportError, RuntimeError) as e:
|
||||||
app.logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
|
log.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
|
||||||
use_generic_pdf_cover = True
|
use_generic_pdf_cover = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -44,29 +50,29 @@ try:
|
|||||||
from PyPDF2 import __version__ as PyPdfVersion
|
from PyPDF2 import __version__ as PyPdfVersion
|
||||||
use_pdf_meta = True
|
use_pdf_meta = True
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
app.logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
|
log.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
|
||||||
use_pdf_meta = False
|
use_pdf_meta = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import epub
|
from . import epub
|
||||||
use_epub_meta = True
|
use_epub_meta = True
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
app.logger.warning('cannot import epub, extracting epub metadata will not work: %s', e)
|
log.warning('cannot import epub, extracting epub metadata will not work: %s', e)
|
||||||
use_epub_meta = False
|
use_epub_meta = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import fb2
|
from . import fb2
|
||||||
use_fb2_meta = True
|
use_fb2_meta = True
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
app.logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
|
log.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
|
||||||
use_fb2_meta = False
|
use_fb2_meta = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from PIL import __version__ as PILversion
|
from PIL import __version__ as PILversion
|
||||||
use_PIL = True
|
use_PIL = True
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
app.logger.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e)
|
log.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e)
|
||||||
use_generic_pdf_cover = True
|
use_generic_pdf_cover = True
|
||||||
use_PIL = False
|
use_PIL = False
|
||||||
|
|
||||||
@ -88,7 +94,7 @@ def process(tmp_file_path, original_file_name, original_file_extension):
|
|||||||
meta = comic.get_comic_info(tmp_file_path, original_file_name, original_file_extension)
|
meta = comic.get_comic_info(tmp_file_path, original_file_name, original_file_extension)
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
app.logger.warning('cannot parse metadata, using default: %s', ex)
|
log.warning('cannot parse metadata, using default: %s', ex)
|
||||||
|
|
||||||
if meta and meta.title.strip() and meta.author.strip():
|
if meta and meta.title.strip() and meta.author.strip():
|
||||||
return meta
|
return meta
|
||||||
@ -192,10 +198,10 @@ def pdf_preview(tmp_file_path, tmp_dir):
|
|||||||
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
img.save(filename=os.path.join(tmp_dir, cover_file_name))
|
||||||
return cover_file_name
|
return cover_file_name
|
||||||
except PolicyError as ex:
|
except PolicyError as ex:
|
||||||
app.logger.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
|
||||||
return None
|
return None
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
app.logger.warning('Cannot extract cover image, using default: %s', ex)
|
log.warning('Cannot extract cover image, using default: %s', ex)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
143
cps/web.py
143
cps/web.py
@ -21,35 +21,39 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from . import mimetypes, global_WorkerThread, searched_ids, lm, babel, ub, config, get_locale, language_table, app, db
|
from __future__ import division, print_function, unicode_literals
|
||||||
from .helper import common_filters, get_search_results, fill_indexpage, speaking_language, check_valid_domain, \
|
import os
|
||||||
order_authors, get_typeahead, render_task_status, json_serial, get_unique_other_books, get_cc_columns, \
|
import base64
|
||||||
get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, \
|
import datetime
|
||||||
check_send_to_kindle, check_read_formats, lcase
|
import json
|
||||||
from flask import render_template, request, redirect, send_from_directory, make_response, g, flash, abort, url_for
|
import mimetypes
|
||||||
from flask_login import login_user, logout_user, login_required, current_user
|
|
||||||
from werkzeug.exceptions import default_exceptions
|
|
||||||
from werkzeug.security import generate_password_hash, check_password_hash
|
|
||||||
from werkzeug.datastructures import Headers
|
|
||||||
from redirect import redirect_back
|
|
||||||
from pagination import Pagination
|
|
||||||
from babel import Locale as LC
|
from babel import Locale as LC
|
||||||
from babel.dates import format_date
|
from babel.dates import format_date
|
||||||
from babel.core import UnknownLocaleError
|
from babel.core import UnknownLocaleError
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import render_template, request, redirect, send_from_directory, make_response, g, flash, abort, url_for
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from sqlalchemy.sql.expression import text, func, true, false, not_
|
from flask_login import login_user, logout_user, login_required, current_user
|
||||||
from sqlalchemy.exc import IntegrityError
|
from sqlalchemy.exc import IntegrityError
|
||||||
import base64
|
from sqlalchemy.sql.expression import text, func, true, false, not_, and_
|
||||||
import os.path
|
from werkzeug.exceptions import default_exceptions
|
||||||
import json
|
from werkzeug.datastructures import Headers
|
||||||
import datetime
|
from werkzeug.security import generate_password_hash, check_password_hash
|
||||||
import isoLanguages
|
|
||||||
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
|
|
||||||
|
|
||||||
|
from . import constants, logger, isoLanguages
|
||||||
|
from . import global_WorkerThread, searched_ids, lm, babel, db, ub, config, get_locale, app, language_table
|
||||||
|
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
|
||||||
|
from .helper import common_filters, get_search_results, fill_indexpage, speaking_language, check_valid_domain, \
|
||||||
|
order_authors, get_typeahead, render_task_status, json_serial, get_unique_other_books, get_cc_columns, \
|
||||||
|
get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, \
|
||||||
|
check_send_to_kindle, check_read_formats, lcase
|
||||||
|
from .pagination import Pagination
|
||||||
|
from .redirect import redirect_back
|
||||||
|
|
||||||
feature_support = dict()
|
feature_support = dict()
|
||||||
try:
|
try:
|
||||||
from oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
|
from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
|
||||||
feature_support['oauth'] = True
|
feature_support['oauth'] = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
feature_support['oauth'] = False
|
feature_support['oauth'] = False
|
||||||
@ -72,32 +76,17 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass # We're not using Python 3
|
pass # We're not using Python 3
|
||||||
|
|
||||||
try:
|
# try:
|
||||||
import rarfile
|
# import rarfile
|
||||||
feature_support['rar'] = True
|
# feature_support['rar'] = True
|
||||||
except ImportError:
|
# except ImportError:
|
||||||
feature_support['rar'] = False
|
# feature_support['rar'] = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from natsort import natsorted as sort
|
from natsort import natsorted as sort
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sort = sorted # Just use regular sort then, may cause issues with badly named pages in cbz/cbr files
|
sort = sorted # Just use regular sort then, may cause issues with badly named pages in cbz/cbr files
|
||||||
|
|
||||||
from flask import Blueprint
|
|
||||||
|
|
||||||
# Global variables
|
|
||||||
|
|
||||||
EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'}
|
|
||||||
|
|
||||||
EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx',
|
|
||||||
'fb2', 'html', 'rtf', 'odt', 'mp3', 'm4a', 'm4b'}
|
|
||||||
|
|
||||||
|
|
||||||
'''EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +
|
|
||||||
(['rar','cbr'] if feature_support['rar'] else []))'''
|
|
||||||
|
|
||||||
|
|
||||||
# with app.app_context():
|
|
||||||
|
|
||||||
# custom error page
|
# custom error page
|
||||||
def error_http(error):
|
def error_http(error):
|
||||||
@ -116,6 +105,7 @@ for ex in default_exceptions:
|
|||||||
|
|
||||||
|
|
||||||
web = Blueprint('web', __name__)
|
web = Blueprint('web', __name__)
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
# ################################### Login logic and rights management ###############################################
|
# ################################### Login logic and rights management ###############################################
|
||||||
|
|
||||||
@ -238,7 +228,7 @@ def edit_required(f):
|
|||||||
# Returns the template for rendering and includes the instance name
|
# Returns the template for rendering and includes the instance name
|
||||||
def render_title_template(*args, **kwargs):
|
def render_title_template(*args, **kwargs):
|
||||||
sidebar=ub.get_sidebar_config(kwargs)
|
sidebar=ub.get_sidebar_config(kwargs)
|
||||||
return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, accept=EXTENSIONS_UPLOAD,
|
return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, accept=constants.EXTENSIONS_UPLOAD,
|
||||||
*args, **kwargs)
|
*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@ -272,9 +262,9 @@ def get_email_status_json():
|
|||||||
@login_required
|
@login_required
|
||||||
def bookmark(book_id, book_format):
|
def bookmark(book_id, book_format):
|
||||||
bookmark_key = request.form["bookmark"]
|
bookmark_key = request.form["bookmark"]
|
||||||
ub.session.query(ub.Bookmark).filter(ub.and_(ub.Bookmark.user_id == int(current_user.id),
|
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
|
||||||
ub.Bookmark.book_id == book_id,
|
ub.Bookmark.book_id == book_id,
|
||||||
ub.Bookmark.format == book_format)).delete()
|
ub.Bookmark.format == book_format)).delete()
|
||||||
if not bookmark_key:
|
if not bookmark_key:
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
return "", 204
|
return "", 204
|
||||||
@ -292,8 +282,8 @@ def bookmark(book_id, book_format):
|
|||||||
@login_required
|
@login_required
|
||||||
def toggle_read(book_id):
|
def toggle_read(book_id):
|
||||||
if not config.config_read_column:
|
if not config.config_read_column:
|
||||||
book = ub.session.query(ub.ReadBook).filter(ub.and_(ub.ReadBook.user_id == int(current_user.id),
|
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
|
||||||
ub.ReadBook.book_id == book_id)).first()
|
ub.ReadBook.book_id == book_id)).first()
|
||||||
if book:
|
if book:
|
||||||
book.is_read = not book.is_read
|
book.is_read = not book.is_read
|
||||||
else:
|
else:
|
||||||
@ -318,8 +308,7 @@ def toggle_read(book_id):
|
|||||||
db.session.add(new_cc)
|
db.session.add(new_cc)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
app.logger.error(
|
log.error(u"Custom Column No.%d is not exisiting in calibre database", config.config_read_column)
|
||||||
u"Custom Column No.%d is not exisiting in calibre database" % config.config_read_column)
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@ -342,10 +331,10 @@ def get_comic_book(book_id, book_format, page):
|
|||||||
extract = lambda page: rf.read(names[page])
|
extract = lambda page: rf.read(names[page])
|
||||||
except:
|
except:
|
||||||
# rarfile not valid
|
# rarfile not valid
|
||||||
app.logger.error('Unrar binary not found, or unable to decompress file ' + cbr_file)
|
log.error('Unrar binary not found, or unable to decompress file %s', cbr_file)
|
||||||
return "", 204
|
return "", 204
|
||||||
else:
|
else:
|
||||||
app.logger.info('Unrar is not supported please install python rarfile extension')
|
log.info('Unrar is not supported please install python rarfile extension')
|
||||||
# no support means return nothing
|
# no support means return nothing
|
||||||
return "", 204
|
return "", 204
|
||||||
elif book_format in ("cbz", "zip"):
|
elif book_format in ("cbz", "zip"):
|
||||||
@ -357,7 +346,7 @@ def get_comic_book(book_id, book_format, page):
|
|||||||
names=sort(tf.getnames())
|
names=sort(tf.getnames())
|
||||||
extract = lambda page: tf.extractfile(names[page]).read()
|
extract = lambda page: tf.extractfile(names[page]).read()
|
||||||
else:
|
else:
|
||||||
app.logger.error('unsupported comic format')
|
log.error('unsupported comic format')
|
||||||
return "", 204
|
return "", 204
|
||||||
|
|
||||||
if sys.version_info.major >= 3:
|
if sys.version_info.major >= 3:
|
||||||
@ -477,7 +466,7 @@ def books_list(data, sort, book_id, page):
|
|||||||
order = [db.Books.timestamp]
|
order = [db.Books.timestamp]
|
||||||
|
|
||||||
if data == "rated":
|
if data == "rated":
|
||||||
if current_user.check_visibility(ub.SIDEBAR_BEST_RATED):
|
if current_user.check_visibility(constants.SIDEBAR_BEST_RATED):
|
||||||
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
|
||||||
order)
|
order)
|
||||||
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
||||||
@ -485,7 +474,7 @@ def books_list(data, sort, book_id, page):
|
|||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
elif data == "discover":
|
elif data == "discover":
|
||||||
if current_user.check_visibility(ub.SIDEBAR_RANDOM):
|
if current_user.check_visibility(constants.SIDEBAR_RANDOM):
|
||||||
entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)])
|
entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)])
|
||||||
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
|
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
|
||||||
return render_title_template('discover.html', entries=entries, pagination=pagination,
|
return render_title_template('discover.html', entries=entries, pagination=pagination,
|
||||||
@ -517,7 +506,7 @@ def books_list(data, sort, book_id, page):
|
|||||||
|
|
||||||
|
|
||||||
def render_hot_books(page):
|
def render_hot_books(page):
|
||||||
if current_user.check_visibility(ub.SIDEBAR_HOT):
|
if current_user.check_visibility(constants.SIDEBAR_HOT):
|
||||||
if current_user.show_detail_random():
|
if current_user.show_detail_random():
|
||||||
random = db.session.query(db.Books).filter(common_filters()) \
|
random = db.session.query(db.Books).filter(common_filters()) \
|
||||||
.order_by(func.random()).limit(config.config_random_books)
|
.order_by(func.random()).limit(config.config_random_books)
|
||||||
@ -564,7 +553,7 @@ def render_author_books(page, book_id, order):
|
|||||||
other_books = get_unique_other_books(entries.all(), author_info.books)
|
other_books = get_unique_other_books(entries.all(), author_info.books)
|
||||||
except Exception:
|
except Exception:
|
||||||
# Skip goodreads, if site is down/inaccessible
|
# Skip goodreads, if site is down/inaccessible
|
||||||
app.logger.error('Goodreads website is down/inaccessible')
|
log.error('Goodreads website is down/inaccessible')
|
||||||
|
|
||||||
return render_title_template('author.html', entries=entries, pagination=pagination,
|
return render_title_template('author.html', entries=entries, pagination=pagination,
|
||||||
title=name, author=author_info, other_books=other_books, page="author")
|
title=name, author=author_info, other_books=other_books, page="author")
|
||||||
@ -630,7 +619,7 @@ def render_category_books(page, book_id, order):
|
|||||||
@web.route("/author")
|
@web.route("/author")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def author_list():
|
def author_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_AUTHOR):
|
if current_user.check_visibility(constants.SIDEBAR_AUTHOR):
|
||||||
entries = db.session.query(db.Authors, func.count('books_authors_link.book').label('count'))\
|
entries = db.session.query(db.Authors, func.count('books_authors_link.book').label('count'))\
|
||||||
.join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
.join(db.books_authors_link).join(db.Books).filter(common_filters())\
|
||||||
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).all()
|
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).all()
|
||||||
@ -648,7 +637,7 @@ def author_list():
|
|||||||
@web.route("/publisher")
|
@web.route("/publisher")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def publisher_list():
|
def publisher_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_PUBLISHER):
|
if current_user.check_visibility(constants.SIDEBAR_PUBLISHER):
|
||||||
entries = db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count'))\
|
entries = db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count'))\
|
||||||
.join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
.join(db.books_publishers_link).join(db.Books).filter(common_filters())\
|
||||||
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort).all()
|
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort).all()
|
||||||
@ -664,7 +653,7 @@ def publisher_list():
|
|||||||
@web.route("/series")
|
@web.route("/series")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def series_list():
|
def series_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_SERIES):
|
if current_user.check_visibility(constants.SIDEBAR_SERIES):
|
||||||
entries = db.session.query(db.Series, func.count('books_series_link.book').label('count'))\
|
entries = db.session.query(db.Series, func.count('books_series_link.book').label('count'))\
|
||||||
.join(db.books_series_link).join(db.Books).filter(common_filters())\
|
.join(db.books_series_link).join(db.Books).filter(common_filters())\
|
||||||
.group_by(text('books_series_link.series')).order_by(db.Series.sort).all()
|
.group_by(text('books_series_link.series')).order_by(db.Series.sort).all()
|
||||||
@ -680,7 +669,7 @@ def series_list():
|
|||||||
@web.route("/ratings")
|
@web.route("/ratings")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def ratings_list():
|
def ratings_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_RATING):
|
if current_user.check_visibility(constants.SIDEBAR_RATING):
|
||||||
entries = db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
|
entries = db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
|
||||||
(db.Ratings.rating/2).label('name'))\
|
(db.Ratings.rating/2).label('name'))\
|
||||||
.join(db.books_ratings_link).join(db.Books).filter(common_filters())\
|
.join(db.books_ratings_link).join(db.Books).filter(common_filters())\
|
||||||
@ -694,7 +683,7 @@ def ratings_list():
|
|||||||
@web.route("/formats")
|
@web.route("/formats")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def formats_list():
|
def formats_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_FORMAT):
|
if current_user.check_visibility(constants.SIDEBAR_FORMAT):
|
||||||
entries = db.session.query(db.Data, func.count('data.book').label('count'),db.Data.format.label('format'))\
|
entries = db.session.query(db.Data, func.count('data.book').label('count'),db.Data.format.label('format'))\
|
||||||
.join(db.Books).filter(common_filters())\
|
.join(db.Books).filter(common_filters())\
|
||||||
.group_by(db.Data.format).order_by(db.Data.format).all()
|
.group_by(db.Data.format).order_by(db.Data.format).all()
|
||||||
@ -707,7 +696,7 @@ def formats_list():
|
|||||||
@web.route("/language")
|
@web.route("/language")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def language_overview():
|
def language_overview():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_LANGUAGE):
|
if current_user.check_visibility(constants.SIDEBAR_LANGUAGE):
|
||||||
charlist = list()
|
charlist = list()
|
||||||
if current_user.filter_language() == u"all":
|
if current_user.filter_language() == u"all":
|
||||||
languages = speaking_language()
|
languages = speaking_language()
|
||||||
@ -753,7 +742,7 @@ def language(name, page):
|
|||||||
@web.route("/category")
|
@web.route("/category")
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
def category_list():
|
def category_list():
|
||||||
if current_user.check_visibility(ub.SIDEBAR_CATEGORY):
|
if current_user.check_visibility(constants.SIDEBAR_CATEGORY):
|
||||||
entries = db.session.query(db.Tags, func.count('books_tags_link.book').label('count'))\
|
entries = db.session.query(db.Tags, func.count('books_tags_link.book').label('count'))\
|
||||||
.join(db.books_tags_link).join(db.Books).order_by(db.Tags.name).filter(common_filters())\
|
.join(db.books_tags_link).join(db.Books).order_by(db.Tags.name).filter(common_filters())\
|
||||||
.group_by(text('books_tags_link.tag')).all()
|
.group_by(text('books_tags_link.tag')).all()
|
||||||
@ -945,7 +934,7 @@ def render_read_books(page, are_read, as_xml=False, order=None):
|
|||||||
.filter(db.cc_classes[config.config_read_column].value is True).all()
|
.filter(db.cc_classes[config.config_read_column].value is True).all()
|
||||||
readBookIds = [x.book for x in readBooks]
|
readBookIds = [x.book for x in readBooks]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
app.logger.error(u"Custom Column No.%d is not existing in calibre database" % config.config_read_column)
|
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
|
||||||
readBookIds = []
|
readBookIds = []
|
||||||
|
|
||||||
if are_read:
|
if are_read:
|
||||||
@ -988,7 +977,7 @@ def serve_book(book_id, book_format):
|
|||||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||||
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == book_format.upper())\
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == book_format.upper())\
|
||||||
.first()
|
.first()
|
||||||
app.logger.info('Serving book: %s', data.name)
|
log.info('Serving book: %s', data.name)
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
headers = Headers()
|
headers = Headers()
|
||||||
try:
|
try:
|
||||||
@ -1058,7 +1047,7 @@ def register():
|
|||||||
content.password = generate_password_hash(password)
|
content.password = generate_password_hash(password)
|
||||||
content.role = config.config_default_role
|
content.role = config.config_default_role
|
||||||
content.sidebar_view = config.config_default_show
|
content.sidebar_view = config.config_default_show
|
||||||
content.mature_content = bool(config.config_default_show & ub.MATURE_CONTENT)
|
content.mature_content = bool(config.config_default_show & constants.MATURE_CONTENT)
|
||||||
try:
|
try:
|
||||||
ub.session.add(content)
|
ub.session.add(content)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
@ -1071,8 +1060,7 @@ def register():
|
|||||||
return render_title_template('register.html', title=_(u"register"), page="register")
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
||||||
else:
|
else:
|
||||||
flash(_(u"Your e-mail is not allowed to register"), category="error")
|
flash(_(u"Your e-mail is not allowed to register"), category="error")
|
||||||
app.logger.info('Registering failed for user "' + to_save['nickname'] + '" e-mail adress: ' +
|
log.info('Registering failed for user "%s" e-mail adress: %s', to_save['nickname'], to_save["email"])
|
||||||
to_save["email"])
|
|
||||||
return render_title_template('register.html', title=_(u"register"), page="register")
|
return render_title_template('register.html', title=_(u"register"), page="register")
|
||||||
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
|
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
|
||||||
return redirect(url_for('web.login'))
|
return redirect(url_for('web.login'))
|
||||||
@ -1104,10 +1092,10 @@ def login():
|
|||||||
return redirect_back(url_for("web.index"))
|
return redirect_back(url_for("web.index"))
|
||||||
except ldap.INVALID_CREDENTIALS:
|
except ldap.INVALID_CREDENTIALS:
|
||||||
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||||
app.logger.info('LDAP Login failed for user "' + form['username'] + '" IP-adress: ' + ipAdress)
|
log.info('LDAP Login failed for user "%s" IP-adress: %s', form['username'], ipAdress)
|
||||||
flash(_(u"Wrong Username or Password"), category="error")
|
flash(_(u"Wrong Username or Password"), category="error")
|
||||||
except ldap.SERVER_DOWN:
|
except ldap.SERVER_DOWN:
|
||||||
app.logger.info('LDAP Login failed, LDAP Server down')
|
log.info('LDAP Login failed, LDAP Server down')
|
||||||
flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error")
|
flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error")
|
||||||
else:
|
else:
|
||||||
if user and check_password_hash(user.password, form['password']) and user.nickname is not "Guest":
|
if user and check_password_hash(user.password, form['password']) and user.nickname is not "Guest":
|
||||||
@ -1116,7 +1104,7 @@ def login():
|
|||||||
return redirect_back(url_for("web.index"))
|
return redirect_back(url_for("web.index"))
|
||||||
else:
|
else:
|
||||||
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||||
app.logger.info('Login failed for user "' + form['username'] + '" IP-adress: ' + ipAdress)
|
log.info('Login failed for user "%s" IP-adress: %s', form['username'], ipAdress)
|
||||||
flash(_(u"Wrong Username or Password"), category="error")
|
flash(_(u"Wrong Username or Password"), category="error")
|
||||||
|
|
||||||
next_url = url_for('web.index')
|
next_url = url_for('web.index')
|
||||||
@ -1263,7 +1251,7 @@ def profile():
|
|||||||
val += int(key[5:])
|
val += int(key[5:])
|
||||||
current_user.sidebar_view = val
|
current_user.sidebar_view = val
|
||||||
if "Show_detail_random" in to_save:
|
if "Show_detail_random" in to_save:
|
||||||
current_user.sidebar_view += ub.DETAIL_RANDOM
|
current_user.sidebar_view += constants.DETAIL_RANDOM
|
||||||
|
|
||||||
current_user.mature_content = "Show_mature_content" in to_save
|
current_user.mature_content = "Show_mature_content" in to_save
|
||||||
|
|
||||||
@ -1297,9 +1285,9 @@ def read_book(book_id, book_format):
|
|||||||
# check if book has bookmark
|
# check if book has bookmark
|
||||||
bookmark = None
|
bookmark = None
|
||||||
if current_user.is_authenticated:
|
if current_user.is_authenticated:
|
||||||
bookmark = ub.session.query(ub.Bookmark).filter(ub.and_(ub.Bookmark.user_id == int(current_user.id),
|
bookmark = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
|
||||||
ub.Bookmark.book_id == book_id,
|
ub.Bookmark.book_id == book_id,
|
||||||
ub.Bookmark.format == book_format.upper())).first()
|
ub.Bookmark.format == book_format.upper())).first()
|
||||||
if book_format.lower() == "epub":
|
if book_format.lower() == "epub":
|
||||||
return render_title_template('read.html', bookid=book_id, title=_(u"Read a Book"), bookmark=bookmark)
|
return render_title_template('read.html', bookid=book_id, title=_(u"Read a Book"), bookmark=bookmark)
|
||||||
elif book_format.lower() == "pdf":
|
elif book_format.lower() == "pdf":
|
||||||
@ -1350,15 +1338,14 @@ def show_book(book_id):
|
|||||||
if not current_user.is_anonymous:
|
if not current_user.is_anonymous:
|
||||||
if not config.config_read_column:
|
if not config.config_read_column:
|
||||||
matching_have_read_book = ub.session.query(ub.ReadBook).\
|
matching_have_read_book = ub.session.query(ub.ReadBook).\
|
||||||
filter(ub.and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
|
filter(and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
|
||||||
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].is_read
|
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].is_read
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
matching_have_read_book = getattr(entries, 'custom_column_'+str(config.config_read_column))
|
matching_have_read_book = getattr(entries, 'custom_column_'+str(config.config_read_column))
|
||||||
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value
|
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
app.logger.error(
|
log.error("Custom Column No.%d is not exisiting in calibre database", config.config_read_column)
|
||||||
u"Custom Column No.%d is not exisiting in calibre database" % config.config_read_column)
|
|
||||||
have_read = None
|
have_read = None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -1373,7 +1360,7 @@ def show_book(book_id):
|
|||||||
|
|
||||||
audioentries = []
|
audioentries = []
|
||||||
for media_format in entries.data:
|
for media_format in entries.data:
|
||||||
if media_format.format.lower() in EXTENSIONS_AUDIO:
|
if media_format.format.lower() in constants.EXTENSIONS_AUDIO:
|
||||||
audioentries.append(media_format.format.lower())
|
audioentries.append(media_format.format.lower())
|
||||||
|
|
||||||
return render_title_template('detail.html', entry=entries, audioentries=audioentries, cc=cc,
|
return render_title_template('detail.html', entry=entries, audioentries=audioentries, cc=cc,
|
||||||
|
@ -17,21 +17,15 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import division, print_function, unicode_literals
|
||||||
import smtplib
|
|
||||||
import threading
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
import socket
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from email.generator import Generator
|
|
||||||
from . import config, db, app
|
|
||||||
from flask_babel import gettext as _
|
|
||||||
import re
|
import re
|
||||||
from .gdriveutils import getFileFromEbooksFolder, updateGdriveCalibreFromLocal
|
import smtplib
|
||||||
from .subproc_wrapper import process_open
|
import socket
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
@ -47,6 +41,14 @@ except ImportError:
|
|||||||
from email import encoders
|
from email import encoders
|
||||||
from email.utils import formatdate
|
from email.utils import formatdate
|
||||||
from email.utils import make_msgid
|
from email.utils import make_msgid
|
||||||
|
from email.generator import Generator
|
||||||
|
from flask_babel import gettext as _
|
||||||
|
|
||||||
|
from . import logger, config, db, gdriveutils
|
||||||
|
from .subproc_wrapper import process_open
|
||||||
|
|
||||||
|
|
||||||
|
log = logger.create()
|
||||||
|
|
||||||
chunksize = 8192
|
chunksize = 8192
|
||||||
# task 'status' consts
|
# task 'status' consts
|
||||||
@ -70,7 +72,7 @@ def get_attachment(bookpath, filename):
|
|||||||
"""Get file as MIMEBase message"""
|
"""Get file as MIMEBase message"""
|
||||||
calibrepath = config.config_calibre_dir
|
calibrepath = config.config_calibre_dir
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
df = getFileFromEbooksFolder(bookpath, filename)
|
df = gdriveutils.getFileFromEbooksFolder(bookpath, filename)
|
||||||
if df:
|
if df:
|
||||||
datafile = os.path.join(calibrepath, bookpath, filename)
|
datafile = os.path.join(calibrepath, bookpath, filename)
|
||||||
if not os.path.exists(os.path.join(calibrepath, bookpath)):
|
if not os.path.exists(os.path.join(calibrepath, bookpath)):
|
||||||
@ -88,8 +90,8 @@ def get_attachment(bookpath, filename):
|
|||||||
data = file_.read()
|
data = file_.read()
|
||||||
file_.close()
|
file_.close()
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
app.logger.exception(e) # traceback.print_exc()
|
log.exception(e) # traceback.print_exc()
|
||||||
app.logger.error(u'The requested file could not be read. Maybe wrong permissions?')
|
log.error(u'The requested file could not be read. Maybe wrong permissions?')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
attachment = MIMEBase('application', 'octet-stream')
|
attachment = MIMEBase('application', 'octet-stream')
|
||||||
@ -114,7 +116,7 @@ class emailbase():
|
|||||||
|
|
||||||
def send(self, strg):
|
def send(self, strg):
|
||||||
"""Send `strg' to the server."""
|
"""Send `strg' to the server."""
|
||||||
app.logger.debug('send:' + repr(strg[:300]))
|
log.debug('send: %r', strg[:300])
|
||||||
if hasattr(self, 'sock') and self.sock:
|
if hasattr(self, 'sock') and self.sock:
|
||||||
try:
|
try:
|
||||||
if self.transferSize:
|
if self.transferSize:
|
||||||
@ -139,7 +141,7 @@ class emailbase():
|
|||||||
raise smtplib.SMTPServerDisconnected('please run connect() first')
|
raise smtplib.SMTPServerDisconnected('please run connect() first')
|
||||||
|
|
||||||
def _print_debug(self, *args):
|
def _print_debug(self, *args):
|
||||||
app.logger.debug(args)
|
log.debug(args)
|
||||||
|
|
||||||
def getTransferStatus(self):
|
def getTransferStatus(self):
|
||||||
if self.transferSize:
|
if self.transferSize:
|
||||||
@ -236,7 +238,7 @@ class WorkerThread(threading.Thread):
|
|||||||
filename = self._convert_ebook_format()
|
filename = self._convert_ebook_format()
|
||||||
if filename:
|
if filename:
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
updateGdriveCalibreFromLocal()
|
gdriveutils.updateGdriveCalibreFromLocal()
|
||||||
if curr_task == TASK_CONVERT:
|
if curr_task == TASK_CONVERT:
|
||||||
self.add_email(self.queue[self.current]['settings']['subject'], self.queue[self.current]['path'],
|
self.add_email(self.queue[self.current]['settings']['subject'], self.queue[self.current]['path'],
|
||||||
filename, self.queue[self.current]['settings'], self.queue[self.current]['kindle'],
|
filename, self.queue[self.current]['settings'], self.queue[self.current]['kindle'],
|
||||||
@ -254,14 +256,14 @@ class WorkerThread(threading.Thread):
|
|||||||
# if it does - mark the conversion task as complete and return a success
|
# if it does - mark the conversion task as complete and return a success
|
||||||
# this will allow send to kindle workflow to continue to work
|
# this will allow send to kindle workflow to continue to work
|
||||||
if os.path.isfile(file_path + format_new_ext):
|
if os.path.isfile(file_path + format_new_ext):
|
||||||
app.logger.info("Book id %d already converted to %s", bookid, format_new_ext)
|
log.info("Book id %d already converted to %s", bookid, format_new_ext)
|
||||||
cur_book = db.session.query(db.Books).filter(db.Books.id == bookid).first()
|
cur_book = db.session.query(db.Books).filter(db.Books.id == bookid).first()
|
||||||
self.queue[self.current]['path'] = file_path
|
self.queue[self.current]['path'] = file_path
|
||||||
self.queue[self.current]['title'] = cur_book.title
|
self.queue[self.current]['title'] = cur_book.title
|
||||||
self._handleSuccess()
|
self._handleSuccess()
|
||||||
return file_path + format_new_ext
|
return file_path + format_new_ext
|
||||||
else:
|
else:
|
||||||
app.logger.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext)
|
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext)
|
||||||
|
|
||||||
# check if converter-executable is existing
|
# check if converter-executable is existing
|
||||||
if not os.path.exists(config.config_converterpath):
|
if not os.path.exists(config.config_converterpath):
|
||||||
@ -317,13 +319,13 @@ class WorkerThread(threading.Thread):
|
|||||||
if conv_error:
|
if conv_error:
|
||||||
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
|
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
|
||||||
error=conv_error.group(1), message=conv_error.group(2).strip())
|
error=conv_error.group(1), message=conv_error.group(2).strip())
|
||||||
app.logger.debug("convert_kindlegen: " + nextline)
|
log.debug("convert_kindlegen: %s", nextline)
|
||||||
else:
|
else:
|
||||||
while p.poll() is None:
|
while p.poll() is None:
|
||||||
nextline = p.stdout.readline()
|
nextline = p.stdout.readline()
|
||||||
if os.name == 'nt' and sys.version_info < (3, 0):
|
if os.name == 'nt' and sys.version_info < (3, 0):
|
||||||
nextline = nextline.decode('windows-1252')
|
nextline = nextline.decode('windows-1252')
|
||||||
app.logger.debug(nextline.strip('\r\n'))
|
log.debug(nextline.strip('\r\n'))
|
||||||
# parse progress string from calibre-converter
|
# parse progress string from calibre-converter
|
||||||
progress = re.search("(\d+)%\s.*", nextline)
|
progress = re.search("(\d+)%\s.*", nextline)
|
||||||
if progress:
|
if progress:
|
||||||
@ -353,7 +355,7 @@ class WorkerThread(threading.Thread):
|
|||||||
return file_path + format_new_ext
|
return file_path + format_new_ext
|
||||||
else:
|
else:
|
||||||
error_message = format_new_ext.upper() + ' format not found on disk'
|
error_message = format_new_ext.upper() + ' format not found on disk'
|
||||||
app.logger.info("ebook converter failed with error while converting book")
|
log.info("ebook converter failed with error while converting book")
|
||||||
if not error_message:
|
if not error_message:
|
||||||
error_message = 'Ebook converter failed with unknown error'
|
error_message = 'Ebook converter failed with unknown error'
|
||||||
self._handleError(error_message)
|
self._handleError(error_message)
|
||||||
@ -449,7 +451,7 @@ class WorkerThread(threading.Thread):
|
|||||||
# _print_debug function
|
# _print_debug function
|
||||||
if sys.version_info < (3, 0):
|
if sys.version_info < (3, 0):
|
||||||
org_smtpstderr = smtplib.stderr
|
org_smtpstderr = smtplib.stderr
|
||||||
smtplib.stderr = StderrLogger()
|
smtplib.stderr = logger.StderrLogger('worker.smtp')
|
||||||
|
|
||||||
if use_ssl == 2:
|
if use_ssl == 2:
|
||||||
self.asyncSMTP = email_SSL(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
|
self.asyncSMTP = email_SSL(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
|
||||||
@ -457,9 +459,7 @@ class WorkerThread(threading.Thread):
|
|||||||
self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
|
self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
|
||||||
|
|
||||||
# link to logginglevel
|
# link to logginglevel
|
||||||
if config.config_log_level != logging.DEBUG:
|
if logger.is_debug_enabled():
|
||||||
self.asyncSMTP.set_debuglevel(0)
|
|
||||||
else:
|
|
||||||
self.asyncSMTP.set_debuglevel(1)
|
self.asyncSMTP.set_debuglevel(1)
|
||||||
if use_ssl == 1:
|
if use_ssl == 1:
|
||||||
self.asyncSMTP.starttls()
|
self.asyncSMTP.starttls()
|
||||||
@ -501,7 +501,7 @@ class WorkerThread(threading.Thread):
|
|||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def _handleError(self, error_message):
|
def _handleError(self, error_message):
|
||||||
app.logger.error(error_message)
|
log.error(error_message)
|
||||||
self.UIqueue[self.current]['stat'] = STAT_FAIL
|
self.UIqueue[self.current]['stat'] = STAT_FAIL
|
||||||
self.UIqueue[self.current]['progress'] = "100 %"
|
self.UIqueue[self.current]['progress'] = "100 %"
|
||||||
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
||||||
@ -513,22 +513,3 @@ class WorkerThread(threading.Thread):
|
|||||||
self.UIqueue[self.current]['progress'] = "100 %"
|
self.UIqueue[self.current]['progress'] = "100 %"
|
||||||
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
||||||
datetime.now() - self.queue[self.current]['starttime'])
|
datetime.now() - self.queue[self.current]['starttime'])
|
||||||
|
|
||||||
|
|
||||||
# Enable logging of smtp lib debug output
|
|
||||||
class StderrLogger(object):
|
|
||||||
|
|
||||||
buffer = ''
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.logger = app.logger
|
|
||||||
|
|
||||||
def write(self, message):
|
|
||||||
try:
|
|
||||||
if message == '\n':
|
|
||||||
self.logger.debug(self.buffer.replace("\n","\\n"))
|
|
||||||
self.buffer = ''
|
|
||||||
else:
|
|
||||||
self.buffer += message
|
|
||||||
except:
|
|
||||||
self.logger.debug("Logging Error")
|
|
||||||
|
Loading…
Reference in New Issue
Block a user