2015-08-02 18:59:11 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
2017-02-20 18:34:37 +00:00
|
|
|
from pydrive.auth import GoogleAuth
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
import mimetypes
|
2016-03-28 19:07:13 +00:00
|
|
|
import logging
|
2016-04-15 16:23:00 +00:00
|
|
|
from logging.handlers import RotatingFileHandler
|
2016-04-03 21:33:29 +00:00
|
|
|
import textwrap
|
2016-12-23 08:53:39 +00:00
|
|
|
from flask import Flask, render_template, session, request, Response, redirect, url_for, send_from_directory, \
|
2017-03-06 22:50:24 +00:00
|
|
|
make_response, g, flash, abort, send_file, Markup, \
|
|
|
|
stream_with_context
|
2017-02-23 18:58:56 +00:00
|
|
|
from flask import __version__ as flaskVersion
|
2017-01-28 19:16:40 +00:00
|
|
|
import ub
|
|
|
|
from ub import config
|
|
|
|
import helper
|
2015-08-02 18:59:11 +00:00
|
|
|
import os
|
2016-06-17 18:01:27 +00:00
|
|
|
import errno
|
2015-08-02 18:59:11 +00:00
|
|
|
from sqlalchemy.sql.expression import func
|
2016-03-26 15:12:29 +00:00
|
|
|
from sqlalchemy.sql.expression import false
|
2015-10-13 16:07:17 +00:00
|
|
|
from sqlalchemy.exc import IntegrityError
|
2017-02-23 18:58:56 +00:00
|
|
|
from sqlalchemy import __version__ as sqlalchemyVersion
|
2015-08-02 18:59:11 +00:00
|
|
|
from math import ceil
|
2017-01-12 19:43:36 +00:00
|
|
|
from flask_login import LoginManager, login_user, logout_user, login_required, current_user
|
2016-11-09 18:24:33 +00:00
|
|
|
from flask_principal import Principal, Identity, AnonymousIdentity, identity_changed
|
2017-03-07 18:46:23 +00:00
|
|
|
from flask_principal import __version__ as flask_principalVersion
|
2016-11-09 18:24:33 +00:00
|
|
|
from flask_babel import Babel
|
|
|
|
from flask_babel import gettext as _
|
2017-01-28 19:16:40 +00:00
|
|
|
import requests
|
|
|
|
import zipfile
|
2015-08-02 18:59:11 +00:00
|
|
|
from werkzeug.security import generate_password_hash, check_password_hash
|
2016-11-09 18:24:33 +00:00
|
|
|
from babel import Locale as LC
|
|
|
|
from babel import negotiate_locale
|
2017-02-23 18:58:56 +00:00
|
|
|
from babel import __version__ as babelVersion
|
2017-02-15 17:09:17 +00:00
|
|
|
from babel.dates import format_date
|
2015-10-13 16:07:17 +00:00
|
|
|
from functools import wraps
|
2015-10-13 17:06:37 +00:00
|
|
|
import base64
|
2016-03-26 15:12:29 +00:00
|
|
|
from sqlalchemy.sql import *
|
2016-03-28 22:09:11 +00:00
|
|
|
import json
|
2016-07-09 08:54:13 +00:00
|
|
|
import urllib
|
2016-04-03 21:52:32 +00:00
|
|
|
import datetime
|
2016-11-09 18:24:33 +00:00
|
|
|
from iso639 import languages as isoLanguages
|
2017-02-23 18:58:56 +00:00
|
|
|
from iso639 import __version__ as iso639Version
|
2016-04-03 21:52:32 +00:00
|
|
|
from uuid import uuid4
|
2016-07-16 08:44:47 +00:00
|
|
|
import os.path
|
2016-12-23 08:53:39 +00:00
|
|
|
import sys
|
|
|
|
import subprocess
|
2016-07-16 08:44:47 +00:00
|
|
|
import re
|
2017-01-22 15:44:37 +00:00
|
|
|
import db
|
|
|
|
from shutil import move, copyfile
|
|
|
|
from tornado.ioloop import IOLoop
|
2017-02-22 22:06:59 +00:00
|
|
|
import shutil
|
2017-01-30 17:58:36 +00:00
|
|
|
import StringIO
|
2017-02-20 18:34:37 +00:00
|
|
|
import gdriveutils
|
2017-03-02 00:28:30 +00:00
|
|
|
import tempfile
|
2017-02-20 18:34:37 +00:00
|
|
|
import io
|
|
|
|
import hashlib
|
|
|
|
import threading
|
|
|
|
|
2017-02-23 18:58:56 +00:00
|
|
|
from tornado import version as tornadoVersion
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-03-05 09:40:39 +00:00
|
|
|
try:
|
2017-03-07 22:03:10 +00:00
|
|
|
from urllib.parse import quote
|
2017-03-05 09:40:39 +00:00
|
|
|
from imp import reload
|
|
|
|
from past.builtins import xrange
|
2017-03-06 03:44:54 +00:00
|
|
|
except ImportError as e:
|
2017-03-05 10:48:59 +00:00
|
|
|
from urllib import quote
|
2017-03-05 09:40:39 +00:00
|
|
|
|
2017-03-07 18:46:23 +00:00
|
|
|
try:
|
|
|
|
from flask_login import __version__ as flask_loginVersion
|
|
|
|
except ImportError, e:
|
|
|
|
from flask_login.__about__ import __version__ as flask_loginVersion
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
import time
|
|
|
|
|
|
|
|
current_milli_time = lambda: int(round(time.time() * 1000))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-08 20:57:16 +00:00
|
|
|
try:
|
|
|
|
from wand.image import Image
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-04-08 20:57:16 +00:00
|
|
|
use_generic_pdf_cover = False
|
2017-03-05 09:40:39 +00:00
|
|
|
except ImportError as e:
|
2016-04-08 20:57:16 +00:00
|
|
|
use_generic_pdf_cover = True
|
2016-07-16 08:44:47 +00:00
|
|
|
from cgi import escape
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
# Global variables
|
2017-02-20 18:34:37 +00:00
|
|
|
gdrive_watch_callback_token='target=calibreweb-watch_files'
|
2017-01-22 15:44:37 +00:00
|
|
|
global_task = None
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-23 05:52:58 +00:00
|
|
|
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx', 'fb2'])
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def md5(fname):
|
|
|
|
hash_md5 = hashlib.md5()
|
|
|
|
with open(fname, "rb") as f:
|
|
|
|
for chunk in iter(lambda: f.read(4096), b""):
|
|
|
|
hash_md5.update(chunk)
|
|
|
|
return hash_md5.hexdigest()
|
|
|
|
|
|
|
|
class Singleton:
|
|
|
|
"""
|
|
|
|
A non-thread-safe helper class to ease implementing singletons.
|
|
|
|
This should be used as a decorator -- not a metaclass -- to the
|
|
|
|
class that should be a singleton.
|
|
|
|
|
|
|
|
The decorated class can define one `__init__` function that
|
|
|
|
takes only the `self` argument. Also, the decorated class cannot be
|
|
|
|
inherited from. Other than that, there are no restrictions that apply
|
|
|
|
to the decorated class.
|
|
|
|
|
|
|
|
To get the singleton instance, use the `Instance` method. Trying
|
|
|
|
to use `__call__` will result in a `TypeError` being raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, decorated):
|
|
|
|
self._decorated = decorated
|
|
|
|
|
|
|
|
def Instance(self):
|
|
|
|
"""
|
|
|
|
Returns the singleton instance. Upon its first call, it creates a
|
|
|
|
new instance of the decorated class and calls its `__init__` method.
|
|
|
|
On all subsequent calls, the already created instance is returned.
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return self._instance
|
|
|
|
except AttributeError:
|
|
|
|
self._instance = self._decorated()
|
|
|
|
return self._instance
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
raise TypeError('Singletons must be accessed through `Instance()`.')
|
|
|
|
|
|
|
|
def __instancecheck__(self, inst):
|
|
|
|
return isinstance(inst, self._decorated)
|
|
|
|
|
|
|
|
@Singleton
|
|
|
|
class Gauth:
|
|
|
|
def __init__(self):
|
|
|
|
self.auth=GoogleAuth(settings_file='settings.yaml')
|
|
|
|
|
|
|
|
@Singleton
|
|
|
|
class Gdrive:
|
|
|
|
def __init__(self):
|
|
|
|
self.drive=gdriveutils.getDrive(Gauth.Instance().auth)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-07-14 00:12:21 +00:00
|
|
|
class ReverseProxied(object):
|
2016-12-23 08:53:39 +00:00
|
|
|
"""Wrap the application in this middleware and configure the
|
2016-07-14 00:12:21 +00:00
|
|
|
front-end server to add these headers, to let you quietly bind
|
|
|
|
this to a URL other than / and to an HTTP scheme that is
|
|
|
|
different than what is used locally.
|
|
|
|
|
|
|
|
Code courtesy of: http://flask.pocoo.org/snippets/35/
|
|
|
|
|
|
|
|
In nginx:
|
|
|
|
location /myprefix {
|
|
|
|
proxy_pass http://127.0.0.1:8083;
|
|
|
|
proxy_set_header Host $host;
|
|
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
|
|
proxy_set_header X-Scheme $scheme;
|
|
|
|
proxy_set_header X-Script-Name /myprefix;
|
|
|
|
}
|
2016-12-23 08:53:39 +00:00
|
|
|
"""
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def __init__(self, application):
|
|
|
|
self.app = application
|
2016-07-14 00:12:21 +00:00
|
|
|
|
|
|
|
def __call__(self, environ, start_response):
|
|
|
|
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
|
|
|
|
if script_name:
|
|
|
|
environ['SCRIPT_NAME'] = script_name
|
2016-07-14 23:28:12 +00:00
|
|
|
path_info = environ.get('PATH_INFO', '')
|
|
|
|
if path_info and path_info.startswith(script_name):
|
2016-07-14 00:12:21 +00:00
|
|
|
environ['PATH_INFO'] = path_info[len(script_name):]
|
|
|
|
|
|
|
|
scheme = environ.get('HTTP_X_SCHEME', '')
|
|
|
|
if scheme:
|
|
|
|
environ['wsgi.url_scheme'] = scheme
|
|
|
|
server = environ.get('HTTP_X_FORWARDED_SERVER', '')
|
|
|
|
if server:
|
|
|
|
environ['HTTP_HOST'] = server
|
|
|
|
return self.app(environ, start_response)
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
|
|
|
# Main code
|
2017-01-22 15:44:37 +00:00
|
|
|
mimetypes.init()
|
|
|
|
mimetypes.add_type('application/xhtml+xml', '.xhtml')
|
|
|
|
mimetypes.add_type('application/epub+zip', '.epub')
|
|
|
|
mimetypes.add_type('application/x-mobipocket-ebook', '.mobi')
|
|
|
|
mimetypes.add_type('application/x-mobipocket-ebook', '.prc')
|
|
|
|
mimetypes.add_type('application/vnd.amazon.ebook', '.azw')
|
|
|
|
mimetypes.add_type('application/x-cbr', '.cbr')
|
|
|
|
mimetypes.add_type('application/x-cbz', '.cbz')
|
|
|
|
mimetypes.add_type('application/x-cbt', '.cbt')
|
|
|
|
mimetypes.add_type('image/vnd.djvu', '.djvu')
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
app = (Flask(__name__))
|
2016-07-14 00:12:21 +00:00
|
|
|
app.wsgi_app = ReverseProxied(app.wsgi_app)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-29 20:06:08 +00:00
|
|
|
formatter = logging.Formatter(
|
2016-04-15 16:23:00 +00:00
|
|
|
"[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
|
2017-01-30 17:58:36 +00:00
|
|
|
file_handler = RotatingFileHandler(os.path.join(config.get_main_dir, "calibre-web.log"), maxBytes=50000, backupCount=2)
|
2016-04-15 16:23:00 +00:00
|
|
|
file_handler.setFormatter(formatter)
|
2016-03-28 19:07:13 +00:00
|
|
|
app.logger.addHandler(file_handler)
|
2017-01-28 19:16:40 +00:00
|
|
|
app.logger.setLevel(config.config_log_level)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-03-28 19:07:13 +00:00
|
|
|
app.logger.info('Starting Calibre Web...')
|
2016-06-05 16:42:18 +00:00
|
|
|
logging.getLogger("book_formats").addHandler(file_handler)
|
2017-01-29 20:06:08 +00:00
|
|
|
logging.getLogger("book_formats").setLevel(config.config_log_level)
|
2016-06-05 16:42:18 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
Principal(app)
|
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
babel = Babel(app)
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
import uploader
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
lm = LoginManager(app)
|
|
|
|
lm.init_app(app)
|
|
|
|
lm.login_view = 'login'
|
2017-01-12 19:43:36 +00:00
|
|
|
lm.anonymous_user = ub.Anonymous
|
2015-08-02 18:59:11 +00:00
|
|
|
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
|
2017-01-28 19:16:40 +00:00
|
|
|
db.setup_db()
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-03-07 18:10:17 +00:00
|
|
|
if config.config_log_level == logging.DEBUG :
|
|
|
|
logging.getLogger("sqlalchemy.engine").addHandler(file_handler)
|
|
|
|
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
|
|
|
logging.getLogger("sqlalchemy.pool").addHandler(file_handler)
|
|
|
|
logging.getLogger("sqlalchemy.pool").setLevel(config.config_log_level)
|
|
|
|
logging.getLogger("sqlalchemy.orm").addHandler(file_handler)
|
|
|
|
logging.getLogger("sqlalchemy.orm").setLevel(config.config_log_level)
|
|
|
|
|
|
|
|
|
2017-02-22 22:06:59 +00:00
|
|
|
def is_gdrive_ready():
|
|
|
|
return os.path.exists('settings.yaml') and os.path.exists('gdrive_credentials')
|
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@babel.localeselector
|
|
|
|
def get_locale():
|
|
|
|
# if a user is logged in, use the locale from the user settings
|
|
|
|
user = getattr(g, 'user', None)
|
|
|
|
if user is not None and hasattr(user, "locale"):
|
2016-12-23 08:53:39 +00:00
|
|
|
return user.locale
|
2017-01-28 19:16:40 +00:00
|
|
|
translations = [item.language for item in babel.list_translations()] + ['en']
|
2016-11-09 18:24:33 +00:00
|
|
|
preferred = [x.replace('-', '_') for x in request.accept_languages.values()]
|
2017-01-14 14:05:49 +00:00
|
|
|
return negotiate_locale(preferred, translations)
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@babel.timezoneselector
|
|
|
|
def get_timezone():
|
|
|
|
user = getattr(g, 'user', None)
|
|
|
|
if user is not None:
|
|
|
|
return user.timezone
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@lm.user_loader
|
2017-01-28 19:16:40 +00:00
|
|
|
def load_user(user_id):
|
|
|
|
return ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
@lm.header_loader
|
|
|
|
def load_user_from_header(header_val):
|
|
|
|
if header_val.startswith('Basic '):
|
|
|
|
header_val = header_val.replace('Basic ', '', 1)
|
2017-01-28 19:16:40 +00:00
|
|
|
basic_username = basic_password = ''
|
2015-08-02 18:59:11 +00:00
|
|
|
try:
|
|
|
|
header_val = base64.b64decode(header_val)
|
2015-10-13 17:06:37 +00:00
|
|
|
basic_username = header_val.split(':')[0]
|
|
|
|
basic_password = header_val.split(':')[1]
|
2015-08-02 18:59:11 +00:00
|
|
|
except TypeError:
|
|
|
|
pass
|
2015-10-13 17:06:37 +00:00
|
|
|
user = ub.session.query(ub.User).filter(ub.User.nickname == basic_username).first()
|
|
|
|
if user and check_password_hash(user.password, basic_password):
|
|
|
|
return user
|
|
|
|
return
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-10-13 17:06:37 +00:00
|
|
|
def check_auth(username, password):
|
|
|
|
user = ub.session.query(ub.User).filter(ub.User.nickname == username).first()
|
|
|
|
if user and check_password_hash(user.password, password):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-10-13 17:06:37 +00:00
|
|
|
def authenticate():
|
|
|
|
return Response(
|
2016-12-23 08:53:39 +00:00
|
|
|
'Could not verify your access level for that URL.\n'
|
|
|
|
'You have to login with proper credentials', 401,
|
|
|
|
{'WWW-Authenticate': 'Basic realm="Login Required"'})
|
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def updateGdriveCalibreFromLocal():
|
|
|
|
gdriveutils.backupCalibreDbAndOptionalDownload(Gdrive.Instance().drive)
|
|
|
|
gdriveutils.copyToDrive(Gdrive.Instance().drive, config.config_calibre_dir, False, True)
|
|
|
|
for x in os.listdir(config.config_calibre_dir):
|
|
|
|
if os.path.isdir(os.path.join(config.config_calibre_dir,x)):
|
|
|
|
shutil.rmtree(os.path.join(config.config_calibre_dir,x))
|
2015-10-13 17:06:37 +00:00
|
|
|
|
2016-04-27 16:29:45 +00:00
|
|
|
def requires_basic_auth_if_no_ano(f):
|
2015-10-13 17:06:37 +00:00
|
|
|
@wraps(f)
|
|
|
|
def decorated(*args, **kwargs):
|
|
|
|
auth = request.authorization
|
2017-01-22 15:44:37 +00:00
|
|
|
if config.config_anonbrowse != 1:
|
2016-04-27 16:29:45 +00:00
|
|
|
if not auth or not check_auth(auth.username, auth.password):
|
|
|
|
return authenticate()
|
2015-10-13 17:06:37 +00:00
|
|
|
return f(*args, **kwargs)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-10-13 17:06:37 +00:00
|
|
|
return decorated
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
# simple pagination for the feed
|
|
|
|
class Pagination(object):
|
2015-08-02 18:59:11 +00:00
|
|
|
def __init__(self, page, per_page, total_count):
|
2017-01-12 19:43:36 +00:00
|
|
|
self.page = int(page)
|
|
|
|
self.per_page = int(per_page)
|
|
|
|
self.total_count = int(total_count)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-09 18:10:39 +00:00
|
|
|
@property
|
|
|
|
def next_offset(self):
|
|
|
|
return int(self.page * self.per_page)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def previous_offset(self):
|
2017-01-28 19:16:40 +00:00
|
|
|
return int((self.page - 2) * self.per_page)
|
2017-01-09 18:10:39 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def last_offset(self):
|
|
|
|
last = int(self.total_count) - int(self.per_page)
|
|
|
|
if last < 0:
|
|
|
|
last = 0
|
|
|
|
return int(last)
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@property
|
|
|
|
def pages(self):
|
|
|
|
return int(ceil(self.total_count / float(self.per_page)))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def has_prev(self):
|
|
|
|
return self.page > 1
|
|
|
|
|
|
|
|
@property
|
|
|
|
def has_next(self):
|
|
|
|
return self.page < self.pages
|
|
|
|
|
|
|
|
def iter_pages(self, left_edge=2, left_current=2,
|
|
|
|
right_current=5, right_edge=2):
|
|
|
|
last = 0
|
2017-01-28 19:16:40 +00:00
|
|
|
for num in xrange(1, self.pages + 1): # ToDo: can be simplified
|
|
|
|
if num <= left_edge or (num > self.page - left_current - 1 and num < self.page + right_current) \
|
|
|
|
or num > self.pages - right_edge:
|
2015-08-02 18:59:11 +00:00
|
|
|
if last + 1 != num:
|
|
|
|
yield None
|
|
|
|
yield num
|
|
|
|
last = num
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
# pagination links in jinja
|
2015-08-02 18:59:11 +00:00
|
|
|
def url_for_other_page(page):
|
|
|
|
args = request.view_args.copy()
|
|
|
|
args['page'] = page
|
2017-01-17 03:09:02 +00:00
|
|
|
return url_for(request.endpoint, **args)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
app.jinja_env.globals['url_for_other_page'] = url_for_other_page
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
def login_required_if_no_ano(func):
|
2017-01-22 15:44:37 +00:00
|
|
|
if config.config_anonbrowse == 1:
|
2016-04-27 08:35:23 +00:00
|
|
|
return func
|
|
|
|
return login_required(func)
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
# custom jinja filters
|
2016-04-03 21:33:29 +00:00
|
|
|
@app.template_filter('shortentitle')
|
|
|
|
def shortentitle_filter(s):
|
|
|
|
if len(s) > 60:
|
|
|
|
s = s.split(':', 1)[0]
|
|
|
|
if len(s) > 60:
|
2016-12-23 08:53:39 +00:00
|
|
|
s = textwrap.wrap(s, 60, break_long_words=False)[0] + ' [...]'
|
2016-04-03 21:33:29 +00:00
|
|
|
return s
|
2015-10-13 17:06:37 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-15 11:23:08 +00:00
|
|
|
@app.template_filter('mimetype')
|
|
|
|
def mimetype_filter(val):
|
|
|
|
try:
|
2017-01-28 19:16:40 +00:00
|
|
|
s = mimetypes.types_map['.' + val]
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2017-01-28 19:16:40 +00:00
|
|
|
s = 'application/octet-stream'
|
2017-01-15 11:23:08 +00:00
|
|
|
return s
|
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
@app.template_filter('formatdate')
|
|
|
|
def formatdate(val):
|
|
|
|
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)
|
2017-02-17 17:22:08 +00:00
|
|
|
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
|
2017-02-15 17:09:17 +00:00
|
|
|
return format_date(formatdate, format='medium',locale=get_locale())
|
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
@app.template_filter('strftime')
|
|
|
|
def timestamptodate(date, fmt=None):
|
|
|
|
date=datetime.datetime.fromtimestamp(
|
|
|
|
int(date)/1000
|
|
|
|
)
|
|
|
|
native = date.replace(tzinfo=None)
|
|
|
|
if fmt:
|
|
|
|
format=fmt
|
|
|
|
else:
|
|
|
|
format='%d %m %Y - %H:%S'
|
|
|
|
return native.strftime(format)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-10-13 16:07:17 +00:00
|
|
|
def admin_required(f):
|
|
|
|
"""
|
|
|
|
Checks if current_user.role == 1
|
|
|
|
"""
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2015-10-13 16:07:17 +00:00
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
2016-04-27 08:35:23 +00:00
|
|
|
if current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
return inner
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
def unconfigured(f):
|
|
|
|
"""
|
|
|
|
Checks if current_user.role == 1
|
|
|
|
"""
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
2017-01-28 19:16:40 +00:00
|
|
|
if not config.db_configured:
|
2017-01-22 15:44:37 +00:00
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
return inner
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
def download_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_download() or current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
return inner
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
def upload_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_upload() or current_user.role_admin():
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
return inner
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
def edit_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
if current_user.role_edit() or current_user.role_admin():
|
2015-10-13 16:07:17 +00:00
|
|
|
return f(*args, **kwargs)
|
|
|
|
abort(403)
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2015-10-13 16:07:17 +00:00
|
|
|
return inner
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
# Fill indexpage with all requested data from database
|
2016-12-23 08:53:39 +00:00
|
|
|
def fill_indexpage(page, database, db_filter, order):
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
filter = True
|
2017-01-28 19:16:40 +00:00
|
|
|
if current_user.show_detail_random():
|
2017-01-22 15:44:37 +00:00
|
|
|
random = db.session.query(db.Books).filter(filter).order_by(func.random()).limit(config.config_random_books)
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
random = false
|
2017-01-22 15:44:37 +00:00
|
|
|
off = int(int(config.config_books_per_page) * (page - 1))
|
|
|
|
pagination = Pagination(page, config.config_books_per_page,
|
2016-12-23 08:53:39 +00:00
|
|
|
len(db.session.query(database).filter(db_filter).filter(filter).all()))
|
|
|
|
entries = db.session.query(database).filter(db_filter).filter(filter).order_by(order).offset(off).limit(
|
2017-01-22 15:44:37 +00:00
|
|
|
config.config_books_per_page)
|
2016-11-09 18:24:33 +00:00
|
|
|
return entries, random, pagination
|
|
|
|
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
def modify_database_object(input_elements, db_book_object, db_object, db_session, type):
|
2016-11-09 18:24:33 +00:00
|
|
|
input_elements = [x for x in input_elements if x != '']
|
|
|
|
# we have all input element (authors, series, tags) names now
|
|
|
|
# 1. search for elements to remove
|
|
|
|
del_elements = []
|
|
|
|
for c_elements in db_book_object:
|
|
|
|
found = False
|
|
|
|
for inp_element in input_elements:
|
|
|
|
if inp_element == c_elements.name:
|
|
|
|
found = True
|
2016-12-23 08:53:39 +00:00
|
|
|
break
|
2016-11-09 18:24:33 +00:00
|
|
|
# if the element was not found in the new list, add it to remove list
|
|
|
|
if not found:
|
|
|
|
del_elements.append(c_elements)
|
|
|
|
# 2. search for elements that need to be added
|
|
|
|
add_elements = []
|
|
|
|
for inp_element in input_elements:
|
|
|
|
found = False
|
|
|
|
for c_elements in db_book_object:
|
|
|
|
if inp_element == c_elements.name:
|
|
|
|
found = True
|
2016-12-23 08:53:39 +00:00
|
|
|
break
|
2016-11-09 18:24:33 +00:00
|
|
|
if not found:
|
|
|
|
add_elements.append(inp_element)
|
|
|
|
# if there are elements to remove, we remove them now
|
|
|
|
if len(del_elements) > 0:
|
|
|
|
for del_element in del_elements:
|
|
|
|
db_book_object.remove(del_element)
|
|
|
|
if len(del_element.books) == 0:
|
|
|
|
db_session.delete(del_element)
|
|
|
|
# if there are elements to add, we add them now!
|
|
|
|
if len(add_elements) > 0:
|
|
|
|
if type == 'languages':
|
2016-12-23 08:53:39 +00:00
|
|
|
db_filter = db_object.lang_code
|
2016-11-09 18:24:33 +00:00
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
db_filter = db_object.name
|
2016-11-09 18:24:33 +00:00
|
|
|
for add_element in add_elements:
|
|
|
|
# check if a element with that name exists
|
|
|
|
new_element = db_session.query(db_object).filter(db_filter == add_element).first()
|
|
|
|
# if no element is found add it
|
2016-12-23 08:53:39 +00:00
|
|
|
if new_element is None:
|
|
|
|
if type == 'author':
|
2016-11-09 18:24:33 +00:00
|
|
|
new_element = db_object(add_element, add_element, "")
|
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
if type == 'series':
|
2016-11-09 18:24:33 +00:00
|
|
|
new_element = db_object(add_element, add_element)
|
2016-12-23 08:53:39 +00:00
|
|
|
else: # type should be tag, or languages
|
2016-11-09 18:24:33 +00:00
|
|
|
new_element = db_object(add_element)
|
|
|
|
db_session.add(new_element)
|
|
|
|
new_element = db.session.query(db_object).filter(db_filter == add_element).first()
|
|
|
|
# add element to book
|
|
|
|
db_book_object.append(new_element)
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
def render_title_template(*args, **kwargs):
|
|
|
|
return render_template(instance=config.config_calibre_web_title, *args, **kwargs)
|
|
|
|
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.before_request
|
|
|
|
def before_request():
|
2017-01-28 19:16:40 +00:00
|
|
|
if ub.DEVELOPMENT:
|
|
|
|
reload(ub)
|
2015-08-02 18:59:11 +00:00
|
|
|
g.user = current_user
|
2017-01-22 15:44:37 +00:00
|
|
|
g.allow_registration = config.config_public_reg
|
|
|
|
g.allow_upload = config.config_uploading
|
2017-02-05 13:27:57 +00:00
|
|
|
g.public_shelfes = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1).all()
|
2017-01-28 19:16:40 +00:00
|
|
|
if not config.db_configured and request.endpoint not in ('basic_configuration', 'login') and '/static/' not in request.path:
|
|
|
|
return redirect(url_for('basic_configuration'))
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
# Routing functions
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def feed_index():
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('index.xml')
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/osd")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def feed_osd():
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('osd.xml',lang='de-DE')
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-12 19:43:36 +00:00
|
|
|
@app.route("/opds/search/<query>")
|
2017-01-17 16:45:23 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-12 19:43:36 +00:00
|
|
|
def feed_cc_search(query):
|
|
|
|
return feed_search(query.strip())
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/search", methods=["GET"])
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-12 19:43:36 +00:00
|
|
|
def feed_normal_search():
|
|
|
|
return feed_search(request.args.get("query").strip())
|
|
|
|
|
2017-01-15 11:37:58 +00:00
|
|
|
|
2017-01-12 19:43:36 +00:00
|
|
|
def feed_search(term):
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2015-08-02 18:59:11 +00:00
|
|
|
if term:
|
2016-12-23 08:53:39 +00:00
|
|
|
entries = db.session.query(db.Books).filter(db.or_(db.Books.tags.any(db.Tags.name.like("%" + term + "%")),
|
2017-02-22 19:59:48 +00:00
|
|
|
db.Books.series.any(db.Series.name.like("%" + term + "%")),
|
|
|
|
db.Books.authors.any(db.Authors.name.like("%" + term + "%")),
|
|
|
|
db.Books.publishers.any(db.Publishers.name.like("%" + term + "%")),
|
|
|
|
db.Books.title.like("%" + term + "%"))).filter(filter).all()
|
2017-01-15 11:37:58 +00:00
|
|
|
entriescount = len(entries) if len(entries) > 0 else 1
|
2017-01-28 19:16:40 +00:00
|
|
|
pagination = Pagination(1, entriescount, entriescount)
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', searchterm="")
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/new")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def feed_new():
|
2017-01-09 18:10:39 +00:00
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
|
|
|
db.Books, True, db.Books.timestamp.desc())
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/discover")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def feed_discover():
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
filter = True
|
2017-01-22 15:44:37 +00:00
|
|
|
entries = db.session.query(db.Books).filter(filter).order_by(func.random()).limit(config.config_books_per_page)
|
2017-01-28 19:16:40 +00:00
|
|
|
pagination = Pagination(1, config.config_books_per_page, int(config.config_books_per_page))
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-03-26 15:12:29 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
@app.route("/opds/rated")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-02-04 13:28:18 +00:00
|
|
|
def feed_best_rated():
|
2017-01-09 18:10:39 +00:00
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
|
|
|
db.Books, db.Books.ratings.any(db.Ratings.rating > 9), db.Books.timestamp.desc())
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2015-08-02 18:59:11 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
@app.route("/opds/hot")
|
|
|
|
@requires_basic_auth_if_no_ano
|
|
|
|
def feed_hot():
|
|
|
|
off = request.args.get("offset")
|
|
|
|
if not off:
|
|
|
|
off = 0
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
|
|
|
all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by(
|
|
|
|
ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
|
|
|
|
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
|
|
|
entries = list()
|
|
|
|
for book in hot_books:
|
2017-03-07 18:10:17 +00:00
|
|
|
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.Downloads.book_id).first()
|
|
|
|
if downloadBook:
|
|
|
|
entries.append(
|
|
|
|
db.session.query(db.Books).filter(filter).filter(db.Books.id == book.Downloads.book_id).first())
|
|
|
|
else:
|
|
|
|
ub.session.query(ub.Downloads).filter(book.Downloads.book_id == ub.Downloads.book_id).delete()
|
|
|
|
ub.session.commit()
|
2017-02-04 13:28:18 +00:00
|
|
|
numBooks = entries.__len__()
|
|
|
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page, numBooks)
|
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
|
|
|
response = make_response(xml)
|
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/author")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
|
|
|
def feed_authorindex():
|
2017-01-09 18:10:39 +00:00
|
|
|
off = request.args.get("offset")
|
2017-02-03 12:20:35 +00:00
|
|
|
if not off:
|
|
|
|
off = 0
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2017-02-03 12:20:35 +00:00
|
|
|
entries = db.session.query(db.Authors).join(db.books_authors_link).join(db.Books).filter(filter)\
|
|
|
|
.group_by('books_authors_link.author').order_by(db.Authors.sort).limit(config.config_books_per_page).offset(off)
|
2017-01-28 19:16:40 +00:00
|
|
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
2017-01-07 17:08:12 +00:00
|
|
|
len(db.session.query(db.Authors).all()))
|
2017-02-03 12:20:35 +00:00
|
|
|
xml = render_title_template('feed.xml', listelements=entries, folder='feed_author', pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-09 18:10:39 +00:00
|
|
|
@app.route("/opds/author/<int:id>")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-09 18:10:39 +00:00
|
|
|
def feed_author(id):
|
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
|
|
|
db.Books, db.Books.authors.any(db.Authors.id == id), db.Books.timestamp.desc())
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/category")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
|
|
|
def feed_categoryindex():
|
2017-01-09 18:10:39 +00:00
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
|
|
|
entries = db.session.query(db.Tags).join(db.books_tags_link).join(db.Books).filter(filter).\
|
|
|
|
group_by('books_tags_link.tag').order_by(db.Tags.name).offset(off).limit(config.config_books_per_page)
|
2017-01-28 19:16:40 +00:00
|
|
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
2017-01-07 17:08:12 +00:00
|
|
|
len(db.session.query(db.Tags).all()))
|
2017-02-03 12:20:35 +00:00
|
|
|
xml = render_title_template('feed.xml', listelements=entries, folder='feed_category', pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2017-01-09 18:10:39 +00:00
|
|
|
@app.route("/opds/category/<int:id>")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-09 18:10:39 +00:00
|
|
|
def feed_category(id):
|
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
|
|
|
db.Books, db.Books.tags.any(db.Tags.id == id), db.Books.timestamp.desc())
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-12-28 14:51:58 +00:00
|
|
|
@app.route("/opds/series")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
|
|
|
def feed_seriesindex():
|
2017-01-09 18:10:39 +00:00
|
|
|
off = request.args.get("offset")
|
2017-02-03 12:20:35 +00:00
|
|
|
if not off:
|
|
|
|
off = 0
|
2017-01-07 17:08:12 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2017-02-03 12:20:35 +00:00
|
|
|
entries = db.session.query(db.Series).join(db.books_series_link).join(db.Books).filter(filter).\
|
|
|
|
group_by('books_series_link.series').order_by(db.Series.sort).offset(off).all()
|
2017-01-28 19:16:40 +00:00
|
|
|
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
2017-01-07 17:08:12 +00:00
|
|
|
len(db.session.query(db.Series).all()))
|
2017-02-03 12:20:35 +00:00
|
|
|
xml = render_title_template('feed.xml', listelements=entries, folder='feed_series', pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2017-01-09 18:10:39 +00:00
|
|
|
@app.route("/opds/series/<int:id>")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-09 18:10:39 +00:00
|
|
|
def feed_series(id):
|
|
|
|
off = request.args.get("offset")
|
2016-11-09 18:24:33 +00:00
|
|
|
if not off:
|
2016-12-23 08:53:39 +00:00
|
|
|
off = 0
|
2017-02-03 12:20:35 +00:00
|
|
|
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
|
|
|
|
db.Books, db.Books.series.any(db.Series.id == id),db.Books.series_index)
|
2017-01-29 20:06:08 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
2016-12-23 08:53:39 +00:00
|
|
|
response = make_response(xml)
|
2016-11-09 18:24:33 +00:00
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2017-03-06 20:58:15 +00:00
|
|
|
def partial(total_byte_len, part_size_limit):
|
|
|
|
s = []
|
|
|
|
for p in range(0, total_byte_len, part_size_limit):
|
|
|
|
last = min(total_byte_len - 1, p + part_size_limit - 1)
|
|
|
|
s.append([p, last])
|
|
|
|
return s
|
|
|
|
|
|
|
|
def do_gdrive_download(df, headers):
|
2017-03-06 21:05:21 +00:00
|
|
|
startTime=time.time()
|
2017-03-06 20:58:15 +00:00
|
|
|
total_size = int(df.metadata.get('fileSize'))
|
|
|
|
download_url = df.metadata.get('downloadUrl')
|
2017-03-06 22:50:24 +00:00
|
|
|
s = partial(total_size, 1024 * 1024) # I'm downloading BIG files, so 100M chunk size is fine for me
|
2017-03-06 20:58:15 +00:00
|
|
|
def stream():
|
|
|
|
for bytes in s:
|
|
|
|
headers = {"Range" : 'bytes=%s-%s' % (bytes[0], bytes[1])}
|
|
|
|
resp, content = df.auth.Get_Http_Object().request(download_url, headers=headers)
|
|
|
|
if resp.status == 206 :
|
|
|
|
yield content
|
|
|
|
else:
|
2017-03-06 21:05:21 +00:00
|
|
|
app.logger.info('An error occurred: %s' % resp)
|
2017-03-06 20:58:15 +00:00
|
|
|
return
|
2017-03-06 22:50:24 +00:00
|
|
|
return Response(stream_with_context(stream()), headers=headers)
|
2017-03-06 20:58:15 +00:00
|
|
|
|
2017-01-15 10:05:28 +00:00
|
|
|
@app.route("/opds/download/<book_id>/<format>/")
|
2016-04-27 16:29:45 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2016-04-27 08:35:23 +00:00
|
|
|
@download_required
|
2015-10-13 17:06:37 +00:00
|
|
|
def get_opds_download_link(book_id, format):
|
2017-03-06 21:05:21 +00:00
|
|
|
startTime=time.time()
|
2015-10-13 17:06:37 +00:00
|
|
|
format = format.split(".")[0]
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == format.upper()).first()
|
2017-03-06 21:05:21 +00:00
|
|
|
app.logger.info (data.name)
|
2017-01-15 10:05:28 +00:00
|
|
|
if current_user.is_authenticated:
|
|
|
|
helper.update_download(book_id, int(current_user.id))
|
2016-03-26 15:12:29 +00:00
|
|
|
file_name = book.title
|
2017-02-15 17:09:17 +00:00
|
|
|
if len(book.authors) > 0:
|
|
|
|
file_name = book.authors[0].name + '-' + file_name
|
2017-03-06 08:35:00 +00:00
|
|
|
file_name = helper.get_valid_filename(file_name)
|
2017-03-06 20:58:15 +00:00
|
|
|
headers={}
|
|
|
|
headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (urllib.quote(file_name.encode('utf8')), format)
|
2017-03-06 21:05:21 +00:00
|
|
|
app.logger.info (time.time()-startTime)
|
|
|
|
startTime=time.time()
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
2017-03-06 19:17:00 +00:00
|
|
|
df=gdriveutils.getFileFromEbooksFolder(Gdrive.Instance().drive, book.path, data.name + "." + format)
|
2017-03-06 20:58:15 +00:00
|
|
|
return do_gdrive_download(df, headers)
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
2017-02-22 22:45:19 +00:00
|
|
|
# file_name = helper.get_valid_filename(file_name)
|
2017-02-20 18:34:37 +00:00
|
|
|
response = make_response(send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + format))
|
2017-03-06 23:17:57 +00:00
|
|
|
response.headers=headers
|
2015-10-13 17:06:37 +00:00
|
|
|
return response
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-09 20:18:49 +00:00
|
|
|
@app.route("/ajax/book/<string:uuid>")
|
2017-01-15 10:05:28 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-09 20:18:49 +00:00
|
|
|
def get_metadata_calibre_companion(uuid):
|
2017-01-28 19:16:40 +00:00
|
|
|
entry = db.session.query(db.Books).filter(db.Books.uuid.like("%" + uuid + "%")).first()
|
|
|
|
if entry is not None:
|
|
|
|
js = render_template('json.txt', entry=entry)
|
2017-01-09 20:18:49 +00:00
|
|
|
response = make_response(js)
|
2017-01-14 12:18:35 +00:00
|
|
|
response.headers["Content-Type"] = "application/json; charset=utf-8"
|
2017-01-09 20:18:49 +00:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
return ""
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route("/get_authors_json", methods=['GET', 'POST'])
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def get_authors_json():
|
2016-04-15 21:35:18 +00:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
|
|
|
entries = db.session.execute("select name from authors where name like '%" + query + "%'")
|
|
|
|
json_dumps = json.dumps([dict(r) for r in entries])
|
|
|
|
return json_dumps
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route("/get_tags_json", methods=['GET', 'POST'])
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def get_tags_json():
|
2016-04-15 21:35:18 +00:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
|
|
|
entries = db.session.execute("select name from tags where name like '%" + query + "%'")
|
|
|
|
json_dumps = json.dumps([dict(r) for r in entries])
|
|
|
|
return json_dumps
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2017-01-30 17:58:36 +00:00
|
|
|
@app.route("/get_update_status", methods=['GET'])
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def get_update_status():
|
|
|
|
status = {}
|
|
|
|
if request.method == "GET":
|
2017-01-30 18:03:55 +00:00
|
|
|
# should be automatically replaced by git with current commit hash
|
2017-01-31 18:29:47 +00:00
|
|
|
commit_id = '$Format:%H$'
|
2017-01-30 17:58:36 +00:00
|
|
|
commit = requests.get('https://api.github.com/repos/janeczku/calibre-web/git/refs/heads/master').json()
|
2017-01-31 18:29:47 +00:00
|
|
|
if "object" in commit and commit['object']['sha'] != commit_id:
|
2017-01-30 17:58:36 +00:00
|
|
|
status['status'] = True
|
2017-02-20 18:52:00 +00:00
|
|
|
commitdate = requests.get('https://api.github.com/repos/janeczku/calibre-web/git/commits/'+commit['object']['sha']).json()
|
|
|
|
if "committer" in commitdate:
|
|
|
|
status['commit'] = commitdate['committer']['date']
|
|
|
|
else:
|
|
|
|
status['commit'] = u'Unknown'
|
2017-01-30 17:58:36 +00:00
|
|
|
else:
|
|
|
|
status['status'] = False
|
|
|
|
return json.dumps(status)
|
|
|
|
|
2017-02-20 18:52:00 +00:00
|
|
|
@app.route("/get_updater_status", methods=['GET','POST'])
|
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def get_updater_status():
|
|
|
|
status = {}
|
|
|
|
if request.method == "POST":
|
|
|
|
commit = request.form.to_dict()
|
|
|
|
if "start" in commit and commit['start'] == 'True':
|
|
|
|
text={
|
|
|
|
"1": _(u'Requesting update package'),
|
|
|
|
"2": _(u'Downloading update package'),
|
|
|
|
"3": _(u'Unzipping update package'),
|
|
|
|
"4": _(u'Files are replaced'),
|
|
|
|
"5": _(u'Database connections are closed'),
|
|
|
|
"6": _(u'Server is stopped'),
|
|
|
|
"7": _(u'Update finished, please press okay and reload page')
|
|
|
|
}
|
|
|
|
status['text']=text
|
|
|
|
helper.updater_thread = helper.Updater()
|
|
|
|
helper.updater_thread.start()
|
|
|
|
status['status']=helper.updater_thread.get_update_status()
|
|
|
|
elif request.method == "GET":
|
2017-02-21 18:40:22 +00:00
|
|
|
try:
|
|
|
|
status['status']=helper.updater_thread.get_update_status()
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2017-02-21 18:40:22 +00:00
|
|
|
status['status'] = 7
|
2017-02-20 18:52:00 +00:00
|
|
|
return json.dumps(status)
|
2017-01-30 17:58:36 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route("/get_languages_json", methods=['GET', 'POST'])
|
2016-11-09 18:24:33 +00:00
|
|
|
@login_required_if_no_ano
|
|
|
|
def get_languages_json():
|
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q').lower()
|
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-23 08:53:39 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
2016-11-09 18:24:33 +00:00
|
|
|
entries = [s for s in languages if query in s.name.lower()]
|
|
|
|
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
|
|
|
return json_dumps
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route("/get_series_json", methods=['GET', 'POST'])
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def get_series_json():
|
2016-04-15 21:35:18 +00:00
|
|
|
if request.method == "GET":
|
|
|
|
query = request.args.get('q')
|
|
|
|
entries = db.session.execute("select name from series where name like '%" + query + "%'")
|
|
|
|
json_dumps = json.dumps([dict(r) for r in entries])
|
|
|
|
return json_dumps
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route("/get_matching_tags", methods=['GET', 'POST'])
|
2016-05-02 20:43:50 +00:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def get_matching_tags():
|
2016-05-02 20:43:50 +00:00
|
|
|
tag_dict = {'tags': []}
|
|
|
|
if request.method == "GET":
|
|
|
|
q = db.session.query(db.Books)
|
|
|
|
author_input = request.args.get('author_name')
|
|
|
|
title_input = request.args.get('book_title')
|
2016-05-03 12:27:38 +00:00
|
|
|
include_tag_inputs = request.args.getlist('include_tag')
|
|
|
|
exclude_tag_inputs = request.args.getlist('exclude_tag')
|
2016-12-23 08:53:39 +00:00
|
|
|
q = q.filter(db.Books.authors.any(db.Authors.name.like("%" + author_input + "%")),
|
|
|
|
db.Books.title.like("%" + title_input + "%"))
|
2016-05-03 12:27:38 +00:00
|
|
|
if len(include_tag_inputs) > 0:
|
|
|
|
for tag in include_tag_inputs:
|
2016-05-02 20:43:50 +00:00
|
|
|
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
|
2016-05-03 12:27:38 +00:00
|
|
|
if len(exclude_tag_inputs) > 0:
|
|
|
|
for tag in exclude_tag_inputs:
|
|
|
|
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
|
2016-05-02 20:43:50 +00:00
|
|
|
for book in q:
|
|
|
|
for tag in book.tags:
|
|
|
|
if tag.id not in tag_dict['tags']:
|
|
|
|
tag_dict['tags'].append(tag.id)
|
|
|
|
json_dumps = json.dumps(tag_dict)
|
|
|
|
return json_dumps
|
2015-10-13 17:06:37 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/", defaults={'page': 1})
|
|
|
|
@app.route('/page/<int:page>')
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def index(page):
|
2016-12-23 08:53:39 +00:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, db.Books.timestamp.desc())
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Latest Books"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
@app.route("/hot", defaults={'page': 1})
|
|
|
|
@app.route('/hot/page/<int:page>')
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def hot_books(page):
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2017-02-02 18:36:31 +00:00
|
|
|
if current_user.show_detail_random():
|
2017-01-22 20:30:36 +00:00
|
|
|
random = db.session.query(db.Books).filter(filter).order_by(func.random()).limit(config.config_random_books)
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
random = false
|
2017-01-22 15:44:37 +00:00
|
|
|
off = int(int(config.config_books_per_page) * (page - 1))
|
2016-12-23 08:53:39 +00:00
|
|
|
all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by(
|
|
|
|
ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
|
2017-01-22 15:44:37 +00:00
|
|
|
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
2015-08-02 18:59:11 +00:00
|
|
|
entries = list()
|
|
|
|
for book in hot_books:
|
2017-03-07 18:10:17 +00:00
|
|
|
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.Downloads.book_id).first()
|
|
|
|
if downloadBook:
|
|
|
|
entries.append(
|
|
|
|
db.session.query(db.Books).filter(filter).filter(db.Books.id == book.Downloads.book_id).first())
|
|
|
|
else:
|
|
|
|
ub.session.query(ub.Downloads).filter(book.Downloads.book_id == ub.Downloads.book_id).delete()
|
|
|
|
ub.session.commit()
|
2016-11-09 18:24:33 +00:00
|
|
|
numBooks = entries.__len__()
|
2017-01-22 15:44:37 +00:00
|
|
|
pagination = Pagination(page, config.config_books_per_page, numBooks)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Hot Books (most downloaded)"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
@app.route("/rated", defaults={'page': 1})
|
|
|
|
@app.route('/rated/page/<int:page>')
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def best_rated_books(page):
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
|
|
|
|
db.Books.timestamp.desc())
|
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
|
|
|
title=_(u"Best rated books"))
|
|
|
|
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/discover", defaults={'page': 1})
|
|
|
|
@app.route('/discover/page/<int:page>')
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def discover(page):
|
2017-01-28 19:16:40 +00:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, True, func.randomblob(2))
|
|
|
|
pagination = Pagination(1, config.config_books_per_page,config.config_books_per_page)
|
|
|
|
return render_title_template('discover.html', entries=entries, pagination=pagination, title=_(u"Random Books"))
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route("/author")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def author_list():
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
filter = True
|
2016-12-23 08:53:39 +00:00
|
|
|
entries = db.session.query(db.Authors, func.count('books_authors_link.book').label('count')).join(
|
|
|
|
db.books_authors_link).join(db.Books).filter(
|
2016-11-09 18:24:33 +00:00
|
|
|
filter).group_by('books_authors_link.author').order_by(db.Authors.sort).all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('list.html', entries=entries, folder='author', title=_(u"Author list"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2017-02-03 12:20:35 +00:00
|
|
|
@app.route("/author/<int:id>", defaults={'page': 1})
|
|
|
|
@app.route("/author/<int:id>/<int:page>'")
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2017-02-03 12:20:35 +00:00
|
|
|
def author(id,page):
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.authors.any(db.Authors.id == id),
|
|
|
|
db.Books.timestamp.desc())
|
|
|
|
name = db.session.query(db.Authors).filter(db.Authors.id == id).first().name
|
|
|
|
if entries:
|
|
|
|
return render_title_template('index.html', random=random, entries=entries, title=_(u"Author: %(name)s", name=name))
|
2016-11-09 18:24:33 +00:00
|
|
|
else:
|
2017-02-03 12:20:35 +00:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
|
|
|
return redirect(url_for("index"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
|
|
|
|
@app.route("/series")
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2016-11-09 18:24:33 +00:00
|
|
|
def series_list():
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2016-12-23 08:53:39 +00:00
|
|
|
entries = db.session.query(db.Series, func.count('books_series_link.book').label('count')).join(
|
|
|
|
db.books_series_link).join(db.Books).filter(
|
2016-11-09 18:24:33 +00:00
|
|
|
filter).group_by('books_series_link.series').order_by(db.Series.sort).all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('list.html', entries=entries, folder='series', title=_(u"Series list"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-03 12:20:35 +00:00
|
|
|
@app.route("/series/<int:id>/", defaults={'page': 1})
|
|
|
|
@app.route("/series/<int:id>/<int:page>'")
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2017-02-03 12:20:35 +00:00
|
|
|
def series(id, page):
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.series.any(db.Series.id == id),
|
2016-12-23 08:53:39 +00:00
|
|
|
db.Books.series_index)
|
2017-02-03 12:20:35 +00:00
|
|
|
name=db.session.query(db.Series).filter(db.Series.id == id).first().name
|
2016-12-23 08:53:39 +00:00
|
|
|
if entries:
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('index.html', random=random, pagination=pagination, entries=entries,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Series: %(serie)s", serie=name))
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for("index"))
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route("/language")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def language_overview():
|
|
|
|
if current_user.filter_language() == u"all":
|
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-23 08:53:39 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
try:
|
2016-12-23 08:53:39 +00:00
|
|
|
langfound = 1
|
2016-11-09 18:24:33 +00:00
|
|
|
cur_l = LC.parse(current_user.filter_language())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-23 08:53:39 +00:00
|
|
|
langfound = 0
|
|
|
|
languages = db.session.query(db.Languages).filter(
|
|
|
|
db.Languages.lang_code == current_user.filter_language()).all()
|
2016-11-09 18:24:33 +00:00
|
|
|
if langfound:
|
|
|
|
languages[0].name = cur_l.get_language_name(get_locale())
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
|
|
|
languages[0].name = _(isoLanguages.get(part3=languages[0].lang_code).name)
|
|
|
|
lang_counter = db.session.query(db.books_languages_link,
|
|
|
|
func.count('books_languages_link.book').label('bookcount')).group_by(
|
|
|
|
'books_languages_link.lang_code').all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('languages.html', languages=languages, lang_counter=lang_counter,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Available languages"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route("/language/<name>", defaults={'page': 1})
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route('/language/<name>/page/<int:page>')
|
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def language(name, page):
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.languages.any(db.Languages.lang_code == name),
|
|
|
|
db.Books.timestamp.desc())
|
2016-11-09 18:24:33 +00:00
|
|
|
try:
|
|
|
|
cur_l = LC.parse(name)
|
|
|
|
name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
name = _(isoLanguages.get(part3=name).name)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Language: %(name)s", name=name))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
|
|
|
|
@app.route("/category")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def category_list():
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
filter = True
|
2016-12-23 08:53:39 +00:00
|
|
|
entries = db.session.query(db.Tags, func.count('books_tags_link.book').label('count')).join(
|
|
|
|
db.books_tags_link).join(db.Books).filter(
|
2016-11-09 18:24:33 +00:00
|
|
|
filter).group_by('books_tags_link.tag').all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('list.html', entries=entries, folder='category', title=_(u"Category list"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-03 12:20:35 +00:00
|
|
|
@app.route("/category/<int:id>", defaults={'page': 1})
|
|
|
|
@app.route('/category/<int:id>/<int:page>')
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2017-02-03 12:20:35 +00:00
|
|
|
def category(id, page):
|
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.tags.any(db.Tags.id == id),
|
2016-12-23 08:53:39 +00:00
|
|
|
db.Books.timestamp.desc())
|
2017-02-03 12:20:35 +00:00
|
|
|
|
|
|
|
name=db.session.query(db.Tags).filter(db.Tags.id == id).first().name
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Category: %(name)s", name=name))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-02-19 20:08:22 +00:00
|
|
|
@app.route("/ajax/toggleread/<int:id>", methods=['POST'])
|
|
|
|
@login_required
|
|
|
|
def toggle_read(id):
|
|
|
|
book = ub.session.query(ub.ReadBook).filter(ub.and_(ub.ReadBook.user_id == int(current_user.id),
|
|
|
|
ub.ReadBook.book_id == id)).first()
|
|
|
|
if book:
|
|
|
|
book.is_read=not book.is_read
|
|
|
|
else:
|
|
|
|
readBook=ub.ReadBook()
|
|
|
|
readBook.user_id=int(current_user.id)
|
|
|
|
readBook.book_id = id
|
|
|
|
readBook.is_read=True
|
|
|
|
book=readBook
|
|
|
|
ub.session.merge(book)
|
|
|
|
ub.session.commit()
|
|
|
|
return ""
|
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route("/book/<int:id>")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def show_book(id):
|
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
|
|
|
entries = db.session.query(db.Books).filter(db.Books.id == id).filter(filter).first()
|
2016-12-23 08:53:39 +00:00
|
|
|
if entries:
|
|
|
|
for index in range(0, len(entries.languages)):
|
2016-11-09 18:24:33 +00:00
|
|
|
try:
|
2016-12-23 08:53:39 +00:00
|
|
|
entries.languages[index].language_name = LC.parse(entries.languages[index].lang_code).get_language_name(
|
|
|
|
get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-23 08:53:39 +00:00
|
|
|
entries.languages[index].language_name = _(
|
|
|
|
isoLanguages.get(part3=entries.languages[index].lang_code).name)
|
2017-02-28 23:42:46 +00:00
|
|
|
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
|
|
|
|
|
|
|
if config.config_columns_to_ignore:
|
|
|
|
cc=[]
|
|
|
|
for col in tmpcc:
|
|
|
|
r= re.compile(config.config_columns_to_ignore)
|
|
|
|
if r.match(col.label):
|
2017-03-07 19:36:43 +00:00
|
|
|
cc.append(col)
|
2017-02-28 23:42:46 +00:00
|
|
|
else:
|
|
|
|
cc=tmpcc
|
2016-11-09 18:24:33 +00:00
|
|
|
book_in_shelfs = []
|
|
|
|
shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == id).all()
|
|
|
|
for entry in shelfs:
|
|
|
|
book_in_shelfs.append(entry.shelf)
|
|
|
|
|
2017-02-19 20:08:22 +00:00
|
|
|
#return render_title_template('detail.html', entry=entries, cc=cc,
|
|
|
|
# title=entries.title, books_shelfs=book_in_shelfs)
|
2017-03-02 23:56:07 +00:00
|
|
|
if not current_user.is_anonymous():
|
|
|
|
matching_have_read_book=ub.session.query(ub.ReadBook).filter(ub.and_(ub.ReadBook.user_id == int(current_user.id),
|
2017-02-19 20:08:22 +00:00
|
|
|
ub.ReadBook.book_id == id)).all()
|
2017-03-02 23:56:07 +00:00
|
|
|
have_read=len(matching_have_read_book) > 0 and matching_have_read_book[0].is_read
|
|
|
|
else:
|
|
|
|
have_read=None
|
2017-02-19 20:08:22 +00:00
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('detail.html', entry=entries, cc=cc,
|
2017-02-19 20:08:22 +00:00
|
|
|
title=entries.title, books_shelfs=book_in_shelfs, have_read=have_read)
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for("index"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-02 17:52:33 +00:00
|
|
|
@app.route("/admin")
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required
|
2017-01-02 17:52:33 +00:00
|
|
|
def admin_forbidden():
|
2015-10-13 16:07:17 +00:00
|
|
|
abort(403)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route("/stats")
|
|
|
|
@login_required
|
|
|
|
def stats():
|
|
|
|
counter = len(db.session.query(db.Books).all())
|
|
|
|
authors = len(db.session.query(db.Authors).all())
|
2017-01-28 19:54:31 +00:00
|
|
|
categorys = len(db.session.query(db.Tags).all())
|
|
|
|
series = len(db.session.query(db.Series).all())
|
2017-01-28 19:16:40 +00:00
|
|
|
versions = uploader.book_formats.get_versions()
|
2017-03-01 09:20:58 +00:00
|
|
|
vendorpath = os.path.join(config.get_main_dir, "vendor")
|
2016-12-23 08:53:39 +00:00
|
|
|
if sys.platform == "win32":
|
2017-01-22 20:30:36 +00:00
|
|
|
kindlegen = os.path.join(vendorpath, u"kindlegen.exe")
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2017-01-22 20:30:36 +00:00
|
|
|
kindlegen = os.path.join(vendorpath, u"kindlegen")
|
2017-01-28 19:16:40 +00:00
|
|
|
versions['KindlegenVersion'] = _('not installed')
|
2016-12-23 08:53:39 +00:00
|
|
|
if os.path.exists(kindlegen):
|
2017-01-28 19:16:40 +00:00
|
|
|
p = subprocess.Popen(kindlegen, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
|
|
stdin=subprocess.PIPE)
|
|
|
|
p.wait()
|
2016-12-23 08:53:39 +00:00
|
|
|
for lines in p.stdout.readlines():
|
2017-03-06 05:42:00 +00:00
|
|
|
if type(lines) is bytes:
|
|
|
|
lines = lines.decode('utf-8')
|
2016-12-23 08:53:39 +00:00
|
|
|
if re.search('Amazon kindlegen\(', lines):
|
2017-01-28 19:16:40 +00:00
|
|
|
versions['KindlegenVersion'] = lines
|
|
|
|
versions['PythonVersion'] = sys.version
|
2017-02-23 18:58:56 +00:00
|
|
|
versions['babel'] = babelVersion
|
|
|
|
versions['sqlalchemy'] = sqlalchemyVersion
|
|
|
|
versions['flask'] = flaskVersion
|
|
|
|
versions['flasklogin'] = flask_loginVersion
|
|
|
|
versions['flask_principal'] = flask_principalVersion
|
|
|
|
versions['tornado'] = tornadoVersion
|
|
|
|
versions['iso639'] = iso639Version
|
|
|
|
versions['requests'] = requests.__version__
|
2017-02-25 08:02:59 +00:00
|
|
|
versions['pysqlite'] = db.engine.dialect.dbapi.version
|
|
|
|
versions['sqlite'] = db.engine.dialect.dbapi.sqlite_version
|
2017-02-23 18:58:56 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('stats.html', bookcounter=counter, authorcounter=authors, versions=versions,
|
2017-01-28 19:54:31 +00:00
|
|
|
categorycounter=categorys, seriecounter=series, title=_(u"Statistics"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
#@app.route("/load_gdrive")
|
|
|
|
#@login_required
|
|
|
|
#@admin_required
|
|
|
|
#def load_all_gdrive_folder_ids():
|
|
|
|
# books=db.session.query(db.Books).all()
|
|
|
|
# for book in books:
|
|
|
|
# gdriveutils.getFolderId(book.path, Gdrive.Instance().drive)
|
2017-02-22 22:45:19 +00:00
|
|
|
# return
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
@app.route("/gdrive/authenticate")
|
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def authenticate_google_drive():
|
|
|
|
authUrl=Gauth.Instance().auth.GetAuthUrl()
|
|
|
|
return redirect(authUrl)
|
|
|
|
|
|
|
|
@app.route("/gdrive/callback")
|
|
|
|
def google_drive_callback():
|
|
|
|
auth_code = request.args.get('code')
|
|
|
|
credentials = Gauth.Instance().auth.flow.step2_exchange(auth_code)
|
|
|
|
with open('gdrive_credentials' ,'w') as f:
|
|
|
|
f.write(credentials.to_json())
|
|
|
|
return redirect(url_for('configuration'))
|
|
|
|
|
|
|
|
@app.route("/gdrive/watch/subscribe")
|
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def watch_gdrive():
|
|
|
|
if not config.config_google_drive_watch_changes_response:
|
2017-02-23 00:17:14 +00:00
|
|
|
address = '%sgdrive/watch/callback' % config.config_google_drive_calibre_url_base
|
2017-02-20 18:34:37 +00:00
|
|
|
notification_id=str(uuid4())
|
|
|
|
result = gdriveutils.watchChange(Gdrive.Instance().drive, notification_id,
|
|
|
|
'web_hook', address, gdrive_watch_callback_token, current_milli_time() + 604800*1000)
|
|
|
|
print (result)
|
|
|
|
settings = ub.session.query(ub.Settings).first()
|
|
|
|
settings.config_google_drive_watch_changes_response=json.dumps(result)
|
|
|
|
ub.session.merge(settings)
|
|
|
|
ub.session.commit()
|
|
|
|
settings = ub.session.query(ub.Settings).first()
|
|
|
|
config.loadSettings()
|
|
|
|
|
|
|
|
print (settings.config_google_drive_watch_changes_response)
|
|
|
|
|
|
|
|
return redirect(url_for('configuration'))
|
|
|
|
|
|
|
|
@app.route("/gdrive/watch/revoke")
|
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def revoke_watch_gdrive():
|
|
|
|
last_watch_response=config.config_google_drive_watch_changes_response
|
|
|
|
if last_watch_response:
|
|
|
|
response=gdriveutils.stopChannel(Gdrive.Instance().drive, last_watch_response['id'], last_watch_response['resourceId'])
|
|
|
|
settings = ub.session.query(ub.Settings).first()
|
|
|
|
settings.config_google_drive_watch_changes_response=None
|
|
|
|
ub.session.merge(settings)
|
|
|
|
ub.session.commit()
|
|
|
|
config.loadSettings()
|
|
|
|
return redirect(url_for('configuration'))
|
|
|
|
|
|
|
|
@app.route("/gdrive/watch/callback", methods=['GET', 'POST'])
|
|
|
|
def on_received_watch_confirmation():
|
|
|
|
app.logger.info (request.headers)
|
|
|
|
if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \
|
|
|
|
and request.headers.get('X-Goog-Resource-State') == 'change' \
|
|
|
|
and request.data:
|
|
|
|
|
|
|
|
data=request.data
|
|
|
|
|
|
|
|
def updateMetaData():
|
|
|
|
app.logger.info ('Change received from gdrive')
|
2017-02-22 22:45:19 +00:00
|
|
|
app.logger.info (data)
|
2017-02-20 18:34:37 +00:00
|
|
|
try:
|
|
|
|
j=json.loads(data)
|
2017-02-22 22:45:19 +00:00
|
|
|
app.logger.info ('Getting change details')
|
2017-02-20 18:34:37 +00:00
|
|
|
response=gdriveutils.getChangeById(Gdrive.Instance().drive, j['id'])
|
|
|
|
app.logger.info (response)
|
|
|
|
if response:
|
|
|
|
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
|
|
|
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != md5(dbpath):
|
2017-03-02 00:28:30 +00:00
|
|
|
tmpDir=tempfile.gettempdir()
|
2017-02-20 18:34:37 +00:00
|
|
|
app.logger.info ('Database file updated')
|
2017-03-02 00:28:30 +00:00
|
|
|
copyfile (dbpath, tmpDir + "/metadata.db_" + str(current_milli_time()))
|
2017-02-20 18:34:37 +00:00
|
|
|
app.logger.info ('Backing up existing and downloading updated metadata.db')
|
2017-03-02 00:28:30 +00:00
|
|
|
gdriveutils.downloadFile(Gdrive.Instance().drive, None, "metadata.db", tmpDir + "/tmp_metadata.db")
|
2017-02-20 18:34:37 +00:00
|
|
|
app.logger.info ('Setting up new DB')
|
2017-03-02 00:28:30 +00:00
|
|
|
os.rename(tmpDir + "/tmp_metadata.db", dbpath)
|
2017-02-20 18:34:37 +00:00
|
|
|
db.setup_db()
|
|
|
|
except Exception, e:
|
|
|
|
app.logger.exception(e)
|
|
|
|
|
|
|
|
updateMetaData()
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route("/shutdown")
|
2017-01-17 16:45:23 +00:00
|
|
|
@login_required
|
2017-01-22 15:44:37 +00:00
|
|
|
@admin_required
|
2016-12-23 08:53:39 +00:00
|
|
|
def shutdown():
|
2017-02-21 18:40:22 +00:00
|
|
|
# global global_task
|
2017-01-22 15:44:37 +00:00
|
|
|
task = int(request.args.get("parameter").strip())
|
2017-02-21 18:40:22 +00:00
|
|
|
helper.global_task = task
|
2017-01-28 19:16:40 +00:00
|
|
|
if task == 1 or task == 0: # valid commandos received
|
2017-01-22 15:44:37 +00:00
|
|
|
# close all database connections
|
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
|
|
|
ub.session.close()
|
|
|
|
ub.engine.dispose()
|
|
|
|
# stop tornado server
|
2017-01-28 19:16:40 +00:00
|
|
|
server = IOLoop.instance()
|
2017-01-22 15:44:37 +00:00
|
|
|
server.add_callback(server.stop)
|
2017-01-28 19:16:40 +00:00
|
|
|
showtext = {}
|
2017-01-22 15:44:37 +00:00
|
|
|
if task == 0:
|
2017-02-20 18:52:00 +00:00
|
|
|
showtext['text'] = _(u'Server restarted, please reload page')
|
2017-01-22 15:44:37 +00:00
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
showtext['text'] = _(u'Performing shutdown of server, please close window')
|
|
|
|
return json.dumps(showtext)
|
2017-01-22 15:44:37 +00:00
|
|
|
else:
|
2017-03-07 18:10:17 +00:00
|
|
|
if task == 2:
|
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
|
|
|
db.setup_db()
|
|
|
|
return json.dumps({})
|
2017-01-22 15:44:37 +00:00
|
|
|
abort(404)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-30 17:58:36 +00:00
|
|
|
@app.route("/update")
|
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def update():
|
2017-02-20 18:52:00 +00:00
|
|
|
helper.updater_thread = helper.Updater()
|
2017-01-30 17:58:36 +00:00
|
|
|
flash(_(u"Update done"), category="info")
|
2017-02-20 18:52:00 +00:00
|
|
|
return ""
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/search", methods=["GET"])
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def search():
|
2016-11-09 18:24:33 +00:00
|
|
|
term = request.args.get("query").strip()
|
2015-08-02 18:59:11 +00:00
|
|
|
if term:
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
2016-12-23 08:53:39 +00:00
|
|
|
entries = db.session.query(db.Books).filter(db.or_(db.Books.tags.any(db.Tags.name.like("%" + term + "%")),
|
2017-02-22 19:59:48 +00:00
|
|
|
db.Books.series.any(db.Series.name.like("%" + term + "%")),
|
|
|
|
db.Books.authors.any(db.Authors.name.like("%" + term + "%")),
|
|
|
|
db.Books.publishers.any(db.Publishers.name.like("%" + term + "%")),
|
|
|
|
db.Books.title.like("%" + term + "%"))).filter(filter).all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('search.html', searchterm=term, entries=entries)
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('search.html', searchterm="")
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-05-02 20:43:50 +00:00
|
|
|
@app.route("/advanced_search", methods=["GET"])
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def advanced_search():
|
|
|
|
if request.method == 'GET':
|
|
|
|
q = db.session.query(db.Books)
|
2016-05-03 12:27:38 +00:00
|
|
|
include_tag_inputs = request.args.getlist('include_tag')
|
|
|
|
exclude_tag_inputs = request.args.getlist('exclude_tag')
|
2016-12-27 15:07:25 +00:00
|
|
|
include_series_inputs = request.args.getlist('include_serie')
|
|
|
|
exclude_series_inputs = request.args.getlist('exclude_serie')
|
|
|
|
include_languages_inputs = request.args.getlist('include_language')
|
|
|
|
exclude_languages_inputs = request.args.getlist('exclude_language')
|
|
|
|
|
2016-10-03 10:08:34 +00:00
|
|
|
author_name = request.args.get("author_name")
|
|
|
|
book_title = request.args.get("book_title")
|
2017-02-22 19:59:48 +00:00
|
|
|
publisher = request.args.get("publisher")
|
2016-12-23 08:53:39 +00:00
|
|
|
if author_name: author_name = author_name.strip()
|
|
|
|
if book_title: book_title = book_title.strip()
|
2017-02-22 19:59:48 +00:00
|
|
|
if publisher: publisher = publisher.strip()
|
2016-12-27 15:07:25 +00:00
|
|
|
if include_tag_inputs or exclude_tag_inputs or include_series_inputs or exclude_series_inputs or \
|
2017-02-22 19:59:48 +00:00
|
|
|
include_languages_inputs or exclude_languages_inputs or author_name or book_title or publisher:
|
2016-05-03 09:00:45 +00:00
|
|
|
searchterm = []
|
2017-02-22 19:59:48 +00:00
|
|
|
searchterm.extend((author_name, book_title, publisher))
|
2016-05-03 12:27:38 +00:00
|
|
|
tag_names = db.session.query(db.Tags).filter(db.Tags.id.in_(include_tag_inputs)).all()
|
2016-05-03 09:00:45 +00:00
|
|
|
searchterm.extend(tag.name for tag in tag_names)
|
2016-12-27 15:07:25 +00:00
|
|
|
# searchterm = " + ".join(filter(None, searchterm))
|
|
|
|
serie_names = db.session.query(db.Series).filter(db.Series.id.in_(include_series_inputs)).all()
|
|
|
|
searchterm.extend(serie.name for serie in serie_names)
|
|
|
|
language_names = db.session.query(db.Languages).filter(db.Languages.id.in_(include_languages_inputs)).all()
|
|
|
|
for lang in language_names:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-27 15:07:25 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
searchterm.extend(language.name for language in language_names)
|
2016-05-03 09:00:45 +00:00
|
|
|
searchterm = " + ".join(filter(None, searchterm))
|
2016-12-23 08:53:39 +00:00
|
|
|
q = q.filter(db.Books.authors.any(db.Authors.name.like("%" + author_name + "%")),
|
2017-02-22 19:59:48 +00:00
|
|
|
db.Books.title.like("%" + book_title + "%"),
|
|
|
|
db.Books.publishers.any(db.Publishers.name.like("%" + publisher + "%")))
|
2016-05-03 12:27:38 +00:00
|
|
|
for tag in include_tag_inputs:
|
2016-05-02 20:43:50 +00:00
|
|
|
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
|
2016-05-03 12:27:38 +00:00
|
|
|
for tag in exclude_tag_inputs:
|
|
|
|
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
|
2016-12-27 15:07:25 +00:00
|
|
|
for serie in include_series_inputs:
|
|
|
|
q = q.filter(db.Books.series.any(db.Series.id == serie))
|
|
|
|
for serie in exclude_series_inputs:
|
|
|
|
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
|
2016-12-27 15:07:25 +00:00
|
|
|
else:
|
|
|
|
for language in include_languages_inputs:
|
|
|
|
q = q.filter(db.Books.languages.any(db.Languages.id == language))
|
|
|
|
for language in exclude_languages_inputs:
|
|
|
|
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
|
2016-05-02 20:43:50 +00:00
|
|
|
q = q.all()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('search.html', searchterm=searchterm, entries=q, title=_(u"search"))
|
2016-05-02 20:43:50 +00:00
|
|
|
tags = db.session.query(db.Tags).order_by(db.Tags.name).all()
|
2016-12-27 15:07:25 +00:00
|
|
|
series = db.session.query(db.Series).order_by(db.Series.name).all()
|
|
|
|
if current_user.filter_language() == u"all":
|
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-27 15:07:25 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
languages = None
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('search_form.html', tags=tags, languages=languages, series=series, title=_(u"search"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-03-02 23:55:32 +00:00
|
|
|
def get_cover_via_gdrive(cover_path):
|
|
|
|
df=gdriveutils.getFileFromEbooksFolder(Gdrive.Instance().drive, cover_path, 'cover.jpg')
|
|
|
|
if not gdriveutils.session.query(gdriveutils.PermissionAdded).filter(gdriveutils.PermissionAdded.gdrive_id == df['id']).first():
|
|
|
|
permissions=df.GetPermissions()
|
|
|
|
df.InsertPermission({
|
|
|
|
'type': 'anyone',
|
|
|
|
'value': 'anyone',
|
|
|
|
'role': 'reader',
|
|
|
|
'withLink' : True})
|
|
|
|
permissionAdded=gdriveutils.PermissionAdded()
|
|
|
|
permissionAdded.gdrive_id=df['id']
|
|
|
|
gdriveutils.session.add(permissionAdded)
|
|
|
|
gdriveutils.session.commit()
|
|
|
|
return df.metadata.get('webContentLink')
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/cover/<path:cover_path>")
|
2016-04-27 08:35:23 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def get_cover(cover_path):
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
2017-03-02 23:55:32 +00:00
|
|
|
return redirect(get_cover_via_gdrive(cover_path))
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
|
|
|
return send_from_directory(os.path.join(config.config_calibre_dir, cover_path), "cover.jpg")
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2017-01-09 18:10:39 +00:00
|
|
|
@app.route("/opds/thumb_240_240/<path:book_id>")
|
2017-01-09 20:18:49 +00:00
|
|
|
@app.route("/opds/cover_240_240/<path:book_id>")
|
2017-01-08 15:46:57 +00:00
|
|
|
@app.route("/opds/cover_90_90/<path:book_id>")
|
2017-01-07 17:08:12 +00:00
|
|
|
@app.route("/opds/cover/<path:book_id>")
|
2016-11-09 18:24:33 +00:00
|
|
|
@requires_basic_auth_if_no_ano
|
2017-01-07 17:08:12 +00:00
|
|
|
def feed_get_cover(book_id):
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
2017-03-02 23:55:32 +00:00
|
|
|
return redirect(get_cover_via_gdrive(book.path))
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
2017-03-02 23:55:32 +00:00
|
|
|
return send_from_directory(os.path.join(config.config_calibre_dir, book.path), "cover.jpg")
|
2017-02-19 20:08:22 +00:00
|
|
|
|
|
|
|
def render_read_books(page, are_read, as_xml=False):
|
|
|
|
readBooks=ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id)).filter(ub.ReadBook.is_read == True).all()
|
|
|
|
readBookIds=[x.book_id for x in readBooks]
|
|
|
|
if are_read:
|
|
|
|
db_filter = db.Books.id.in_(readBookIds)
|
|
|
|
else:
|
|
|
|
db_filter = ~db.Books.id.in_(readBookIds)
|
|
|
|
|
2017-02-22 22:45:19 +00:00
|
|
|
entries, random, pagination = fill_indexpage(page, db.Books,
|
2017-02-19 20:08:22 +00:00
|
|
|
db_filter, db.Books.timestamp.desc())
|
2017-02-22 22:45:19 +00:00
|
|
|
if as_xml:
|
2017-02-19 20:08:22 +00:00
|
|
|
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
|
|
|
|
response = make_response(xml)
|
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
name=u'Read Books' if are_read else u'Unread Books'
|
|
|
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
|
|
|
title=_(name, name=name))
|
|
|
|
|
|
|
|
@app.route("/opds/readbooks/")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def feed_read_books():
|
|
|
|
off = request.args.get("offset")
|
|
|
|
if not off:
|
|
|
|
off = 0
|
|
|
|
return render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
|
2016-11-09 18:24:33 +00:00
|
|
|
|
2017-02-19 20:08:22 +00:00
|
|
|
@app.route("/readbooks/", defaults={'page': 1})
|
|
|
|
@app.route("/readbooks/<int:page>'")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def read_books(page):
|
|
|
|
return render_read_books(page, True)
|
|
|
|
|
|
|
|
@app.route("/opds/unreadbooks/")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def feed_unread_books():
|
|
|
|
off = request.args.get("offset")
|
|
|
|
if not off:
|
|
|
|
off = 0
|
|
|
|
return render_read_books(int(off) / (int(config.config_books_per_page)) + 1, False, True)
|
|
|
|
|
|
|
|
@app.route("/unreadbooks/", defaults={'page': 1})
|
|
|
|
@app.route("/unreadbooks/<int:page>'")
|
|
|
|
@login_required_if_no_ano
|
|
|
|
def unread_books(page):
|
|
|
|
return render_read_books(page, False)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-07-16 08:44:47 +00:00
|
|
|
@app.route("/read/<int:book_id>/<format>")
|
2017-02-25 10:14:45 +00:00
|
|
|
@login_required_if_no_ano
|
2016-12-23 08:53:39 +00:00
|
|
|
def read_book(book_id, format):
|
2015-08-02 18:59:11 +00:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2016-12-23 08:53:39 +00:00
|
|
|
if book:
|
2017-01-22 20:30:36 +00:00
|
|
|
book_dir = os.path.join(config.get_main_dir, "cps", "static", str(book_id))
|
2016-10-30 10:44:02 +00:00
|
|
|
if not os.path.exists(book_dir):
|
|
|
|
os.mkdir(book_dir)
|
2016-11-01 17:49:47 +00:00
|
|
|
if format.lower() == "epub":
|
2016-12-23 08:53:39 +00:00
|
|
|
# check if mimetype file is exists
|
|
|
|
mime_file = str(book_id) + "/mimetype"
|
|
|
|
if not os.path.exists(mime_file):
|
2017-01-22 20:30:36 +00:00
|
|
|
epub_file = os.path.join(config.config_calibre_dir, book.path, book.data[0].name) + ".epub"
|
2016-11-01 17:49:47 +00:00
|
|
|
if not os.path.isfile(epub_file):
|
|
|
|
raise ValueError('Error opening eBook. File does not exist: ', epub_file)
|
|
|
|
zfile = zipfile.ZipFile(epub_file)
|
|
|
|
for name in zfile.namelist():
|
|
|
|
(dirName, fileName) = os.path.split(name)
|
|
|
|
newDir = os.path.join(book_dir, dirName)
|
|
|
|
if not os.path.exists(newDir):
|
|
|
|
try:
|
|
|
|
os.makedirs(newDir)
|
|
|
|
except OSError as exception:
|
|
|
|
if exception.errno == errno.EEXIST:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
if fileName:
|
|
|
|
fd = open(os.path.join(newDir, fileName), "wb")
|
|
|
|
fd.write(zfile.read(name))
|
|
|
|
fd.close()
|
|
|
|
zfile.close()
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('read.html', bookid=book_id, title=_(u"Read a Book"))
|
2016-11-01 17:49:47 +00:00
|
|
|
elif format.lower() == "pdf":
|
2017-03-07 08:32:55 +00:00
|
|
|
all_name = str(book_id) + "/" + book.data[0].name + ".pdf"
|
|
|
|
tmp_file = os.path.join(book_dir, book.data[0].name) + ".pdf"
|
2016-12-23 08:53:39 +00:00
|
|
|
if not os.path.exists(tmp_file):
|
2017-01-22 20:30:36 +00:00
|
|
|
pdf_file = os.path.join(config.config_calibre_dir, book.path, book.data[0].name) + ".pdf"
|
2016-12-23 08:53:39 +00:00
|
|
|
copyfile(pdf_file, tmp_file)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('readpdf.html', pdffile=all_name, title=_(u"Read a Book"))
|
2016-12-23 08:53:39 +00:00
|
|
|
elif format.lower() == "txt":
|
2017-03-07 08:32:55 +00:00
|
|
|
all_name = str(book_id) + "/" + book.data[0].name + ".txt"
|
|
|
|
tmp_file = os.path.join(book_dir, book.data[0].name) + ".txt"
|
2016-12-23 08:53:39 +00:00
|
|
|
if not os.path.exists(all_name):
|
2017-01-22 20:30:36 +00:00
|
|
|
txt_file = os.path.join(config.config_calibre_dir, book.path, book.data[0].name) + ".txt"
|
2016-12-23 08:53:39 +00:00
|
|
|
copyfile(txt_file, tmp_file)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('readtxt.html', txtfile=all_name, title=_(u"Read a Book"))
|
2016-12-23 08:53:39 +00:00
|
|
|
elif format.lower() == "cbr":
|
2017-03-07 08:32:55 +00:00
|
|
|
all_name = str(book_id) + "/" + book.data[0].name + ".cbr"
|
|
|
|
tmp_file = os.path.join(book_dir, book.data[0].name) + ".cbr"
|
2016-12-23 08:53:39 +00:00
|
|
|
if not os.path.exists(all_name):
|
2017-01-22 20:30:36 +00:00
|
|
|
cbr_file = os.path.join(config.config_calibre_dir, book.path, book.data[0].name) + ".cbr"
|
2016-12-23 08:53:39 +00:00
|
|
|
copyfile(cbr_file, tmp_file)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('readcbr.html', comicfile=all_name, title=_(u"Read a Book"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2017-01-02 17:52:33 +00:00
|
|
|
return redirect(url_for("index"))
|
2016-10-30 10:44:02 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/download/<int:book_id>/<format>")
|
2017-01-12 19:43:36 +00:00
|
|
|
@login_required_if_no_ano
|
2016-04-27 08:35:23 +00:00
|
|
|
@download_required
|
2015-08-02 18:59:11 +00:00
|
|
|
def get_download_link(book_id, format):
|
|
|
|
format = format.split(".")[0]
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == format.upper()).first()
|
2017-01-15 11:23:08 +00:00
|
|
|
if data:
|
2017-01-28 19:16:40 +00:00
|
|
|
# collect downloaded books only for registered user and not for anonymous user
|
|
|
|
if current_user.is_authenticated:
|
2017-01-15 11:23:08 +00:00
|
|
|
helper.update_download(book_id, int(current_user.id))
|
|
|
|
file_name = book.title
|
2017-02-15 17:09:17 +00:00
|
|
|
if len(book.authors) > 0:
|
|
|
|
file_name = book.authors[0].name + '-' + file_name
|
2017-01-15 11:23:08 +00:00
|
|
|
file_name = helper.get_valid_filename(file_name)
|
2017-03-06 23:17:57 +00:00
|
|
|
headers={}
|
|
|
|
try:
|
|
|
|
headers["Content-Type"] = mimetypes.types_map['.' + format]
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (urllib.quote(file_name.encode('utf-8')), format)
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
df=gdriveutils.getFileFromEbooksFolder(Gdrive.Instance().drive, book.path, '%s.%s' % (data.name, format))
|
2017-03-06 23:17:57 +00:00
|
|
|
return do_gdrive_download(df, headers)
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
|
|
|
response = make_response(send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + format))
|
2017-03-06 23:17:57 +00:00
|
|
|
response.headers=headers
|
2017-01-15 11:23:08 +00:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
abort(404)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route('/register', methods=['GET', 'POST'])
|
2015-10-13 00:30:55 +00:00
|
|
|
def register():
|
2017-01-22 20:30:36 +00:00
|
|
|
if not config.config_public_reg:
|
2015-10-13 00:30:55 +00:00
|
|
|
abort(404)
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2015-10-13 00:30:55 +00:00
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
if not to_save["nickname"] or not to_save["email"] or not to_save["password"]:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Please fill out all fields!"), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('register.html', title=_(u"register"))
|
2015-10-13 00:30:55 +00:00
|
|
|
|
|
|
|
existing_user = ub.session.query(ub.User).filter(ub.User.nickname == to_save["nickname"]).first()
|
|
|
|
existing_email = ub.session.query(ub.User).filter(ub.User.email == to_save["email"]).first()
|
|
|
|
if not existing_user and not existing_email:
|
|
|
|
content = ub.User()
|
|
|
|
content.password = generate_password_hash(to_save["password"])
|
|
|
|
content.nickname = to_save["nickname"]
|
|
|
|
content.email = to_save["email"]
|
2017-02-09 05:46:07 +00:00
|
|
|
content.role = config.config_default_role
|
2015-10-13 00:30:55 +00:00
|
|
|
try:
|
|
|
|
ub.session.add(content)
|
|
|
|
ub.session.commit()
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2015-10-13 00:30:55 +00:00
|
|
|
ub.session.rollback()
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"An unknown error occured. Please try again later."), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('register.html', title=_(u"register"))
|
2015-10-13 00:30:55 +00:00
|
|
|
flash("Your account has been created. Please login.", category="success")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('login'))
|
2015-10-13 00:30:55 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"This username or email address is already in use."), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('register.html', title=_(u"register"))
|
2015-10-13 00:30:55 +00:00
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('register.html', title=_(u"register"))
|
2015-10-13 00:30:55 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route('/login', methods=['GET', 'POST'])
|
2015-08-02 18:59:11 +00:00
|
|
|
def login():
|
2017-01-28 19:16:40 +00:00
|
|
|
if not config.db_configured:
|
2017-01-22 15:44:37 +00:00
|
|
|
return redirect(url_for('basic_configuration'))
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2015-08-02 18:59:11 +00:00
|
|
|
if request.method == "POST":
|
|
|
|
form = request.form.to_dict()
|
2016-11-12 20:28:40 +00:00
|
|
|
user = ub.session.query(ub.User).filter(ub.User.nickname == form['username'].strip()).first()
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
if user and check_password_hash(user.password, form['password']):
|
2016-12-23 08:53:39 +00:00
|
|
|
login_user(user, remember=True)
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"you are now logged in as: '%(nickname)s'", nickname=user.nickname), category="success")
|
2017-01-02 17:52:33 +00:00
|
|
|
# test=
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for("index"))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-30 17:58:36 +00:00
|
|
|
app.logger.info('Login failed for user "'+form['username']+'"')
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Wrong Username or Password"), category="error")
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('login.html', title=_(u"login"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
@app.route('/logout')
|
|
|
|
@login_required
|
|
|
|
def logout():
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user is not None and current_user.is_authenticated:
|
2015-08-02 18:59:11 +00:00
|
|
|
logout_user()
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('login'))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/send/<int:book_id>')
|
|
|
|
@login_required
|
2016-04-27 08:35:23 +00:00
|
|
|
@download_required
|
2015-08-02 18:59:11 +00:00
|
|
|
def send_to_kindle(book_id):
|
2015-08-02 19:23:24 +00:00
|
|
|
settings = ub.get_mail_settings()
|
|
|
|
if settings.get("mail_server", "mail.example.com") == "mail.example.com":
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
|
2015-08-02 19:23:24 +00:00
|
|
|
elif current_user.kindle_mail:
|
2017-01-28 19:16:40 +00:00
|
|
|
result = helper.send_mail(book_id, current_user.kindle_mail, config.config_calibre_dir)
|
2016-03-27 21:36:51 +00:00
|
|
|
if result is None:
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"Book successfully send to %(kindlemail)s", kindlemail=current_user.kindle_mail),
|
|
|
|
category="success")
|
2015-08-02 18:59:11 +00:00
|
|
|
helper.update_download(book_id, int(current_user.id))
|
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"There was an error sending this book: %(res)s", res=result), category="error")
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Please configure your kindle email address first..."), category="error")
|
2015-08-02 18:59:11 +00:00
|
|
|
return redirect(request.environ["HTTP_REFERER"])
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/shelf/add/<int:shelf_id>/<int:book_id>")
|
|
|
|
@login_required
|
|
|
|
def add_to_shelf(shelf_id, book_id):
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
|
|
|
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
|
|
|
flash("Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name)
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2017-01-28 19:16:40 +00:00
|
|
|
maxOrder = ub.session.query(func.max(ub.BookShelf.order)).filter(ub.BookShelf.shelf == shelf_id).first()
|
|
|
|
if maxOrder[0] is None:
|
2016-12-26 10:33:32 +00:00
|
|
|
maxOrder = 0
|
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
maxOrder = maxOrder[0]
|
|
|
|
ins = ub.BookShelf(shelf=shelf.id, book_id=book_id, order=maxOrder + 1)
|
2015-08-02 18:59:11 +00:00
|
|
|
ub.session.add(ins)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"Book has been added to shelf: %(sname)s", sname=shelf.name), category="success")
|
2015-10-12 01:09:56 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
# return redirect(url_for('show_book', id=book_id))
|
2015-10-12 01:09:56 +00:00
|
|
|
return redirect(request.environ["HTTP_REFERER"])
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-10-12 01:09:56 +00:00
|
|
|
@app.route("/shelf/remove/<int:shelf_id>/<int:book_id>")
|
|
|
|
@login_required
|
|
|
|
def remove_from_shelf(shelf_id, book_id):
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
|
|
|
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
|
|
|
flash("Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name)
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2015-10-12 01:09:56 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
book_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
|
|
|
|
ub.BookShelf.book_id == book_id).first()
|
2015-10-12 01:09:56 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
# rem = ub.BookShelf(shelf=shelf.id, book_id=book_id)
|
2015-10-12 01:09:56 +00:00
|
|
|
ub.session.delete(book_shelf)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"Book has been removed from shelf: %(sname)s", sname=shelf.name), category="success")
|
2015-10-12 01:09:56 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
return redirect(request.environ["HTTP_REFERER"])
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/shelf/create", methods=["GET", "POST"])
|
|
|
|
@login_required
|
|
|
|
def create_shelf():
|
|
|
|
shelf = ub.Shelf()
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
if "is_public" in to_save:
|
|
|
|
shelf.is_public = 1
|
|
|
|
shelf.name = to_save["title"]
|
|
|
|
shelf.user_id = int(current_user.id)
|
2017-01-28 19:16:40 +00:00
|
|
|
existing_shelf = ub.session.query(ub.Shelf).filter(
|
|
|
|
or_((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1),
|
|
|
|
(ub.Shelf.name == to_save["title"]) & (ub.Shelf.user_id == int(current_user.id)))).first()
|
2015-10-13 17:12:18 +00:00
|
|
|
if existing_shelf:
|
2017-01-28 19:16:40 +00:00
|
|
|
flash(_(u"A shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
2015-10-13 17:12:18 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
ub.session.add(shelf)
|
|
|
|
ub.session.commit()
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"There was an error"), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"))
|
2016-12-26 10:33:32 +00:00
|
|
|
else:
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"))
|
2016-12-26 10:33:32 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
|
2016-12-26 10:33:32 +00:00
|
|
|
@app.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
|
|
|
|
@login_required
|
|
|
|
def edit_shelf(shelf_id):
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2017-01-28 19:16:40 +00:00
|
|
|
existing_shelf = ub.session.query(ub.Shelf).filter(
|
|
|
|
or_((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 1),
|
|
|
|
(ub.Shelf.name == to_save["title"]) & (ub.Shelf.user_id == int(current_user.id)))).filter(
|
|
|
|
ub.Shelf.id != shelf_id).first()
|
2017-01-03 18:29:33 +00:00
|
|
|
if existing_shelf:
|
2017-01-28 19:16:40 +00:00
|
|
|
flash(_(u"A shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
|
2016-12-26 10:33:32 +00:00
|
|
|
else:
|
|
|
|
shelf.name = to_save["title"]
|
|
|
|
if "is_public" in to_save:
|
|
|
|
shelf.is_public = 1
|
|
|
|
else:
|
|
|
|
shelf.is_public = 0
|
|
|
|
try:
|
|
|
|
ub.session.commit()
|
2017-01-28 19:16:40 +00:00
|
|
|
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-26 10:33:32 +00:00
|
|
|
flash(_(u"There was an error"), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"))
|
2016-12-26 10:33:32 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
@app.route("/shelf/delete/<int:shelf_id>")
|
|
|
|
@login_required
|
|
|
|
def delete_shelf(shelf_id):
|
|
|
|
cur_shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
|
|
|
if current_user.role == ub.ROLE_ADMIN:
|
|
|
|
deleted = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).delete()
|
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
deleted = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
|
|
|
ub.Shelf.id == shelf_id),
|
|
|
|
ub.and_(ub.Shelf.is_public == 1,
|
|
|
|
ub.Shelf.id == shelf_id))).delete()
|
2016-11-09 18:24:33 +00:00
|
|
|
|
|
|
|
if deleted:
|
|
|
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
|
|
|
|
ub.session.commit()
|
2017-01-22 15:44:37 +00:00
|
|
|
flash(_(u"successfully deleted shelf %(name)s", name=cur_shelf.name, category="success"))
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
@app.route("/shelf/<int:shelf_id>")
|
2016-11-09 18:24:33 +00:00
|
|
|
@login_required_if_no_ano
|
2015-08-02 18:59:11 +00:00
|
|
|
def show_shelf(shelf_id):
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.is_anonymous():
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
|
|
|
ub.Shelf.id == shelf_id),
|
|
|
|
ub.and_(ub.Shelf.is_public == 1,
|
|
|
|
ub.Shelf.id == shelf_id))).first()
|
2015-08-02 18:59:11 +00:00
|
|
|
result = list()
|
|
|
|
if shelf:
|
2017-01-28 19:16:40 +00:00
|
|
|
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).order_by(
|
|
|
|
ub.BookShelf.order.asc()).all()
|
2015-08-02 18:59:11 +00:00
|
|
|
for book in books_in_shelf:
|
|
|
|
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
|
|
|
result.append(cur_book)
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('shelf.html', entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
|
|
|
|
shelf=shelf)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-26 10:33:32 +00:00
|
|
|
@app.route("/shelf/order/<int:shelf_id>", methods=["GET", "POST"])
|
2017-01-12 19:43:36 +00:00
|
|
|
@login_required
|
2016-12-26 10:33:32 +00:00
|
|
|
def order_shelf(shelf_id):
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).order_by(
|
|
|
|
ub.BookShelf.order.asc()).all()
|
2017-01-28 19:16:40 +00:00
|
|
|
counter = 0
|
2016-12-26 10:33:32 +00:00
|
|
|
for book in books_in_shelf:
|
|
|
|
setattr(book, 'order', to_save[str(book.book_id)])
|
2017-01-28 19:16:40 +00:00
|
|
|
counter += 1
|
2016-12-26 10:33:32 +00:00
|
|
|
ub.session.commit()
|
|
|
|
if current_user.is_anonymous():
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
|
|
|
|
else:
|
|
|
|
shelf = ub.session.query(ub.Shelf).filter(ub.or_(ub.and_(ub.Shelf.user_id == int(current_user.id),
|
|
|
|
ub.Shelf.id == shelf_id),
|
|
|
|
ub.and_(ub.Shelf.is_public == 1,
|
|
|
|
ub.Shelf.id == shelf_id))).first()
|
|
|
|
result = list()
|
|
|
|
if shelf:
|
2017-01-28 19:16:40 +00:00
|
|
|
books_in_shelf2 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
|
2017-01-22 20:30:36 +00:00
|
|
|
.order_by(ub.BookShelf.order.asc()).all()
|
2016-12-26 10:33:32 +00:00
|
|
|
for book in books_in_shelf2:
|
|
|
|
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
|
|
|
result.append(cur_book)
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template('shelf_order.html', entries=result,
|
|
|
|
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name), shelf=shelf)
|
2016-12-26 10:33:32 +00:00
|
|
|
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route("/me", methods=["GET", "POST"])
|
2015-08-02 18:59:11 +00:00
|
|
|
@login_required
|
|
|
|
def profile():
|
|
|
|
content = ub.session.query(ub.User).filter(ub.User.id == int(current_user.id)).first()
|
|
|
|
downloads = list()
|
2016-11-09 18:24:33 +00:00
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
2016-12-23 08:53:39 +00:00
|
|
|
translations = babel.list_translations() + [LC('en')]
|
2015-08-02 18:59:11 +00:00
|
|
|
for book in content.downloads:
|
2017-01-28 19:16:40 +00:00
|
|
|
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
2017-01-10 20:20:12 +00:00
|
|
|
if downloadBook:
|
|
|
|
downloads.append(db.session.query(db.Books).filter(db.Books.id == book.book_id).first())
|
|
|
|
else:
|
|
|
|
ub.session.query(ub.Downloads).filter(book.book_id == ub.Downloads.book_id).delete()
|
|
|
|
ub.session.commit()
|
2015-08-02 18:59:11 +00:00
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2016-11-09 18:24:33 +00:00
|
|
|
content.random_books = 0
|
2016-04-27 14:00:58 +00:00
|
|
|
if current_user.role_passwd() or current_user.role_admin():
|
|
|
|
if to_save["password"]:
|
|
|
|
content.password = generate_password_hash(to_save["password"])
|
2016-08-08 19:01:38 +00:00
|
|
|
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
2015-08-02 18:59:11 +00:00
|
|
|
content.kindle_mail = to_save["kindle_mail"]
|
2015-10-13 16:07:17 +00:00
|
|
|
if to_save["email"] and to_save["email"] != content.email:
|
|
|
|
content.email = to_save["email"]
|
2016-11-09 18:24:33 +00:00
|
|
|
if "show_random" in to_save and to_save["show_random"] == "on":
|
|
|
|
content.random_books = 1
|
|
|
|
if "default_language" in to_save:
|
|
|
|
content.default_language = to_save["default_language"]
|
|
|
|
if to_save["locale"]:
|
|
|
|
content.locale = to_save["locale"]
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view = 0
|
|
|
|
if "show_random" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_RANDOM
|
|
|
|
if "show_language" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_LANGUAGE
|
|
|
|
if "show_series" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_SERIES
|
|
|
|
if "show_category" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_CATEGORY
|
|
|
|
if "show_hot" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_HOT
|
2017-02-04 13:28:18 +00:00
|
|
|
if "show_best_rated" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_BEST_RATED
|
2017-01-28 19:16:40 +00:00
|
|
|
if "show_author" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_AUTHOR
|
2017-02-19 20:08:22 +00:00
|
|
|
if "show_read_and_unread" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_READ_AND_UNREAD
|
2017-01-28 19:16:40 +00:00
|
|
|
if "show_detail_random" in to_save:
|
|
|
|
content.sidebar_view += ub.DETAIL_RANDOM
|
2016-12-23 08:53:39 +00:00
|
|
|
if "default_language" in to_save:
|
|
|
|
content.default_language = to_save["default_language"]
|
2015-10-13 16:07:17 +00:00
|
|
|
try:
|
|
|
|
ub.session.commit()
|
|
|
|
except IntegrityError:
|
|
|
|
ub.session.rollback()
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Found an existing account for this email address."), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template("user_edit.html", content=content, downloads=downloads,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"%(name)s's profile", name=current_user.nickname))
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Profile updated"), category="success")
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template("user_edit.html", translations=translations, profile=1, languages=languages,
|
|
|
|
content=content,
|
|
|
|
downloads=downloads, title=_(u"%(name)s's profile", name=current_user.nickname))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-02 17:52:33 +00:00
|
|
|
@app.route("/admin/view")
|
2015-08-02 18:59:11 +00:00
|
|
|
@login_required
|
2015-10-13 16:07:17 +00:00
|
|
|
@admin_required
|
2017-01-02 17:52:33 +00:00
|
|
|
def admin():
|
2017-02-15 17:19:48 +00:00
|
|
|
commit = '$Format:%cI$'
|
2015-08-02 18:59:11 +00:00
|
|
|
content = ub.session.query(ub.User).all()
|
2015-08-02 19:23:24 +00:00
|
|
|
settings = ub.session.query(ub.Settings).first()
|
2017-02-15 17:19:48 +00:00
|
|
|
return render_title_template("admin.html", content=content, email=settings, config=config, commit=commit,
|
2017-01-28 19:16:40 +00:00
|
|
|
development=ub.DEVELOPMENT, title=_(u"Admin page"))
|
|
|
|
|
2017-01-02 17:52:33 +00:00
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
@app.route("/admin/config", methods=["GET", "POST"])
|
2017-01-02 17:52:33 +00:00
|
|
|
@login_required
|
|
|
|
@admin_required
|
|
|
|
def configuration():
|
2017-01-28 19:16:40 +00:00
|
|
|
return configuration_helper(0)
|
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
@app.route("/config", methods=["GET", "POST"])
|
2017-01-22 15:44:37 +00:00
|
|
|
@unconfigured
|
|
|
|
def basic_configuration():
|
2017-01-28 19:16:40 +00:00
|
|
|
return configuration_helper(1)
|
|
|
|
|
2017-01-22 20:30:36 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def configuration_helper(origin):
|
2017-02-21 18:40:22 +00:00
|
|
|
# global global_task
|
2017-01-28 19:16:40 +00:00
|
|
|
reboot_required = False
|
|
|
|
db_change = False
|
|
|
|
success = False
|
2017-01-22 15:44:37 +00:00
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2017-01-28 19:16:40 +00:00
|
|
|
content = ub.session.query(ub.Settings).first()
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_calibre_dir" in to_save:
|
2017-01-22 20:30:36 +00:00
|
|
|
if content.config_calibre_dir != to_save["config_calibre_dir"]:
|
|
|
|
content.config_calibre_dir = to_save["config_calibre_dir"]
|
2017-01-28 19:16:40 +00:00
|
|
|
db_change = True
|
2017-02-20 18:34:37 +00:00
|
|
|
##Google drive setup
|
|
|
|
create_new_yaml=False
|
|
|
|
if "config_google_drive_client_id" in to_save:
|
|
|
|
if content.config_google_drive_client_id != to_save["config_google_drive_client_id"]:
|
|
|
|
content.config_google_drive_client_id = to_save["config_google_drive_client_id"]
|
|
|
|
create_new_yaml=True
|
|
|
|
if "config_google_drive_client_secret" in to_save:
|
|
|
|
if content.config_google_drive_client_secret != to_save["config_google_drive_client_secret"]:
|
|
|
|
content.config_google_drive_client_secret = to_save["config_google_drive_client_secret"]
|
|
|
|
create_new_yaml=True
|
|
|
|
if "config_google_drive_calibre_url_base" in to_save:
|
|
|
|
if content.config_google_drive_calibre_url_base != to_save["config_google_drive_calibre_url_base"]:
|
|
|
|
content.config_google_drive_calibre_url_base = to_save["config_google_drive_calibre_url_base"]
|
|
|
|
create_new_yaml=True
|
|
|
|
if ("config_use_google_drive" in to_save and not content.config_use_google_drive) or ("config_use_google_drive" not in to_save and content.config_use_google_drive):
|
|
|
|
content.config_use_google_drive = "config_use_google_drive" in to_save
|
|
|
|
db_change = True
|
|
|
|
if not content.config_use_google_drive:
|
|
|
|
create_new_yaml=False
|
|
|
|
if create_new_yaml:
|
|
|
|
with open('settings.yaml', 'w') as f:
|
|
|
|
with open('gdrive_template.yaml' ,'r') as t:
|
|
|
|
f.write(t.read() % {'client_id' : content.config_google_drive_client_id, 'client_secret' : content.config_google_drive_client_secret,
|
|
|
|
"redirect_uri" : content.config_google_drive_calibre_url_base + 'gdrive/callback'})
|
|
|
|
if "config_google_drive_folder" in to_save:
|
|
|
|
if content.config_google_drive_folder != to_save["config_google_drive_folder"]:
|
|
|
|
content.config_google_drive_folder = to_save["config_google_drive_folder"]
|
|
|
|
db_change = True
|
|
|
|
##
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_port" in to_save:
|
2017-01-22 20:30:36 +00:00
|
|
|
if content.config_port != int(to_save["config_port"]):
|
|
|
|
content.config_port = int(to_save["config_port"])
|
|
|
|
reboot_required = True
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_calibre_web_title" in to_save:
|
|
|
|
content.config_calibre_web_title = to_save["config_calibre_web_title"]
|
2017-02-28 23:42:46 +00:00
|
|
|
if "config_columns_to_ignore" in to_save:
|
|
|
|
content.config_columns_to_ignore = to_save["config_columns_to_ignore"]
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_title_regex" in to_save:
|
2017-01-22 20:30:36 +00:00
|
|
|
if content.config_title_regex != to_save["config_title_regex"]:
|
|
|
|
content.config_title_regex = to_save["config_title_regex"]
|
|
|
|
reboot_required = True
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_log_level" in to_save:
|
2017-01-28 19:16:40 +00:00
|
|
|
content.config_log_level = int(to_save["config_log_level"])
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_random_books" in to_save:
|
|
|
|
content.config_random_books = int(to_save["config_random_books"])
|
|
|
|
if "config_books_per_page" in to_save:
|
|
|
|
content.config_books_per_page = int(to_save["config_books_per_page"])
|
|
|
|
content.config_uploading = 0
|
|
|
|
content.config_anonbrowse = 0
|
|
|
|
content.config_public_reg = 0
|
|
|
|
if "config_uploading" in to_save and to_save["config_uploading"] == "on":
|
2017-01-28 19:16:40 +00:00
|
|
|
content.config_uploading = 1
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_anonbrowse" in to_save and to_save["config_anonbrowse"] == "on":
|
2017-01-28 19:16:40 +00:00
|
|
|
content.config_anonbrowse = 1
|
2017-01-22 15:44:37 +00:00
|
|
|
if "config_public_reg" in to_save and to_save["config_public_reg"] == "on":
|
2017-01-28 19:16:40 +00:00
|
|
|
content.config_public_reg = 1
|
2017-02-09 05:46:07 +00:00
|
|
|
|
|
|
|
content.config_default_role = 0
|
|
|
|
if "admin_role" in to_save:
|
|
|
|
content.config_default_role = content.config_default_role + ub.ROLE_ADMIN
|
|
|
|
if "download_role" in to_save:
|
|
|
|
content.config_default_role = content.config_default_role + ub.ROLE_DOWNLOAD
|
|
|
|
if "upload_role" in to_save:
|
|
|
|
content.config_default_role = content.config_default_role + ub.ROLE_UPLOAD
|
|
|
|
if "edit_role" in to_save:
|
|
|
|
content.config_default_role = content.config_default_role + ub.ROLE_EDIT
|
|
|
|
if "passwd_role" in to_save:
|
|
|
|
content.config_default_role = content.config_default_role + ub.ROLE_PASSWD
|
2017-01-22 15:44:37 +00:00
|
|
|
try:
|
2017-02-22 22:45:19 +00:00
|
|
|
if content.config_use_google_drive and is_gdrive_ready() and not os.path.exists(config.config_calibre_dir + "/metadata.db"):
|
|
|
|
gdriveutils.downloadFile(Gdrive.Instance().drive, None, "metadata.db", config.config_calibre_dir + "/metadata.db")
|
2017-01-28 19:16:40 +00:00
|
|
|
if db_change:
|
|
|
|
if config.db_configured:
|
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
2017-01-22 15:44:37 +00:00
|
|
|
ub.session.commit()
|
|
|
|
flash(_(u"Calibre-web configuration updated"), category="success")
|
|
|
|
config.loadSettings()
|
2017-01-28 19:16:40 +00:00
|
|
|
app.logger.setLevel(config.config_log_level)
|
|
|
|
logging.getLogger("book_formats").setLevel(config.config_log_level)
|
2017-01-22 15:44:37 +00:00
|
|
|
except e:
|
|
|
|
flash(e, category="error")
|
2017-02-15 17:19:48 +00:00
|
|
|
return render_title_template("config_edit.html", content=config, origin=origin,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Basic Configuration"))
|
|
|
|
if db_change:
|
|
|
|
reload(db)
|
|
|
|
if not db.setup_db():
|
|
|
|
flash(_(u'DB location is not valid, please enter correct path'), category="error")
|
2017-02-15 17:19:48 +00:00
|
|
|
return render_title_template("config_edit.html", content=config, origin=origin,
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Basic Configuration"))
|
2017-01-22 20:30:36 +00:00
|
|
|
if reboot_required:
|
2017-02-15 17:09:17 +00:00
|
|
|
# db.engine.dispose() # ToDo verify correct
|
2017-01-22 20:30:36 +00:00
|
|
|
ub.session.close()
|
|
|
|
ub.engine.dispose()
|
|
|
|
# stop tornado server
|
|
|
|
server = IOLoop.instance()
|
|
|
|
server.add_callback(server.stop)
|
2017-02-21 18:40:22 +00:00
|
|
|
helper.global_task = 0
|
2017-01-28 19:16:40 +00:00
|
|
|
app.logger.info('Reboot required, restarting')
|
|
|
|
if origin:
|
|
|
|
success = True
|
2017-02-15 17:19:48 +00:00
|
|
|
return render_title_template("config_edit.html", origin=origin, success=success, content=config,
|
2017-02-22 22:06:59 +00:00
|
|
|
show_authenticate_google_drive=not is_gdrive_ready(),
|
2017-01-28 19:16:40 +00:00
|
|
|
title=_(u"Basic Configuration"))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route("/admin/user/new", methods=["GET", "POST"])
|
2015-08-02 18:59:11 +00:00
|
|
|
@login_required
|
2015-10-13 16:07:17 +00:00
|
|
|
@admin_required
|
2015-08-02 18:59:11 +00:00
|
|
|
def new_user():
|
|
|
|
content = ub.User()
|
2016-11-09 18:24:33 +00:00
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
2017-01-02 17:52:33 +00:00
|
|
|
translations = [LC('en')] + babel.list_translations()
|
2015-08-02 18:59:11 +00:00
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
2015-10-13 16:07:17 +00:00
|
|
|
if not to_save["nickname"] or not to_save["email"] or not to_save["password"]:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Please fill out all fields!"), category="error")
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
|
|
|
title=_(u"Add new user"))
|
2015-08-02 18:59:11 +00:00
|
|
|
content.password = generate_password_hash(to_save["password"])
|
|
|
|
content.nickname = to_save["nickname"]
|
|
|
|
content.email = to_save["email"]
|
2016-11-09 18:24:33 +00:00
|
|
|
content.default_language = to_save["default_language"]
|
2017-01-22 15:44:37 +00:00
|
|
|
if "locale" in to_save:
|
2017-01-22 13:07:54 +00:00
|
|
|
content.locale = to_save["locale"]
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view = 0
|
|
|
|
if "show_random" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_RANDOM
|
2016-11-09 18:24:33 +00:00
|
|
|
if "show_language" in to_save:
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view += ub.SIDEBAR_LANGUAGE
|
2016-11-09 18:24:33 +00:00
|
|
|
if "show_series" in to_save:
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view += ub.SIDEBAR_SERIES
|
2016-11-09 18:24:33 +00:00
|
|
|
if "show_category" in to_save:
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view += ub.SIDEBAR_CATEGORY
|
2016-11-09 18:24:33 +00:00
|
|
|
if "show_hot" in to_save:
|
2017-01-28 19:16:40 +00:00
|
|
|
content.sidebar_view += ub.SIDEBAR_HOT
|
2017-03-03 21:09:53 +00:00
|
|
|
if "show_read_and_unread" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_READ_AND_UNREAD
|
2017-02-04 13:28:18 +00:00
|
|
|
if "show_best_rated" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_BEST_RATED
|
2017-01-28 19:16:40 +00:00
|
|
|
if "show_author" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_AUTHOR
|
|
|
|
if "show_detail_random" in to_save:
|
|
|
|
content.sidebar_view += ub.DETAIL_RANDOM
|
2016-04-27 08:35:23 +00:00
|
|
|
content.role = 0
|
|
|
|
if "admin_role" in to_save:
|
|
|
|
content.role = content.role + ub.ROLE_ADMIN
|
|
|
|
if "download_role" in to_save:
|
|
|
|
content.role = content.role + ub.ROLE_DOWNLOAD
|
|
|
|
if "upload_role" in to_save:
|
|
|
|
content.role = content.role + ub.ROLE_UPLOAD
|
|
|
|
if "edit_role" in to_save:
|
|
|
|
content.role = content.role + ub.ROLE_EDIT
|
2016-04-27 14:00:58 +00:00
|
|
|
if "passwd_role" in to_save:
|
|
|
|
content.role = content.role + ub.ROLE_PASSWD
|
2015-08-02 18:59:11 +00:00
|
|
|
try:
|
|
|
|
ub.session.add(content)
|
|
|
|
ub.session.commit()
|
2017-01-22 15:44:37 +00:00
|
|
|
flash(_(u"User '%(user)s' created", user=content.nickname), category="success")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('admin'))
|
2015-10-13 16:07:17 +00:00
|
|
|
except IntegrityError:
|
|
|
|
ub.session.rollback()
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Found an existing account for this email address or nickname."), category="error")
|
2017-02-10 19:18:37 +00:00
|
|
|
else:
|
|
|
|
content.role=config.config_default_role
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template("user_edit.html", new_user=1, content=content, translations=translations,
|
2017-01-28 19:16:40 +00:00
|
|
|
languages=languages, title=_(u"Add new user"))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-02 17:52:33 +00:00
|
|
|
@app.route("/admin/mailsettings", methods=["GET", "POST"])
|
2015-08-02 19:23:24 +00:00
|
|
|
@login_required
|
2015-10-13 16:07:17 +00:00
|
|
|
@admin_required
|
2015-08-02 19:23:24 +00:00
|
|
|
def edit_mailsettings():
|
|
|
|
content = ub.session.query(ub.Settings).first()
|
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
content.mail_server = to_save["mail_server"]
|
|
|
|
content.mail_port = int(to_save["mail_port"])
|
|
|
|
content.mail_login = to_save["mail_login"]
|
|
|
|
content.mail_password = to_save["mail_password"]
|
|
|
|
content.mail_from = to_save["mail_from"]
|
2017-01-18 18:07:45 +00:00
|
|
|
content.mail_use_ssl = int(to_save["mail_use_ssl"])
|
2015-08-02 19:23:24 +00:00
|
|
|
try:
|
|
|
|
ub.session.commit()
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Mail settings updated"), category="success")
|
2016-12-23 08:53:39 +00:00
|
|
|
except e:
|
2015-08-02 19:23:24 +00:00
|
|
|
flash(e, category="error")
|
2017-01-06 13:45:43 +00:00
|
|
|
if "test" in to_save and to_save["test"]:
|
2017-01-28 19:16:40 +00:00
|
|
|
result = helper.send_test_mail(current_user.kindle_mail)
|
2016-12-23 08:53:39 +00:00
|
|
|
if result is None:
|
|
|
|
flash(_(u"Test E-Mail successfully send to %(kindlemail)s", kindlemail=current_user.kindle_mail),
|
|
|
|
category="success")
|
|
|
|
else:
|
|
|
|
flash(_(u"There was an error sending the Test E-Mail: %(res)s", res=result), category="error")
|
2017-02-23 18:58:56 +00:00
|
|
|
else:
|
|
|
|
flash(_(u"E-Mail settings updated"), category="success")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template("email_edit.html", content=content, title=_(u"Edit mail settings"))
|
2015-08-02 19:23:24 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
@app.route("/admin/user/<int:user_id>", methods=["GET", "POST"])
|
2015-08-02 18:59:11 +00:00
|
|
|
@login_required
|
2015-10-13 16:07:17 +00:00
|
|
|
@admin_required
|
2015-08-02 18:59:11 +00:00
|
|
|
def edit_user(user_id):
|
|
|
|
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
|
|
|
downloads = list()
|
2016-11-09 18:24:33 +00:00
|
|
|
languages = db.session.query(db.Languages).all()
|
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
cur_l = LC.parse(lang.lang_code)
|
|
|
|
lang.name = cur_l.get_language_name(get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
|
|
|
translations = babel.list_translations() + [LC('en')]
|
2015-08-02 18:59:11 +00:00
|
|
|
for book in content.downloads:
|
2017-01-28 19:16:40 +00:00
|
|
|
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
2017-01-10 20:20:12 +00:00
|
|
|
if downloadBook:
|
|
|
|
downloads.append(db.session.query(db.Books).filter(db.Books.id == book.book_id).first())
|
|
|
|
else:
|
|
|
|
ub.session.query(ub.Downloads).filter(book.book_id == ub.Downloads.book_id).delete()
|
|
|
|
ub.session.commit()
|
2015-08-02 18:59:11 +00:00
|
|
|
if request.method == "POST":
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
if "delete" in to_save:
|
|
|
|
ub.session.delete(content)
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"User '%(nick)s' deleted", nick=content.nickname), category="success")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('admin'))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-12 19:43:36 +00:00
|
|
|
if "password" in to_save and to_save["password"]:
|
2016-04-03 13:12:48 +00:00
|
|
|
content.password = generate_password_hash(to_save["password"])
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
if "admin_role" in to_save and not content.role_admin():
|
|
|
|
content.role = content.role + ub.ROLE_ADMIN
|
2016-12-23 08:53:39 +00:00
|
|
|
elif "admin_role" not in to_save and content.role_admin():
|
2016-04-27 08:35:23 +00:00
|
|
|
content.role = content.role - ub.ROLE_ADMIN
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
if "download_role" in to_save and not content.role_download():
|
|
|
|
content.role = content.role + ub.ROLE_DOWNLOAD
|
2016-12-23 08:53:39 +00:00
|
|
|
elif "download_role" not in to_save and content.role_download():
|
2016-04-27 08:35:23 +00:00
|
|
|
content.role = content.role - ub.ROLE_DOWNLOAD
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
if "upload_role" in to_save and not content.role_upload():
|
|
|
|
content.role = content.role + ub.ROLE_UPLOAD
|
2016-12-23 08:53:39 +00:00
|
|
|
elif "upload_role" not in to_save and content.role_upload():
|
2016-04-27 08:35:23 +00:00
|
|
|
content.role = content.role - ub.ROLE_UPLOAD
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 08:35:23 +00:00
|
|
|
if "edit_role" in to_save and not content.role_edit():
|
|
|
|
content.role = content.role + ub.ROLE_EDIT
|
2016-12-23 08:53:39 +00:00
|
|
|
elif "edit_role" not in to_save and content.role_edit():
|
2016-04-27 08:35:23 +00:00
|
|
|
content.role = content.role - ub.ROLE_EDIT
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-27 14:00:58 +00:00
|
|
|
if "passwd_role" in to_save and not content.role_passwd():
|
|
|
|
content.role = content.role + ub.ROLE_PASSWD
|
2016-12-23 08:53:39 +00:00
|
|
|
elif "passwd_role" not in to_save and content.role_passwd():
|
2016-04-27 14:00:58 +00:00
|
|
|
content.role = content.role - ub.ROLE_PASSWD
|
2017-01-28 19:16:40 +00:00
|
|
|
|
|
|
|
if "show_random" in to_save and not content.show_random_books():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_RANDOM
|
|
|
|
elif "show_random" not in to_save and content.show_random_books():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_RANDOM
|
|
|
|
|
|
|
|
if "show_language" in to_save and not content.show_language():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_LANGUAGE
|
|
|
|
elif "show_language" not in to_save and content.show_language():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_LANGUAGE
|
|
|
|
|
|
|
|
if "show_series" in to_save and not content.show_series():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_SERIES
|
|
|
|
elif "show_series" not in to_save and content.show_series():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_SERIES
|
|
|
|
|
|
|
|
if "show_category" in to_save and not content.show_category():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_CATEGORY
|
|
|
|
elif "show_category" not in to_save and content.show_category():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_CATEGORY
|
|
|
|
|
|
|
|
if "show_hot" in to_save and not content.show_hot_books():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_HOT
|
|
|
|
elif "show_hot" not in to_save and content.show_hot_books():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_HOT
|
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
if "show_best_rated" in to_save and not content.show_best_rated_books():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_BEST_RATED
|
|
|
|
elif "show_best_rated" not in to_save and content.show_best_rated_books():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_BEST_RATED
|
|
|
|
|
2017-03-03 21:09:53 +00:00
|
|
|
if "show_read_and_unread" in to_save:
|
|
|
|
content.sidebar_view += ub.SIDEBAR_READ_AND_UNREAD
|
|
|
|
elif "show_read_and_unread" not in to_save and content.show_read_and_unread():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_READ_AND_UNREAD
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
if "show_author" in to_save and not content.show_author():
|
|
|
|
content.sidebar_view += ub.SIDEBAR_AUTHOR
|
|
|
|
elif "show_author" not in to_save and content.show_author():
|
|
|
|
content.sidebar_view -= ub.SIDEBAR_AUTHOR
|
|
|
|
|
|
|
|
if "show_detail_random" in to_save and not content.show_detail_random():
|
|
|
|
content.sidebar_view += ub.DETAIL_RANDOM
|
|
|
|
elif "show_detail_random" not in to_save and content.show_detail_random():
|
|
|
|
content.sidebar_view -= ub.DETAIL_RANDOM
|
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
if "default_language" in to_save:
|
|
|
|
content.default_language = to_save["default_language"]
|
2017-01-12 19:43:36 +00:00
|
|
|
if "locale" in to_save and to_save["locale"]:
|
2016-11-09 18:24:33 +00:00
|
|
|
content.locale = to_save["locale"]
|
2015-10-13 16:07:17 +00:00
|
|
|
if to_save["email"] and to_save["email"] != content.email:
|
|
|
|
content.email = to_save["email"]
|
2016-08-08 19:01:38 +00:00
|
|
|
if "kindle_mail" in to_save and to_save["kindle_mail"] != content.kindle_mail:
|
2015-10-13 16:07:17 +00:00
|
|
|
content.kindle_mail = to_save["kindle_mail"]
|
|
|
|
try:
|
|
|
|
ub.session.commit()
|
2016-12-23 08:53:39 +00:00
|
|
|
flash(_(u"User '%(nick)s' updated", nick=content.nickname), category="success")
|
2015-10-13 16:07:17 +00:00
|
|
|
except IntegrityError:
|
|
|
|
ub.session.rollback()
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"An unknown error occured."), category="error")
|
2017-01-22 20:30:36 +00:00
|
|
|
return render_title_template("user_edit.html", translations=translations, languages=languages, new_user=0,
|
2017-01-28 19:16:40 +00:00
|
|
|
content=content, downloads=downloads,
|
|
|
|
title=_(u"Edit User %(nick)s", nick=content.nickname))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
@app.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
|
2017-01-12 19:43:36 +00:00
|
|
|
@login_required_if_no_ano
|
2016-04-27 08:35:23 +00:00
|
|
|
@edit_required
|
2015-08-02 18:59:11 +00:00
|
|
|
def edit_book(book_id):
|
2016-12-23 08:53:39 +00:00
|
|
|
# create the function for sorting...
|
|
|
|
db.session.connection().connection.connection.create_function("title_sort", 1, db.title_sort)
|
2016-04-20 16:56:03 +00:00
|
|
|
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
2016-11-09 18:24:33 +00:00
|
|
|
if current_user.filter_language() != "all":
|
|
|
|
filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
|
|
|
else:
|
|
|
|
filter = True
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(filter).first()
|
2016-04-15 21:35:18 +00:00
|
|
|
author_names = []
|
2016-11-09 18:24:33 +00:00
|
|
|
if book:
|
|
|
|
for index in range(0, len(book.languages)):
|
|
|
|
try:
|
2016-12-23 08:53:39 +00:00
|
|
|
book.languages[index].language_name = LC.parse(book.languages[index].lang_code).get_language_name(
|
|
|
|
get_locale())
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-12-23 08:53:39 +00:00
|
|
|
book.languages[index].language_name = _(isoLanguages.get(part3=book.languages[index].lang_code).name)
|
2016-11-09 18:24:33 +00:00
|
|
|
for author in book.authors:
|
|
|
|
author_names.append(author.name)
|
|
|
|
if request.method == 'POST':
|
|
|
|
edited_books_id = set()
|
|
|
|
to_save = request.form.to_dict()
|
|
|
|
if book.title != to_save["book_title"]:
|
|
|
|
book.title = to_save["book_title"]
|
|
|
|
edited_books_id.add(book.id)
|
|
|
|
input_authors = to_save["author_name"].split('&')
|
|
|
|
input_authors = map(lambda it: it.strip(), input_authors)
|
|
|
|
# we have all author names now
|
|
|
|
author0_before_edit = book.authors[0].name
|
|
|
|
modify_database_object(input_authors, book.authors, db.Authors, db.session, 'author')
|
|
|
|
if author0_before_edit != book.authors[0].name:
|
|
|
|
edited_books_id.add(book.id)
|
2017-02-15 17:09:17 +00:00
|
|
|
book.author_sort=helper.get_sorted_author(input_authors[0])
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-11-09 18:24:33 +00:00
|
|
|
if to_save["cover_url"] and os.path.splitext(to_save["cover_url"])[1].lower() == ".jpg":
|
|
|
|
img = requests.get(to_save["cover_url"])
|
2017-01-22 20:30:36 +00:00
|
|
|
f = open(os.path.join(config.config_calibre_dir, book.path, "cover.jpg"), "wb")
|
2016-11-09 18:24:33 +00:00
|
|
|
f.write(img.content)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
if book.series_index != to_save["series_index"]:
|
|
|
|
book.series_index = to_save["series_index"]
|
|
|
|
|
|
|
|
if len(book.comments):
|
|
|
|
book.comments[0].text = to_save["description"]
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
book.comments.append(db.Comments(text=to_save["description"], book=book.id))
|
|
|
|
|
|
|
|
input_tags = to_save["tags"].split(',')
|
|
|
|
input_tags = map(lambda it: it.strip(), input_tags)
|
|
|
|
modify_database_object(input_tags, book.tags, db.Tags, db.session, 'tags')
|
|
|
|
|
|
|
|
input_series = [to_save["series"].strip()]
|
|
|
|
input_series = [x for x in input_series if x != '']
|
|
|
|
modify_database_object(input_series, book.series, db.Series, db.session, 'series')
|
|
|
|
|
|
|
|
input_languages = to_save["languages"].split(',')
|
|
|
|
input_languages = map(lambda it: it.strip().lower(), input_languages)
|
|
|
|
|
|
|
|
# retranslate displayed text to language codes
|
|
|
|
languages = db.session.query(db.Languages).all()
|
2016-12-23 08:53:39 +00:00
|
|
|
input_l = []
|
2016-11-09 18:24:33 +00:00
|
|
|
for lang in languages:
|
|
|
|
try:
|
|
|
|
lang.name = LC.parse(lang.lang_code).get_language_name(get_locale()).lower()
|
2017-03-06 03:53:17 +00:00
|
|
|
except Exception as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name).lower()
|
|
|
|
for inp_lang in input_languages:
|
|
|
|
if inp_lang == lang.name:
|
|
|
|
input_l.append(lang.lang_code)
|
|
|
|
modify_database_object(input_l, book.languages, db.Languages, db.session, 'languages')
|
|
|
|
|
|
|
|
if to_save["rating"].strip():
|
|
|
|
old_rating = False
|
|
|
|
if len(book.ratings) > 0:
|
|
|
|
old_rating = book.ratings[0].rating
|
2016-12-23 08:53:39 +00:00
|
|
|
ratingx2 = int(float(to_save["rating"]) * 2)
|
2016-11-09 18:24:33 +00:00
|
|
|
if ratingx2 != old_rating:
|
|
|
|
is_rating = db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
|
|
|
|
if is_rating:
|
|
|
|
book.ratings.append(is_rating)
|
|
|
|
else:
|
|
|
|
new_rating = db.Ratings(rating=ratingx2)
|
|
|
|
book.ratings.append(new_rating)
|
|
|
|
if old_rating:
|
|
|
|
book.ratings.remove(book.ratings[0])
|
|
|
|
else:
|
|
|
|
if len(book.ratings) > 0:
|
2016-04-21 07:04:08 +00:00
|
|
|
book.ratings.remove(book.ratings[0])
|
2016-11-09 18:24:33 +00:00
|
|
|
|
|
|
|
for c in cc:
|
|
|
|
cc_string = "custom_column_" + str(c.id)
|
|
|
|
if not c.is_multiple:
|
|
|
|
if len(getattr(book, cc_string)) > 0:
|
|
|
|
cc_db_value = getattr(book, cc_string)[0].value
|
|
|
|
else:
|
|
|
|
cc_db_value = None
|
|
|
|
if to_save[cc_string].strip():
|
|
|
|
if c.datatype == 'bool':
|
|
|
|
if to_save[cc_string] == 'None':
|
2016-12-23 08:53:39 +00:00
|
|
|
to_save[cc_string] = None
|
2016-11-09 18:24:33 +00:00
|
|
|
else:
|
|
|
|
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
|
|
|
|
if to_save[cc_string] != cc_db_value:
|
|
|
|
if cc_db_value is not None:
|
|
|
|
if to_save[cc_string] is not None:
|
|
|
|
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
del_cc = getattr(book, cc_string)[0]
|
|
|
|
getattr(book, cc_string).remove(del_cc)
|
|
|
|
db.session.delete(del_cc)
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
cc_class = db.cc_classes[c.id]
|
2016-12-23 08:53:39 +00:00
|
|
|
new_cc = cc_class(value=to_save[cc_string], book=book_id)
|
2016-11-09 18:24:33 +00:00
|
|
|
db.session.add(new_cc)
|
|
|
|
else:
|
|
|
|
if c.datatype == 'rating':
|
2016-12-23 08:53:39 +00:00
|
|
|
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
|
2016-11-09 18:24:33 +00:00
|
|
|
if to_save[cc_string].strip() != cc_db_value:
|
2016-12-23 08:53:39 +00:00
|
|
|
if cc_db_value is not None:
|
|
|
|
# remove old cc_val
|
2016-11-09 18:24:33 +00:00
|
|
|
del_cc = getattr(book, cc_string)[0]
|
|
|
|
getattr(book, cc_string).remove(del_cc)
|
|
|
|
if len(del_cc.books) == 0:
|
|
|
|
db.session.delete(del_cc)
|
|
|
|
cc_class = db.cc_classes[c.id]
|
2016-12-23 08:53:39 +00:00
|
|
|
new_cc = db.session.query(cc_class).filter(
|
|
|
|
cc_class.value == to_save[cc_string].strip()).first()
|
2016-11-09 18:24:33 +00:00
|
|
|
# if no cc val is found add it
|
2016-12-23 08:53:39 +00:00
|
|
|
if new_cc is None:
|
2016-11-09 18:24:33 +00:00
|
|
|
new_cc = cc_class(value=to_save[cc_string].strip())
|
|
|
|
db.session.add(new_cc)
|
2016-12-23 08:53:39 +00:00
|
|
|
new_cc = db.session.query(cc_class).filter(
|
|
|
|
cc_class.value == to_save[cc_string].strip()).first()
|
2016-11-09 18:24:33 +00:00
|
|
|
# add cc value to book
|
|
|
|
getattr(book, cc_string).append(new_cc)
|
|
|
|
else:
|
2016-12-23 08:53:39 +00:00
|
|
|
if cc_db_value is not None:
|
|
|
|
# remove old cc_val
|
2016-04-21 17:18:52 +00:00
|
|
|
del_cc = getattr(book, cc_string)[0]
|
|
|
|
getattr(book, cc_string).remove(del_cc)
|
|
|
|
if len(del_cc.books) == 0:
|
|
|
|
db.session.delete(del_cc)
|
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
input_tags = to_save[cc_string].split(',')
|
|
|
|
input_tags = map(lambda it: it.strip(), input_tags)
|
|
|
|
input_tags = [x for x in input_tags if x != '']
|
|
|
|
# we have all author names now
|
|
|
|
# 1. search for tags to remove
|
|
|
|
del_tags = []
|
2016-04-21 17:18:52 +00:00
|
|
|
for c_tag in getattr(book, cc_string):
|
2016-11-09 18:24:33 +00:00
|
|
|
found = False
|
|
|
|
for inp_tag in input_tags:
|
|
|
|
if inp_tag == c_tag.value:
|
|
|
|
found = True
|
2016-12-23 08:53:39 +00:00
|
|
|
break
|
2016-11-09 18:24:33 +00:00
|
|
|
# if the tag was not found in the new list, add him to remove list
|
|
|
|
if not found:
|
|
|
|
del_tags.append(c_tag)
|
|
|
|
# 2. search for tags that need to be added
|
|
|
|
add_tags = []
|
|
|
|
for inp_tag in input_tags:
|
|
|
|
found = False
|
|
|
|
for c_tag in getattr(book, cc_string):
|
|
|
|
if inp_tag == c_tag.value:
|
|
|
|
found = True
|
2016-12-23 08:53:39 +00:00
|
|
|
break
|
2016-11-09 18:24:33 +00:00
|
|
|
if not found:
|
|
|
|
add_tags.append(inp_tag)
|
|
|
|
# if there are tags to remove, we remove them now
|
|
|
|
if len(del_tags) > 0:
|
|
|
|
for del_tag in del_tags:
|
|
|
|
getattr(book, cc_string).remove(del_tag)
|
|
|
|
if len(del_tag.books) == 0:
|
|
|
|
db.session.delete(del_tag)
|
|
|
|
# if there are tags to add, we add them now!
|
|
|
|
if len(add_tags) > 0:
|
|
|
|
for add_tag in add_tags:
|
|
|
|
# check if a tag with that name exists
|
2016-12-23 08:53:39 +00:00
|
|
|
new_tag = db.session.query(db.cc_classes[c.id]).filter(
|
|
|
|
db.cc_classes[c.id].value == add_tag).first()
|
2016-11-09 18:24:33 +00:00
|
|
|
# if no tag is found add it
|
2016-12-23 08:53:39 +00:00
|
|
|
if new_tag is None:
|
2016-11-09 18:24:33 +00:00
|
|
|
new_tag = db.cc_classes[c.id](value=add_tag)
|
|
|
|
db.session.add(new_tag)
|
2016-12-23 08:53:39 +00:00
|
|
|
new_tag = db.session.query(db.cc_classes[c.id]).filter(
|
|
|
|
db.cc_classes[c.id].value == add_tag).first()
|
2016-11-09 18:24:33 +00:00
|
|
|
# add tag to book
|
|
|
|
getattr(book, cc_string).append(new_tag)
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
author_names = []
|
|
|
|
for author in book.authors:
|
|
|
|
author_names.append(author.name)
|
|
|
|
for b in edited_books_id:
|
2017-03-01 22:38:03 +00:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
helper.update_dir_structure_gdrive(b)
|
|
|
|
else:
|
|
|
|
helper.update_dir_stucture(b, config.config_calibre_dir)
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
updateGdriveCalibreFromLocal()
|
2016-11-09 18:24:33 +00:00
|
|
|
if "detail_view" in to_save:
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('show_book', id=book.id))
|
2016-11-09 18:24:33 +00:00
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
|
|
|
title=_(u"edit metadata"))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
|
|
|
title=_(u"edit metadata"))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for("index"))
|
2016-04-03 21:52:32 +00:00
|
|
|
|
2016-06-05 15:41:47 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
@app.route("/upload", methods=["GET", "POST"])
|
2017-01-12 19:43:36 +00:00
|
|
|
@login_required_if_no_ano
|
2016-04-27 08:35:23 +00:00
|
|
|
@upload_required
|
2016-04-03 21:52:32 +00:00
|
|
|
def upload():
|
2017-01-22 20:30:36 +00:00
|
|
|
if not config.config_uploading:
|
2016-04-15 17:39:25 +00:00
|
|
|
abort(404)
|
2016-12-23 08:53:39 +00:00
|
|
|
# create the function for sorting...
|
|
|
|
db.session.connection().connection.connection.create_function("title_sort", 1, db.title_sort)
|
|
|
|
db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
|
2016-04-03 21:52:32 +00:00
|
|
|
if request.method == 'POST' and 'btn-upload' in request.files:
|
|
|
|
file = request.files['btn-upload']
|
2017-02-23 05:52:58 +00:00
|
|
|
if '.' in file.filename:
|
|
|
|
file_ext = file.filename.rsplit('.', 1)[-1].lower()
|
|
|
|
if file_ext not in ALLOWED_EXTENSIONS:
|
|
|
|
flash(
|
2017-02-26 19:49:49 +00:00
|
|
|
_('File extension "%s" is not allowed to be uploaded to this server' %
|
2017-02-23 05:52:58 +00:00
|
|
|
file_ext),
|
|
|
|
category="error"
|
|
|
|
)
|
|
|
|
return redirect(url_for('index'))
|
|
|
|
else:
|
|
|
|
flash(_('File to be uploaded must have an extension'), category="error")
|
|
|
|
return redirect(url_for('index'))
|
2016-06-05 15:41:47 +00:00
|
|
|
meta = uploader.upload(file)
|
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
title = meta.title
|
|
|
|
author = meta.author
|
2016-06-05 15:41:47 +00:00
|
|
|
|
2017-02-04 13:28:18 +00:00
|
|
|
title_dir = helper.get_valid_filename(title, False)
|
|
|
|
author_dir = helper.get_valid_filename(author, False)
|
2016-04-04 21:25:53 +00:00
|
|
|
data_name = title_dir
|
2017-01-22 20:30:36 +00:00
|
|
|
filepath = config.config_calibre_dir + os.sep + author_dir + os.sep + title_dir
|
2016-10-30 10:44:02 +00:00
|
|
|
saved_filename = filepath + os.sep + data_name + meta.extension
|
|
|
|
|
2016-04-03 21:52:32 +00:00
|
|
|
if not os.path.exists(filepath):
|
2016-04-17 15:42:22 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(filepath)
|
|
|
|
except OSError:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Failed to create path %s (Permission denied)." % filepath), category="error")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2016-04-17 15:42:22 +00:00
|
|
|
try:
|
2016-10-30 10:44:02 +00:00
|
|
|
copyfile(meta.file_path, saved_filename)
|
2017-03-05 09:40:39 +00:00
|
|
|
except OSError as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Failed to store file %s (Permission denied)." % saved_filename), category="error")
|
2017-01-17 03:09:02 +00:00
|
|
|
return redirect(url_for('index'))
|
2016-10-30 10:44:02 +00:00
|
|
|
try:
|
|
|
|
os.unlink(meta.file_path)
|
2017-03-05 09:40:39 +00:00
|
|
|
except OSError as e:
|
2016-11-09 18:24:33 +00:00
|
|
|
flash(_(u"Failed to delete file %s (Permission denied)." % meta.file_path), category="warning")
|
2016-06-05 15:41:47 +00:00
|
|
|
|
2016-04-04 21:25:53 +00:00
|
|
|
file_size = os.path.getsize(saved_filename)
|
2016-06-05 15:41:47 +00:00
|
|
|
if meta.cover is None:
|
|
|
|
has_cover = 0
|
|
|
|
basedir = os.path.dirname(__file__)
|
|
|
|
copyfile(os.path.join(basedir, "static/generic_cover.jpg"), os.path.join(filepath, "cover.jpg"))
|
|
|
|
else:
|
|
|
|
has_cover = 1
|
2016-06-05 16:42:18 +00:00
|
|
|
move(meta.cover, os.path.join(filepath, "cover.jpg"))
|
2016-06-05 15:41:47 +00:00
|
|
|
|
2016-04-04 21:25:53 +00:00
|
|
|
is_author = db.session.query(db.Authors).filter(db.Authors.name == author).first()
|
2016-04-03 21:52:32 +00:00
|
|
|
if is_author:
|
|
|
|
db_author = is_author
|
|
|
|
else:
|
2017-02-15 17:09:17 +00:00
|
|
|
db_author = db.Authors(author, helper.get_sorted_author(author), "")
|
2016-04-03 21:52:32 +00:00
|
|
|
db.session.add(db_author)
|
2017-03-06 19:17:00 +00:00
|
|
|
|
2017-03-02 14:57:02 +00:00
|
|
|
#add language actually one value in list
|
|
|
|
input_language = meta.languages
|
|
|
|
db_language = None
|
|
|
|
if input_language != "":
|
|
|
|
input_language = isoLanguages.get(name=input_language).part3
|
|
|
|
hasLanguage = db.session.query(db.Languages).filter(db.Languages.lang_code == input_language).first()
|
|
|
|
if hasLanguage:
|
|
|
|
db_language = hasLanguage
|
|
|
|
else:
|
2017-03-06 19:17:00 +00:00
|
|
|
db_language = db.Languages(input_language)
|
2017-03-02 14:57:02 +00:00
|
|
|
db.session.add(db_language)
|
2017-02-15 17:09:17 +00:00
|
|
|
# combine path and normalize path from windows systems
|
|
|
|
path = os.path.join(author_dir, title_dir).replace('\\','/')
|
2017-03-05 09:40:39 +00:00
|
|
|
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 1, 1), 1,
|
2017-03-02 14:57:02 +00:00
|
|
|
datetime.datetime.now(), path, has_cover, db_author, [], db_language)
|
2016-04-03 21:52:32 +00:00
|
|
|
db_book.authors.append(db_author)
|
2017-03-02 14:57:02 +00:00
|
|
|
if db_language is not None:
|
|
|
|
db_book.languages.append(db_language)
|
2016-06-05 15:41:47 +00:00
|
|
|
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, data_name)
|
2016-04-04 21:25:53 +00:00
|
|
|
db_book.data.append(db_data)
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-03 21:52:32 +00:00
|
|
|
db.session.add(db_book)
|
2017-03-03 04:46:19 +00:00
|
|
|
db.session.flush()# flush content get db_book.id avalible
|
|
|
|
#add comment
|
2017-03-04 07:37:30 +00:00
|
|
|
upload_comment = Markup(meta.description).unescape()
|
2017-03-03 04:46:19 +00:00
|
|
|
db_comment = None
|
|
|
|
if upload_comment != "":
|
2017-03-06 19:17:00 +00:00
|
|
|
db_comment = db.Comments(upload_comment, db_book.id)
|
2017-03-03 04:46:19 +00:00
|
|
|
db.session.add(db_comment)
|
2016-04-03 21:52:32 +00:00
|
|
|
db.session.commit()
|
2017-03-02 14:57:02 +00:00
|
|
|
if db_language is not None: #display Full name instead of iso639.part3
|
2017-03-03 18:48:02 +00:00
|
|
|
db_book.languages[0].language_name = _(meta.languages)
|
2016-04-19 22:23:14 +00:00
|
|
|
author_names = []
|
|
|
|
for author in db_book.authors:
|
|
|
|
author_names.append(author.name)
|
2017-02-20 18:34:37 +00:00
|
|
|
if config.config_use_google_drive:
|
2017-03-01 22:38:03 +00:00
|
|
|
updateGdriveCalibreFromLocal()
|
2016-04-20 16:56:03 +00:00
|
|
|
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
2016-04-27 08:35:23 +00:00
|
|
|
if current_user.role_edit() or current_user.role_admin():
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('book_edit.html', book=db_book, authors=author_names, cc=cc,
|
|
|
|
title=_(u"edit metadata"))
|
2016-04-27 08:35:23 +00:00
|
|
|
book_in_shelfs = []
|
2017-01-28 19:16:40 +00:00
|
|
|
return render_title_template('detail.html', entry=db_book, cc=cc, title=db_book.title,
|
|
|
|
books_shelfs=book_in_shelfs, )
|