2015-08-02 18:59:11 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-01-20 18:37:45 +00:00
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
# Copyright (C) 2012-2019 cervinko, idalin, SiphonSquirrel, ouzklcn, akushsky,
|
|
|
|
# OzzieIsaacs, bodybybuddha, jkrehm, matthazinski, janeczku
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
from __future__ import division, print_function, unicode_literals
|
2015-08-02 18:59:11 +00:00
|
|
|
import sys
|
|
|
|
import os
|
2019-07-13 18:45:48 +00:00
|
|
|
import io
|
|
|
|
import mimetypes
|
2016-03-26 15:12:29 +00:00
|
|
|
import re
|
2019-07-13 18:45:48 +00:00
|
|
|
import shutil
|
2018-08-04 08:56:42 +00:00
|
|
|
import time
|
2019-07-13 18:45:48 +00:00
|
|
|
import unicodedata
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from tempfile import gettempdir
|
|
|
|
|
|
|
|
import requests
|
|
|
|
from babel.dates import format_datetime
|
|
|
|
from babel.units import format_unit
|
2020-12-07 07:44:49 +00:00
|
|
|
from flask import send_from_directory, make_response, redirect, abort, url_for, g
|
2016-11-09 18:24:33 +00:00
|
|
|
from flask_babel import gettext as _
|
2018-11-03 12:43:38 +00:00
|
|
|
from flask_login import current_user
|
2020-10-27 10:06:43 +00:00
|
|
|
from sqlalchemy.sql.expression import true, false, and_, text
|
2019-07-13 18:45:48 +00:00
|
|
|
from werkzeug.datastructures import Headers
|
2019-12-15 16:08:17 +00:00
|
|
|
from werkzeug.security import generate_password_hash
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2017-03-16 23:36:37 +00:00
|
|
|
try:
|
2019-07-13 18:45:48 +00:00
|
|
|
from urllib.parse import quote
|
2017-03-16 23:36:37 +00:00
|
|
|
except ImportError:
|
2019-07-13 18:45:48 +00:00
|
|
|
from urllib import quote
|
2017-02-20 18:52:00 +00:00
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
try:
|
|
|
|
import unidecode
|
2017-03-31 14:52:25 +00:00
|
|
|
use_unidecode = True
|
2017-11-30 15:49:46 +00:00
|
|
|
except ImportError:
|
2017-03-31 14:52:25 +00:00
|
|
|
use_unidecode = False
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2019-04-17 18:45:08 +00:00
|
|
|
try:
|
2019-11-03 08:49:54 +00:00
|
|
|
from PIL import Image as PILImage
|
2020-05-29 04:59:40 +00:00
|
|
|
from PIL import UnidentifiedImageError
|
2019-04-17 18:45:08 +00:00
|
|
|
use_PIL = True
|
|
|
|
except ImportError:
|
|
|
|
use_PIL = False
|
|
|
|
|
2020-11-15 13:19:25 +00:00
|
|
|
from . import calibre_db
|
|
|
|
from .tasks.convert import TaskConvert
|
2020-08-24 01:38:56 +00:00
|
|
|
from . import logger, config, get_locale, db, ub
|
2019-07-13 18:45:48 +00:00
|
|
|
from . import gdriveutils as gd
|
|
|
|
from .constants import STATIC_DIR as _STATIC_DIR
|
|
|
|
from .subproc_wrapper import process_wait
|
2020-08-23 03:35:48 +00:00
|
|
|
from .services.worker import WorkerThread, STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
|
2020-08-30 06:49:53 +00:00
|
|
|
from .tasks.mail import TaskEmail
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
log = logger.create()
|
2017-05-19 19:30:39 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2018-08-31 13:19:48 +00:00
|
|
|
# Convert existing book entry to new format
|
|
|
|
def convert_book_format(book_id, calibrepath, old_book_format, new_book_format, user_id, kindle_mail=None):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
|
|
|
data = calibre_db.get_book_format(book.id, old_book_format)
|
2020-10-04 10:09:52 +00:00
|
|
|
file_path = os.path.join(calibrepath, book.path, data.name)
|
2016-03-27 21:36:51 +00:00
|
|
|
if not data:
|
2018-08-31 13:19:48 +00:00
|
|
|
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("convert_book_format: %s", error_message)
|
2018-08-12 07:29:57 +00:00
|
|
|
return error_message
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2020-10-04 10:09:52 +00:00
|
|
|
if not gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower()):
|
2018-08-31 13:19:48 +00:00
|
|
|
error_message = _(u"%(format)s not found on Google Drive: %(fn)s",
|
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
2018-08-12 07:29:57 +00:00
|
|
|
return error_message
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2020-10-04 10:09:52 +00:00
|
|
|
if not os.path.exists(file_path + "." + old_book_format.lower()):
|
|
|
|
error_message = _(u"%(format)s not found: %(fn)s",
|
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
|
|
|
return error_message
|
|
|
|
# read settings and append converter task to queue
|
|
|
|
if kindle_mail:
|
|
|
|
settings = config.get_mail_settings()
|
|
|
|
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
|
|
|
|
settings['body'] = _(u'This e-mail has been sent via Calibre-Web.')
|
|
|
|
else:
|
|
|
|
settings = dict()
|
2020-10-06 19:46:09 +00:00
|
|
|
txt = (u"%s -> %s: %s" % (
|
|
|
|
old_book_format,
|
|
|
|
new_book_format,
|
|
|
|
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + book.title + "</a>"))
|
2020-10-04 10:09:52 +00:00
|
|
|
settings['old_book_format'] = old_book_format
|
|
|
|
settings['new_book_format'] = new_book_format
|
|
|
|
WorkerThread.add(user_id, TaskConvert(file_path, book.id, txt, settings, kindle_mail, user_id))
|
|
|
|
return None
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2018-07-30 18:12:41 +00:00
|
|
|
def send_test_mail(kindle_mail, user_name):
|
2020-08-23 02:44:28 +00:00
|
|
|
WorkerThread.add(user_name, TaskEmail(_(u'Calibre-Web test e-mail'), None, None,
|
|
|
|
config.get_mail_settings(), kindle_mail, _(u"Test e-mail"),
|
|
|
|
_(u'This e-mail has been sent via Calibre-Web.')))
|
2018-08-12 07:29:57 +00:00
|
|
|
return
|
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
|
|
|
|
# Send registration email or password reset email, depending on parameter resend (False means welcome email)
|
|
|
|
def send_registration_mail(e_mail, user_name, default_password, resend=False):
|
|
|
|
text = "Hello %s!\r\n" % user_name
|
|
|
|
if not resend:
|
|
|
|
text += "Your new account at Calibre-Web has been created. Thanks for joining us!\r\n"
|
|
|
|
text += "Please log in to your account using the following informations:\r\n"
|
2019-12-28 15:18:21 +00:00
|
|
|
text += "User name: %s\r\n" % user_name
|
2018-08-24 13:48:09 +00:00
|
|
|
text += "Password: %s\r\n" % default_password
|
|
|
|
text += "Don't forget to change your password after first login.\r\n"
|
|
|
|
text += "Sincerely\r\n\r\n"
|
|
|
|
text += "Your Calibre-Web team"
|
2020-08-25 04:05:20 +00:00
|
|
|
WorkerThread.add(None, TaskEmail(
|
2020-08-29 09:14:52 +00:00
|
|
|
subject=_(u'Get Started with Calibre-Web'),
|
2020-08-25 04:05:20 +00:00
|
|
|
filepath=None,
|
|
|
|
attachment=None,
|
|
|
|
settings=config.get_mail_settings(),
|
|
|
|
recipient=e_mail,
|
|
|
|
taskMessage=_(u"Registration e-mail for user: %(name)s", name=user_name),
|
|
|
|
text=text
|
|
|
|
))
|
2020-08-25 04:15:41 +00:00
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
return
|
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2018-11-18 16:09:13 +00:00
|
|
|
def check_send_to_kindle(entry):
|
2018-11-25 10:25:20 +00:00
|
|
|
"""
|
2018-11-18 16:09:13 +00:00
|
|
|
returns all available book formats for sending to Kindle
|
2018-11-25 10:25:20 +00:00
|
|
|
"""
|
2018-11-18 16:09:13 +00:00
|
|
|
if len(entry.data):
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats = list()
|
2020-05-02 08:18:01 +00:00
|
|
|
if not config.config_converterpath:
|
2018-11-18 16:09:13 +00:00
|
|
|
# no converter - only for mobi and pdf formats
|
|
|
|
for ele in iter(entry.data):
|
2020-05-09 18:29:17 +00:00
|
|
|
if ele.uncompressed_size < config.mail_size:
|
|
|
|
if 'MOBI' in ele.format:
|
|
|
|
bookformats.append({'format': 'Mobi',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Mobi')})
|
|
|
|
if 'PDF' in ele.format:
|
|
|
|
bookformats.append({'format': 'Pdf',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Pdf')})
|
|
|
|
if 'AZW' in ele.format:
|
|
|
|
bookformats.append({'format': 'Azw',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Azw')})
|
2018-10-01 18:19:29 +00:00
|
|
|
else:
|
2018-11-18 16:09:13 +00:00
|
|
|
formats = list()
|
|
|
|
for ele in iter(entry.data):
|
2020-05-09 18:29:17 +00:00
|
|
|
if ele.uncompressed_size < config.mail_size:
|
|
|
|
formats.append(ele.format)
|
2018-11-18 16:09:13 +00:00
|
|
|
if 'MOBI' in formats:
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats.append({'format': 'Mobi',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Mobi')})
|
2018-11-18 16:09:13 +00:00
|
|
|
if 'AZW' in formats:
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats.append({'format': 'Azw',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Azw')})
|
2018-11-18 16:09:13 +00:00
|
|
|
if 'PDF' in formats:
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats.append({'format': 'Pdf',
|
|
|
|
'convert': 0,
|
|
|
|
'text': _('Send %(format)s to Kindle', format='Pdf')})
|
2020-05-02 08:18:01 +00:00
|
|
|
if config.config_converterpath:
|
2018-11-18 16:09:13 +00:00
|
|
|
if 'EPUB' in formats and not 'MOBI' in formats:
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats.append({'format': 'Mobi',
|
|
|
|
'convert':1,
|
|
|
|
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
|
|
|
orig='Epub',
|
|
|
|
format='Mobi')})
|
2019-12-15 17:44:02 +00:00
|
|
|
if 'AZW3' in formats and not 'MOBI' in formats:
|
2020-04-19 17:08:58 +00:00
|
|
|
bookformats.append({'format': 'Mobi',
|
|
|
|
'convert': 2,
|
|
|
|
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
|
|
|
orig='Azw3',
|
|
|
|
format='Mobi')})
|
2018-11-18 16:09:13 +00:00
|
|
|
return bookformats
|
2018-10-01 18:19:29 +00:00
|
|
|
else:
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error(u'Cannot find book entry %d', entry.id)
|
2018-11-18 16:09:13 +00:00
|
|
|
return None
|
2018-10-01 18:19:29 +00:00
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
# Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return
|
|
|
|
# list with supported formats
|
|
|
|
def check_read_formats(entry):
|
2019-05-07 17:54:13 +00:00
|
|
|
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR'}
|
2018-11-25 10:25:20 +00:00
|
|
|
bookformats = list()
|
|
|
|
if len(entry.data):
|
|
|
|
for ele in iter(entry.data):
|
2019-10-20 14:47:15 +00:00
|
|
|
if ele.format.upper() in EXTENSIONS_READER:
|
2018-11-25 10:25:20 +00:00
|
|
|
bookformats.append(ele.format.lower())
|
|
|
|
return bookformats
|
|
|
|
|
|
|
|
|
2018-08-12 07:29:57 +00:00
|
|
|
# Files are processed in the following order/priority:
|
2018-11-18 16:09:13 +00:00
|
|
|
# 1: If Mobi file is existing, it's directly send to kindle email,
|
|
|
|
# 2: If Epub file is existing, it's converted and send to kindle email,
|
|
|
|
# 3: If Pdf file is existing, it's directly send to kindle email
|
|
|
|
def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
|
2016-12-23 08:53:39 +00:00
|
|
|
"""Send email with attachments"""
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
2018-11-18 16:09:13 +00:00
|
|
|
|
2019-12-15 17:44:02 +00:00
|
|
|
if convert == 1:
|
2018-11-03 12:43:38 +00:00
|
|
|
# returns None if success, otherwise errormessage
|
2018-11-18 16:09:13 +00:00
|
|
|
return convert_book_format(book_id, calibrepath, u'epub', book_format.lower(), user_id, kindle_mail)
|
2019-12-15 17:44:02 +00:00
|
|
|
if convert == 2:
|
|
|
|
# returns None if success, otherwise errormessage
|
|
|
|
return convert_book_format(book_id, calibrepath, u'azw3', book_format.lower(), user_id, kindle_mail)
|
|
|
|
|
2019-07-14 17:28:32 +00:00
|
|
|
for entry in iter(book.data):
|
|
|
|
if entry.format.upper() == book_format.upper():
|
|
|
|
converted_file_name = entry.name + '.' + book_format.lower()
|
2020-08-23 02:44:28 +00:00
|
|
|
WorkerThread.add(user_id, TaskEmail(_(u"Send to Kindle"), book.path, converted_file_name,
|
|
|
|
config.get_mail_settings(), kindle_mail,
|
|
|
|
_(u"E-mail: %(book)s", book=book.title), _(u'This e-mail has been sent via Calibre-Web.')))
|
2019-07-14 17:28:32 +00:00
|
|
|
return
|
|
|
|
return _(u"The requested file could not be read. Maybe wrong permissions?")
|
2016-03-26 15:12:29 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-03 21:52:32 +00:00
|
|
|
def get_valid_filename(value, replace_whitespace=True):
|
2016-03-26 15:12:29 +00:00
|
|
|
"""
|
|
|
|
Returns the given string converted to a string that can be used for a clean
|
|
|
|
filename. Limits num characters to 128 max.
|
|
|
|
"""
|
2017-03-31 14:52:25 +00:00
|
|
|
if value[-1:] == u'.':
|
2017-02-15 17:09:17 +00:00
|
|
|
value = value[:-1]+u'_'
|
2017-09-16 17:57:00 +00:00
|
|
|
value = value.replace("/", "_").replace(":", "_").strip('\0')
|
2017-02-15 17:09:17 +00:00
|
|
|
if use_unidecode:
|
2020-08-23 08:53:18 +00:00
|
|
|
value = (unidecode.unidecode(value))
|
2017-02-15 17:09:17 +00:00
|
|
|
else:
|
2017-11-30 15:49:46 +00:00
|
|
|
value = value.replace(u'§', u'SS')
|
|
|
|
value = value.replace(u'ß', u'ss')
|
2017-02-15 17:09:17 +00:00
|
|
|
value = unicodedata.normalize('NFKD', value)
|
2019-07-14 17:28:32 +00:00
|
|
|
re_slugify = re.compile(r'[\W\s-]', re.UNICODE)
|
2017-11-30 15:49:46 +00:00
|
|
|
if isinstance(value, str): # Python3 str, Python2 unicode
|
2020-08-23 08:53:18 +00:00
|
|
|
value = re_slugify.sub('', value)
|
2017-03-06 05:42:00 +00:00
|
|
|
else:
|
2020-08-23 08:53:18 +00:00
|
|
|
value = unicode(re_slugify.sub('', value))
|
2016-04-03 21:52:32 +00:00
|
|
|
if replace_whitespace:
|
2017-11-30 15:49:46 +00:00
|
|
|
# *+:\"/<>? are replaced by _
|
2020-08-23 08:53:18 +00:00
|
|
|
value = re.sub(r'[*+:\\\"/<>?]+', u'_', value, flags=re.U)
|
2017-11-28 07:54:21 +00:00
|
|
|
# pipe has to be replaced with comma
|
2020-08-23 08:53:18 +00:00
|
|
|
value = re.sub(r'[|]+', u',', value, flags=re.U)
|
2020-08-09 05:08:00 +00:00
|
|
|
value = value[:128].strip()
|
2017-04-23 06:22:10 +00:00
|
|
|
if not value:
|
|
|
|
raise ValueError("Filename cannot be empty")
|
2019-02-23 17:28:25 +00:00
|
|
|
if sys.version_info.major == 3:
|
|
|
|
return value
|
|
|
|
else:
|
|
|
|
return value.decode('utf-8')
|
2016-03-26 15:12:29 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
def split_authors(values):
|
|
|
|
authors_list = []
|
|
|
|
for value in values:
|
|
|
|
authors = re.split('[&;]', value)
|
|
|
|
for author in authors:
|
|
|
|
commas = author.count(',')
|
|
|
|
if commas == 1:
|
|
|
|
author_split = author.split(',')
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.append(author_split[1].strip() + ' ' + author_split[0].strip())
|
2020-08-23 07:44:42 +00:00
|
|
|
elif commas > 1:
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.extend([x.strip() for x in author.split(',')])
|
2020-08-23 07:44:42 +00:00
|
|
|
else:
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.append(author.strip())
|
2020-08-23 07:44:42 +00:00
|
|
|
return authors_list
|
|
|
|
|
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
def get_sorted_author(value):
|
2017-11-30 15:49:46 +00:00
|
|
|
try:
|
2018-10-30 20:47:33 +00:00
|
|
|
if ',' not in value:
|
2019-07-14 17:28:32 +00:00
|
|
|
regexes = [r"^(JR|SR)\.?$", r"^I{1,3}\.?$", r"^IV\.?$"]
|
2018-10-30 20:47:33 +00:00
|
|
|
combined = "(" + ")|(".join(regexes) + ")"
|
|
|
|
value = value.split(" ")
|
|
|
|
if re.match(combined, value[-1].upper()):
|
2020-08-23 08:53:18 +00:00
|
|
|
if len(value) > 1:
|
|
|
|
value2 = value[-2] + ", " + " ".join(value[:-2]) + " " + value[-1]
|
|
|
|
else:
|
|
|
|
value2 = value[0]
|
2018-10-30 20:47:33 +00:00
|
|
|
elif len(value) == 1:
|
|
|
|
value2 = value[0]
|
|
|
|
else:
|
|
|
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
2017-11-30 15:49:46 +00:00
|
|
|
else:
|
2018-10-30 20:47:33 +00:00
|
|
|
value2 = value
|
2019-07-13 18:45:48 +00:00
|
|
|
except Exception as ex:
|
|
|
|
log.error("Sorting author %s failed: %s", value, ex)
|
2020-08-23 08:53:18 +00:00
|
|
|
if isinstance(list, value2):
|
|
|
|
value2 = value[0]
|
|
|
|
else:
|
|
|
|
value2 = value
|
2017-02-15 17:09:17 +00:00
|
|
|
return value2
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2018-08-04 16:22:43 +00:00
|
|
|
|
2018-07-14 17:40:59 +00:00
|
|
|
# Deletes a book fro the local filestorage, returns True if deleting is successfull, otherwise false
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book_file(book, calibrepath, book_format=None):
|
2018-07-09 18:30:38 +00:00
|
|
|
# check that path is 2 elements deep, check that target path has no subfolders
|
2018-07-14 17:40:59 +00:00
|
|
|
if book.path.count('/') == 1:
|
2018-01-10 20:16:51 +00:00
|
|
|
path = os.path.join(calibrepath, book.path)
|
2018-08-04 16:22:43 +00:00
|
|
|
if book_format:
|
|
|
|
for file in os.listdir(path):
|
|
|
|
if file.upper().endswith("."+book_format):
|
|
|
|
os.remove(os.path.join(path, file))
|
2020-05-05 16:48:40 +00:00
|
|
|
return True, None
|
2018-07-14 21:03:54 +00:00
|
|
|
else:
|
2018-08-04 16:22:43 +00:00
|
|
|
if os.path.isdir(path):
|
2020-05-01 08:26:35 +00:00
|
|
|
try:
|
2020-07-05 08:44:49 +00:00
|
|
|
for root, folders, files in os.walk(path):
|
2020-05-01 08:26:35 +00:00
|
|
|
for f in files:
|
|
|
|
os.unlink(os.path.join(root, f))
|
2020-07-05 08:44:49 +00:00
|
|
|
if len(folders):
|
|
|
|
log.warning("Deleting book {} failed, path {} has subfolders: {}".format(book.id,
|
|
|
|
book.path, folders))
|
|
|
|
return True, _("Deleting bookfolder for book %(id)s failed, path has subfolders: %(path)s",
|
2020-08-23 07:44:42 +00:00
|
|
|
id=book.id,
|
|
|
|
path=book.path)
|
2020-05-01 08:26:35 +00:00
|
|
|
shutil.rmtree(path)
|
|
|
|
except (IOError, OSError) as e:
|
|
|
|
log.error("Deleting book %s failed: %s", book.id, e)
|
|
|
|
return False, _("Deleting book %(id)s failed: %(message)s", id=book.id, message=e)
|
2020-04-26 18:44:37 +00:00
|
|
|
authorpath = os.path.join(calibrepath, os.path.split(book.path)[0])
|
|
|
|
if not os.listdir(authorpath):
|
2020-05-01 08:26:35 +00:00
|
|
|
try:
|
|
|
|
shutil.rmtree(authorpath)
|
|
|
|
except (IOError, OSError) as e:
|
|
|
|
log.error("Deleting authorpath for book %s failed: %s", book.id, e)
|
|
|
|
return True, None
|
2018-08-04 16:22:43 +00:00
|
|
|
else:
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
|
2020-05-04 16:19:30 +00:00
|
|
|
return True, _("Deleting book %(id)s, book path not valid: %(path)s",
|
2020-08-23 07:44:42 +00:00
|
|
|
id=book.id,
|
|
|
|
path=book.path)
|
2017-11-30 15:49:46 +00:00
|
|
|
|
|
|
|
|
2020-09-07 19:26:59 +00:00
|
|
|
# Moves files in file storage during author/title rename, or from temp dir to file storage
|
|
|
|
def update_dir_structure_file(book_id, calibrepath, first_author, orignal_filepath, db_filename):
|
|
|
|
# get book database entry from id, if original path overwrite source with original_filepath
|
2020-05-23 08:16:29 +00:00
|
|
|
localbook = calibre_db.get_book(book_id)
|
2020-09-07 19:26:59 +00:00
|
|
|
if orignal_filepath:
|
|
|
|
path = orignal_filepath
|
|
|
|
else:
|
|
|
|
path = os.path.join(calibrepath, localbook.path)
|
2017-03-30 19:17:18 +00:00
|
|
|
|
2020-09-07 19:26:59 +00:00
|
|
|
# Create (current) authordir and titledir from database
|
2017-11-30 15:49:46 +00:00
|
|
|
authordir = localbook.path.split('/')[0]
|
2020-09-07 19:26:59 +00:00
|
|
|
titledir = localbook.path.split('/')[1]
|
|
|
|
|
|
|
|
# Create new_authordir from parameter or from database
|
|
|
|
# Create new titledir from database and add id
|
2019-01-05 20:44:29 +00:00
|
|
|
if first_author:
|
|
|
|
new_authordir = get_valid_filename(first_author)
|
|
|
|
else:
|
|
|
|
new_authordir = get_valid_filename(localbook.authors[0].name)
|
2017-11-30 15:49:46 +00:00
|
|
|
new_titledir = get_valid_filename(localbook.title) + " (" + str(book_id) + ")"
|
2017-04-03 18:05:55 +00:00
|
|
|
|
2020-09-07 19:26:59 +00:00
|
|
|
if titledir != new_titledir or authordir != new_authordir or orignal_filepath:
|
|
|
|
new_path = os.path.join(calibrepath, new_authordir, new_titledir)
|
|
|
|
new_name = get_valid_filename(localbook.title) + ' - ' + get_valid_filename(new_authordir)
|
2017-11-30 15:49:46 +00:00
|
|
|
try:
|
2020-09-07 19:26:59 +00:00
|
|
|
if orignal_filepath:
|
2020-10-28 19:37:35 +00:00
|
|
|
if not os.path.isdir(new_path):
|
|
|
|
os.makedirs(new_path)
|
|
|
|
shutil.move(os.path.normcase(path), os.path.normcase(os.path.join(new_path, db_filename)))
|
2020-09-07 19:26:59 +00:00
|
|
|
log.debug("Moving title: %s to %s/%s", path, new_path, new_name)
|
2020-10-28 19:37:35 +00:00
|
|
|
# Check new path is not valid path
|
|
|
|
else:
|
|
|
|
if not os.path.exists(new_path):
|
|
|
|
# move original path to new path
|
|
|
|
log.debug("Moving title: %s to %s", path, new_path)
|
|
|
|
shutil.move(os.path.normcase(path), os.path.normcase(new_path))
|
|
|
|
else: # path is valid copy only files to new location (merge)
|
|
|
|
log.info("Moving title: %s into existing: %s", path, new_path)
|
|
|
|
# Take all files and subfolder from old path (strange command)
|
|
|
|
for dir_name, __, file_list in os.walk(path):
|
|
|
|
for file in file_list:
|
|
|
|
shutil.move(os.path.normcase(os.path.join(dir_name, file)),
|
|
|
|
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
|
|
|
|
# os.unlink(os.path.normcase(os.path.join(dir_name, file)))
|
2020-09-07 19:26:59 +00:00
|
|
|
# change location in database to new author/title path
|
2020-10-11 16:01:26 +00:00
|
|
|
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\','/')
|
2017-11-30 15:49:46 +00:00
|
|
|
except OSError as ex:
|
2020-09-07 19:26:59 +00:00
|
|
|
log.error("Rename title from: %s to %s: %s", path, new_path, ex)
|
2019-07-13 18:45:48 +00:00
|
|
|
log.debug(ex, exc_info=True)
|
2019-01-06 14:00:34 +00:00
|
|
|
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
2020-09-07 19:26:59 +00:00
|
|
|
src=path, dest=new_path, error=str(ex))
|
|
|
|
|
|
|
|
# Rename all files from old names to new names
|
2019-01-06 14:00:34 +00:00
|
|
|
try:
|
2019-01-14 19:27:53 +00:00
|
|
|
for file_format in localbook.data:
|
2020-10-28 19:37:35 +00:00
|
|
|
shutil.move(os.path.normcase(
|
2020-09-07 19:26:59 +00:00
|
|
|
os.path.join(new_path, file_format.name + '.' + file_format.format.lower())),
|
2020-10-28 19:37:35 +00:00
|
|
|
os.path.normcase(os.path.join(new_path, new_name + '.' + file_format.format.lower())))
|
2019-01-14 19:27:53 +00:00
|
|
|
file_format.name = new_name
|
2020-10-28 19:37:35 +00:00
|
|
|
if not orignal_filepath and len(os.listdir(os.path.dirname(path))) == 0:
|
|
|
|
shutil.rmtree(os.path.dirname(path))
|
2019-01-06 14:00:34 +00:00
|
|
|
except OSError as ex:
|
2020-09-07 19:26:59 +00:00
|
|
|
log.error("Rename file in path %s to %s: %s", new_path, new_name, ex)
|
2019-07-13 18:45:48 +00:00
|
|
|
log.debug(ex, exc_info=True)
|
2019-01-06 14:00:34 +00:00
|
|
|
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
2020-09-07 19:26:59 +00:00
|
|
|
src=new_path, dest=new_name, error=str(ex))
|
2017-11-30 15:49:46 +00:00
|
|
|
return False
|
2017-01-30 17:58:36 +00:00
|
|
|
|
2020-09-12 10:11:33 +00:00
|
|
|
def update_dir_structure_gdrive(book_id, first_author):
|
2017-11-30 15:49:46 +00:00
|
|
|
error = False
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
2019-02-18 19:52:58 +00:00
|
|
|
path = book.path
|
2017-04-04 17:05:09 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
authordir = book.path.split('/')[0]
|
2019-01-05 20:44:29 +00:00
|
|
|
if first_author:
|
|
|
|
new_authordir = get_valid_filename(first_author)
|
|
|
|
else:
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
2017-03-01 22:38:03 +00:00
|
|
|
titledir = book.path.split('/')[1]
|
2019-02-18 19:52:58 +00:00
|
|
|
new_titledir = get_valid_filename(book.title) + u" (" + str(book_id) + u")"
|
2017-07-09 18:15:15 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
if titledir != new_titledir:
|
2018-07-14 06:31:52 +00:00
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
|
|
|
if gFile:
|
|
|
|
gFile['title'] = new_titledir
|
|
|
|
gFile.Upload()
|
2019-02-18 19:52:58 +00:00
|
|
|
book.path = book.path.split('/')[0] + u'/' + new_titledir
|
|
|
|
path = book.path
|
2018-07-14 06:31:52 +00:00
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
|
|
|
else:
|
2020-04-19 17:08:58 +00:00
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
2017-07-11 14:13:33 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
if authordir != new_authordir:
|
2019-02-20 17:04:55 +00:00
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
2018-07-14 06:31:52 +00:00
|
|
|
if gFile:
|
2019-02-18 19:52:58 +00:00
|
|
|
gd.moveGdriveFolderRemote(gFile, new_authordir)
|
|
|
|
book.path = new_authordir + u'/' + book.path.split('/')[1]
|
|
|
|
path = book.path
|
2018-07-14 06:31:52 +00:00
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
|
|
|
else:
|
2020-04-19 17:08:58 +00:00
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
2019-01-06 14:00:34 +00:00
|
|
|
# Rename all files from old names to new names
|
2019-02-18 19:52:58 +00:00
|
|
|
|
2019-01-06 14:00:34 +00:00
|
|
|
if authordir != new_authordir or titledir != new_titledir:
|
2019-02-18 19:52:58 +00:00
|
|
|
new_name = get_valid_filename(book.title) + u' - ' + get_valid_filename(new_authordir)
|
|
|
|
for file_format in book.data:
|
|
|
|
gFile = gd.getFileFromEbooksFolder(path, file_format.name + u'.' + file_format.format.lower())
|
|
|
|
if not gFile:
|
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=file_format.name) # file not found
|
|
|
|
break
|
|
|
|
gd.moveGdriveFileRemote(gFile, new_name + u'.' + file_format.format.lower())
|
|
|
|
file_format.name = new_name
|
2017-11-30 15:49:46 +00:00
|
|
|
return error
|
2017-03-01 22:38:03 +00:00
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book_gdrive(book, book_format):
|
2020-05-01 08:26:35 +00:00
|
|
|
error = None
|
2018-08-04 16:22:43 +00:00
|
|
|
if book_format:
|
|
|
|
name = ''
|
|
|
|
for entry in book.data:
|
|
|
|
if entry.format.upper() == book_format:
|
|
|
|
name = entry.name + '.' + book_format
|
|
|
|
gFile = gd.getFileFromEbooksFolder(book.path, name)
|
|
|
|
else:
|
2020-04-19 17:08:58 +00:00
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
|
2018-07-14 11:48:51 +00:00
|
|
|
if gFile:
|
|
|
|
gd.deleteDatabaseEntry(gFile['id'])
|
|
|
|
gFile.Trash()
|
|
|
|
else:
|
2020-04-19 17:08:58 +00:00
|
|
|
error = _(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
2020-05-01 08:26:35 +00:00
|
|
|
|
|
|
|
return error is None, error
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2019-12-15 16:08:17 +00:00
|
|
|
def reset_password(user_id):
|
2020-12-07 07:44:49 +00:00
|
|
|
existing_user = g.ubsession.query(ub.User).filter(ub.User.id == user_id).first()
|
2020-05-01 10:00:45 +00:00
|
|
|
if not existing_user:
|
|
|
|
return 0, None
|
2019-12-28 15:18:21 +00:00
|
|
|
if not config.get_mail_server_configured():
|
2020-04-19 17:08:58 +00:00
|
|
|
return 2, None
|
2019-12-15 16:08:17 +00:00
|
|
|
try:
|
2020-05-13 18:15:35 +00:00
|
|
|
password = generate_random_password()
|
|
|
|
existing_user.password = generate_password_hash(password)
|
2020-12-07 07:44:49 +00:00
|
|
|
g.ubsession.commit()
|
2019-12-15 16:08:17 +00:00
|
|
|
send_registration_mail(existing_user.email, existing_user.nickname, password, True)
|
2020-04-19 17:08:58 +00:00
|
|
|
return 1, existing_user.nickname
|
2019-12-15 16:08:17 +00:00
|
|
|
except Exception:
|
2020-12-07 07:44:49 +00:00
|
|
|
g.ubsession.rollback()
|
2020-04-19 17:08:58 +00:00
|
|
|
return 0, None
|
2019-12-15 16:08:17 +00:00
|
|
|
|
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
def generate_random_password():
|
|
|
|
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
|
|
|
passlen = 8
|
2020-06-30 17:58:07 +00:00
|
|
|
if sys.version_info < (3, 0):
|
|
|
|
return "".join(s[ord(c) % len(s)] for c in os.urandom(passlen))
|
|
|
|
else:
|
|
|
|
return "".join(s[c % len(s)] for c in os.urandom(passlen))
|
2018-08-24 13:48:09 +00:00
|
|
|
|
2020-06-22 17:11:03 +00:00
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
def uniq(inpt):
|
|
|
|
output = []
|
|
|
|
for x in inpt:
|
|
|
|
if x not in output:
|
|
|
|
output.append(x)
|
|
|
|
return output
|
2020-06-22 17:11:03 +00:00
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
# ################################# External interface #################################
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2020-09-07 19:26:59 +00:00
|
|
|
def update_dir_stucture(book_id, calibrepath, first_author=None, orignal_filepath=None, db_filename=None):
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2020-09-12 10:11:33 +00:00
|
|
|
return update_dir_structure_gdrive(book_id, first_author)
|
2018-07-14 06:31:52 +00:00
|
|
|
else:
|
2020-09-07 19:26:59 +00:00
|
|
|
return update_dir_structure_file(book_id, calibrepath, first_author, orignal_filepath, db_filename)
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book(book, calibrepath, book_format):
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2018-08-04 16:22:43 +00:00
|
|
|
return delete_book_gdrive(book, book_format)
|
2018-07-14 11:48:51 +00:00
|
|
|
else:
|
2018-08-04 16:22:43 +00:00
|
|
|
return delete_book_file(book, calibrepath, book_format)
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2019-12-17 17:00:35 +00:00
|
|
|
def get_cover_on_failure(use_generic_cover):
|
|
|
|
if use_generic_cover:
|
|
|
|
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
def get_book_cover(book_id):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_book_cover_internal(book, use_generic_cover_on_failure=True)
|
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2019-12-17 17:00:35 +00:00
|
|
|
def get_book_cover_with_uuid(book_uuid,
|
2020-04-19 17:08:58 +00:00
|
|
|
use_generic_cover_on_failure=True):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book_by_uuid(book_uuid)
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_book_cover_internal(book, use_generic_cover_on_failure)
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2020-05-23 08:16:29 +00:00
|
|
|
def get_book_cover_internal(book, use_generic_cover_on_failure):
|
2019-12-17 17:00:35 +00:00
|
|
|
if book and book.has_cover:
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
try:
|
|
|
|
if not gd.is_gdrive_ready():
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
2020-04-19 17:08:58 +00:00
|
|
|
path = gd.get_cover_via_gdrive(book.path)
|
2019-07-13 18:45:48 +00:00
|
|
|
if path:
|
|
|
|
return redirect(path)
|
|
|
|
else:
|
|
|
|
log.error('%s/cover.jpg not found on Google Drive', book.path)
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
2019-07-13 18:45:48 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.exception(e)
|
|
|
|
# traceback.print_exc()
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
2019-07-13 18:45:48 +00:00
|
|
|
else:
|
|
|
|
cover_file_path = os.path.join(config.config_calibre_dir, book.path)
|
|
|
|
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
|
|
|
return send_from_directory(cover_file_path, "cover.jpg")
|
2018-08-04 08:56:42 +00:00
|
|
|
else:
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
2018-08-04 08:56:42 +00:00
|
|
|
else:
|
2019-12-17 17:00:35 +00:00
|
|
|
return get_cover_on_failure(use_generic_cover_on_failure)
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2019-04-15 18:57:25 +00:00
|
|
|
# saves book cover from url
|
|
|
|
def save_cover_from_url(url, book_path):
|
2020-05-27 18:41:16 +00:00
|
|
|
try:
|
|
|
|
img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling
|
|
|
|
img.raise_for_status()
|
2020-05-29 04:59:40 +00:00
|
|
|
return save_cover(img, book_path)
|
2020-05-27 18:41:16 +00:00
|
|
|
except (requests.exceptions.HTTPError,
|
|
|
|
requests.exceptions.ConnectionError,
|
|
|
|
requests.exceptions.Timeout) as ex:
|
|
|
|
log.info(u'Cover Download Error %s', ex)
|
|
|
|
return False, _("Error Downloading Cover")
|
2020-05-29 04:59:40 +00:00
|
|
|
except UnidentifiedImageError as ex:
|
|
|
|
log.info(u'File Format Error %s', ex)
|
|
|
|
return False, _("Cover Format Error")
|
|
|
|
|
2019-04-15 18:57:25 +00:00
|
|
|
|
|
|
|
def save_cover_from_filestorage(filepath, saved_filename, img):
|
2019-07-13 18:45:48 +00:00
|
|
|
if hasattr(img, '_content'):
|
2019-04-15 18:57:25 +00:00
|
|
|
f = open(os.path.join(filepath, saved_filename), "wb")
|
|
|
|
f.write(img._content)
|
|
|
|
f.close()
|
|
|
|
else:
|
|
|
|
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
|
|
|
if not os.path.exists(filepath):
|
|
|
|
try:
|
|
|
|
os.makedirs(filepath)
|
|
|
|
except OSError:
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error(u"Failed to create path for cover")
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, _(u"Failed to create path for cover")
|
2019-04-15 18:57:25 +00:00
|
|
|
try:
|
|
|
|
img.save(os.path.join(filepath, saved_filename))
|
2020-04-27 18:01:13 +00:00
|
|
|
except (IOError, OSError):
|
|
|
|
log.error(u"Cover-file is not a valid image file, or could not be stored")
|
|
|
|
return False, _(u"Cover-file is not a valid image file, or could not be stored")
|
2020-03-07 10:07:35 +00:00
|
|
|
return True, None
|
2019-04-15 18:57:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
# saves book cover to gdrive or locally
|
|
|
|
def save_cover(img, book_path):
|
2019-03-26 06:31:00 +00:00
|
|
|
content_type = img.headers.get('content-type')
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2019-04-17 18:45:08 +00:00
|
|
|
if use_PIL:
|
2019-04-18 19:17:59 +00:00
|
|
|
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, _("Only jpg/jpeg/png/webp files are supported as coverfile")
|
2019-04-17 18:45:08 +00:00
|
|
|
# convert to jpg because calibre only supports jpg
|
|
|
|
if content_type in ('image/png', 'image/webp'):
|
2020-04-19 17:08:58 +00:00
|
|
|
if hasattr(img, 'stream'):
|
2019-11-03 08:49:54 +00:00
|
|
|
imgc = PILImage.open(img.stream)
|
2019-04-17 18:45:08 +00:00
|
|
|
else:
|
2019-11-03 08:49:54 +00:00
|
|
|
imgc = PILImage.open(io.BytesIO(img.content))
|
2019-04-17 18:45:08 +00:00
|
|
|
im = imgc.convert('RGB')
|
|
|
|
tmp_bytesio = io.BytesIO()
|
|
|
|
im.save(tmp_bytesio, format='JPEG')
|
|
|
|
img._content = tmp_bytesio.getvalue()
|
2019-04-18 19:17:59 +00:00
|
|
|
else:
|
2020-04-19 17:08:58 +00:00
|
|
|
if content_type not in 'image/jpeg':
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("Only jpg/jpeg files are supported as coverfile")
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, _("Only jpg/jpeg files are supported as coverfile")
|
2019-03-26 06:31:00 +00:00
|
|
|
|
2019-07-24 16:15:38 +00:00
|
|
|
if config.config_use_google_drive:
|
2018-08-04 08:56:42 +00:00
|
|
|
tmpDir = gettempdir()
|
2020-03-07 10:07:35 +00:00
|
|
|
ret, message = save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img)
|
|
|
|
if ret is True:
|
2019-04-15 18:57:25 +00:00
|
|
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
|
|
|
|
os.path.join(tmpDir, "uploaded_cover.jpg"))
|
2019-07-13 18:45:48 +00:00
|
|
|
log.info("Cover is saved on Google Drive")
|
2020-03-07 10:07:35 +00:00
|
|
|
return True, None
|
2019-04-15 18:57:25 +00:00
|
|
|
else:
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, message
|
2019-04-15 18:57:25 +00:00
|
|
|
else:
|
2019-07-13 18:45:48 +00:00
|
|
|
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
|
2019-04-15 18:57:25 +00:00
|
|
|
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2019-08-18 19:44:19 +00:00
|
|
|
def do_download_file(book, book_format, client, data, headers):
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2018-08-04 08:56:42 +00:00
|
|
|
startTime = time.time()
|
|
|
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
2019-07-13 18:45:48 +00:00
|
|
|
log.debug('%s', time.time() - startTime)
|
2018-08-04 08:56:42 +00:00
|
|
|
if df:
|
|
|
|
return gd.do_gdrive_download(df, headers)
|
|
|
|
else:
|
|
|
|
abort(404)
|
|
|
|
else:
|
2019-07-13 18:45:48 +00:00
|
|
|
filename = os.path.join(config.config_calibre_dir, book.path)
|
2018-11-03 17:37:38 +00:00
|
|
|
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
|
|
|
|
# ToDo: improve error handling
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
|
2019-08-18 19:44:19 +00:00
|
|
|
|
2020-05-09 08:58:59 +00:00
|
|
|
if client == "kobo" and book_format == "kepub":
|
|
|
|
headers["Content-Disposition"] = headers["Content-Disposition"].replace(".kepub", ".kepub.epub")
|
2019-08-18 19:44:19 +00:00
|
|
|
|
2018-11-03 17:37:38 +00:00
|
|
|
response = make_response(send_from_directory(filename, data.name + "." + book_format))
|
2020-05-10 12:58:18 +00:00
|
|
|
# ToDo Check headers parameter
|
|
|
|
for element in headers:
|
|
|
|
response.headers[element[0]] = element[1]
|
2018-08-04 08:56:42 +00:00
|
|
|
return response
|
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
##################################
|
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
|
2017-11-19 17:08:55 +00:00
|
|
|
def check_unrar(unrarLocation):
|
2019-07-13 18:45:48 +00:00
|
|
|
if not unrarLocation:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not os.path.exists(unrarLocation):
|
2020-04-28 18:23:39 +00:00
|
|
|
return _('Unrar binary file not found')
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
if sys.version_info < (3, 0):
|
|
|
|
unrarLocation = unrarLocation.encode(sys.getfilesystemencoding())
|
2019-07-21 10:58:48 +00:00
|
|
|
unrarLocation = [unrarLocation]
|
2019-07-13 18:45:48 +00:00
|
|
|
for lines in process_wait(unrarLocation):
|
2020-05-24 15:24:54 +00:00
|
|
|
value = re.search('UNRAR (.*) freeware', lines, re.IGNORECASE)
|
2019-07-13 18:45:48 +00:00
|
|
|
if value:
|
|
|
|
version = value.group(1)
|
|
|
|
log.debug("unrar version %s", version)
|
2020-05-24 15:24:54 +00:00
|
|
|
break
|
|
|
|
except (OSError, UnicodeDecodeError) as err:
|
2019-07-13 18:45:48 +00:00
|
|
|
log.exception(err)
|
2020-04-28 18:23:39 +00:00
|
|
|
return _('Error excecuting UnRar')
|
2018-08-31 08:47:58 +00:00
|
|
|
|
2018-09-10 08:42:28 +00:00
|
|
|
|
2018-11-18 16:09:13 +00:00
|
|
|
def json_serial(obj):
|
|
|
|
"""JSON serializer for objects not serializable by default json code"""
|
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
if isinstance(obj, datetime):
|
2018-11-18 16:09:13 +00:00
|
|
|
return obj.isoformat()
|
2020-04-19 17:08:58 +00:00
|
|
|
if isinstance(obj, timedelta):
|
2019-06-30 09:20:36 +00:00
|
|
|
return {
|
|
|
|
'__type__': 'timedelta',
|
|
|
|
'days': obj.days,
|
|
|
|
'seconds': obj.seconds,
|
|
|
|
'microseconds': obj.microseconds,
|
|
|
|
}
|
2019-07-13 18:45:48 +00:00
|
|
|
# return obj.isoformat()
|
2020-04-19 17:08:58 +00:00
|
|
|
raise TypeError("Type %s not serializable" % type(obj))
|
2018-10-03 19:58:37 +00:00
|
|
|
|
2019-06-30 09:20:36 +00:00
|
|
|
|
|
|
|
# helper function for displaying the runtime of tasks
|
|
|
|
def format_runtime(runtime):
|
|
|
|
retVal = ""
|
|
|
|
if runtime.days:
|
2019-07-13 18:45:48 +00:00
|
|
|
retVal = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', '
|
2019-06-30 09:20:36 +00:00
|
|
|
mins, seconds = divmod(runtime.seconds, 60)
|
|
|
|
hours, minutes = divmod(mins, 60)
|
|
|
|
# ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ?
|
|
|
|
if hours:
|
|
|
|
retVal += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds)
|
|
|
|
elif minutes:
|
|
|
|
retVal += '{:2d}:{:02d}s'.format(minutes, seconds)
|
|
|
|
else:
|
|
|
|
retVal += '{:2d}s'.format(seconds)
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
|
2019-05-11 11:03:38 +00:00
|
|
|
# helper function to apply localize status information in tasklist entries
|
2018-09-19 15:26:52 +00:00
|
|
|
def render_task_status(tasklist):
|
2020-04-19 17:08:58 +00:00
|
|
|
renderedtasklist = list()
|
2020-08-28 01:44:28 +00:00
|
|
|
for num, user, added, task in tasklist:
|
2020-08-22 20:31:00 +00:00
|
|
|
if user == current_user.nickname or current_user.role_admin():
|
|
|
|
ret = {}
|
|
|
|
if task.start_time:
|
|
|
|
ret['starttime'] = format_datetime(task.start_time, format='short', locale=get_locale())
|
|
|
|
ret['runtime'] = format_runtime(task.runtime)
|
2019-06-30 09:20:36 +00:00
|
|
|
|
2018-11-03 12:43:38 +00:00
|
|
|
# localize the task status
|
2020-08-22 20:31:00 +00:00
|
|
|
if isinstance(task.stat, int):
|
|
|
|
if task.stat == STAT_WAITING:
|
|
|
|
ret['status'] = _(u'Waiting')
|
|
|
|
elif task.stat == STAT_FAIL:
|
|
|
|
ret['status'] = _(u'Failed')
|
|
|
|
elif task.stat == STAT_STARTED:
|
|
|
|
ret['status'] = _(u'Started')
|
|
|
|
elif task.stat == STAT_FINISH_SUCCESS:
|
|
|
|
ret['status'] = _(u'Finished')
|
2018-11-03 12:43:38 +00:00
|
|
|
else:
|
2020-08-22 20:31:00 +00:00
|
|
|
ret['status'] = _(u'Unknown Status')
|
2018-10-03 19:58:37 +00:00
|
|
|
|
2020-08-22 20:31:00 +00:00
|
|
|
ret['taskMessage'] = "{}: {}".format(_(task.name), task.message)
|
|
|
|
ret['progress'] = "{} %".format(int(task.progress * 100))
|
|
|
|
ret['user'] = user
|
|
|
|
renderedtasklist.append(ret)
|
2018-09-19 15:26:52 +00:00
|
|
|
|
|
|
|
return renderedtasklist
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
|
2019-07-25 19:42:46 +00:00
|
|
|
def tags_filters():
|
2020-02-15 09:21:45 +00:00
|
|
|
negtags_list = current_user.list_denied_tags()
|
2019-12-30 14:15:07 +00:00
|
|
|
postags_list = current_user.list_allowed_tags()
|
|
|
|
neg_content_tags_filter = false() if negtags_list == [''] else db.Tags.name.in_(negtags_list)
|
|
|
|
pos_content_tags_filter = true() if postags_list == [''] else db.Tags.name.in_(postags_list)
|
|
|
|
return and_(pos_content_tags_filter, ~neg_content_tags_filter)
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
# checks if domain is in database (including wildcards)
|
|
|
|
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
|
|
|
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
|
|
|
def check_valid_domain(domain_text):
|
2020-01-11 10:42:30 +00:00
|
|
|
# domain_text = domain_text.split('@', 1)[-1].lower()
|
2019-12-29 12:54:52 +00:00
|
|
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 1);"
|
2020-12-07 07:44:49 +00:00
|
|
|
result = g.ubsession.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
2019-12-29 12:54:52 +00:00
|
|
|
if not len(result):
|
|
|
|
return False
|
|
|
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 0);"
|
2020-12-07 07:44:49 +00:00
|
|
|
result = g.ubsession.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
2019-12-29 12:54:52 +00:00
|
|
|
return not len(result)
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
|
2020-04-19 10:50:58 +00:00
|
|
|
def get_cc_columns(filter_config_custom_read=False):
|
2020-05-23 08:16:29 +00:00
|
|
|
tmpcc = calibre_db.session.query(db.Custom_Columns)\
|
|
|
|
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
2020-04-19 10:50:58 +00:00
|
|
|
cc = []
|
|
|
|
r = None
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_columns_to_ignore:
|
2020-04-19 10:50:58 +00:00
|
|
|
r = re.compile(config.config_columns_to_ignore)
|
|
|
|
|
|
|
|
for col in tmpcc:
|
|
|
|
if filter_config_custom_read and config.config_read_column and config.config_read_column == col.id:
|
|
|
|
continue
|
2020-05-21 20:26:06 +00:00
|
|
|
if r and r.match(col.name):
|
2020-04-19 10:50:58 +00:00
|
|
|
continue
|
|
|
|
cc.append(col)
|
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
return cc
|
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
|
2019-08-18 19:44:19 +00:00
|
|
|
def get_download_link(book_id, book_format, client):
|
2019-07-13 18:45:48 +00:00
|
|
|
book_format = book_format.split(".")[0]
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_filtered_book(book_id)
|
2019-07-25 19:42:46 +00:00
|
|
|
if book:
|
2020-06-01 08:53:48 +00:00
|
|
|
data1 = calibre_db.get_book_format(book.id, book_format.upper())
|
2019-07-25 19:42:46 +00:00
|
|
|
else:
|
|
|
|
abort(404)
|
2020-03-07 10:07:35 +00:00
|
|
|
if data1:
|
2019-07-13 18:45:48 +00:00
|
|
|
# collect downloaded books only for registered user and not for anonymous user
|
|
|
|
if current_user.is_authenticated:
|
|
|
|
ub.update_download(book_id, int(current_user.id))
|
|
|
|
file_name = book.title
|
|
|
|
if len(book.authors) > 0:
|
|
|
|
file_name = book.authors[0].name + '_' + file_name
|
|
|
|
file_name = get_valid_filename(file_name)
|
|
|
|
headers = Headers()
|
|
|
|
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
|
2020-02-16 18:55:05 +00:00
|
|
|
headers["Content-Disposition"] = "attachment; filename=%s.%s; filename*=UTF-8''%s.%s" % (
|
|
|
|
quote(file_name.encode('utf-8')), book_format, quote(file_name.encode('utf-8')), book_format)
|
2020-05-08 12:53:21 +00:00
|
|
|
return do_download_file(book, book_format, client, data1, headers)
|
2019-07-13 18:45:48 +00:00
|
|
|
else:
|
|
|
|
abort(404)
|
2020-11-15 13:19:25 +00:00
|
|
|
|