2015-08-02 18:59:11 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-01-20 18:37:45 +00:00
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
# Copyright (C) 2012-2019 cervinko, idalin, SiphonSquirrel, ouzklcn, akushsky,
|
|
|
|
# OzzieIsaacs, bodybybuddha, jkrehm, matthazinski, janeczku
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
import os
|
2023-02-16 15:23:06 +00:00
|
|
|
import random
|
2019-07-13 18:45:48 +00:00
|
|
|
import io
|
|
|
|
import mimetypes
|
2016-03-26 15:12:29 +00:00
|
|
|
import re
|
2024-02-29 07:23:18 +00:00
|
|
|
import regex
|
2019-07-13 18:45:48 +00:00
|
|
|
import shutil
|
2022-01-23 12:11:02 +00:00
|
|
|
import socket
|
2024-08-10 19:05:29 +00:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2019-07-13 18:45:48 +00:00
|
|
|
import requests
|
2022-03-13 11:34:21 +00:00
|
|
|
import unidecode
|
2023-11-11 13:48:59 +00:00
|
|
|
from uuid import uuid4
|
2022-01-23 12:11:02 +00:00
|
|
|
|
2024-07-02 18:53:08 +00:00
|
|
|
from flask import send_from_directory, make_response, abort, url_for, Response
|
2016-11-09 18:24:33 +00:00
|
|
|
from flask_babel import gettext as _
|
2022-04-24 16:40:50 +00:00
|
|
|
from flask_babel import lazy_gettext as N_
|
2022-09-19 16:56:22 +00:00
|
|
|
from flask_babel import get_locale
|
2024-07-14 14:24:07 +00:00
|
|
|
from .cw_login import current_user
|
2021-09-24 08:11:14 +00:00
|
|
|
from sqlalchemy.sql.expression import true, false, and_, or_, text, func
|
2022-02-05 14:36:18 +00:00
|
|
|
from sqlalchemy.exc import InvalidRequestError, OperationalError
|
2019-07-13 18:45:48 +00:00
|
|
|
from werkzeug.datastructures import Headers
|
2019-12-15 16:08:17 +00:00
|
|
|
from werkzeug.security import generate_password_hash
|
2021-08-27 12:27:35 +00:00
|
|
|
from markupsafe import escape
|
2021-12-26 09:31:04 +00:00
|
|
|
from urllib.parse import quote
|
2017-02-20 18:52:00 +00:00
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
try:
|
2022-03-13 11:34:21 +00:00
|
|
|
import advocate
|
|
|
|
from advocate.exceptions import UnacceptableAddressException
|
|
|
|
use_advocate = True
|
2017-11-30 15:49:46 +00:00
|
|
|
except ImportError:
|
2022-03-13 11:34:21 +00:00
|
|
|
use_advocate = False
|
|
|
|
advocate = requests
|
|
|
|
UnacceptableAddressException = MissingSchema = BaseException
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2022-04-26 12:44:55 +00:00
|
|
|
from . import calibre_db, cli_param
|
2020-11-15 13:19:25 +00:00
|
|
|
from .tasks.convert import TaskConvert
|
2022-04-26 09:28:20 +00:00
|
|
|
from . import logger, config, db, ub, fs
|
2019-07-13 18:45:48 +00:00
|
|
|
from . import gdriveutils as gd
|
2024-02-29 07:23:18 +00:00
|
|
|
from .constants import (STATIC_DIR as _STATIC_DIR, CACHE_TYPE_THUMBNAILS, THUMBNAIL_TYPE_COVER, THUMBNAIL_TYPE_SERIES,
|
|
|
|
SUPPORTED_CALIBRE_BINARIES)
|
2024-02-24 09:57:10 +00:00
|
|
|
from .subproc_wrapper import process_wait
|
2022-04-26 12:44:55 +00:00
|
|
|
from .services.worker import WorkerThread
|
2020-08-30 06:49:53 +00:00
|
|
|
from .tasks.mail import TaskEmail
|
2022-04-22 14:13:51 +00:00
|
|
|
from .tasks.thumbnail import TaskClearCoverThumbnailCache, TaskGenerateCoverThumbnails
|
2022-09-19 16:56:22 +00:00
|
|
|
from .tasks.metadata_backup import TaskBackupMetadata
|
2023-11-11 13:48:59 +00:00
|
|
|
from .file_helper import get_temp_dir
|
2024-01-13 09:53:46 +00:00
|
|
|
from .epub_helper import get_content_opf, create_new_metadata_backup, updateEpub, replace_metadata
|
2024-02-24 09:57:10 +00:00
|
|
|
from .embed_helper import do_calibre_export
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
log = logger.create()
|
2017-05-19 19:30:39 +00:00
|
|
|
|
2020-12-09 13:18:39 +00:00
|
|
|
try:
|
|
|
|
from wand.image import Image
|
2021-11-20 12:45:41 +00:00
|
|
|
from wand.exceptions import MissingDelegateError, BlobError
|
2020-12-09 13:18:39 +00:00
|
|
|
use_IM = True
|
|
|
|
except (ImportError, RuntimeError) as e:
|
|
|
|
log.debug('Cannot import Image, generating covers from non jpg files will not work: %s', e)
|
|
|
|
use_IM = False
|
2021-01-28 19:05:58 +00:00
|
|
|
MissingDelegateError = BaseException
|
2020-12-09 13:18:39 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2018-08-31 13:19:48 +00:00
|
|
|
# Convert existing book entry to new format
|
2022-05-08 10:55:54 +00:00
|
|
|
def convert_book_format(book_id, calibre_path, old_book_format, new_book_format, user_id, ereader_mail=None):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
|
|
|
data = calibre_db.get_book_format(book.id, old_book_format)
|
2016-03-27 21:36:51 +00:00
|
|
|
if not data:
|
2023-01-21 14:19:59 +00:00
|
|
|
error_message = _("%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("convert_book_format: %s", error_message)
|
2018-08-12 07:29:57 +00:00
|
|
|
return error_message
|
2022-07-31 09:28:11 +00:00
|
|
|
file_path = os.path.join(calibre_path, book.path, data.name)
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2020-10-04 10:09:52 +00:00
|
|
|
if not gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower()):
|
2023-01-21 14:19:59 +00:00
|
|
|
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
2018-08-31 13:19:48 +00:00
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
2018-08-12 07:29:57 +00:00
|
|
|
return error_message
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2020-10-04 10:09:52 +00:00
|
|
|
if not os.path.exists(file_path + "." + old_book_format.lower()):
|
2023-01-21 14:19:59 +00:00
|
|
|
error_message = _("%(format)s not found: %(fn)s",
|
2020-10-04 10:09:52 +00:00
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
|
|
|
return error_message
|
|
|
|
# read settings and append converter task to queue
|
2022-05-08 10:55:54 +00:00
|
|
|
if ereader_mail:
|
2020-10-04 10:09:52 +00:00
|
|
|
settings = config.get_mail_settings()
|
2023-01-04 02:30:13 +00:00
|
|
|
settings['subject'] = _('Send to eReader') # pretranslate Subject for Email
|
2023-01-21 14:23:18 +00:00
|
|
|
settings['body'] = _('This Email has been sent via Calibre-Web.')
|
2020-10-04 10:09:52 +00:00
|
|
|
else:
|
|
|
|
settings = dict()
|
2021-08-28 08:32:40 +00:00
|
|
|
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book.id), escape(book.title)) # prevent xss
|
2023-01-21 14:23:18 +00:00
|
|
|
txt = "{} -> {}: {}".format(
|
2021-08-28 09:32:13 +00:00
|
|
|
old_book_format.upper(),
|
|
|
|
new_book_format.upper(),
|
2021-08-27 12:27:35 +00:00
|
|
|
link)
|
2020-10-04 10:09:52 +00:00
|
|
|
settings['old_book_format'] = old_book_format
|
|
|
|
settings['new_book_format'] = new_book_format
|
2022-05-08 10:55:54 +00:00
|
|
|
WorkerThread.add(user_id, TaskConvert(file_path, book.id, txt, settings, ereader_mail, user_id))
|
2020-10-04 10:09:52 +00:00
|
|
|
return None
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2022-04-24 16:40:50 +00:00
|
|
|
# Texts are not lazy translated as they are supposed to get send out as is
|
2022-05-08 10:55:54 +00:00
|
|
|
def send_test_mail(ereader_mail, user_name):
|
2022-07-08 20:42:14 +00:00
|
|
|
for email in ereader_mail.split(','):
|
2022-03-28 23:59:59 +00:00
|
|
|
email = email.strip()
|
2024-07-10 17:59:21 +00:00
|
|
|
WorkerThread.add(user_name, TaskEmail(_('Calibre-Web Test Email'), None, None,
|
2024-07-07 12:49:22 +00:00
|
|
|
config.get_mail_settings(), email, N_("Test Email"),
|
2024-07-10 17:59:21 +00:00
|
|
|
_('This Email has been sent via Calibre-Web.')))
|
2018-08-12 07:29:57 +00:00
|
|
|
return
|
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
|
|
|
|
# Send registration email or password reset email, depending on parameter resend (False means welcome email)
|
|
|
|
def send_registration_mail(e_mail, user_name, default_password, resend=False):
|
2023-01-04 02:30:13 +00:00
|
|
|
txt = "Hi %s!\r\n" % user_name
|
2018-08-24 13:48:09 +00:00
|
|
|
if not resend:
|
2023-01-04 02:30:13 +00:00
|
|
|
txt += "Your account at Calibre-Web has been created.\r\n"
|
|
|
|
txt += "Please log in using the following information:\r\n"
|
|
|
|
txt += "Username: %s\r\n" % user_name
|
2020-12-09 13:18:39 +00:00
|
|
|
txt += "Password: %s\r\n" % default_password
|
2023-01-04 02:30:13 +00:00
|
|
|
txt += "Don't forget to change your password after your first login.\r\n"
|
|
|
|
txt += "Regards,\r\n\r\n"
|
|
|
|
txt += "Calibre-Web"
|
2020-08-25 04:05:20 +00:00
|
|
|
WorkerThread.add(None, TaskEmail(
|
2023-01-21 14:23:18 +00:00
|
|
|
subject=_('Get Started with Calibre-Web'),
|
2020-08-25 04:05:20 +00:00
|
|
|
filepath=None,
|
|
|
|
attachment=None,
|
|
|
|
settings=config.get_mail_settings(),
|
|
|
|
recipient=e_mail,
|
2023-01-21 14:19:59 +00:00
|
|
|
task_message=N_("Registration Email for user: %(name)s", name=user_name),
|
2020-12-09 13:18:39 +00:00
|
|
|
text=txt
|
2020-08-25 04:05:20 +00:00
|
|
|
))
|
2018-08-24 13:48:09 +00:00
|
|
|
return
|
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2022-05-08 10:55:54 +00:00
|
|
|
def check_send_to_ereader_with_converter(formats):
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats = list()
|
2022-05-16 05:59:35 +00:00
|
|
|
if 'MOBI' in formats and 'EPUB' not in formats:
|
2022-05-08 10:55:54 +00:00
|
|
|
book_formats.append({'format': 'Epub',
|
2022-04-26 12:44:55 +00:00
|
|
|
'convert': 1,
|
2023-01-04 02:30:13 +00:00
|
|
|
'text': _('Convert %(orig)s to %(format)s and send to eReader',
|
2022-05-08 10:55:54 +00:00
|
|
|
orig='Mobi',
|
|
|
|
format='Epub')})
|
|
|
|
if 'AZW3' in formats and 'EPUB' not in formats:
|
|
|
|
book_formats.append({'format': 'Epub',
|
2022-04-26 12:44:55 +00:00
|
|
|
'convert': 2,
|
2023-01-04 02:30:13 +00:00
|
|
|
'text': _('Convert %(orig)s to %(format)s and send to eReader',
|
2022-04-26 12:44:55 +00:00
|
|
|
orig='Azw3',
|
2022-05-08 10:55:54 +00:00
|
|
|
format='Epub')})
|
2022-04-26 12:44:55 +00:00
|
|
|
return book_formats
|
2021-03-14 12:28:52 +00:00
|
|
|
|
|
|
|
|
2022-05-08 10:55:54 +00:00
|
|
|
def check_send_to_ereader(entry):
|
2018-11-25 10:25:20 +00:00
|
|
|
"""
|
2023-01-04 02:30:13 +00:00
|
|
|
returns all available book formats for sending to eReader
|
2018-11-25 10:25:20 +00:00
|
|
|
"""
|
2021-03-15 07:10:42 +00:00
|
|
|
formats = list()
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats = list()
|
2018-11-18 16:09:13 +00:00
|
|
|
if len(entry.data):
|
2021-03-15 07:10:42 +00:00
|
|
|
for ele in iter(entry.data):
|
|
|
|
if ele.uncompressed_size < config.mail_size:
|
|
|
|
formats.append(ele.format)
|
2022-05-08 10:55:54 +00:00
|
|
|
if 'EPUB' in formats:
|
|
|
|
book_formats.append({'format': 'Epub',
|
|
|
|
'convert': 0,
|
2023-01-04 02:30:13 +00:00
|
|
|
'text': _('Send %(format)s to eReader', format='Epub')})
|
2021-03-15 07:10:42 +00:00
|
|
|
if 'PDF' in formats:
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats.append({'format': 'Pdf',
|
|
|
|
'convert': 0,
|
2023-01-04 02:30:13 +00:00
|
|
|
'text': _('Send %(format)s to eReader', format='Pdf')})
|
2021-03-15 07:10:42 +00:00
|
|
|
if 'AZW' in formats:
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats.append({'format': 'Azw',
|
|
|
|
'convert': 0,
|
2023-01-04 02:30:13 +00:00
|
|
|
'text': _('Send %(format)s to eReader', format='Azw')})
|
2021-03-15 07:10:42 +00:00
|
|
|
if config.config_converterpath:
|
2022-05-08 10:55:54 +00:00
|
|
|
book_formats.extend(check_send_to_ereader_with_converter(formats))
|
2022-04-26 12:44:55 +00:00
|
|
|
return book_formats
|
2018-10-01 18:19:29 +00:00
|
|
|
else:
|
2023-01-21 14:23:18 +00:00
|
|
|
log.error('Cannot find book entry %d', entry.id)
|
2018-11-18 16:09:13 +00:00
|
|
|
return None
|
2018-10-01 18:19:29 +00:00
|
|
|
|
2018-08-24 13:48:09 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
# Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return
|
|
|
|
# list with supported formats
|
|
|
|
def check_read_formats(entry):
|
2023-04-15 13:25:46 +00:00
|
|
|
extensions_reader = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU', 'DJV'}
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats = list()
|
2018-11-25 10:25:20 +00:00
|
|
|
if len(entry.data):
|
|
|
|
for ele in iter(entry.data):
|
2022-03-13 11:34:21 +00:00
|
|
|
if ele.format.upper() in extensions_reader:
|
2022-04-26 12:44:55 +00:00
|
|
|
book_formats.append(ele.format.lower())
|
|
|
|
return book_formats
|
2018-11-25 10:25:20 +00:00
|
|
|
|
|
|
|
|
2018-08-12 07:29:57 +00:00
|
|
|
# Files are processed in the following order/priority:
|
2023-03-25 11:34:16 +00:00
|
|
|
# 1: If epub file is existing, it's directly send to eReader email,
|
|
|
|
# 2: If mobi file is existing, it's converted and send to eReader email,
|
2023-01-04 02:30:13 +00:00
|
|
|
# 3: If Pdf file is existing, it's directly send to eReader email
|
2022-05-08 10:55:54 +00:00
|
|
|
def send_mail(book_id, book_format, convert, ereader_mail, calibrepath, user_id):
|
2016-12-23 08:53:39 +00:00
|
|
|
"""Send email with attachments"""
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
2018-11-18 16:09:13 +00:00
|
|
|
|
2019-12-15 17:44:02 +00:00
|
|
|
if convert == 1:
|
2018-11-03 12:43:38 +00:00
|
|
|
# returns None if success, otherwise errormessage
|
2023-03-25 11:34:16 +00:00
|
|
|
return convert_book_format(book_id, calibrepath, 'mobi', book_format.lower(), user_id, ereader_mail)
|
2019-12-15 17:44:02 +00:00
|
|
|
if convert == 2:
|
|
|
|
# returns None if success, otherwise errormessage
|
2023-01-21 14:23:18 +00:00
|
|
|
return convert_book_format(book_id, calibrepath, 'azw3', book_format.lower(), user_id, ereader_mail)
|
2019-12-15 17:44:02 +00:00
|
|
|
|
2019-07-14 17:28:32 +00:00
|
|
|
for entry in iter(book.data):
|
|
|
|
if entry.format.upper() == book_format.upper():
|
|
|
|
converted_file_name = entry.name + '.' + book_format.lower()
|
2021-08-28 09:32:13 +00:00
|
|
|
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book_id), escape(book.title))
|
2023-01-21 14:19:59 +00:00
|
|
|
email_text = N_("%(book)s send to eReader", book=link)
|
2022-07-08 20:42:14 +00:00
|
|
|
for email in ereader_mail.split(','):
|
2022-03-28 23:59:59 +00:00
|
|
|
email = email.strip()
|
2024-07-07 12:49:22 +00:00
|
|
|
WorkerThread.add(user_id, TaskEmail(_("Send to eReader"), book.path, converted_file_name,
|
2022-08-02 23:51:31 +00:00
|
|
|
config.get_mail_settings(), email,
|
2024-07-07 12:49:22 +00:00
|
|
|
email_text, _('This Email has been sent via Calibre-Web.'), book.id))
|
2019-07-14 17:28:32 +00:00
|
|
|
return
|
2023-01-21 14:19:59 +00:00
|
|
|
return _("The requested file could not be read. Maybe wrong permissions?")
|
2016-03-26 15:12:29 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2022-01-30 14:16:42 +00:00
|
|
|
def get_valid_filename(value, replace_whitespace=True, chars=128):
|
2016-03-26 15:12:29 +00:00
|
|
|
"""
|
|
|
|
Returns the given string converted to a string that can be used for a clean
|
|
|
|
filename. Limits num characters to 128 max.
|
|
|
|
"""
|
2023-01-21 14:23:18 +00:00
|
|
|
if value[-1:] == '.':
|
|
|
|
value = value[:-1]+'_'
|
2017-09-16 17:57:00 +00:00
|
|
|
value = value.replace("/", "_").replace(":", "_").strip('\0')
|
2022-03-13 11:34:21 +00:00
|
|
|
if config.config_unicode_filename:
|
|
|
|
value = (unidecode.unidecode(value))
|
2016-04-03 21:52:32 +00:00
|
|
|
if replace_whitespace:
|
2017-11-30 15:49:46 +00:00
|
|
|
# *+:\"/<>? are replaced by _
|
2023-01-21 14:23:18 +00:00
|
|
|
value = re.sub(r'[*+:\\\"/<>?]+', '_', value, flags=re.U)
|
2017-11-28 07:54:21 +00:00
|
|
|
# pipe has to be replaced with comma
|
2023-01-21 14:23:18 +00:00
|
|
|
value = re.sub(r'[|]+', ',', value, flags=re.U)
|
2022-03-29 16:28:53 +00:00
|
|
|
|
2022-05-04 17:22:18 +00:00
|
|
|
value = value.encode('utf-8')[:chars].decode('utf-8', errors='ignore').strip()
|
2022-03-29 16:28:53 +00:00
|
|
|
|
2017-04-23 06:22:10 +00:00
|
|
|
if not value:
|
|
|
|
raise ValueError("Filename cannot be empty")
|
2021-08-01 11:50:17 +00:00
|
|
|
return value
|
2016-03-26 15:12:29 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
def split_authors(values):
|
|
|
|
authors_list = []
|
|
|
|
for value in values:
|
|
|
|
authors = re.split('[&;]', value)
|
|
|
|
for author in authors:
|
|
|
|
commas = author.count(',')
|
|
|
|
if commas == 1:
|
|
|
|
author_split = author.split(',')
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.append(author_split[1].strip() + ' ' + author_split[0].strip())
|
2020-08-23 07:44:42 +00:00
|
|
|
elif commas > 1:
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.extend([x.strip() for x in author.split(',')])
|
2020-08-23 07:44:42 +00:00
|
|
|
else:
|
2020-08-23 08:53:18 +00:00
|
|
|
authors_list.append(author.strip())
|
2020-08-23 07:44:42 +00:00
|
|
|
return authors_list
|
|
|
|
|
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
def get_sorted_author(value):
|
2022-03-13 11:34:21 +00:00
|
|
|
value2 = None
|
2017-11-30 15:49:46 +00:00
|
|
|
try:
|
2018-10-30 20:47:33 +00:00
|
|
|
if ',' not in value:
|
2019-07-14 17:28:32 +00:00
|
|
|
regexes = [r"^(JR|SR)\.?$", r"^I{1,3}\.?$", r"^IV\.?$"]
|
2018-10-30 20:47:33 +00:00
|
|
|
combined = "(" + ")|(".join(regexes) + ")"
|
|
|
|
value = value.split(" ")
|
|
|
|
if re.match(combined, value[-1].upper()):
|
2020-08-23 08:53:18 +00:00
|
|
|
if len(value) > 1:
|
|
|
|
value2 = value[-2] + ", " + " ".join(value[:-2]) + " " + value[-1]
|
|
|
|
else:
|
|
|
|
value2 = value[0]
|
2018-10-30 20:47:33 +00:00
|
|
|
elif len(value) == 1:
|
|
|
|
value2 = value[0]
|
|
|
|
else:
|
|
|
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
2017-11-30 15:49:46 +00:00
|
|
|
else:
|
2018-10-30 20:47:33 +00:00
|
|
|
value2 = value
|
2019-07-13 18:45:48 +00:00
|
|
|
except Exception as ex:
|
|
|
|
log.error("Sorting author %s failed: %s", value, ex)
|
2020-08-23 08:53:18 +00:00
|
|
|
if isinstance(list, value2):
|
|
|
|
value2 = value[0]
|
|
|
|
else:
|
|
|
|
value2 = value
|
2017-02-15 17:09:17 +00:00
|
|
|
return value2
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2022-03-13 11:34:21 +00:00
|
|
|
|
2022-02-05 14:36:18 +00:00
|
|
|
def edit_book_read_status(book_id, read_status=None):
|
|
|
|
if not config.config_read_column:
|
|
|
|
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
|
|
|
|
ub.ReadBook.book_id == book_id)).first()
|
|
|
|
if book:
|
|
|
|
if read_status is None:
|
|
|
|
if book.read_status == ub.ReadBook.STATUS_FINISHED:
|
|
|
|
book.read_status = ub.ReadBook.STATUS_UNREAD
|
|
|
|
else:
|
|
|
|
book.read_status = ub.ReadBook.STATUS_FINISHED
|
|
|
|
else:
|
|
|
|
book.read_status = ub.ReadBook.STATUS_FINISHED if read_status else ub.ReadBook.STATUS_UNREAD
|
|
|
|
else:
|
2022-03-13 11:34:21 +00:00
|
|
|
read_book = ub.ReadBook(user_id=current_user.id, book_id=book_id)
|
|
|
|
read_book.read_status = ub.ReadBook.STATUS_FINISHED
|
|
|
|
book = read_book
|
2022-02-05 14:36:18 +00:00
|
|
|
if not book.kobo_reading_state:
|
|
|
|
kobo_reading_state = ub.KoboReadingState(user_id=current_user.id, book_id=book_id)
|
|
|
|
kobo_reading_state.current_bookmark = ub.KoboBookmark()
|
|
|
|
kobo_reading_state.statistics = ub.KoboStatistics()
|
|
|
|
book.kobo_reading_state = kobo_reading_state
|
|
|
|
ub.session.merge(book)
|
|
|
|
ub.session_commit("Book {} readbit toggled".format(book_id))
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
calibre_db.update_title_sort(config)
|
|
|
|
book = calibre_db.get_filtered_book(book_id)
|
2022-06-11 09:35:02 +00:00
|
|
|
book_read_status = getattr(book, 'custom_column_' + str(config.config_read_column))
|
|
|
|
if len(book_read_status):
|
2022-02-05 14:36:18 +00:00
|
|
|
if read_status is None:
|
2022-06-11 09:35:02 +00:00
|
|
|
book_read_status[0].value = not book_read_status[0].value
|
2022-02-05 14:36:18 +00:00
|
|
|
else:
|
2022-06-11 09:35:02 +00:00
|
|
|
book_read_status[0].value = read_status is True
|
2022-02-05 14:36:18 +00:00
|
|
|
calibre_db.session.commit()
|
|
|
|
else:
|
|
|
|
cc_class = db.cc_classes[config.config_read_column]
|
|
|
|
new_cc = cc_class(value=read_status or 1, book=book_id)
|
|
|
|
calibre_db.session.add(new_cc)
|
|
|
|
calibre_db.session.commit()
|
2022-03-20 10:21:15 +00:00
|
|
|
except (KeyError, AttributeError, IndexError):
|
|
|
|
log.error(
|
2022-06-08 15:17:07 +00:00
|
|
|
"Custom Column No.{} does not exist in calibre database".format(config.config_read_column))
|
|
|
|
return "Custom Column No.{} does not exist in calibre database".format(config.config_read_column)
|
2022-03-13 11:34:21 +00:00
|
|
|
except (OperationalError, InvalidRequestError) as ex:
|
2022-02-05 14:36:18 +00:00
|
|
|
calibre_db.session.rollback()
|
2023-01-21 14:19:59 +00:00
|
|
|
log.error("Read status could not set: {}".format(ex))
|
2022-03-13 11:34:21 +00:00
|
|
|
return _("Read status could not set: {}".format(ex.orig))
|
2022-02-05 14:36:18 +00:00
|
|
|
return ""
|
2018-08-04 16:22:43 +00:00
|
|
|
|
2022-03-13 11:34:21 +00:00
|
|
|
|
2022-04-28 18:57:09 +00:00
|
|
|
# Deletes a book from the local filestorage, returns True if deleting is successful, otherwise false
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book_file(book, calibrepath, book_format=None):
|
2022-04-28 18:57:09 +00:00
|
|
|
# check that path is 2 elements deep, check that target path has no sub folders
|
2018-07-14 17:40:59 +00:00
|
|
|
if book.path.count('/') == 1:
|
2018-01-10 20:16:51 +00:00
|
|
|
path = os.path.join(calibrepath, book.path)
|
2018-08-04 16:22:43 +00:00
|
|
|
if book_format:
|
|
|
|
for file in os.listdir(path):
|
|
|
|
if file.upper().endswith("."+book_format):
|
|
|
|
os.remove(os.path.join(path, file))
|
2020-05-05 16:48:40 +00:00
|
|
|
return True, None
|
2018-07-14 21:03:54 +00:00
|
|
|
else:
|
2018-08-04 16:22:43 +00:00
|
|
|
if os.path.isdir(path):
|
2020-05-01 08:26:35 +00:00
|
|
|
try:
|
2020-07-05 08:44:49 +00:00
|
|
|
for root, folders, files in os.walk(path):
|
2020-05-01 08:26:35 +00:00
|
|
|
for f in files:
|
|
|
|
os.unlink(os.path.join(root, f))
|
2020-07-05 08:44:49 +00:00
|
|
|
if len(folders):
|
|
|
|
log.warning("Deleting book {} failed, path {} has subfolders: {}".format(book.id,
|
|
|
|
book.path, folders))
|
|
|
|
return True, _("Deleting bookfolder for book %(id)s failed, path has subfolders: %(path)s",
|
2020-08-23 07:44:42 +00:00
|
|
|
id=book.id,
|
|
|
|
path=book.path)
|
2020-05-01 08:26:35 +00:00
|
|
|
shutil.rmtree(path)
|
2022-03-13 11:34:21 +00:00
|
|
|
except (IOError, OSError) as ex:
|
|
|
|
log.error("Deleting book %s failed: %s", book.id, ex)
|
|
|
|
return False, _("Deleting book %(id)s failed: %(message)s", id=book.id, message=ex)
|
2020-04-26 18:44:37 +00:00
|
|
|
authorpath = os.path.join(calibrepath, os.path.split(book.path)[0])
|
|
|
|
if not os.listdir(authorpath):
|
2020-05-01 08:26:35 +00:00
|
|
|
try:
|
|
|
|
shutil.rmtree(authorpath)
|
2022-03-13 11:34:21 +00:00
|
|
|
except (IOError, OSError) as ex:
|
|
|
|
log.error("Deleting authorpath for book %s failed: %s", book.id, ex)
|
2020-05-01 08:26:35 +00:00
|
|
|
return True, None
|
2021-07-10 06:27:29 +00:00
|
|
|
|
|
|
|
log.error("Deleting book %s from database only, book path in database not valid: %s",
|
|
|
|
book.id, book.path)
|
|
|
|
return True, _("Deleting book %(id)s from database only, book path in database not valid: %(path)s",
|
|
|
|
id=book.id,
|
|
|
|
path=book.path)
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2024-06-28 17:38:01 +00:00
|
|
|
def rename_all_files_on_change(one_book, new_path, old_path, all_new_name, gdrive=False):
|
|
|
|
for file_format in one_book.data:
|
|
|
|
if not gdrive:
|
|
|
|
if not os.path.exists(new_path):
|
|
|
|
os.makedirs(new_path)
|
2024-06-30 15:08:41 +00:00
|
|
|
shutil.move(os.path.join(old_path, file_format.name + '.' + file_format.format.lower()),
|
|
|
|
os.path.join(new_path, all_new_name + '.' + file_format.format.lower()))
|
2021-11-13 13:57:01 +00:00
|
|
|
else:
|
2024-06-28 17:38:01 +00:00
|
|
|
g_file = gd.getFileFromEbooksFolder(old_path,
|
|
|
|
file_format.name + '.' + file_format.format.lower())
|
|
|
|
if g_file:
|
|
|
|
gd.moveGdriveFileRemote(g_file, all_new_name + '.' + file_format.format.lower())
|
|
|
|
gd.updateDatabaseOnEdit(g_file['id'], all_new_name + '.' + file_format.format.lower())
|
|
|
|
else:
|
|
|
|
log.error("File {} not found on gdrive"
|
|
|
|
.format(old_path, file_format.name + '.' + file_format.format.lower()))
|
|
|
|
|
|
|
|
# change name in Database
|
|
|
|
file_format.name = all_new_name
|
2021-11-13 13:57:01 +00:00
|
|
|
|
|
|
|
|
2024-06-28 17:38:01 +00:00
|
|
|
def rename_author_path(first_author, old_author_dir, renamed_author, calibre_path="", gdrive=False):
|
|
|
|
# Create new_author_dir from parameter or from database
|
|
|
|
# Create new title_dir from database and add id
|
|
|
|
new_authordir = get_valid_filename(first_author, chars=96)
|
|
|
|
# new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == renamed_author).first()
|
|
|
|
# old_author_dir = get_valid_filename(old_author_name, chars=96)
|
|
|
|
new_author_rename_dir = get_valid_filename(renamed_author, chars=96)
|
|
|
|
if gdrive:
|
|
|
|
g_file = gd.getFileFromEbooksFolder(None, old_author_dir)
|
|
|
|
if g_file:
|
|
|
|
gd.moveGdriveFolderRemote(g_file, new_author_rename_dir)
|
|
|
|
else:
|
|
|
|
if os.path.isdir(os.path.join(calibre_path, old_author_dir)):
|
|
|
|
old_author_path = os.path.join(calibre_path, old_author_dir)
|
|
|
|
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
|
|
|
|
try:
|
2024-07-27 16:42:21 +00:00
|
|
|
os.rename(old_author_path, new_author_path)
|
|
|
|
except OSError:
|
|
|
|
try:
|
|
|
|
shutil.move(old_author_path, new_author_path)
|
|
|
|
except OSError as ex:
|
|
|
|
log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex)
|
|
|
|
log.error_or_exception(ex)
|
|
|
|
raise Exception(_("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
|
|
|
src=old_author_path, dest=new_author_path, error=str(ex)))
|
2024-06-28 17:38:01 +00:00
|
|
|
return new_authordir
|
|
|
|
|
2020-09-07 19:26:59 +00:00
|
|
|
# Moves files in file storage during author/title rename, or from temp dir to file storage
|
2024-06-29 08:50:44 +00:00
|
|
|
def update_dir_structure_file(book_id, calibre_path, original_filepath, new_author, db_filename):
|
2020-09-07 19:26:59 +00:00
|
|
|
# get book database entry from id, if original path overwrite source with original_filepath
|
2022-03-20 10:21:15 +00:00
|
|
|
local_book = calibre_db.get_book(book_id)
|
2022-02-05 08:06:14 +00:00
|
|
|
if original_filepath:
|
|
|
|
path = original_filepath
|
2020-09-07 19:26:59 +00:00
|
|
|
else:
|
2022-03-20 10:21:15 +00:00
|
|
|
path = os.path.join(calibre_path, local_book.path)
|
2017-03-30 19:17:18 +00:00
|
|
|
|
2022-03-20 10:21:15 +00:00
|
|
|
# Create (current) author_dir and title_dir from database
|
|
|
|
author_dir = local_book.path.split('/')[0]
|
|
|
|
title_dir = local_book.path.split('/')[1]
|
2020-09-07 19:26:59 +00:00
|
|
|
|
2022-03-20 10:21:15 +00:00
|
|
|
new_title_dir = get_valid_filename(local_book.title, chars=96) + " (" + str(book_id) + ")"
|
2024-06-30 09:00:30 +00:00
|
|
|
if new_author:
|
|
|
|
new_author_dir = get_valid_filename(new_author, chars=96)
|
|
|
|
else:
|
|
|
|
new_author = new_author_dir = author_dir
|
2017-04-03 18:05:55 +00:00
|
|
|
|
2024-06-29 08:50:44 +00:00
|
|
|
if title_dir != new_title_dir or author_dir != new_author_dir or original_filepath:
|
2022-02-05 08:06:14 +00:00
|
|
|
error = move_files_on_change(calibre_path,
|
2024-06-29 08:50:44 +00:00
|
|
|
new_author_dir,
|
2022-03-20 10:21:15 +00:00
|
|
|
new_title_dir,
|
|
|
|
local_book,
|
2022-02-05 08:06:14 +00:00
|
|
|
db_filename,
|
|
|
|
original_filepath,
|
|
|
|
path)
|
2024-06-29 08:50:44 +00:00
|
|
|
# old_path = os.path.join(calibre_path, author_dir, new_title_dir).replace('\\', '/')
|
|
|
|
new_path = os.path.join(calibre_path, new_author_dir, new_title_dir).replace('\\', '/')
|
2024-06-28 17:38:01 +00:00
|
|
|
all_new_name = get_valid_filename(local_book.title, chars=42) + ' - ' \
|
2024-06-29 08:50:44 +00:00
|
|
|
+ get_valid_filename(new_author, chars=42)
|
2024-06-28 17:38:01 +00:00
|
|
|
# Book folder already moved, only files need to be renamed
|
|
|
|
rename_all_files_on_change(local_book, new_path, new_path, all_new_name)
|
|
|
|
|
2022-02-05 08:06:14 +00:00
|
|
|
if error:
|
|
|
|
return error
|
2020-09-07 19:26:59 +00:00
|
|
|
|
2022-01-30 14:16:42 +00:00
|
|
|
# Rename all files from old names to new names
|
2024-06-28 17:38:01 +00:00
|
|
|
return False
|
2022-02-05 08:06:14 +00:00
|
|
|
|
|
|
|
|
2024-06-28 17:38:01 +00:00
|
|
|
def upload_new_file_gdrive(book_id, first_author, title, title_dir, original_filepath, filename_ext):
|
2022-02-05 12:21:06 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
|
|
|
file_name = get_valid_filename(title, chars=42) + ' - ' + \
|
2022-03-13 11:34:21 +00:00
|
|
|
get_valid_filename(first_author, chars=42) + filename_ext
|
2022-02-05 12:21:06 +00:00
|
|
|
gdrive_path = os.path.join(get_valid_filename(first_author, chars=96),
|
|
|
|
title_dir + " (" + str(book_id) + ")")
|
|
|
|
book.path = gdrive_path.replace("\\", "/")
|
|
|
|
gd.uploadFileToEbooksFolder(os.path.join(gdrive_path, file_name).replace("\\", "/"), original_filepath)
|
2024-06-28 17:38:01 +00:00
|
|
|
return False # rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
|
2022-02-05 12:21:06 +00:00
|
|
|
|
|
|
|
|
2024-06-29 08:50:44 +00:00
|
|
|
def update_dir_structure_gdrive(book_id, first_author):
|
2022-02-05 12:21:06 +00:00
|
|
|
book = calibre_db.get_book(book_id)
|
|
|
|
|
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
titledir = book.path.split('/')[1]
|
2024-06-28 17:38:01 +00:00
|
|
|
# new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
|
2024-06-29 08:50:44 +00:00
|
|
|
new_authordir = get_valid_filename(first_author, chars=96)
|
2023-01-21 14:23:18 +00:00
|
|
|
new_titledir = get_valid_filename(book.title, chars=96) + " (" + str(book_id) + ")"
|
2022-02-05 12:21:06 +00:00
|
|
|
|
|
|
|
if titledir != new_titledir:
|
2024-07-01 18:19:44 +00:00
|
|
|
g_file = gd.getFileFromEbooksFolder(authordir, titledir)
|
2022-03-13 11:34:21 +00:00
|
|
|
if g_file:
|
|
|
|
gd.moveGdriveFileRemote(g_file, new_titledir)
|
2023-01-21 14:23:18 +00:00
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
2022-03-13 11:34:21 +00:00
|
|
|
gd.updateDatabaseOnEdit(g_file['id'], book.path) # only child folder affected
|
2022-02-05 12:21:06 +00:00
|
|
|
else:
|
2023-01-21 14:23:18 +00:00
|
|
|
return _('File %(file)s not found on Google Drive', file=book.path) # file not found
|
2022-02-05 12:21:06 +00:00
|
|
|
|
2024-06-29 08:50:44 +00:00
|
|
|
if authordir != new_authordir:
|
2024-06-30 09:00:30 +00:00
|
|
|
g_file = gd.getFileFromEbooksFolder(authordir, new_titledir)
|
2022-03-13 11:34:21 +00:00
|
|
|
if g_file:
|
2024-06-30 09:00:30 +00:00
|
|
|
gd.moveGdriveFolderRemote(g_file, new_authordir, single_book=True)
|
2023-01-21 14:23:18 +00:00
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
2022-03-13 11:34:21 +00:00
|
|
|
gd.updateDatabaseOnEdit(g_file['id'], book.path)
|
2022-02-05 12:21:06 +00:00
|
|
|
else:
|
2024-06-28 17:38:01 +00:00
|
|
|
return _('File %(file)s not found on Google Drive', file=authordir) # file not found'''
|
2024-06-29 08:50:44 +00:00
|
|
|
if titledir != new_titledir or authordir != new_authordir :
|
2024-06-28 17:38:01 +00:00
|
|
|
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
|
2024-06-29 08:50:44 +00:00
|
|
|
+ get_valid_filename(new_authordir, chars=42)
|
2024-06-28 17:38:01 +00:00
|
|
|
rename_all_files_on_change(book, book.path, book.path, all_new_name, gdrive=True) # todo: Move filenames on gdrive
|
2022-02-05 12:21:06 +00:00
|
|
|
# change location in database to new author/title path
|
2024-06-28 17:38:01 +00:00
|
|
|
# book.path = os.path.join(authordir, new_titledir).replace('\\', '/')
|
|
|
|
return False
|
2022-02-05 12:21:06 +00:00
|
|
|
|
|
|
|
|
2024-06-29 08:50:44 +00:00
|
|
|
def move_files_on_change(calibre_path, new_author_dir, new_titledir, localbook, db_filename, original_filepath, path):
|
|
|
|
new_path = os.path.join(calibre_path, new_author_dir, new_titledir)
|
|
|
|
# new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_author_dir
|
2022-01-30 14:16:42 +00:00
|
|
|
try:
|
2022-02-05 08:06:14 +00:00
|
|
|
if original_filepath:
|
|
|
|
if not os.path.isdir(new_path):
|
|
|
|
os.makedirs(new_path)
|
2024-06-30 15:08:41 +00:00
|
|
|
shutil.move(original_filepath, os.path.join(new_path, db_filename))
|
2024-07-11 18:12:26 +00:00
|
|
|
log.debug("Moving title: %s to %s", original_filepath, new_path)
|
2022-02-05 08:06:14 +00:00
|
|
|
else:
|
|
|
|
# Check new path is not valid path
|
|
|
|
if not os.path.exists(new_path):
|
|
|
|
# move original path to new path
|
|
|
|
log.debug("Moving title: %s to %s", path, new_path)
|
2024-06-30 15:08:41 +00:00
|
|
|
shutil.move(path, new_path)
|
2022-03-13 11:34:21 +00:00
|
|
|
else: # path is valid copy only files to new location (merge)
|
2022-02-05 08:06:14 +00:00
|
|
|
log.info("Moving title: %s into existing: %s", path, new_path)
|
|
|
|
# Take all files and subfolder from old path (strange command)
|
|
|
|
for dir_name, __, file_list in os.walk(path):
|
|
|
|
for file in file_list:
|
2024-06-30 15:08:41 +00:00
|
|
|
shutil.move(os.path.join(dir_name, file), os.path.join(new_path + dir_name[len(path):], file))
|
2024-06-29 08:50:44 +00:00
|
|
|
if not os.listdir(os.path.split(path)[0]):
|
|
|
|
try:
|
|
|
|
shutil.rmtree(os.path.split(path)[0])
|
|
|
|
except (IOError, OSError) as ex:
|
|
|
|
log.error("Deleting authorpath for book %s failed: %s", localbook.id, ex)
|
2022-02-05 08:06:14 +00:00
|
|
|
# change location in database to new author/title path
|
2024-06-29 08:50:44 +00:00
|
|
|
localbook.path = os.path.join(new_author_dir, new_titledir).replace('\\', '/')
|
2022-02-05 08:06:14 +00:00
|
|
|
except OSError as ex:
|
2022-03-20 10:21:15 +00:00
|
|
|
log.error_or_exception("Rename title from {} to {} failed with error: {}".format(path, new_path, ex))
|
2022-02-05 08:06:14 +00:00
|
|
|
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
|
|
|
src=path, dest=new_path, error=str(ex))
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def rename_files_on_change(first_author,
|
|
|
|
renamed_author,
|
2022-03-20 10:21:15 +00:00
|
|
|
local_book,
|
|
|
|
original_filepath="",
|
2022-02-05 08:06:14 +00:00
|
|
|
path="",
|
|
|
|
calibre_path="",
|
|
|
|
gdrive=False):
|
|
|
|
# Rename all files from old names to new names
|
2024-06-28 17:38:01 +00:00
|
|
|
#try:
|
|
|
|
#clean_author_database(renamed_author, calibre_path, gdrive=gdrive)
|
|
|
|
#if first_author and first_author not in renamed_author:
|
|
|
|
# clean_author_database([first_author], calibre_path, local_book, gdrive)
|
|
|
|
#if not gdrive and not renamed_author and not original_filepath and len(os.listdir(os.path.dirname(path))) == 0:
|
|
|
|
# shutil.rmtree(os.path.dirname(path))
|
|
|
|
#except (OSError, FileNotFoundError) as ex:
|
|
|
|
# log.error_or_exception("Error in rename file in path {}".format(ex))
|
|
|
|
# return _("Error in rename file in path: {}".format(str(ex)))
|
2017-11-30 15:49:46 +00:00
|
|
|
return False
|
2017-01-30 17:58:36 +00:00
|
|
|
|
2022-02-05 08:06:14 +00:00
|
|
|
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book_gdrive(book, book_format):
|
2020-05-01 08:26:35 +00:00
|
|
|
error = None
|
2018-08-04 16:22:43 +00:00
|
|
|
if book_format:
|
|
|
|
name = ''
|
|
|
|
for entry in book.data:
|
|
|
|
if entry.format.upper() == book_format:
|
|
|
|
name = entry.name + '.' + book_format
|
2024-07-01 18:19:44 +00:00
|
|
|
g_file = gd.getFileFromEbooksFolder(book.path, name, nocase=True)
|
2018-08-04 16:22:43 +00:00
|
|
|
else:
|
2022-03-13 11:34:21 +00:00
|
|
|
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
|
|
|
|
if g_file:
|
|
|
|
gd.deleteDatabaseEntry(g_file['id'])
|
|
|
|
g_file.Trash()
|
2018-07-14 11:48:51 +00:00
|
|
|
else:
|
2023-01-21 14:23:18 +00:00
|
|
|
error = _('Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
2020-05-01 08:26:35 +00:00
|
|
|
|
|
|
|
return error is None, error
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2019-12-15 16:08:17 +00:00
|
|
|
def reset_password(user_id):
|
|
|
|
existing_user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
|
2020-05-01 10:00:45 +00:00
|
|
|
if not existing_user:
|
|
|
|
return 0, None
|
2019-12-28 15:18:21 +00:00
|
|
|
if not config.get_mail_server_configured():
|
2020-04-19 17:08:58 +00:00
|
|
|
return 2, None
|
2019-12-15 16:08:17 +00:00
|
|
|
try:
|
2023-02-15 18:53:35 +00:00
|
|
|
password = generate_random_password(config.config_password_min_length)
|
2020-05-13 18:15:35 +00:00
|
|
|
existing_user.password = generate_password_hash(password)
|
2019-12-15 16:08:17 +00:00
|
|
|
ub.session.commit()
|
2021-03-21 17:55:02 +00:00
|
|
|
send_registration_mail(existing_user.email, existing_user.name, password, True)
|
|
|
|
return 1, existing_user.name
|
2019-12-15 16:08:17 +00:00
|
|
|
except Exception:
|
|
|
|
ub.session.rollback()
|
2020-04-19 17:08:58 +00:00
|
|
|
return 0, None
|
2019-12-15 16:08:17 +00:00
|
|
|
|
2024-06-18 18:13:26 +00:00
|
|
|
|
2023-02-15 18:53:35 +00:00
|
|
|
def generate_random_password(min_length):
|
2023-02-16 15:23:06 +00:00
|
|
|
min_length = max(8, min_length) - 4
|
|
|
|
random_source = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
|
|
|
# select 1 lowercase
|
|
|
|
s = "abcdefghijklmnopqrstuvwxyz"
|
|
|
|
password = [s[c % len(s)] for c in os.urandom(1)]
|
|
|
|
# select 1 uppercase
|
|
|
|
s = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
|
|
password.extend([s[c % len(s)] for c in os.urandom(1)])
|
|
|
|
# select 1 digit
|
|
|
|
s = "01234567890"
|
|
|
|
password.extend([s[c % len(s)] for c in os.urandom(1)])
|
|
|
|
# select 1 special symbol
|
|
|
|
s = "!@#$%&*()?"
|
|
|
|
password.extend([s[c % len(s)] for c in os.urandom(1)])
|
|
|
|
|
|
|
|
# generate other characters
|
|
|
|
password.extend([random_source[c % len(random_source)] for c in os.urandom(min_length)])
|
|
|
|
|
|
|
|
# password_list = list(password)
|
|
|
|
# shuffle all characters
|
|
|
|
random.SystemRandom().shuffle(password)
|
|
|
|
return ''.join(password)
|
|
|
|
|
|
|
|
|
2020-08-23 07:44:42 +00:00
|
|
|
def uniq(inpt):
|
|
|
|
output = []
|
2022-03-13 11:34:21 +00:00
|
|
|
inpt = [" ".join(inp.split()) for inp in inpt]
|
2020-08-23 07:44:42 +00:00
|
|
|
for x in inpt:
|
|
|
|
if x not in output:
|
|
|
|
output.append(x)
|
|
|
|
return output
|
2020-06-22 17:11:03 +00:00
|
|
|
|
2021-09-20 03:45:19 +00:00
|
|
|
|
2021-04-04 17:40:34 +00:00
|
|
|
def check_email(email):
|
|
|
|
email = valid_email(email)
|
|
|
|
if ub.session.query(ub.User).filter(func.lower(ub.User.email) == email.lower()).first():
|
2023-01-21 14:19:59 +00:00
|
|
|
log.error("Found an existing account for this Email address")
|
|
|
|
raise Exception(_("Found an existing account for this Email address"))
|
2021-04-04 17:40:34 +00:00
|
|
|
return email
|
|
|
|
|
|
|
|
|
|
|
|
def check_username(username):
|
|
|
|
username = username.strip()
|
|
|
|
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).scalar():
|
2023-01-21 14:19:59 +00:00
|
|
|
log.error("This username is already taken")
|
|
|
|
raise Exception(_("This username is already taken"))
|
2021-04-04 17:40:34 +00:00
|
|
|
return username
|
|
|
|
|
|
|
|
|
2022-03-28 23:59:59 +00:00
|
|
|
def valid_email(emails):
|
|
|
|
for email in emails.split(','):
|
2024-07-07 12:49:22 +00:00
|
|
|
email = email.strip()
|
|
|
|
# if email is not deleted
|
|
|
|
if email:
|
|
|
|
# Regex according to https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/email#validation
|
|
|
|
if not re.search(r"^[\w.!#$%&'*+\\/=?^_`{|}~-]+@[\w](?:[\w-]{0,61}[\w])?(?:\.[\w](?:[\w-]{0,61}[\w])?)*$",
|
|
|
|
email):
|
|
|
|
log.error("Invalid Email address format")
|
|
|
|
raise Exception(_("Invalid Email address format"))
|
2021-04-04 17:40:34 +00:00
|
|
|
return email
|
|
|
|
|
2024-06-18 18:13:26 +00:00
|
|
|
|
2022-06-16 12:16:00 +00:00
|
|
|
def valid_password(check_password):
|
|
|
|
if config.config_password_policy:
|
|
|
|
verify = ""
|
|
|
|
if config.config_password_min_length > 0:
|
2024-02-26 14:07:59 +00:00
|
|
|
verify += r"^(?=.{" + str(config.config_password_min_length) + ",}$)"
|
2022-06-16 12:16:00 +00:00
|
|
|
if config.config_password_number:
|
2024-03-10 15:39:06 +00:00
|
|
|
verify += r"(?=.*?\d)"
|
2022-06-16 12:16:00 +00:00
|
|
|
if config.config_password_lower:
|
2024-03-10 15:39:06 +00:00
|
|
|
verify += r"(?=.*?[\p{Ll}])"
|
2022-06-16 12:16:00 +00:00
|
|
|
if config.config_password_upper:
|
2024-03-10 15:39:06 +00:00
|
|
|
verify += r"(?=.*?[\p{Lu}])"
|
2024-02-29 07:23:18 +00:00
|
|
|
if config.config_password_character:
|
2024-03-10 15:39:06 +00:00
|
|
|
verify += r"(?=.*?[\p{Letter}])"
|
2022-06-16 12:16:00 +00:00
|
|
|
if config.config_password_special:
|
2024-03-10 15:39:06 +00:00
|
|
|
verify += r"(?=.*?[^\p{Letter}\s0-9])"
|
2024-02-29 07:23:18 +00:00
|
|
|
match = regex.match(verify, check_password)
|
2022-06-16 12:16:00 +00:00
|
|
|
if not match:
|
|
|
|
raise Exception(_("Password doesn't comply with password validation rules"))
|
|
|
|
return check_password
|
2020-08-23 07:44:42 +00:00
|
|
|
# ################################# External interface #################################
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2021-11-11 14:46:32 +00:00
|
|
|
def update_dir_structure(book_id,
|
2022-02-05 08:06:14 +00:00
|
|
|
calibre_path,
|
|
|
|
first_author=None, # change author of book to this author
|
|
|
|
original_filepath=None,
|
2024-06-28 17:38:01 +00:00
|
|
|
db_filename=None):
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2024-06-28 17:38:01 +00:00
|
|
|
return update_dir_structure_gdrive(book_id, first_author)
|
2018-07-14 06:31:52 +00:00
|
|
|
else:
|
2021-11-11 14:46:32 +00:00
|
|
|
return update_dir_structure_file(book_id,
|
2022-02-05 08:06:14 +00:00
|
|
|
calibre_path,
|
|
|
|
original_filepath,
|
2024-06-29 08:50:44 +00:00
|
|
|
first_author,
|
2024-06-28 17:38:01 +00:00
|
|
|
db_filename)
|
2018-07-14 06:31:52 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2018-08-04 16:22:43 +00:00
|
|
|
def delete_book(book, calibrepath, book_format):
|
2022-04-28 18:57:09 +00:00
|
|
|
if not book_format:
|
2024-06-18 18:13:26 +00:00
|
|
|
clear_cover_thumbnail_cache(book.id) # here it breaks
|
2022-09-10 16:26:52 +00:00
|
|
|
calibre_db.delete_dirty_metadata(book.id)
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2018-08-04 16:22:43 +00:00
|
|
|
return delete_book_gdrive(book, book_format)
|
2018-07-14 11:48:51 +00:00
|
|
|
else:
|
2018-08-04 16:22:43 +00:00
|
|
|
return delete_book_file(book, calibrepath, book_format)
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2023-03-28 14:13:10 +00:00
|
|
|
def get_cover_on_failure():
|
|
|
|
try:
|
|
|
|
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
|
|
|
|
except PermissionError:
|
|
|
|
log.error("No permission to access generic_cover.jpg file.")
|
|
|
|
abort(403)
|
2019-12-17 17:00:35 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2021-09-24 08:11:14 +00:00
|
|
|
def get_book_cover(book_id, resolution=None):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_book_cover_internal(book, resolution=resolution)
|
2019-12-17 17:00:35 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2022-04-22 14:13:51 +00:00
|
|
|
def get_book_cover_with_uuid(book_uuid, resolution=None):
|
2020-05-23 08:16:29 +00:00
|
|
|
book = calibre_db.get_book_by_uuid(book_uuid)
|
2023-03-28 14:13:10 +00:00
|
|
|
if not book:
|
2023-03-28 13:56:02 +00:00
|
|
|
return # allows kobo.HandleCoverImageRequest to proxy request
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_book_cover_internal(book, resolution=resolution)
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2023-03-28 14:13:10 +00:00
|
|
|
def get_book_cover_internal(book, resolution=None):
|
2019-12-17 17:00:35 +00:00
|
|
|
if book and book.has_cover:
|
2021-03-20 10:32:50 +00:00
|
|
|
|
|
|
|
# Send the book cover thumbnail if it exists in cache
|
|
|
|
if resolution:
|
|
|
|
thumbnail = get_book_cover_thumbnail(book, resolution)
|
|
|
|
if thumbnail:
|
|
|
|
cache = fs.FileSystem()
|
2021-09-25 08:04:38 +00:00
|
|
|
if cache.get_cache_file_exists(thumbnail.filename, CACHE_TYPE_THUMBNAILS):
|
|
|
|
return send_from_directory(cache.get_cache_file_dir(thumbnail.filename, CACHE_TYPE_THUMBNAILS),
|
|
|
|
thumbnail.filename)
|
2021-03-20 10:32:50 +00:00
|
|
|
|
|
|
|
# Send the book cover from Google Drive if configured
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
|
|
|
try:
|
|
|
|
if not gd.is_gdrive_ready():
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_cover_on_failure()
|
2024-07-02 18:53:08 +00:00
|
|
|
cover_file = gd.get_cover_via_gdrive(book.path)
|
|
|
|
if cover_file:
|
|
|
|
return Response(cover_file, mimetype='image/jpeg')
|
2019-07-13 18:45:48 +00:00
|
|
|
else:
|
2022-03-28 17:02:12 +00:00
|
|
|
log.error('{}/cover.jpg not found on Google Drive'.format(book.path))
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_cover_on_failure()
|
2021-04-04 17:40:34 +00:00
|
|
|
except Exception as ex:
|
2022-03-12 16:14:54 +00:00
|
|
|
log.error_or_exception(ex)
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_cover_on_failure()
|
2021-03-20 10:32:50 +00:00
|
|
|
|
|
|
|
# Send the book cover from the Calibre directory
|
2019-07-13 18:45:48 +00:00
|
|
|
else:
|
2023-11-07 18:30:13 +00:00
|
|
|
cover_file_path = os.path.join(config.get_book_path(), book.path)
|
2019-07-13 18:45:48 +00:00
|
|
|
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
|
|
|
return send_from_directory(cover_file_path, "cover.jpg")
|
2018-08-04 08:56:42 +00:00
|
|
|
else:
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_cover_on_failure()
|
2018-08-04 08:56:42 +00:00
|
|
|
else:
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_cover_on_failure()
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2018-11-25 10:25:20 +00:00
|
|
|
|
2021-03-20 10:32:50 +00:00
|
|
|
def get_book_cover_thumbnail(book, resolution):
|
|
|
|
if book and book.has_cover:
|
2024-08-10 16:09:52 +00:00
|
|
|
return (ub.session
|
|
|
|
.query(ub.Thumbnail)
|
|
|
|
.filter(ub.Thumbnail.type == THUMBNAIL_TYPE_COVER)
|
|
|
|
.filter(ub.Thumbnail.entity_id == book.id)
|
|
|
|
.filter(ub.Thumbnail.resolution == resolution)
|
2024-08-10 19:05:29 +00:00
|
|
|
.filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.now(timezone.utc)))
|
2024-08-10 16:09:52 +00:00
|
|
|
.first())
|
2021-03-20 10:32:50 +00:00
|
|
|
|
|
|
|
|
2021-09-25 08:04:38 +00:00
|
|
|
def get_series_thumbnail_on_failure(series_id, resolution):
|
2024-08-10 16:09:52 +00:00
|
|
|
book = (calibre_db.session
|
|
|
|
.query(db.Books)
|
|
|
|
.join(db.books_series_link)
|
|
|
|
.join(db.Series)
|
|
|
|
.filter(db.Series.id == series_id)
|
|
|
|
.filter(db.Books.has_cover == 1)
|
|
|
|
.first())
|
2023-03-28 14:13:10 +00:00
|
|
|
return get_book_cover_internal(book, resolution=resolution)
|
2021-09-25 08:04:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_series_cover_thumbnail(series_id, resolution=None):
|
|
|
|
return get_series_cover_internal(series_id, resolution)
|
|
|
|
|
|
|
|
|
|
|
|
def get_series_cover_internal(series_id, resolution=None):
|
|
|
|
# Send the series thumbnail if it exists in cache
|
|
|
|
if resolution:
|
|
|
|
thumbnail = get_series_thumbnail(series_id, resolution)
|
|
|
|
if thumbnail:
|
|
|
|
cache = fs.FileSystem()
|
|
|
|
if cache.get_cache_file_exists(thumbnail.filename, CACHE_TYPE_THUMBNAILS):
|
|
|
|
return send_from_directory(cache.get_cache_file_dir(thumbnail.filename, CACHE_TYPE_THUMBNAILS),
|
|
|
|
thumbnail.filename)
|
|
|
|
|
|
|
|
return get_series_thumbnail_on_failure(series_id, resolution)
|
|
|
|
|
|
|
|
|
|
|
|
def get_series_thumbnail(series_id, resolution):
|
2024-08-10 16:09:52 +00:00
|
|
|
return (ub.session
|
|
|
|
.query(ub.Thumbnail)
|
|
|
|
.filter(ub.Thumbnail.type == THUMBNAIL_TYPE_SERIES)
|
|
|
|
.filter(ub.Thumbnail.entity_id == series_id)
|
|
|
|
.filter(ub.Thumbnail.resolution == resolution)
|
2024-08-10 19:05:29 +00:00
|
|
|
.filter(or_(ub.Thumbnail.expiration.is_(None), ub.Thumbnail.expiration > datetime.now(timezone.utc)))
|
2024-08-10 16:09:52 +00:00
|
|
|
.first())
|
2021-09-25 08:04:38 +00:00
|
|
|
|
|
|
|
|
2019-04-15 18:57:25 +00:00
|
|
|
# saves book cover from url
|
|
|
|
def save_cover_from_url(url, book_path):
|
2020-05-27 18:41:16 +00:00
|
|
|
try:
|
2022-04-26 12:44:55 +00:00
|
|
|
if cli_param.allow_localhost:
|
2022-03-13 11:34:21 +00:00
|
|
|
img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling
|
|
|
|
elif use_advocate:
|
|
|
|
img = advocate.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling
|
|
|
|
else:
|
2022-05-01 08:25:07 +00:00
|
|
|
log.error("python module advocate is not installed but is needed")
|
2022-05-01 11:24:11 +00:00
|
|
|
return False, _("Python module 'advocate' is not installed but is needed for cover uploads")
|
2020-05-27 18:41:16 +00:00
|
|
|
img.raise_for_status()
|
2020-05-29 04:59:40 +00:00
|
|
|
return save_cover(img, book_path)
|
2022-01-23 12:11:02 +00:00
|
|
|
except (socket.gaierror,
|
|
|
|
requests.exceptions.HTTPError,
|
2022-03-15 17:44:36 +00:00
|
|
|
requests.exceptions.InvalidURL,
|
2020-05-27 18:41:16 +00:00
|
|
|
requests.exceptions.ConnectionError,
|
|
|
|
requests.exceptions.Timeout) as ex:
|
2022-03-15 17:44:36 +00:00
|
|
|
# "Invalid host" can be the result of a redirect response
|
|
|
|
log.error(u'Cover Download Error %s', ex)
|
2020-05-27 18:41:16 +00:00
|
|
|
return False, _("Error Downloading Cover")
|
2020-12-09 14:14:08 +00:00
|
|
|
except MissingDelegateError as ex:
|
|
|
|
log.info(u'File Format Error %s', ex)
|
|
|
|
return False, _("Cover Format Error")
|
2022-03-15 17:44:36 +00:00
|
|
|
except UnacceptableAddressException as e:
|
|
|
|
log.error("Localhost or local network was accessed for cover upload")
|
|
|
|
return False, _("You are not allowed to access localhost or the local network for cover uploads")
|
2020-05-29 04:59:40 +00:00
|
|
|
|
2019-04-15 18:57:25 +00:00
|
|
|
|
|
|
|
def save_cover_from_filestorage(filepath, saved_filename, img):
|
2021-02-13 13:34:41 +00:00
|
|
|
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
|
|
|
if not os.path.exists(filepath):
|
2019-04-15 18:57:25 +00:00
|
|
|
try:
|
2021-02-13 13:34:41 +00:00
|
|
|
os.makedirs(filepath)
|
|
|
|
except OSError:
|
2023-01-21 14:19:59 +00:00
|
|
|
log.error("Failed to create path for cover")
|
|
|
|
return False, _("Failed to create path for cover")
|
2021-02-13 13:34:41 +00:00
|
|
|
try:
|
2024-05-31 18:12:55 +00:00
|
|
|
# upload of jpg file without wand
|
2021-02-13 13:34:41 +00:00
|
|
|
if isinstance(img, requests.Response):
|
|
|
|
with open(os.path.join(filepath, saved_filename), 'wb') as f:
|
|
|
|
f.write(img.content)
|
|
|
|
else:
|
|
|
|
if hasattr(img, "metadata"):
|
|
|
|
# upload of jpg/png... via url
|
|
|
|
img.save(filename=os.path.join(filepath, saved_filename))
|
|
|
|
img.close()
|
|
|
|
else:
|
|
|
|
# upload of jpg/png... from hdd
|
|
|
|
img.save(os.path.join(filepath, saved_filename))
|
|
|
|
except (IOError, OSError):
|
2023-01-21 14:19:59 +00:00
|
|
|
log.error("Cover-file is not a valid image file, or could not be stored")
|
|
|
|
return False, _("Cover-file is not a valid image file, or could not be stored")
|
2020-03-07 10:07:35 +00:00
|
|
|
return True, None
|
2019-04-15 18:57:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
# saves book cover to gdrive or locally
|
|
|
|
def save_cover(img, book_path):
|
2019-03-26 06:31:00 +00:00
|
|
|
content_type = img.headers.get('content-type')
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2020-12-09 13:18:39 +00:00
|
|
|
if use_IM:
|
2022-04-12 17:33:00 +00:00
|
|
|
if content_type not in ('image/jpeg', 'image/jpg', 'image/png', 'image/webp', 'image/bmp'):
|
2020-12-09 13:18:39 +00:00
|
|
|
log.error("Only jpg/jpeg/png/webp/bmp files are supported as coverfile")
|
|
|
|
return False, _("Only jpg/jpeg/png/webp/bmp files are supported as coverfile")
|
2019-04-17 18:45:08 +00:00
|
|
|
# convert to jpg because calibre only supports jpg
|
2022-04-12 17:33:00 +00:00
|
|
|
try:
|
|
|
|
if hasattr(img, 'stream'):
|
|
|
|
imgc = Image(blob=img.stream)
|
|
|
|
else:
|
|
|
|
imgc = Image(blob=io.BytesIO(img.content))
|
|
|
|
imgc.format = 'jpeg'
|
|
|
|
imgc.transform_colorspace("rgb")
|
|
|
|
img = imgc
|
|
|
|
except (BlobError, MissingDelegateError):
|
|
|
|
log.error("Invalid cover file content")
|
|
|
|
return False, _("Invalid cover file content")
|
2019-04-18 19:17:59 +00:00
|
|
|
else:
|
2022-04-12 17:33:00 +00:00
|
|
|
if content_type not in ['image/jpeg', 'image/jpg']:
|
2019-07-13 18:45:48 +00:00
|
|
|
log.error("Only jpg/jpeg files are supported as coverfile")
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, _("Only jpg/jpeg files are supported as coverfile")
|
2019-03-26 06:31:00 +00:00
|
|
|
|
2019-07-24 16:15:38 +00:00
|
|
|
if config.config_use_google_drive:
|
2023-11-11 13:48:59 +00:00
|
|
|
tmp_dir = get_temp_dir()
|
2020-12-11 12:34:29 +00:00
|
|
|
ret, message = save_cover_from_filestorage(tmp_dir, "uploaded_cover.jpg", img)
|
2020-03-07 10:07:35 +00:00
|
|
|
if ret is True:
|
2022-03-13 11:34:21 +00:00
|
|
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\", "/"),
|
2020-12-11 12:34:29 +00:00
|
|
|
os.path.join(tmp_dir, "uploaded_cover.jpg"))
|
2019-07-13 18:45:48 +00:00
|
|
|
log.info("Cover is saved on Google Drive")
|
2020-03-07 10:07:35 +00:00
|
|
|
return True, None
|
2019-04-15 18:57:25 +00:00
|
|
|
else:
|
2020-03-07 10:07:35 +00:00
|
|
|
return False, message
|
2019-04-15 18:57:25 +00:00
|
|
|
else:
|
2023-11-07 18:30:13 +00:00
|
|
|
return save_cover_from_filestorage(os.path.join(config.get_book_path(), book_path), "cover.jpg", img)
|
2019-04-15 18:57:25 +00:00
|
|
|
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2019-08-18 19:44:19 +00:00
|
|
|
def do_download_file(book, book_format, client, data, headers):
|
2023-11-02 16:05:02 +00:00
|
|
|
book_name = data.name
|
2024-06-18 18:13:26 +00:00
|
|
|
download_name = filename = None
|
2019-07-13 18:45:48 +00:00
|
|
|
if config.config_use_google_drive:
|
2022-03-13 11:34:21 +00:00
|
|
|
# startTime = time.time()
|
2024-06-29 08:50:44 +00:00
|
|
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
2022-03-13 11:34:21 +00:00
|
|
|
# log.debug('%s', time.time() - startTime)
|
2018-08-04 08:56:42 +00:00
|
|
|
if df:
|
2024-01-13 09:53:46 +00:00
|
|
|
if config.config_embed_metadata and (
|
2024-06-18 18:13:26 +00:00
|
|
|
(book_format == "kepub" and config.config_kepubifypath) or
|
2024-01-13 09:53:46 +00:00
|
|
|
(book_format != "kepub" and config.config_binariesdir)):
|
2023-11-11 14:26:05 +00:00
|
|
|
output_path = os.path.join(config.config_calibre_dir, book.path)
|
|
|
|
if not os.path.exists(output_path):
|
|
|
|
os.makedirs(output_path)
|
|
|
|
output = os.path.join(config.config_calibre_dir, book.path, book_name + "." + book_format)
|
|
|
|
gd.downloadFile(book.path, book_name + "." + book_format, output)
|
2024-01-13 09:53:46 +00:00
|
|
|
if book_format == "kepub" and config.config_kepubifypath:
|
|
|
|
filename, download_name = do_kepubify_metadata_replace(book, output)
|
|
|
|
elif book_format != "kepub" and config.config_binariesdir:
|
|
|
|
filename, download_name = do_calibre_export(book.id, book_format)
|
2023-11-11 13:48:59 +00:00
|
|
|
else:
|
|
|
|
return gd.do_gdrive_download(df, headers)
|
2018-08-04 08:56:42 +00:00
|
|
|
else:
|
|
|
|
abort(404)
|
|
|
|
else:
|
2023-11-07 18:30:13 +00:00
|
|
|
filename = os.path.join(config.get_book_path(), book.path)
|
2023-11-02 16:05:02 +00:00
|
|
|
if not os.path.isfile(os.path.join(filename, book_name + "." + book_format)):
|
2018-11-03 17:37:38 +00:00
|
|
|
# ToDo: improve error handling
|
2023-11-02 16:05:02 +00:00
|
|
|
log.error('File not found: %s', os.path.join(filename, book_name + "." + book_format))
|
2019-08-18 19:44:19 +00:00
|
|
|
|
2020-05-09 08:58:59 +00:00
|
|
|
if client == "kobo" and book_format == "kepub":
|
|
|
|
headers["Content-Disposition"] = headers["Content-Disposition"].replace(".kepub", ".kepub.epub")
|
2019-08-18 19:44:19 +00:00
|
|
|
|
2024-01-13 09:53:46 +00:00
|
|
|
if book_format == "kepub" and config.config_kepubifypath and config.config_embed_metadata:
|
|
|
|
filename, download_name = do_kepubify_metadata_replace(book, os.path.join(filename,
|
|
|
|
book_name + "." + book_format))
|
|
|
|
elif book_format != "kepub" and config.config_binariesdir and config.config_embed_metadata:
|
2024-06-18 18:13:26 +00:00
|
|
|
filename, download_name = do_calibre_export(book.id, book_format)
|
2023-11-11 13:48:59 +00:00
|
|
|
else:
|
|
|
|
download_name = book_name
|
|
|
|
|
|
|
|
response = make_response(send_from_directory(filename, download_name + "." + book_format))
|
|
|
|
# ToDo Check headers parameter
|
|
|
|
for element in headers:
|
|
|
|
response.headers[element[0]] = element[1]
|
|
|
|
log.info('Downloading file: {}'.format(os.path.join(filename, book_name + "." + book_format)))
|
|
|
|
return response
|
2023-11-02 16:05:02 +00:00
|
|
|
|
2018-08-04 08:56:42 +00:00
|
|
|
|
2024-01-13 09:53:46 +00:00
|
|
|
def do_kepubify_metadata_replace(book, file_path):
|
|
|
|
custom_columns = (calibre_db.session.query(db.CustomColumns)
|
|
|
|
.filter(db.CustomColumns.mark_for_delete == 0)
|
|
|
|
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
|
|
|
.order_by(db.CustomColumns.label).all())
|
|
|
|
|
|
|
|
tree, cf_name = get_content_opf(file_path)
|
|
|
|
package = create_new_metadata_backup(book, custom_columns, current_user.locale, _("Cover"), lang_type=2)
|
|
|
|
content = replace_metadata(tree, package)
|
|
|
|
tmp_dir = get_temp_dir()
|
|
|
|
temp_file_name = str(uuid4())
|
|
|
|
# open zipfile and replace metadata block in content.opf
|
|
|
|
updateEpub(file_path, os.path.join(tmp_dir, temp_file_name + ".kepub"), cf_name, content)
|
|
|
|
return tmp_dir, temp_file_name
|
|
|
|
|
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
##################################
|
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
|
2022-03-13 11:34:21 +00:00
|
|
|
def check_unrar(unrar_location):
|
|
|
|
if not unrar_location:
|
2019-07-13 18:45:48 +00:00
|
|
|
return
|
|
|
|
|
2022-03-13 11:34:21 +00:00
|
|
|
if not os.path.exists(unrar_location):
|
2024-05-31 18:12:55 +00:00
|
|
|
return _('UnRar binary file not found')
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
try:
|
2022-03-13 11:34:21 +00:00
|
|
|
unrar_location = [unrar_location]
|
|
|
|
value = process_wait(unrar_location, pattern='UNRAR (.*) freeware')
|
2021-07-11 07:19:27 +00:00
|
|
|
if value:
|
|
|
|
version = value.group(1)
|
2024-05-31 18:12:55 +00:00
|
|
|
log.debug("UnRar version %s", version)
|
2021-07-11 07:19:27 +00:00
|
|
|
|
2020-05-24 15:24:54 +00:00
|
|
|
except (OSError, UnicodeDecodeError) as err:
|
2022-03-12 16:14:54 +00:00
|
|
|
log.error_or_exception(err)
|
2022-07-01 13:26:06 +00:00
|
|
|
return _('Error executing UnRar')
|
2018-08-31 08:47:58 +00:00
|
|
|
|
2018-09-10 08:42:28 +00:00
|
|
|
|
2023-11-02 16:05:02 +00:00
|
|
|
def check_calibre(calibre_location):
|
|
|
|
if not calibre_location:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not os.path.exists(calibre_location):
|
|
|
|
return _('Could not find the specified directory')
|
|
|
|
|
|
|
|
if not os.path.isdir(calibre_location):
|
|
|
|
return _('Please specify a directory, not a file')
|
|
|
|
|
|
|
|
try:
|
2023-11-12 10:02:30 +00:00
|
|
|
supported_binary_paths = [os.path.join(calibre_location, binary)
|
|
|
|
for binary in SUPPORTED_CALIBRE_BINARIES.values()]
|
2023-12-07 15:47:10 +00:00
|
|
|
binaries_available = [os.path.isfile(binary_path) for binary_path in supported_binary_paths]
|
|
|
|
binaries_executable = [os.access(binary_path, os.X_OK) for binary_path in supported_binary_paths]
|
|
|
|
if all(binaries_available) and all(binaries_executable):
|
2024-02-26 14:07:59 +00:00
|
|
|
values = [process_wait([binary_path, "--version"], pattern=r'\(calibre (.*)\)')
|
2023-11-12 10:02:30 +00:00
|
|
|
for binary_path in supported_binary_paths]
|
2023-11-02 16:05:02 +00:00
|
|
|
if all(values):
|
|
|
|
version = values[0].group(1)
|
|
|
|
log.debug("calibre version %s", version)
|
|
|
|
else:
|
|
|
|
return _('Calibre binaries not viable')
|
|
|
|
else:
|
2023-12-07 15:47:10 +00:00
|
|
|
ret_val = []
|
2024-06-18 18:13:26 +00:00
|
|
|
missing_binaries = [path for path, available in
|
|
|
|
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_available) if not available]
|
2023-12-07 15:47:10 +00:00
|
|
|
|
2024-06-18 18:13:26 +00:00
|
|
|
missing_perms = [path for path, available in
|
|
|
|
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_executable) if not available]
|
2023-12-07 15:47:10 +00:00
|
|
|
if missing_binaries:
|
|
|
|
ret_val.append(_('Missing calibre binaries: %(missing)s', missing=", ".join(missing_binaries)))
|
|
|
|
if missing_perms:
|
|
|
|
ret_val.append(_('Missing executable permissions: %(missing)s', missing=", ".join(missing_perms)))
|
|
|
|
return ", ".join(ret_val)
|
2023-11-02 16:05:02 +00:00
|
|
|
|
|
|
|
except (OSError, UnicodeDecodeError) as err:
|
|
|
|
log.error_or_exception(err)
|
2024-05-31 18:12:55 +00:00
|
|
|
return _('Error executing Calibre')
|
2023-11-02 16:05:02 +00:00
|
|
|
|
|
|
|
|
2018-11-18 16:09:13 +00:00
|
|
|
def json_serial(obj):
|
|
|
|
"""JSON serializer for objects not serializable by default json code"""
|
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
if isinstance(obj, datetime):
|
2018-11-18 16:09:13 +00:00
|
|
|
return obj.isoformat()
|
2020-04-19 17:08:58 +00:00
|
|
|
if isinstance(obj, timedelta):
|
2019-06-30 09:20:36 +00:00
|
|
|
return {
|
|
|
|
'__type__': 'timedelta',
|
|
|
|
'days': obj.days,
|
|
|
|
'seconds': obj.seconds,
|
|
|
|
'microseconds': obj.microseconds,
|
|
|
|
}
|
2020-04-19 17:08:58 +00:00
|
|
|
raise TypeError("Type %s not serializable" % type(obj))
|
2018-10-03 19:58:37 +00:00
|
|
|
|
2019-06-30 09:20:36 +00:00
|
|
|
|
2019-07-25 19:42:46 +00:00
|
|
|
def tags_filters():
|
2020-02-15 09:21:45 +00:00
|
|
|
negtags_list = current_user.list_denied_tags()
|
2019-12-30 14:15:07 +00:00
|
|
|
postags_list = current_user.list_allowed_tags()
|
|
|
|
neg_content_tags_filter = false() if negtags_list == [''] else db.Tags.name.in_(negtags_list)
|
|
|
|
pos_content_tags_filter = true() if postags_list == [''] else db.Tags.name.in_(postags_list)
|
|
|
|
return and_(pos_content_tags_filter, ~neg_content_tags_filter)
|
2019-07-13 18:45:48 +00:00
|
|
|
|
2020-04-19 17:08:58 +00:00
|
|
|
|
2019-07-13 18:45:48 +00:00
|
|
|
# checks if domain is in database (including wildcards)
|
2024-01-13 09:53:46 +00:00
|
|
|
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
2019-07-13 18:45:48 +00:00
|
|
|
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
2021-04-10 09:32:11 +00:00
|
|
|
# in all calls the email address is checked for validity
|
2019-07-13 18:45:48 +00:00
|
|
|
def check_valid_domain(domain_text):
|
2019-12-29 12:54:52 +00:00
|
|
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 1);"
|
2023-11-11 14:00:12 +00:00
|
|
|
if not len(ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()):
|
2019-12-29 12:54:52 +00:00
|
|
|
return False
|
|
|
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 0);"
|
2023-11-11 14:00:12 +00:00
|
|
|
return not len(ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all())
|
2019-07-13 18:45:48 +00:00
|
|
|
|
|
|
|
|
2019-08-18 19:44:19 +00:00
|
|
|
def get_download_link(book_id, book_format, client):
|
2019-07-13 18:45:48 +00:00
|
|
|
book_format = book_format.split(".")[0]
|
2021-12-04 10:16:33 +00:00
|
|
|
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
2019-07-25 19:42:46 +00:00
|
|
|
if book:
|
2020-06-01 08:53:48 +00:00
|
|
|
data1 = calibre_db.get_book_format(book.id, book_format.upper())
|
2023-11-11 14:00:12 +00:00
|
|
|
if data1:
|
|
|
|
# collect downloaded books only for registered user and not for anonymous user
|
|
|
|
if current_user.is_authenticated:
|
|
|
|
ub.update_download(book_id, int(current_user.id))
|
|
|
|
file_name = book.title
|
|
|
|
if len(book.authors) > 0:
|
|
|
|
file_name = file_name + ' - ' + book.authors[0].name
|
|
|
|
file_name = get_valid_filename(file_name, replace_whitespace=False)
|
|
|
|
headers = Headers()
|
|
|
|
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
|
|
|
|
headers["Content-Disposition"] = "attachment; filename=%s.%s; filename*=UTF-8''%s.%s" % (
|
|
|
|
quote(file_name), book_format, quote(file_name), book_format)
|
|
|
|
return do_download_file(book, book_format, client, data1, headers)
|
2019-07-25 19:42:46 +00:00
|
|
|
else:
|
2021-04-25 09:20:21 +00:00
|
|
|
log.error("Book id {} not found for downloading".format(book_id))
|
2023-11-11 14:00:12 +00:00
|
|
|
abort(404)
|
2021-03-20 10:32:50 +00:00
|
|
|
|
|
|
|
|
2021-09-29 07:40:12 +00:00
|
|
|
def clear_cover_thumbnail_cache(book_id):
|
2022-04-28 18:57:09 +00:00
|
|
|
if config.schedule_generate_book_covers:
|
|
|
|
WorkerThread.add(None, TaskClearCoverThumbnailCache(book_id), hidden=True)
|
2022-04-23 17:56:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
def replace_cover_thumbnail_cache(book_id):
|
2022-04-28 18:57:09 +00:00
|
|
|
if config.schedule_generate_book_covers:
|
|
|
|
WorkerThread.add(None, TaskClearCoverThumbnailCache(book_id), hidden=True)
|
|
|
|
WorkerThread.add(None, TaskGenerateCoverThumbnails(book_id), hidden=True)
|
2022-03-16 19:31:25 +00:00
|
|
|
|
2022-04-22 14:13:51 +00:00
|
|
|
|
2022-03-16 19:31:25 +00:00
|
|
|
def delete_thumbnail_cache():
|
2022-04-30 06:26:00 +00:00
|
|
|
WorkerThread.add(None, TaskClearCoverThumbnailCache(-1))
|
2022-04-22 14:13:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def add_book_to_thumbnail_cache(book_id):
|
2022-04-28 18:57:09 +00:00
|
|
|
if config.schedule_generate_book_covers:
|
|
|
|
WorkerThread.add(None, TaskGenerateCoverThumbnails(book_id), hidden=True)
|
2022-04-25 06:24:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_thumbnail_cache():
|
2022-04-28 18:57:09 +00:00
|
|
|
if config.schedule_generate_book_covers:
|
|
|
|
WorkerThread.add(None, TaskGenerateCoverThumbnails())
|
2022-09-19 16:56:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def set_all_metadata_dirty():
|
|
|
|
WorkerThread.add(None, TaskBackupMetadata(export_language=get_locale(),
|
2022-09-19 20:39:40 +00:00
|
|
|
translated_title=_("Cover"),
|
|
|
|
set_dirty=True,
|
|
|
|
task_message=N_("Queue all books for metadata backup")),
|
2022-09-19 16:56:22 +00:00
|
|
|
hidden=False)
|