mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-25 17:40:31 +00:00
Merge remote-tracking branch 'gitignore/fix/syntax-python3-20170305'
# Conflicts: # cps/db.py # cps/web.py # requirements.txt
This commit is contained in:
commit
80f1276624
4
cps.py
4
cps.py
@ -6,7 +6,9 @@ import sys
|
|||||||
|
|
||||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||||
# Insert local directories into path
|
# Insert local directories into path
|
||||||
sys.path.insert(0, os.path.join(base_path, 'vendor'))
|
sys.path.append(base_path)
|
||||||
|
sys.path.append(os.path.join(base_path, 'cps'))
|
||||||
|
sys.path.append(os.path.join(base_path, 'vendor'))
|
||||||
|
|
||||||
from cps import web
|
from cps import web
|
||||||
from tornado.wsgi import WSGIContainer
|
from tornado.wsgi import WSGIContainer
|
||||||
|
@ -14,28 +14,28 @@ try:
|
|||||||
from wand.image import Image
|
from wand.image import Image
|
||||||
from wand import version as ImageVersion
|
from wand import version as ImageVersion
|
||||||
use_generic_pdf_cover = False
|
use_generic_pdf_cover = False
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
|
logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
|
||||||
use_generic_pdf_cover = True
|
use_generic_pdf_cover = True
|
||||||
try:
|
try:
|
||||||
from PyPDF2 import PdfFileReader
|
from PyPDF2 import PdfFileReader
|
||||||
from PyPDF2 import __version__ as PyPdfVersion
|
from PyPDF2 import __version__ as PyPdfVersion
|
||||||
use_pdf_meta = True
|
use_pdf_meta = True
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
|
logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
|
||||||
use_pdf_meta = False
|
use_pdf_meta = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import epub
|
import epub
|
||||||
use_epub_meta = True
|
use_epub_meta = True
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
logger.warning('cannot import epub, extracting epub metadata will not work: %s', e)
|
logger.warning('cannot import epub, extracting epub metadata will not work: %s', e)
|
||||||
use_epub_meta = False
|
use_epub_meta = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import fb2
|
import fb2
|
||||||
use_fb2_meta = True
|
use_fb2_meta = True
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
|
logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
|
||||||
use_fb2_meta = False
|
use_fb2_meta = False
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ def process(tmp_file_path, original_file_name, original_file_extension):
|
|||||||
return epub.get_epub_info(tmp_file_path, original_file_name, original_file_extension)
|
return epub.get_epub_info(tmp_file_path, original_file_name, original_file_extension)
|
||||||
if ".FB2" == original_file_extension.upper() and use_fb2_meta is True:
|
if ".FB2" == original_file_extension.upper() and use_fb2_meta is True:
|
||||||
return fb2.get_fb2_info(tmp_file_path, original_file_extension)
|
return fb2.get_fb2_info(tmp_file_path, original_file_extension)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
logger.warning('cannot parse metadata, using default: %s', e)
|
logger.warning('cannot parse metadata, using default: %s', e)
|
||||||
return default_meta(tmp_file_path, original_file_name, original_file_extension)
|
return default_meta(tmp_file_path, original_file_name, original_file_extension)
|
||||||
|
|
||||||
|
@ -289,10 +289,11 @@ def setup_db():
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
|
||||||
engine = create_engine('sqlite:///{0}'.format(dbpath.encode('utf-8')), echo=False, isolation_level="SERIALIZABLE")
|
#engine = create_engine('sqlite:///{0}'.format(dbpath.encode('utf-8')), echo=False, isolation_level="SERIALIZABLE")
|
||||||
|
engine = create_engine('sqlite:///'+ dbpath, echo=False, isolation_level="SERIALIZABLE")
|
||||||
try:
|
try:
|
||||||
conn = engine.connect()
|
conn = engine.connect()
|
||||||
except:
|
except Exception as e:
|
||||||
content = ub.session.query(ub.Settings).first()
|
content = ub.session.query(ub.Settings).first()
|
||||||
content.config_calibre_dir = None
|
content.config_calibre_dir = None
|
||||||
content.db_configured = False
|
content.db_configured = False
|
||||||
|
10
cps/fb2.py
10
cps/fb2.py
@ -4,9 +4,11 @@
|
|||||||
from lxml import etree
|
from lxml import etree
|
||||||
import os
|
import os
|
||||||
import uploader
|
import uploader
|
||||||
|
try:
|
||||||
|
from io import StringIO
|
||||||
|
except ImportError as e:
|
||||||
import StringIO
|
import StringIO
|
||||||
|
|
||||||
|
|
||||||
def get_fb2_info(tmp_file_path, original_file_extension):
|
def get_fb2_info(tmp_file_path, original_file_extension):
|
||||||
|
|
||||||
ns = {
|
ns = {
|
||||||
@ -37,16 +39,16 @@ def get_fb2_info(tmp_file_path, original_file_extension):
|
|||||||
first_name = u''
|
first_name = u''
|
||||||
return first_name + ' ' + middle_name + ' ' + last_name
|
return first_name + ' ' + middle_name + ' ' + last_name
|
||||||
|
|
||||||
author = unicode(", ".join(map(get_author, authors)))
|
author = str(", ".join(map(get_author, authors)))
|
||||||
|
|
||||||
title = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:book-title/text()', namespaces=ns)
|
title = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:book-title/text()', namespaces=ns)
|
||||||
if len(title):
|
if len(title):
|
||||||
title = unicode(title[0])
|
title = str(title[0])
|
||||||
else:
|
else:
|
||||||
title = u''
|
title = u''
|
||||||
description = tree.xpath('/fb:FictionBook/fb:description/fb:publish-info/fb:book-name/text()', namespaces=ns)
|
description = tree.xpath('/fb:FictionBook/fb:description/fb:publish-info/fb:book-name/text()', namespaces=ns)
|
||||||
if len(description):
|
if len(description):
|
||||||
description = unicode(description[0])
|
description = str(description[0])
|
||||||
else:
|
else:
|
||||||
description = u''
|
description = u''
|
||||||
|
|
||||||
|
@ -13,11 +13,17 @@ import os
|
|||||||
import traceback
|
import traceback
|
||||||
import re
|
import re
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
try:
|
||||||
|
from io import StringIO
|
||||||
|
from email.mime.base import MIMEBase
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
except ImportError as e:
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from email import encoders
|
|
||||||
from email.MIMEBase import MIMEBase
|
from email.MIMEBase import MIMEBase
|
||||||
from email.MIMEMultipart import MIMEMultipart
|
from email.MIMEMultipart import MIMEMultipart
|
||||||
from email.MIMEText import MIMEText
|
from email.MIMEText import MIMEText
|
||||||
|
from email import encoders
|
||||||
from email.generator import Generator
|
from email.generator import Generator
|
||||||
from email.utils import formatdate
|
from email.utils import formatdate
|
||||||
from email.utils import make_msgid
|
from email.utils import make_msgid
|
||||||
@ -32,7 +38,7 @@ from tornado.ioloop import IOLoop
|
|||||||
try:
|
try:
|
||||||
import unidecode
|
import unidecode
|
||||||
use_unidecode=True
|
use_unidecode=True
|
||||||
except:
|
except Exception as e:
|
||||||
use_unidecode=False
|
use_unidecode=False
|
||||||
|
|
||||||
# Global variables
|
# Global variables
|
||||||
@ -147,7 +153,7 @@ def send_raw_email(kindle_mail, msg):
|
|||||||
|
|
||||||
smtplib.stderr = org_stderr
|
smtplib.stderr = org_stderr
|
||||||
|
|
||||||
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException), e:
|
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException) as e:
|
||||||
app.logger.error(traceback.print_exc())
|
app.logger.error(traceback.print_exc())
|
||||||
return _("Failed to send mail: %s" % str(e))
|
return _("Failed to send mail: %s" % str(e))
|
||||||
|
|
||||||
@ -238,7 +244,10 @@ def get_valid_filename(value, replace_whitespace=True):
|
|||||||
value=value.replace(u'§',u'SS')
|
value=value.replace(u'§',u'SS')
|
||||||
value=value.replace(u'ß',u'ss')
|
value=value.replace(u'ß',u'ss')
|
||||||
value = unicodedata.normalize('NFKD', value)
|
value = unicodedata.normalize('NFKD', value)
|
||||||
re_slugify = re.compile('[^\w\s-]', re.UNICODE)
|
re_slugify = re.compile('[\W\s-]', re.UNICODE)
|
||||||
|
if type(value) is str: #Python3 str, Python2 unicode
|
||||||
|
value = re_slugify.sub('', value).strip()
|
||||||
|
else:
|
||||||
value = unicode(re_slugify.sub('', value).strip())
|
value = unicode(re_slugify.sub('', value).strip())
|
||||||
if replace_whitespace:
|
if replace_whitespace:
|
||||||
#*+:\"/<>? werden durch _ ersetzt
|
#*+:\"/<>? werden durch _ ersetzt
|
||||||
@ -379,7 +388,7 @@ class Updater(threading.Thread):
|
|||||||
try:
|
try:
|
||||||
os.chown(dst_file, permission.st_uid, permission.st_uid)
|
os.chown(dst_file, permission.st_uid, permission.st_uid)
|
||||||
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
||||||
except:
|
except Exception as e:
|
||||||
e = sys.exc_info()
|
e = sys.exc_info()
|
||||||
logging.getLogger('cps.web').debug('Fail '+str(dst_file)+' error: '+str(e))
|
logging.getLogger('cps.web').debug('Fail '+str(dst_file)+' error: '+str(e))
|
||||||
return
|
return
|
||||||
@ -421,7 +430,7 @@ class Updater(threading.Thread):
|
|||||||
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
||||||
log_from_thread("Delete file " + item_path)
|
log_from_thread("Delete file " + item_path)
|
||||||
os.remove(item_path)
|
os.remove(item_path)
|
||||||
except:
|
except Exception as e:
|
||||||
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
||||||
shutil.rmtree(source, ignore_errors=True)
|
shutil.rmtree(source, ignore_errors=True)
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
from werkzeug.security import generate_password_hash
|
from werkzeug.security import generate_password_hash
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
#from builtins import str
|
||||||
|
|
||||||
dbpath = os.path.join(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep), "app.db")
|
dbpath = os.path.join(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep), "app.db")
|
||||||
engine = create_engine('sqlite:///{0}'.format(dbpath), echo=False)
|
engine = create_engine('sqlite:///{0}'.format(dbpath), echo=False)
|
||||||
@ -90,7 +91,7 @@ class UserBase:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def get_id(self):
|
def get_id(self):
|
||||||
return unicode(self.id)
|
return str(self.id)
|
||||||
|
|
||||||
def filter_language(self):
|
def filter_language(self):
|
||||||
return self.default_language
|
return self.default_language
|
||||||
@ -438,7 +439,7 @@ def create_anonymous_user():
|
|||||||
session.add(user)
|
session.add(user)
|
||||||
try:
|
try:
|
||||||
session.commit()
|
session.commit()
|
||||||
except:
|
except Exception as e:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -456,7 +457,7 @@ def create_admin_user():
|
|||||||
session.add(user)
|
session.add(user)
|
||||||
try:
|
try:
|
||||||
session.commit()
|
session.commit()
|
||||||
except:
|
except Exception as e:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
55
cps/web.py
55
cps/web.py
@ -47,6 +47,13 @@ from shutil import move, copyfile
|
|||||||
from tornado.ioloop import IOLoop
|
from tornado.ioloop import IOLoop
|
||||||
from tornado import version as tornadoVersion
|
from tornado import version as tornadoVersion
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.parse import quote
|
||||||
|
from imp import reload
|
||||||
|
from past.builtins import xrange
|
||||||
|
except ImportError as e:
|
||||||
|
from urllib import quote
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from flask_login import __version__ as flask_loginVersion
|
from flask_login import __version__ as flask_loginVersion
|
||||||
except ImportError, e:
|
except ImportError, e:
|
||||||
@ -56,7 +63,7 @@ try:
|
|||||||
from wand.image import Image
|
from wand.image import Image
|
||||||
|
|
||||||
use_generic_pdf_cover = False
|
use_generic_pdf_cover = False
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
use_generic_pdf_cover = True
|
use_generic_pdf_cover = True
|
||||||
from cgi import escape
|
from cgi import escape
|
||||||
|
|
||||||
@ -292,7 +299,7 @@ def shortentitle_filter(s):
|
|||||||
def mimetype_filter(val):
|
def mimetype_filter(val):
|
||||||
try:
|
try:
|
||||||
s = mimetypes.types_map['.' + val]
|
s = mimetypes.types_map['.' + val]
|
||||||
except:
|
except Exception as e:
|
||||||
s = 'application/octet-stream'
|
s = 'application/octet-stream'
|
||||||
return s
|
return s
|
||||||
|
|
||||||
@ -773,7 +780,7 @@ def get_updater_status():
|
|||||||
elif request.method == "GET":
|
elif request.method == "GET":
|
||||||
try:
|
try:
|
||||||
status['status']=helper.updater_thread.get_update_status()
|
status['status']=helper.updater_thread.get_update_status()
|
||||||
except:
|
except Exception as e:
|
||||||
status['status'] = 7
|
status['status'] = 7
|
||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
|
|
||||||
@ -788,7 +795,7 @@ def get_languages_json():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
entries = [s for s in languages if query in s.name.lower()]
|
entries = [s for s in languages if query in s.name.lower()]
|
||||||
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
||||||
@ -954,13 +961,13 @@ def language_overview():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
langfound = 1
|
langfound = 1
|
||||||
cur_l = LC.parse(current_user.filter_language())
|
cur_l = LC.parse(current_user.filter_language())
|
||||||
except:
|
except Exception as e:
|
||||||
langfound = 0
|
langfound = 0
|
||||||
languages = db.session.query(db.Languages).filter(
|
languages = db.session.query(db.Languages).filter(
|
||||||
db.Languages.lang_code == current_user.filter_language()).all()
|
db.Languages.lang_code == current_user.filter_language()).all()
|
||||||
@ -984,7 +991,7 @@ def language(name, page):
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(name)
|
cur_l = LC.parse(name)
|
||||||
name = cur_l.get_language_name(get_locale())
|
name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
name = _(isoLanguages.get(part3=name).name)
|
name = _(isoLanguages.get(part3=name).name)
|
||||||
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
||||||
title=_(u"Language: %(name)s", name=name))
|
title=_(u"Language: %(name)s", name=name))
|
||||||
@ -1028,7 +1035,7 @@ def show_book(id):
|
|||||||
try:
|
try:
|
||||||
entries.languages[index].language_name = LC.parse(entries.languages[index].lang_code).get_language_name(
|
entries.languages[index].language_name = LC.parse(entries.languages[index].lang_code).get_language_name(
|
||||||
get_locale())
|
get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
entries.languages[index].language_name = _(
|
entries.languages[index].language_name = _(
|
||||||
isoLanguages.get(part3=entries.languages[index].lang_code).name)
|
isoLanguages.get(part3=entries.languages[index].lang_code).name)
|
||||||
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||||
@ -1069,6 +1076,8 @@ def stats():
|
|||||||
stdin=subprocess.PIPE)
|
stdin=subprocess.PIPE)
|
||||||
p.wait()
|
p.wait()
|
||||||
for lines in p.stdout.readlines():
|
for lines in p.stdout.readlines():
|
||||||
|
if type(lines) is bytes:
|
||||||
|
lines = lines.decode('utf-8')
|
||||||
if re.search('Amazon kindlegen\(', lines):
|
if re.search('Amazon kindlegen\(', lines):
|
||||||
versions['KindlegenVersion'] = lines
|
versions['KindlegenVersion'] = lines
|
||||||
versions['PythonVersion'] = sys.version
|
versions['PythonVersion'] = sys.version
|
||||||
@ -1177,7 +1186,7 @@ def advanced_search():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
searchterm.extend(language.name for language in language_names)
|
searchterm.extend(language.name for language in language_names)
|
||||||
searchterm = " + ".join(filter(None, searchterm))
|
searchterm = " + ".join(filter(None, searchterm))
|
||||||
@ -1209,7 +1218,7 @@ def advanced_search():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
else:
|
else:
|
||||||
languages = None
|
languages = None
|
||||||
@ -1311,9 +1320,9 @@ def get_download_link(book_id, format):
|
|||||||
send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + format))
|
send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + format))
|
||||||
try:
|
try:
|
||||||
response.headers["Content-Type"] = mimetypes.types_map['.' + format]
|
response.headers["Content-Type"] = mimetypes.types_map['.' + format]
|
||||||
except:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
response.headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (urllib.quote(file_name.encode('utf-8')), format)
|
response.headers["Content-Disposition"] = "attachment; filename*=UTF-8''%s.%s" % (quote(file_name.encode('utf-8')), format)
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
@ -1343,7 +1352,7 @@ def register():
|
|||||||
try:
|
try:
|
||||||
ub.session.add(content)
|
ub.session.add(content)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
except:
|
except Exception as e:
|
||||||
ub.session.rollback()
|
ub.session.rollback()
|
||||||
flash(_(u"An unknown error occured. Please try again later."), category="error")
|
flash(_(u"An unknown error occured. Please try again later."), category="error")
|
||||||
return render_title_template('register.html', title=_(u"register"))
|
return render_title_template('register.html', title=_(u"register"))
|
||||||
@ -1468,7 +1477,7 @@ def create_shelf():
|
|||||||
ub.session.add(shelf)
|
ub.session.add(shelf)
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
|
||||||
except:
|
except Exception as e:
|
||||||
flash(_(u"There was an error"), category="error")
|
flash(_(u"There was an error"), category="error")
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"))
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"))
|
||||||
else:
|
else:
|
||||||
@ -1496,7 +1505,7 @@ def edit_shelf(shelf_id):
|
|||||||
try:
|
try:
|
||||||
ub.session.commit()
|
ub.session.commit()
|
||||||
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
|
||||||
except:
|
except Exception as e:
|
||||||
flash(_(u"There was an error"), category="error")
|
flash(_(u"There was an error"), category="error")
|
||||||
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"))
|
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"))
|
||||||
else:
|
else:
|
||||||
@ -1584,7 +1593,7 @@ def profile():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
translations = babel.list_translations() + [LC('en')]
|
translations = babel.list_translations() + [LC('en')]
|
||||||
for book in content.downloads:
|
for book in content.downloads:
|
||||||
@ -1760,7 +1769,7 @@ def new_user():
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
translations = [LC('en')] + babel.list_translations()
|
translations = [LC('en')] + babel.list_translations()
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -1858,7 +1867,7 @@ def edit_user(user_id):
|
|||||||
try:
|
try:
|
||||||
cur_l = LC.parse(lang.lang_code)
|
cur_l = LC.parse(lang.lang_code)
|
||||||
lang.name = cur_l.get_language_name(get_locale())
|
lang.name = cur_l.get_language_name(get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||||
translations = babel.list_translations() + [LC('en')]
|
translations = babel.list_translations() + [LC('en')]
|
||||||
for book in content.downloads:
|
for book in content.downloads:
|
||||||
@ -1980,7 +1989,7 @@ def edit_book(book_id):
|
|||||||
try:
|
try:
|
||||||
book.languages[index].language_name = LC.parse(book.languages[index].lang_code).get_language_name(
|
book.languages[index].language_name = LC.parse(book.languages[index].lang_code).get_language_name(
|
||||||
get_locale())
|
get_locale())
|
||||||
except:
|
except Exception as e:
|
||||||
book.languages[index].language_name = _(isoLanguages.get(part3=book.languages[index].lang_code).name)
|
book.languages[index].language_name = _(isoLanguages.get(part3=book.languages[index].lang_code).name)
|
||||||
for author in book.authors:
|
for author in book.authors:
|
||||||
author_names.append(author.name)
|
author_names.append(author.name)
|
||||||
@ -2030,7 +2039,7 @@ def edit_book(book_id):
|
|||||||
for lang in languages:
|
for lang in languages:
|
||||||
try:
|
try:
|
||||||
lang.name = LC.parse(lang.lang_code).get_language_name(get_locale()).lower()
|
lang.name = LC.parse(lang.lang_code).get_language_name(get_locale()).lower()
|
||||||
except:
|
except Exception as e:
|
||||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name).lower()
|
lang.name = _(isoLanguages.get(part3=lang.lang_code).name).lower()
|
||||||
for inp_lang in input_languages:
|
for inp_lang in input_languages:
|
||||||
if inp_lang == lang.name:
|
if inp_lang == lang.name:
|
||||||
@ -2216,12 +2225,12 @@ def upload():
|
|||||||
return redirect(url_for('index'))
|
return redirect(url_for('index'))
|
||||||
try:
|
try:
|
||||||
copyfile(meta.file_path, saved_filename)
|
copyfile(meta.file_path, saved_filename)
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
flash(_(u"Failed to store file %s (Permission denied)." % saved_filename), category="error")
|
flash(_(u"Failed to store file %s (Permission denied)." % saved_filename), category="error")
|
||||||
return redirect(url_for('index'))
|
return redirect(url_for('index'))
|
||||||
try:
|
try:
|
||||||
os.unlink(meta.file_path)
|
os.unlink(meta.file_path)
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
flash(_(u"Failed to delete file %s (Permission denied)." % meta.file_path), category="warning")
|
flash(_(u"Failed to delete file %s (Permission denied)." % meta.file_path), category="warning")
|
||||||
|
|
||||||
file_size = os.path.getsize(saved_filename)
|
file_size = os.path.getsize(saved_filename)
|
||||||
@ -2253,7 +2262,7 @@ def upload():
|
|||||||
db.session.add(db_language)
|
db.session.add(db_language)
|
||||||
# combine path and normalize path from windows systems
|
# combine path and normalize path from windows systems
|
||||||
path = os.path.join(author_dir, title_dir).replace('\\','/')
|
path = os.path.join(author_dir, title_dir).replace('\\','/')
|
||||||
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 01, 01), 1,
|
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 1, 1), 1,
|
||||||
datetime.datetime.now(), path, has_cover, db_author, [], db_language)
|
datetime.datetime.now(), path, has_cover, db_author, [], db_language)
|
||||||
db_book.authors.append(db_author)
|
db_book.authors.append(db_author)
|
||||||
if db_language is not None:
|
if db_language is not None:
|
||||||
|
3
getVendor.sh
Executable file
3
getVendor.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
pip install --target ./vendor -r requirements.txt
|
@ -10,3 +10,5 @@ requests>=2.11.1
|
|||||||
SQLAlchemy>=0.8.4
|
SQLAlchemy>=0.8.4
|
||||||
tornado>=4.1
|
tornado>=4.1
|
||||||
Wand>=0.4.4
|
Wand>=0.4.4
|
||||||
|
#future
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user