mirror of
https://github.com/janeczku/calibre-web
synced 2024-12-20 15:10:31 +00:00
Merge branch 'master' into Develop
# Conflicts: # test/Calibre-Web TestSummary_Linux.html
This commit is contained in:
commit
f4ecfe4aca
@ -184,11 +184,11 @@ def migrate():
|
||||
sql=sql[0].replace(currUniqueConstraint, 'UNIQUE (gdrive_id, path)')
|
||||
sql=sql.replace(GdriveId.__tablename__, GdriveId.__tablename__ + '2')
|
||||
session.execute(sql)
|
||||
session.execute("INSERT INTO gdrive_ids2 (id, gdrive_id, path) SELECT id, "
|
||||
"gdrive_id, path FROM gdrive_ids;")
|
||||
session.execute(text("INSERT INTO gdrive_ids2 (id, gdrive_id, path) SELECT id, "
|
||||
"gdrive_id, path FROM gdrive_ids;"))
|
||||
session.commit()
|
||||
session.execute('DROP TABLE %s' % 'gdrive_ids')
|
||||
session.execute('ALTER TABLE gdrive_ids2 RENAME to gdrive_ids')
|
||||
session.execute(text('DROP TABLE %s' % 'gdrive_ids'))
|
||||
session.execute(text('ALTER TABLE gdrive_ids2 RENAME to gdrive_ids'))
|
||||
break
|
||||
|
||||
if not os.path.exists(cli_param.gd_path):
|
||||
|
52
cps/ub.py
52
cps/ub.py
@ -565,15 +565,15 @@ def migrate_registration_table(engine, _session):
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
|
||||
conn.execute("update registration set 'allow' = 1")
|
||||
conn.execute(text("ALTER TABLE registration ADD column 'allow' INTEGER"))
|
||||
conn.execute(text("update registration set 'allow' = 1"))
|
||||
_session.commit()
|
||||
try:
|
||||
# Handle table exists, but no content
|
||||
cnt = _session.query(Registration).count()
|
||||
if not cnt:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("insert into registration (domain, allow) values('%.%',1)")
|
||||
conn.execute(text("insert into registration (domain, allow) values('%.%',1)"))
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not writeable
|
||||
print('Settings database is not writeable. Exiting...')
|
||||
@ -597,11 +597,11 @@ def migrate_shelfs(engine, _session):
|
||||
_session.query(exists().where(Shelf.uuid)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
|
||||
conn.execute("ALTER TABLE shelf ADD column 'created' DATETIME")
|
||||
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
conn.execute(text("ALTER TABLE shelf ADD column 'uuid' STRING"))
|
||||
conn.execute(text("ALTER TABLE shelf ADD column 'created' DATETIME"))
|
||||
conn.execute(text("ALTER TABLE shelf ADD column 'last_modified' DATETIME"))
|
||||
conn.execute(text("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME"))
|
||||
conn.execute(text("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false"))
|
||||
for shelf in _session.query(Shelf).all():
|
||||
shelf.uuid = str(uuid.uuid4())
|
||||
shelf.created = datetime.datetime.now()
|
||||
@ -615,14 +615,14 @@ def migrate_shelfs(engine, _session):
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
conn.execute(text("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false"))
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
_session.query(exists().where(BookShelf.order)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
|
||||
conn.execute(text("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1"))
|
||||
_session.commit()
|
||||
|
||||
|
||||
@ -631,11 +631,11 @@ def migrate_readBook(engine, _session):
|
||||
_session.query(exists().where(ReadBook.read_status)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
|
||||
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
|
||||
conn.execute(text("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0"))
|
||||
conn.execute(text("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read"))
|
||||
conn.execute(text("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME"))
|
||||
conn.execute(text("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME"))
|
||||
conn.execute(text("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0"))
|
||||
_session.commit()
|
||||
test = _session.query(ReadBook).filter(ReadBook.last_modified == None).all()
|
||||
for book in test:
|
||||
@ -649,8 +649,8 @@ def migrate_remoteAuthToken(engine, _session):
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
|
||||
conn.execute("update remote_auth_token set 'token_type' = 0")
|
||||
conn.execute(text("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0"))
|
||||
conn.execute(text("update remote_auth_token set 'token_type' = 0"))
|
||||
_session.commit()
|
||||
|
||||
# Migrate database to current version, has to be updated after every database change. Currently migration from
|
||||
@ -668,19 +668,19 @@ def migrate_Database(_session):
|
||||
_session.query(exists().where(User.sidebar_view)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
|
||||
conn.execute(text("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1"))
|
||||
_session.commit()
|
||||
create = True
|
||||
try:
|
||||
if create:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("SELECT language_books FROM user")
|
||||
conn.execute(text("SELECT language_books FROM user"))
|
||||
_session.commit()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||
conn.execute(text("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||
"+ series_books * :side_series + category_books * :side_category + hot_books * "
|
||||
":side_hot + :side_autor + :detail_random)",
|
||||
":side_hot + :side_autor + :detail_random)"),
|
||||
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
|
||||
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
|
||||
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
|
||||
@ -690,16 +690,16 @@ def migrate_Database(_session):
|
||||
_session.query(exists().where(User.denied_tags)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''")
|
||||
conn.execute(text("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''"))
|
||||
conn.execute(text("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''"))
|
||||
conn.execute(text("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''"))
|
||||
conn.execute(text("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''"))
|
||||
_session.commit()
|
||||
try:
|
||||
_session.query(exists().where(User.view_settings)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'")
|
||||
conn.execute(text("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'"))
|
||||
_session.commit()
|
||||
try:
|
||||
_session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
|
||||
|
@ -1,5 +1,5 @@
|
||||
# GDrive Integration
|
||||
google-api-python-client>=1.7.11,<2.75.0
|
||||
google-api-python-client>=1.7.11,<2.78.0
|
||||
gevent>20.6.0,<23.0.0
|
||||
greenlet>=0.4.17,<2.1.0
|
||||
httplib2>=0.9.2,<0.22.0
|
||||
@ -13,7 +13,7 @@ rsa>=3.4.2,<4.10.0
|
||||
|
||||
# Gmail
|
||||
google-auth-oauthlib>=0.4.3,<0.9.0
|
||||
google-api-python-client>=1.7.11,<2.75.0
|
||||
google-api-python-client>=1.7.11,<2.78.0
|
||||
|
||||
# goodreads
|
||||
goodreads>=0.3.2,<0.4.0
|
||||
|
@ -7,10 +7,10 @@ Flask-Principal>=0.3.2,<0.5.1
|
||||
backports_abc>=0.4
|
||||
Flask>=1.0.2,<2.3.0
|
||||
iso-639>=0.4.5,<0.5.0
|
||||
PyPDF>=3.0.0,<3.3.0
|
||||
PyPDF==3.4.0
|
||||
pytz>=2016.10
|
||||
requests>=2.11.1,<2.28.0
|
||||
SQLAlchemy>=1.3.0,<1.5.0
|
||||
SQLAlchemy>=1.3.0,<2.0.0
|
||||
tornado>=4.1,<6.3
|
||||
Wand>=0.4.4,<0.7.0
|
||||
unidecode>=0.04.19,<1.4.0
|
||||
|
@ -47,10 +47,10 @@ install_requires =
|
||||
backports_abc>=0.4
|
||||
Flask>=1.0.2,<2.3.0
|
||||
iso-639>=0.4.5,<0.5.0
|
||||
PyPDF>=3.0.0,<3.3.0
|
||||
PyPDF==3.4.0
|
||||
pytz>=2016.10
|
||||
requests>=2.11.1,<2.28.0
|
||||
SQLAlchemy>=1.3.0,<1.5.0
|
||||
SQLAlchemy>=1.3.0,<2.0.0
|
||||
tornado>=4.1,<6.3
|
||||
Wand>=0.4.4,<0.7.0
|
||||
unidecode>=0.04.19,<1.4.0
|
||||
|
Loading…
Reference in New Issue
Block a user