1
0
mirror of https://github.com/janeczku/calibre-web synced 2024-12-18 14:10:30 +00:00
calibre-web/cps/gdriveutils.py

693 lines
25 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018 idalin, OzzieIsaacs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import json
import shutil
import chardet
import ssl
from flask import Response, stream_with_context
from sqlalchemy import create_engine
from sqlalchemy import Column, UniqueConstraint
from sqlalchemy import String, Integer
from sqlalchemy.orm import sessionmaker, scoped_session
2021-03-20 09:09:08 +00:00
try:
# Compatibility with sqlalchemy 2.0
2021-03-20 09:09:08 +00:00
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.exc import OperationalError, InvalidRequestError, IntegrityError
from sqlalchemy.orm.exc import StaleDataError
from sqlalchemy.sql.expression import text
try:
from httplib2 import __version__ as httplib2_version
except ImportError:
httplib2_version = "not installed"
2017-03-16 23:36:37 +00:00
try:
2017-03-19 16:14:16 +00:00
from apiclient import errors
from httplib2 import ServerNotFoundError
importError = None
gdrive_support = True
except ImportError as e:
importError = e
2019-07-14 16:37:44 +00:00
gdrive_support = False
try:
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.auth import RefreshError
2021-12-23 10:13:08 +00:00
from pydrive2.files import ApiRequestError
except ImportError as err:
try:
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from pydrive.auth import RefreshError
2021-12-23 10:13:08 +00:00
from pydrive.files import ApiRequestError
except ImportError as err:
importError = err
gdrive_support = False
from . import logger, cli_param, config
2019-07-14 06:18:45 +00:00
from .constants import CONFIG_DIR as _CONFIG_DIR
2019-07-14 06:18:45 +00:00
SETTINGS_YAML = os.path.join(_CONFIG_DIR, 'settings.yaml')
CREDENTIALS = os.path.join(_CONFIG_DIR, 'gdrive_credentials')
CLIENT_SECRETS = os.path.join(_CONFIG_DIR, 'client_secrets.json')
log = logger.create()
2019-12-14 21:22:27 +00:00
if gdrive_support:
logger.get('googleapiclient.discovery_cache').setLevel(logger.logging.ERROR)
if not logger.is_debug_enabled():
logger.get('googleapiclient.discovery').setLevel(logger.logging.ERROR)
else:
log.debug("Cannot import pydrive, httplib2, using gdrive will not work: {}".format(importError))
class Singleton:
"""
A non-thread-safe helper class to ease implementing singletons.
This should be used as a decorator -- not a metaclass -- to the
class that should be a singleton.
The decorated class can define one `__init__` function that
takes only the `self` argument. Also, the decorated class cannot be
inherited from. Other than that, there are no restrictions that apply
to the decorated class.
To get the singleton instance, use the `Instance` method. Trying
to use `__call__` will result in a `TypeError` being raised.
"""
def __init__(self, decorated):
self._decorated = decorated
def Instance(self):
"""
Returns the singleton instance. Upon its first call, it creates a
new instance of the decorated class and calls its `__init__` method.
On all subsequent calls, the already created instance is returned.
"""
try:
return self._instance
except AttributeError:
self._instance = self._decorated()
return self._instance
2020-12-08 10:24:07 +00:00
except (ImportError, NameError) as e:
log.debug(e)
return None
def __call__(self):
raise TypeError('Singletons must be accessed through `Instance()`.')
def __instancecheck__(self, inst):
return isinstance(inst, self._decorated)
@Singleton
class Gauth:
def __init__(self):
try:
self.auth = GoogleAuth(settings_file=SETTINGS_YAML)
except NameError as error:
log.error(error)
self.auth = None
@Singleton
class Gdrive:
def __init__(self):
self.drive = getDrive(gauth=Gauth.Instance().auth)
def is_gdrive_ready():
return os.path.exists(SETTINGS_YAML) and os.path.exists(CREDENTIALS)
engine = create_engine('sqlite:///{0}'.format(cli_param.gd_path), echo=False)
Base = declarative_base()
# Open session for database connection
2023-04-15 11:22:17 +00:00
Session = sessionmaker(autoflush=False)
Session.configure(bind=engine)
session = scoped_session(Session)
2017-04-14 18:29:11 +00:00
class GdriveId(Base):
2017-04-14 18:29:11 +00:00
__tablename__ = 'gdrive_ids'
id = Column(Integer, primary_key=True)
gdrive_id = Column(Integer, unique=True)
path = Column(String)
__table_args__ = (UniqueConstraint('gdrive_id', 'path', name='_gdrive_path_uc'),)
def __repr__(self):
return str(self.path)
2017-04-14 18:29:11 +00:00
class PermissionAdded(Base):
2017-04-14 18:29:11 +00:00
__tablename__ = 'permissions_added'
id = Column(Integer, primary_key=True)
gdrive_id = Column(Integer, unique=True)
def __repr__(self):
return str(self.gdrive_id)
2017-04-14 18:29:11 +00:00
if not os.path.exists(cli_param.gd_path):
try:
Base.metadata.create_all(engine)
except Exception as ex:
log.error("Error connect to database: {} - {}".format(cli_param.gd_path, ex))
raise
2017-04-14 18:29:11 +00:00
2017-09-17 05:15:08 +00:00
def getDrive(drive=None, gauth=None):
if not drive:
2017-09-17 05:15:08 +00:00
if not gauth:
gauth = GoogleAuth(settings_file=SETTINGS_YAML)
2017-09-17 05:15:08 +00:00
# Try to load saved client credentials
gauth.LoadCredentialsFile(CREDENTIALS)
2017-09-17 05:15:08 +00:00
if gauth.access_token_expired:
# Refresh them if expired
try:
gauth.Refresh()
2018-07-29 15:51:23 +00:00
except RefreshError as e:
log.error("Google Drive error: {}".format(e))
except Exception as ex:
log.error_or_exception(ex)
2017-09-17 05:15:08 +00:00
else:
# Initialize the saved creds
gauth.Authorize()
# Save the current credentials to a file
return GoogleDrive(gauth)
if drive.auth.access_token_expired:
try:
drive.auth.Refresh()
except RefreshError as e:
log.error("Google Drive error: {}".format(e))
2017-09-17 05:15:08 +00:00
return drive
def listRootFolders():
try:
drive = getDrive(Gdrive.Instance().drive)
folder = "'root' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false"
fileList = drive.ListFile({'q': folder}).GetList()
except (ServerNotFoundError, ssl.SSLError, RefreshError) as e:
log.info("GDrive Error {}".format(e))
fileList = []
return fileList
2017-09-17 05:15:08 +00:00
def getEbooksFolder(drive):
return getFolderInFolder('root', config.config_google_drive_folder, drive)
def getFolderInFolder(parentId, folderName, drive):
# drive = getDrive(drive)
query=""
if folderName:
query = "title = '%s' and " % folderName.replace("'", r"\'")
folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder'" \
" and trashed = false" % parentId
fileList = drive.ListFile({'q': folder}).GetList()
if fileList.__len__() == 0:
return None
else:
return fileList[0]
2017-04-14 18:29:11 +00:00
# Search for id of root folder in gdrive database, if not found request from gdrive and store in internal database
def getEbooksFolderId(drive=None):
2017-04-14 18:29:11 +00:00
storedPathName = session.query(GdriveId).filter(GdriveId.path == '/').first()
if storedPathName:
return storedPathName.gdrive_id
else:
2017-04-14 18:29:11 +00:00
gDriveId = GdriveId()
try:
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
except Exception:
log.error('Error gDrive, root ID not found')
2017-04-14 18:29:11 +00:00
gDriveId.path = '/'
session.merge(gDriveId)
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
return gDriveId.gdrive_id
2017-04-14 18:29:11 +00:00
def getFile(pathId, fileName, drive):
metaDataFile = "'%s' in parents and trashed = false and title = '%s'" % (pathId, fileName.replace("'", r"\'"))
fileList = drive.ListFile({'q': metaDataFile}).GetList()
if fileList.__len__() == 0:
return None
else:
return fileList[0]
2017-04-14 18:29:11 +00:00
def getFolderId(path, drive):
# drive = getDrive(drive)
currentFolderId = None
try:
currentFolderId = getEbooksFolderId(drive)
sqlCheckPath = path if path[-1] == '/' else path + '/'
storedPathName = session.query(GdriveId).filter(GdriveId.path == sqlCheckPath).first()
if not storedPathName:
dbChange = False
s = path.split('/')
for i, x in enumerate(s):
if len(x) > 0:
currentPath = "/".join(s[:i+1])
if currentPath[-1] != '/':
currentPath = currentPath + '/'
storedPathName = session.query(GdriveId).filter(GdriveId.path == currentPath).first()
if storedPathName:
currentFolderId = storedPathName.gdrive_id
else:
currentFolder = getFolderInFolder(currentFolderId, x, drive)
if currentFolder:
gDriveId = GdriveId()
gDriveId.gdrive_id = currentFolder['id']
gDriveId.path = currentPath
session.merge(gDriveId)
dbChange = True
currentFolderId = currentFolder['id']
else:
currentFolderId = None
break
if dbChange:
session.commit()
else:
currentFolderId = storedPathName.gdrive_id
except (OperationalError, IntegrityError, StaleDataError) as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
2021-12-23 10:13:08 +00:00
except ApiRequestError as ex:
log.error('{} {}'.format(ex.error['message'], path))
session.rollback()
except RefreshError as ex:
log.error(ex)
return currentFolderId
def getFileFromEbooksFolder(path, fileName):
drive = getDrive(Gdrive.Instance().drive)
if path:
2017-03-29 19:43:55 +00:00
# sqlCheckPath=path if path[-1] =='/' else path + '/'
2017-04-14 18:29:11 +00:00
folderId = getFolderId(path, drive)
else:
2017-04-14 18:29:11 +00:00
folderId = getEbooksFolderId(drive)
if folderId:
return getFile(folderId, fileName, drive)
else:
return None
2017-04-14 18:29:11 +00:00
def moveGdriveFileRemote(origin_file_id, new_title):
2023-03-21 19:02:57 +00:00
origin_file_id['title'] = new_title
origin_file_id.Upload()
# Download metadata.db from gdrive
def downloadFile(path, filename, output):
f = getFileFromEbooksFolder(path, filename)
f.GetContentFile(output)
def moveGdriveFolderRemote(origin_file, target_folder):
drive = getDrive(Gdrive.Instance().drive)
previous_parents = ",".join([parent["id"] for parent in origin_file.get('parents')])
2019-02-20 17:04:55 +00:00
children = drive.auth.service.children().list(folderId=previous_parents).execute()
gFileTargetDir = getFileFromEbooksFolder(None, target_folder)
2022-01-25 20:13:17 +00:00
if not gFileTargetDir:
gFileTargetDir = drive.CreateFile(
{'title': target_folder, 'parents': [{"kind": "drive#fileLink", 'id': getEbooksFolderId()}],
"mimeType": "application/vnd.google-apps.folder"})
gFileTargetDir.Upload()
# Move the file to the new folder
drive.auth.service.files().update(fileId=origin_file['id'],
addParents=gFileTargetDir['id'],
removeParents=previous_parents,
fields='id, parents').execute()
elif gFileTargetDir['title'] != target_folder:
2019-02-20 17:04:55 +00:00
# Folder is not existing, create, and move folder
2022-01-23 18:31:56 +00:00
drive.auth.service.files().patch(fileId=origin_file['id'],
body={'title': target_folder},
fields='title').execute()
else:
# Move the file to the new folder
drive.auth.service.files().update(fileId=origin_file['id'],
addParents=gFileTargetDir['id'],
removeParents=previous_parents,
fields='id, parents').execute()
# if previous_parents has no children anymore, delete original fileparent
2019-02-20 17:04:55 +00:00
if len(children['items']) == 1:
deleteDatabaseEntry(previous_parents)
drive.auth.service.files().delete(fileId=previous_parents).execute()
2017-04-14 18:29:11 +00:00
def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
2017-09-17 08:54:23 +00:00
ignoreFiles=None,
parent=None, prevDir=''):
2017-09-17 08:54:23 +00:00
ignoreFiles = ignoreFiles or []
2017-09-17 05:15:08 +00:00
drive = getDrive(drive)
2017-04-14 18:29:11 +00:00
isInitial = not bool(parent)
if not parent:
2017-04-14 18:29:11 +00:00
parent = getEbooksFolder(drive)
if os.path.isdir(os.path.join(prevDir,uploadFile)):
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(os.path.basename(uploadFile).replace("'", r"\'"), parent['id'])}).GetList()
if len(existingFolder) == 0 and (not isInitial or createRoot):
parent = drive.CreateFile({'title': os.path.basename(uploadFile),
'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
2017-04-14 18:29:11 +00:00
"mimeType": "application/vnd.google-apps.folder"})
parent.Upload()
else:
if (not isInitial or createRoot) and len(existingFolder) > 0:
2017-04-14 18:29:11 +00:00
parent = existingFolder[0]
for f in os.listdir(os.path.join(prevDir, uploadFile)):
if f not in ignoreFiles:
2017-04-14 18:29:11 +00:00
copyToDrive(drive, f, True, replaceFiles, ignoreFiles, parent, os.path.join(prevDir, uploadFile))
else:
if os.path.basename(uploadFile) not in ignoreFiles:
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(os.path.basename(uploadFile).replace("'", r"\'"), parent['id'])}).GetList()
if len(existingFiles) > 0:
2017-04-14 18:29:11 +00:00
driveFile = existingFiles[0]
else:
driveFile = drive.CreateFile({'title': os.path.basename(uploadFile).replace("'", r"\'"),
'parents': [{"kind":"drive#fileLink", 'id': parent['id']}], })
2017-04-14 18:29:11 +00:00
driveFile.SetContentFile(os.path.join(prevDir, uploadFile))
driveFile.Upload()
2017-04-14 18:29:11 +00:00
2023-03-21 19:02:57 +00:00
def uploadFileToEbooksFolder(destFile, f, string=False):
drive = getDrive(Gdrive.Instance().drive)
2017-04-14 18:29:11 +00:00
parent = getEbooksFolder(drive)
splitDir = destFile.split('/')
for i, x in enumerate(splitDir):
if i == len(splitDir)-1:
2022-02-05 08:06:14 +00:00
existing_Files = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(x.replace("'", r"\'"), parent['id'])}).GetList()
2022-02-05 08:06:14 +00:00
if len(existing_Files) > 0:
driveFile = existing_Files[0]
else:
2021-01-10 10:01:54 +00:00
driveFile = drive.CreateFile({'title': x,
'parents': [{"kind": "drive#fileLink", 'id': parent['id']}], })
2023-03-21 19:02:57 +00:00
if not string:
driveFile.SetContentFile(f)
else:
driveFile.SetContentString(f)
driveFile.Upload()
else:
2022-02-05 08:06:14 +00:00
existing_Folder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
(x.replace("'", r"\'"), parent['id'])}).GetList()
2022-02-05 08:06:14 +00:00
if len(existing_Folder) == 0:
2017-04-14 18:29:11 +00:00
parent = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
"mimeType": "application/vnd.google-apps.folder"})
parent.Upload()
else:
2022-02-05 08:06:14 +00:00
parent = existing_Folder[0]
def watchChange(drive, channel_id, channel_type, channel_address,
channel_token=None, expiration=None):
2017-03-29 19:43:55 +00:00
# Watch for all changes to a user's Drive.
# Args:
# service: Drive API service instance.
# channel_id: Unique string that identifies this channel.
# channel_type: Type of delivery mechanism used for this channel.
# channel_address: Address where notifications are delivered.
# channel_token: An arbitrary string delivered to the target address with
# each notification delivered over this channel. Optional.
# channel_address: Address where notifications are delivered. Optional.
# Returns:
# The created channel if successful
# Raises:
# apiclient.errors.HttpError: if http request to create channel fails.
body = {
2017-04-14 18:29:11 +00:00
'id': channel_id,
'type': channel_type,
'address': channel_address
}
if channel_token:
body['token'] = channel_token
if expiration:
body['expiration'] = expiration
2017-04-14 18:29:11 +00:00
return drive.auth.service.changes().watch(body=body).execute()
def watchFile(drive, file_id, channel_id, channel_type, channel_address,
channel_token=None, expiration=None):
"""Watch for any changes to a specific file.
Args:
service: Drive API service instance.
file_id: ID of the file to watch.
channel_id: Unique string that identifies this channel.
channel_type: Type of delivery mechanism used for this channel.
channel_address: Address where notifications are delivered.
channel_token: An arbitrary string delivered to the target address with
each notification delivered over this channel. Optional.
channel_address: Address where notifications are delivered. Optional.
Returns:
The created channel if successful
Raises:
apiclient.errors.HttpError: if http request to create channel fails.
"""
body = {
'id': channel_id,
'type': channel_type,
'address': channel_address
}
if channel_token:
body['token'] = channel_token
if expiration:
body['expiration'] = expiration
return drive.auth.service.files().watch(fileId=file_id, body=body).execute()
2017-04-14 18:29:11 +00:00
def stopChannel(drive, channel_id, resource_id):
"""Stop watching to a specific channel.
Args:
service: Drive API service instance.
channel_id: ID of the channel to stop.
resource_id: Resource ID of the channel to stop.
Raises:
apiclient.errors.HttpError: if http request to create channel fails.
"""
body = {
2017-04-14 18:29:11 +00:00
'id': channel_id,
'resourceId': resource_id
}
return drive.auth.service.channels().stop(body=body).execute()
2017-04-14 18:29:11 +00:00
def getChangeById (drive, change_id):
2017-03-31 16:41:05 +00:00
# Print a single Change resource information.
#
# Args:
# service: Drive API service instance.
# change_id: ID of the Change resource to retrieve.
try:
change = drive.auth.service.changes().get(changeId=change_id).execute()
return change
except (errors.HttpError) as error:
log.error(error)
return None
except Exception as ex:
log.error(ex)
return None
# Deletes the local hashes database to force search for new folder names
def deleteDatabaseOnChange():
2020-06-28 10:06:27 +00:00
try:
session.query(GdriveId).delete()
session.commit()
except (OperationalError, InvalidRequestError) as ex:
2020-06-28 10:06:27 +00:00
session.rollback()
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
2020-06-28 10:06:27 +00:00
def updateGdriveCalibreFromLocal():
copyToDrive(Gdrive.Instance().drive, config.config_calibre_dir, False, True)
for x in os.listdir(config.config_calibre_dir):
if os.path.isdir(os.path.join(config.config_calibre_dir, x)):
shutil.rmtree(os.path.join(config.config_calibre_dir, x))
# update gdrive.db on edit of books title
def updateDatabaseOnEdit(ID,newPath):
2023-01-21 14:23:18 +00:00
sqlCheckPath = newPath if newPath[-1] == '/' else newPath + '/'
storedPathName = session.query(GdriveId).filter(GdriveId.gdrive_id == ID).first()
if storedPathName:
storedPathName.path = sqlCheckPath
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
# Deletes the hashes in database of deleted book
def deleteDatabaseEntry(ID):
session.query(GdriveId).filter(GdriveId.gdrive_id == ID).delete()
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
# Gets cover file from gdrive
2022-09-10 16:26:52 +00:00
# ToDo: Check is this right everyone get read permissions on cover files?
def get_cover_via_gdrive(cover_path):
df = getFileFromEbooksFolder(cover_path, 'cover.jpg')
if df:
if not session.query(PermissionAdded).filter(PermissionAdded.gdrive_id == df['id']).first():
df.GetPermissions()
df.InsertPermission({
'type': 'anyone',
'value': 'anyone',
'role': 'reader',
'withLink': True})
permissionAdded = PermissionAdded()
permissionAdded.gdrive_id = df['id']
session.add(permissionAdded)
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
2022-09-10 16:26:52 +00:00
session.rollback()
return df.metadata.get('webContentLink')
else:
return None
# Gets cover file from gdrive
def get_metadata_backup_via_gdrive(metadata_path):
df = getFileFromEbooksFolder(metadata_path, 'metadata.opf')
if df:
if not session.query(PermissionAdded).filter(PermissionAdded.gdrive_id == df['id']).first():
df.GetPermissions()
df.InsertPermission({
'type': 'anyone',
'value': 'anyone',
'role': 'writer', # ToDo needs write access
'withLink': True})
permissionAdded = PermissionAdded()
permissionAdded.gdrive_id = df['id']
session.add(permissionAdded)
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
return df.metadata.get('webContentLink')
else:
return None
# Creates chunks for downloading big files
def partial(total_byte_len, part_size_limit):
s = []
for p in range(0, total_byte_len, part_size_limit):
last = min(total_byte_len - 1, p + part_size_limit - 1)
s.append([p, last])
return s
# downloads files in chunks from gdrive
def do_gdrive_download(df, headers, convert_encoding=False):
total_size = int(df.metadata.get('fileSize'))
download_url = df.metadata.get('downloadUrl')
s = partial(total_size, 1024 * 1024) # I'm downloading BIG files, so 100M chunk size is fine for me
def stream(convert_encoding):
for byte in s:
headers = {"Range": 'bytes={}-{}'.format(byte[0], byte[1])}
resp, content = df.auth.Get_Http_Object().request(download_url, headers=headers)
if resp.status == 206:
if convert_encoding:
result = chardet.detect(content)
content = content.decode(result['encoding']).encode('utf-8')
yield content
else:
log.warning('An error occurred: {}'.format(resp))
return
return Response(stream_with_context(stream(convert_encoding)), headers=headers)
_SETTINGS_YAML_TEMPLATE = """
client_config_backend: settings
client_config_file: %(client_file)s
client_config:
client_id: %(client_id)s
client_secret: %(client_secret)s
redirect_uri: %(redirect_uri)s
save_credentials: True
save_credentials_backend: file
save_credentials_file: %(credential)s
get_refresh_token: True
oauth_scope:
- https://www.googleapis.com/auth/drive
"""
def update_settings(client_id, client_secret, redirect_uri):
if redirect_uri.endswith('/'):
redirect_uri = redirect_uri[:-1]
config_params = {
'client_file': CLIENT_SECRETS,
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': redirect_uri,
'credential': CREDENTIALS
}
with open(SETTINGS_YAML, 'w') as f:
f.write(_SETTINGS_YAML_TEMPLATE % config_params)
def get_error_text(client_secrets=None):
2019-07-14 16:37:44 +00:00
if not gdrive_support:
return 'Import of optional Google Drive requirements missing'
if not os.path.isfile(CLIENT_SECRETS):
return 'client_secrets.json is missing or not readable'
2020-08-21 17:26:58 +00:00
try:
with open(CLIENT_SECRETS, 'r') as settings:
filedata = json.load(settings)
except PermissionError:
return 'client_secrets.json is missing or not readable'
if 'web' not in filedata:
return 'client_secrets.json is not configured for web application'
2019-11-26 09:46:06 +00:00
if 'redirect_uris' not in filedata['web']:
return 'Callback url (redirect url) is missing in client_secrets.json'
if client_secrets:
client_secrets.update(filedata['web'])