2017-03-16 23:36:37 +00:00
|
|
|
try:
|
|
|
|
from pydrive.auth import GoogleAuth
|
|
|
|
from pydrive.drive import GoogleDrive
|
2018-07-29 15:51:23 +00:00
|
|
|
from pydrive.auth import RefreshError
|
2017-03-19 16:14:16 +00:00
|
|
|
from apiclient import errors
|
2017-03-16 23:36:37 +00:00
|
|
|
except ImportError:
|
|
|
|
pass
|
2017-03-29 19:43:55 +00:00
|
|
|
import os
|
2017-02-20 18:34:37 +00:00
|
|
|
from ub import config
|
2017-12-02 10:15:51 +00:00
|
|
|
import cli
|
2018-07-14 06:31:52 +00:00
|
|
|
import shutil
|
2018-08-04 08:56:42 +00:00
|
|
|
from flask import Response, stream_with_context
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
from sqlalchemy import *
|
|
|
|
from sqlalchemy.ext.declarative import declarative_base
|
|
|
|
from sqlalchemy.orm import *
|
|
|
|
|
|
|
|
|
|
|
|
import web
|
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
class Singleton:
|
|
|
|
"""
|
|
|
|
A non-thread-safe helper class to ease implementing singletons.
|
|
|
|
This should be used as a decorator -- not a metaclass -- to the
|
|
|
|
class that should be a singleton.
|
|
|
|
|
|
|
|
The decorated class can define one `__init__` function that
|
|
|
|
takes only the `self` argument. Also, the decorated class cannot be
|
|
|
|
inherited from. Other than that, there are no restrictions that apply
|
|
|
|
to the decorated class.
|
|
|
|
|
|
|
|
To get the singleton instance, use the `Instance` method. Trying
|
|
|
|
to use `__call__` will result in a `TypeError` being raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, decorated):
|
|
|
|
self._decorated = decorated
|
|
|
|
|
|
|
|
def Instance(self):
|
|
|
|
"""
|
|
|
|
Returns the singleton instance. Upon its first call, it creates a
|
|
|
|
new instance of the decorated class and calls its `__init__` method.
|
|
|
|
On all subsequent calls, the already created instance is returned.
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return self._instance
|
|
|
|
except AttributeError:
|
|
|
|
self._instance = self._decorated()
|
|
|
|
return self._instance
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
raise TypeError('Singletons must be accessed through `Instance()`.')
|
|
|
|
|
|
|
|
def __instancecheck__(self, inst):
|
|
|
|
return isinstance(inst, self._decorated)
|
|
|
|
|
|
|
|
|
|
|
|
@Singleton
|
|
|
|
class Gauth:
|
|
|
|
def __init__(self):
|
|
|
|
self.auth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
|
|
|
|
|
|
|
|
|
|
|
@Singleton
|
|
|
|
class Gdrive:
|
|
|
|
def __init__(self):
|
|
|
|
self.drive = getDrive(gauth=Gauth.Instance().auth)
|
|
|
|
|
|
|
|
|
2017-12-02 10:15:51 +00:00
|
|
|
engine = create_engine('sqlite:///{0}'.format(cli.gdpath), echo=False)
|
2017-02-20 18:34:37 +00:00
|
|
|
Base = declarative_base()
|
|
|
|
|
|
|
|
# Open session for database connection
|
|
|
|
Session = sessionmaker()
|
|
|
|
Session.configure(bind=engine)
|
2017-03-01 22:38:03 +00:00
|
|
|
session = scoped_session(Session)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
class GdriveId(Base):
|
2017-04-14 18:29:11 +00:00
|
|
|
__tablename__ = 'gdrive_ids'
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
id = Column(Integer, primary_key=True)
|
|
|
|
gdrive_id = Column(Integer, unique=True)
|
|
|
|
path = Column(String)
|
2017-03-01 22:38:03 +00:00
|
|
|
__table_args__ = (UniqueConstraint('gdrive_id', 'path', name='_gdrive_path_uc'),)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(self.path)
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-03-02 23:55:32 +00:00
|
|
|
class PermissionAdded(Base):
|
2017-04-14 18:29:11 +00:00
|
|
|
__tablename__ = 'permissions_added'
|
2017-03-02 23:55:32 +00:00
|
|
|
|
|
|
|
id = Column(Integer, primary_key=True)
|
|
|
|
gdrive_id = Column(Integer, unique=True)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(self.gdrive_id)
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
def migrate():
|
2017-03-02 23:55:32 +00:00
|
|
|
if not engine.dialect.has_table(engine.connect(), "permissions_added"):
|
|
|
|
PermissionAdded.__table__.create(bind = engine)
|
2017-03-01 22:38:03 +00:00
|
|
|
for sql in session.execute("select sql from sqlite_master where type='table'"):
|
|
|
|
if 'CREATE TABLE gdrive_ids' in sql[0]:
|
2017-04-14 18:29:11 +00:00
|
|
|
currUniqueConstraint = 'UNIQUE (gdrive_id)'
|
2017-03-01 22:38:03 +00:00
|
|
|
if currUniqueConstraint in sql[0]:
|
|
|
|
sql=sql[0].replace(currUniqueConstraint, 'UNIQUE (gdrive_id, path)')
|
2017-04-14 18:29:11 +00:00
|
|
|
sql=sql.replace(GdriveId.__tablename__, GdriveId.__tablename__ + '2')
|
2017-03-01 22:38:03 +00:00
|
|
|
session.execute(sql)
|
|
|
|
session.execute('INSERT INTO gdrive_ids2 (id, gdrive_id, path) SELECT id, gdrive_id, path FROM gdrive_ids;')
|
|
|
|
session.commit()
|
|
|
|
session.execute('DROP TABLE %s' % 'gdrive_ids')
|
|
|
|
session.execute('ALTER TABLE gdrive_ids2 RENAME to gdrive_ids')
|
|
|
|
break
|
|
|
|
|
2017-12-02 10:15:51 +00:00
|
|
|
if not os.path.exists(cli.gdpath):
|
2017-02-20 18:34:37 +00:00
|
|
|
try:
|
|
|
|
Base.metadata.create_all(engine)
|
|
|
|
except Exception:
|
|
|
|
raise
|
2017-03-01 22:38:03 +00:00
|
|
|
migrate()
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-09-17 05:15:08 +00:00
|
|
|
def getDrive(drive=None, gauth=None):
|
2017-02-20 18:34:37 +00:00
|
|
|
if not drive:
|
2017-09-17 05:15:08 +00:00
|
|
|
if not gauth:
|
2018-07-07 10:00:46 +00:00
|
|
|
gauth = GoogleAuth(settings_file=os.path.join(config.get_main_dir,'settings.yaml'))
|
2017-09-17 05:15:08 +00:00
|
|
|
# Try to load saved client credentials
|
2018-07-07 10:00:46 +00:00
|
|
|
gauth.LoadCredentialsFile(os.path.join(config.get_main_dir,'gdrive_credentials'))
|
2017-09-17 05:15:08 +00:00
|
|
|
if gauth.access_token_expired:
|
|
|
|
# Refresh them if expired
|
2018-06-02 08:59:34 +00:00
|
|
|
try:
|
|
|
|
gauth.Refresh()
|
2018-07-29 15:51:23 +00:00
|
|
|
except RefreshError as e:
|
|
|
|
web.app.logger.error("Google Drive error: " + e.message)
|
|
|
|
except Exception as e:
|
|
|
|
web.app.logger.exception(e)
|
2017-09-17 05:15:08 +00:00
|
|
|
else:
|
|
|
|
# Initialize the saved creds
|
|
|
|
gauth.Authorize()
|
|
|
|
# Save the current credentials to a file
|
|
|
|
return GoogleDrive(gauth)
|
2017-02-20 18:34:37 +00:00
|
|
|
if drive.auth.access_token_expired:
|
|
|
|
drive.auth.Refresh()
|
2017-09-17 05:15:08 +00:00
|
|
|
return drive
|
|
|
|
|
2018-06-02 08:59:34 +00:00
|
|
|
def listRootFolders(drive=None):
|
|
|
|
drive = getDrive(drive)
|
|
|
|
folder = "'root' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false"
|
|
|
|
fileList = drive.ListFile({'q': folder}).GetList()
|
|
|
|
return fileList
|
|
|
|
|
2017-09-17 05:15:08 +00:00
|
|
|
|
|
|
|
def getEbooksFolder(drive=None):
|
2018-06-02 08:59:34 +00:00
|
|
|
return getFolderInFolder('root',config.config_google_drive_folder,drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2018-06-02 08:59:34 +00:00
|
|
|
|
|
|
|
def getFolderInFolder(parentId, folderName,drive=None):
|
|
|
|
drive = getDrive(drive)
|
|
|
|
query=""
|
|
|
|
if folderName:
|
|
|
|
query = "title = '%s' and " % folderName.replace("'", "\\'")
|
|
|
|
folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false" % parentId
|
|
|
|
fileList = drive.ListFile({'q': folder}).GetList()
|
2018-07-14 06:31:52 +00:00
|
|
|
if fileList.__len__() == 0:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return fileList[0]
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
# Search for id of root folder in gdrive database, if not found request from gdrive and store in internal database
|
2017-02-20 18:34:37 +00:00
|
|
|
def getEbooksFolderId(drive=None):
|
2017-04-14 18:29:11 +00:00
|
|
|
storedPathName = session.query(GdriveId).filter(GdriveId.path == '/').first()
|
2017-02-20 18:34:37 +00:00
|
|
|
if storedPathName:
|
|
|
|
return storedPathName.gdrive_id
|
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
gDriveId = GdriveId()
|
2018-06-02 08:59:34 +00:00
|
|
|
try:
|
|
|
|
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
# ToDo Path not exisiting
|
2017-04-14 18:29:11 +00:00
|
|
|
gDriveId.path = '/'
|
2017-02-20 18:34:37 +00:00
|
|
|
session.merge(gDriveId)
|
|
|
|
session.commit()
|
2017-04-03 19:05:28 +00:00
|
|
|
return
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def getFile(pathId, fileName, drive=None):
|
2018-07-14 06:31:52 +00:00
|
|
|
drive = getDrive(Gdrive.Instance().drive)
|
2017-04-14 18:29:11 +00:00
|
|
|
metaDataFile = "'%s' in parents and trashed = false and title = '%s'" % (pathId, fileName.replace("'", "\\'"))
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
fileList = drive.ListFile({'q': metaDataFile}).GetList()
|
2018-07-14 06:31:52 +00:00
|
|
|
if fileList.__len__() == 0:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return fileList[0]
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def getFolderId(path, drive=None):
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-04-14 18:29:11 +00:00
|
|
|
currentFolderId = getEbooksFolderId(drive)
|
|
|
|
sqlCheckPath = path if path[-1] == '/' else path + '/'
|
|
|
|
storedPathName = session.query(GdriveId).filter(GdriveId.path == sqlCheckPath).first()
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
if not storedPathName:
|
2017-04-14 18:29:11 +00:00
|
|
|
dbChange = False
|
|
|
|
s = path.split('/')
|
2017-02-20 18:34:37 +00:00
|
|
|
for i, x in enumerate(s):
|
|
|
|
if len(x) > 0:
|
2017-04-14 18:29:11 +00:00
|
|
|
currentPath = "/".join(s[:i+1])
|
2017-02-20 18:34:37 +00:00
|
|
|
if currentPath[-1] != '/':
|
|
|
|
currentPath = currentPath + '/'
|
2017-04-14 18:29:11 +00:00
|
|
|
storedPathName = session.query(GdriveId).filter(GdriveId.path == currentPath).first()
|
2017-02-20 18:34:37 +00:00
|
|
|
if storedPathName:
|
2017-04-14 18:29:11 +00:00
|
|
|
currentFolderId = storedPathName.gdrive_id
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
2018-07-14 06:31:52 +00:00
|
|
|
currentFolder = getFolderInFolder(currentFolderId, x, drive)
|
|
|
|
if currentFolder:
|
|
|
|
gDriveId = GdriveId()
|
|
|
|
gDriveId.gdrive_id = currentFolder['id']
|
|
|
|
gDriveId.path = currentPath
|
|
|
|
session.merge(gDriveId)
|
|
|
|
dbChange = True
|
|
|
|
currentFolderId = currentFolder['id']
|
|
|
|
else:
|
|
|
|
currentFolderId= None
|
|
|
|
break
|
2017-02-20 18:34:37 +00:00
|
|
|
if dbChange:
|
|
|
|
session.commit()
|
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
currentFolderId = storedPathName.gdrive_id
|
2017-02-20 18:34:37 +00:00
|
|
|
return currentFolderId
|
|
|
|
|
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
def getFileFromEbooksFolder(path, fileName):
|
|
|
|
drive = getDrive(Gdrive.Instance().drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
if path:
|
2017-03-29 19:43:55 +00:00
|
|
|
# sqlCheckPath=path if path[-1] =='/' else path + '/'
|
2017-04-14 18:29:11 +00:00
|
|
|
folderId = getFolderId(path, drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
folderId = getEbooksFolderId(drive)
|
2018-07-14 06:31:52 +00:00
|
|
|
if folderId:
|
|
|
|
return getFile(folderId, fileName, drive)
|
|
|
|
else:
|
|
|
|
return None
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def copyDriveFileRemote(drive, origin_file_id, copy_title):
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
copied_file = {'title': copy_title}
|
|
|
|
try:
|
|
|
|
file_data = drive.auth.service.files().copy(
|
2017-04-14 18:29:11 +00:00
|
|
|
fileId = origin_file_id, body=copied_file).execute()
|
2017-02-20 18:34:37 +00:00
|
|
|
return drive.CreateFile({'id': file_data['id']})
|
|
|
|
except errors.HttpError as error:
|
|
|
|
print ('An error occurred: %s' % error)
|
|
|
|
return None
|
|
|
|
|
2018-07-14 11:48:51 +00:00
|
|
|
def moveGdriveFolderRemote(origin_file, target_folder):
|
|
|
|
drive = getDrive(Gdrive.Instance().drive)
|
|
|
|
previous_parents = ",".join([parent["id"] for parent in origin_file.get('parents')])
|
|
|
|
gFileTargetDir = getFileFromEbooksFolder(None, target_folder)
|
|
|
|
if not gFileTargetDir:
|
|
|
|
# Folder is not exisiting, create, and move folder
|
|
|
|
gFileTargetDir = drive.CreateFile(
|
|
|
|
{'title': target_folder, 'parents': [{"kind": "drive#fileLink", 'id': getEbooksFolderId()}],
|
|
|
|
"mimeType": "application/vnd.google-apps.folder"})
|
|
|
|
gFileTargetDir.Upload()
|
|
|
|
# Move the file to the new folder
|
|
|
|
drive.auth.service.files().update(fileId=origin_file['id'],
|
|
|
|
addParents=gFileTargetDir['id'],
|
|
|
|
removeParents=previous_parents,
|
|
|
|
fields='id, parents').execute()
|
|
|
|
# if previous_parents has no childs anymore, delete originfileparent
|
|
|
|
# is not working correctly, because of slow update on gdrive -> could cause trouble in gdrive.db
|
|
|
|
# (nonexisting folder has id)
|
|
|
|
# children = drive.auth.service.children().list(folderId=previous_parents).execute()
|
|
|
|
# if not len(children['items']):
|
|
|
|
# drive.auth.service.files().delete(fileId=previous_parents).execute()
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2018-08-04 08:56:42 +00:00
|
|
|
#def downloadFile(path, filename, output):
|
|
|
|
# f = getFileFromEbooksFolder(path, filename)
|
|
|
|
# return f.GetContentFile(output)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2018-08-04 08:56:42 +00:00
|
|
|
# ToDo: Check purpose Parameter f ??, purpose of function ?
|
|
|
|
def backupCalibreDbAndOptionalDownload(drive):
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-04-14 18:29:11 +00:00
|
|
|
metaDataFile = "'%s' in parents and title = 'metadata.db' and trashed = false" % getEbooksFolderId()
|
2017-02-20 18:34:37 +00:00
|
|
|
fileList = drive.ListFile({'q': metaDataFile}).GetList()
|
2018-08-04 08:56:42 +00:00
|
|
|
#databaseFile = fileList[0]
|
|
|
|
#if f:
|
|
|
|
# databaseFile.GetContentFile(f)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
2017-04-02 06:45:47 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
|
2017-09-17 08:54:23 +00:00
|
|
|
ignoreFiles=None,
|
2017-04-02 06:45:47 +00:00
|
|
|
parent=None, prevDir=''):
|
2017-09-17 08:54:23 +00:00
|
|
|
ignoreFiles = ignoreFiles or []
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-04-14 18:29:11 +00:00
|
|
|
isInitial = not bool(parent)
|
2017-02-20 18:34:37 +00:00
|
|
|
if not parent:
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = getEbooksFolder(drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
if os.path.isdir(os.path.join(prevDir,uploadFile)):
|
2017-04-14 18:29:11 +00:00
|
|
|
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (os.path.basename(uploadFile), parent['id'])}).GetList()
|
2017-02-20 18:34:37 +00:00
|
|
|
if len(existingFolder) == 0 and (not isInitial or createRoot):
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = drive.CreateFile({'title': os.path.basename(uploadFile), 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
|
|
|
|
"mimeType": "application/vnd.google-apps.folder"})
|
2017-02-20 18:34:37 +00:00
|
|
|
parent.Upload()
|
|
|
|
else:
|
|
|
|
if (not isInitial or createRoot) and len(existingFolder) > 0:
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = existingFolder[0]
|
|
|
|
for f in os.listdir(os.path.join(prevDir, uploadFile)):
|
2017-02-20 18:34:37 +00:00
|
|
|
if f not in ignoreFiles:
|
2017-04-14 18:29:11 +00:00
|
|
|
copyToDrive(drive, f, True, replaceFiles, ignoreFiles, parent, os.path.join(prevDir, uploadFile))
|
2017-02-20 18:34:37 +00:00
|
|
|
else:
|
|
|
|
if os.path.basename(uploadFile) not in ignoreFiles:
|
2017-04-14 18:29:11 +00:00
|
|
|
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (os.path.basename(uploadFile), parent['id'])}).GetList()
|
2017-02-20 18:34:37 +00:00
|
|
|
if len(existingFiles) > 0:
|
2017-04-14 18:29:11 +00:00
|
|
|
driveFile = existingFiles[0]
|
2017-02-22 22:06:59 +00:00
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
driveFile = drive.CreateFile({'title': os.path.basename(uploadFile), 'parents': [{"kind":"drive#fileLink", 'id': parent['id']}], })
|
|
|
|
driveFile.SetContentFile(os.path.join(prevDir, uploadFile))
|
2017-02-20 18:34:37 +00:00
|
|
|
driveFile.Upload()
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2018-07-14 06:31:52 +00:00
|
|
|
def uploadFileToEbooksFolder(destFile, f):
|
|
|
|
drive = getDrive(Gdrive.Instance().drive)
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = getEbooksFolder(drive)
|
|
|
|
splitDir = destFile.split('/')
|
2017-03-08 00:26:15 +00:00
|
|
|
for i, x in enumerate(splitDir):
|
|
|
|
if i == len(splitDir)-1:
|
2017-04-14 18:29:11 +00:00
|
|
|
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (x, parent['id'])}).GetList()
|
2017-03-08 00:26:15 +00:00
|
|
|
if len(existingFiles) > 0:
|
2017-04-14 18:29:11 +00:00
|
|
|
driveFile = existingFiles[0]
|
2017-03-08 00:26:15 +00:00
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
driveFile = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],})
|
2017-03-08 00:26:15 +00:00
|
|
|
driveFile.SetContentFile(f)
|
|
|
|
driveFile.Upload()
|
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (x, parent['id'])}).GetList()
|
2017-03-08 00:26:15 +00:00
|
|
|
if len(existingFolder) == 0:
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
|
|
|
|
"mimeType": "application/vnd.google-apps.folder"})
|
2017-03-08 00:26:15 +00:00
|
|
|
parent.Upload()
|
|
|
|
else:
|
2017-04-14 18:29:11 +00:00
|
|
|
parent = existingFolder[0]
|
2017-03-08 00:26:15 +00:00
|
|
|
|
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def watchChange(drive, channel_id, channel_type, channel_address,
|
|
|
|
channel_token=None, expiration=None):
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-03-29 19:43:55 +00:00
|
|
|
# Watch for all changes to a user's Drive.
|
|
|
|
# Args:
|
|
|
|
# service: Drive API service instance.
|
|
|
|
# channel_id: Unique string that identifies this channel.
|
|
|
|
# channel_type: Type of delivery mechanism used for this channel.
|
|
|
|
# channel_address: Address where notifications are delivered.
|
|
|
|
# channel_token: An arbitrary string delivered to the target address with
|
|
|
|
# each notification delivered over this channel. Optional.
|
|
|
|
# channel_address: Address where notifications are delivered. Optional.
|
|
|
|
# Returns:
|
|
|
|
# The created channel if successful
|
|
|
|
# Raises:
|
|
|
|
# apiclient.errors.HttpError: if http request to create channel fails.
|
2017-02-20 18:34:37 +00:00
|
|
|
body = {
|
2017-04-14 18:29:11 +00:00
|
|
|
'id': channel_id,
|
|
|
|
'type': channel_type,
|
|
|
|
'address': channel_address
|
2017-02-20 18:34:37 +00:00
|
|
|
}
|
|
|
|
if channel_token:
|
|
|
|
body['token'] = channel_token
|
|
|
|
if expiration:
|
|
|
|
body['expiration'] = expiration
|
2017-04-14 18:29:11 +00:00
|
|
|
return drive.auth.service.changes().watch(body=body).execute()
|
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
def watchFile(drive, file_id, channel_id, channel_type, channel_address,
|
|
|
|
channel_token=None, expiration=None):
|
|
|
|
"""Watch for any changes to a specific file.
|
|
|
|
Args:
|
|
|
|
service: Drive API service instance.
|
|
|
|
file_id: ID of the file to watch.
|
|
|
|
channel_id: Unique string that identifies this channel.
|
|
|
|
channel_type: Type of delivery mechanism used for this channel.
|
|
|
|
channel_address: Address where notifications are delivered.
|
|
|
|
channel_token: An arbitrary string delivered to the target address with
|
|
|
|
each notification delivered over this channel. Optional.
|
|
|
|
channel_address: Address where notifications are delivered. Optional.
|
|
|
|
Returns:
|
|
|
|
The created channel if successful
|
|
|
|
Raises:
|
|
|
|
apiclient.errors.HttpError: if http request to create channel fails.
|
|
|
|
"""
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-02-20 18:34:37 +00:00
|
|
|
|
|
|
|
body = {
|
|
|
|
'id': channel_id,
|
|
|
|
'type': channel_type,
|
|
|
|
'address': channel_address
|
|
|
|
}
|
|
|
|
if channel_token:
|
|
|
|
body['token'] = channel_token
|
|
|
|
if expiration:
|
|
|
|
body['expiration'] = expiration
|
|
|
|
return drive.auth.service.files().watch(fileId=file_id, body=body).execute()
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def stopChannel(drive, channel_id, resource_id):
|
|
|
|
"""Stop watching to a specific channel.
|
|
|
|
Args:
|
|
|
|
service: Drive API service instance.
|
|
|
|
channel_id: ID of the channel to stop.
|
|
|
|
resource_id: Resource ID of the channel to stop.
|
|
|
|
Raises:
|
|
|
|
apiclient.errors.HttpError: if http request to create channel fails.
|
|
|
|
"""
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-03-29 19:43:55 +00:00
|
|
|
# service=drive.auth.service
|
2017-02-20 18:34:37 +00:00
|
|
|
body = {
|
2017-04-14 18:29:11 +00:00
|
|
|
'id': channel_id,
|
|
|
|
'resourceId': resource_id
|
2017-02-20 18:34:37 +00:00
|
|
|
}
|
|
|
|
return drive.auth.service.channels().stop(body=body).execute()
|
|
|
|
|
2017-04-14 18:29:11 +00:00
|
|
|
|
2017-02-20 18:34:37 +00:00
|
|
|
def getChangeById (drive, change_id):
|
2017-09-17 05:15:08 +00:00
|
|
|
drive = getDrive(drive)
|
2017-03-31 16:41:05 +00:00
|
|
|
# Print a single Change resource information.
|
|
|
|
#
|
|
|
|
# Args:
|
|
|
|
# service: Drive API service instance.
|
|
|
|
# change_id: ID of the Change resource to retrieve.
|
2017-02-20 18:34:37 +00:00
|
|
|
try:
|
|
|
|
change = drive.auth.service.changes().get(changeId=change_id).execute()
|
|
|
|
return change
|
2018-07-07 13:48:50 +00:00
|
|
|
except (errors.HttpError) as error:
|
2018-07-14 11:48:51 +00:00
|
|
|
web.app.logger.info(error.message)
|
2017-02-20 18:34:37 +00:00
|
|
|
return None
|
2018-06-02 08:59:34 +00:00
|
|
|
|
|
|
|
# Deletes the local hashes database to force search for new folder names
|
|
|
|
def deleteDatabaseOnChange():
|
|
|
|
session.query(GdriveId).delete()
|
|
|
|
session.commit()
|
2018-07-14 06:31:52 +00:00
|
|
|
|
|
|
|
def updateGdriveCalibreFromLocal():
|
2018-08-04 08:56:42 +00:00
|
|
|
# backupCalibreDbAndOptionalDownload(Gdrive.Instance().drive)
|
2018-07-14 06:31:52 +00:00
|
|
|
copyToDrive(Gdrive.Instance().drive, config.config_calibre_dir, False, True)
|
|
|
|
for x in os.listdir(config.config_calibre_dir):
|
|
|
|
if os.path.isdir(os.path.join(config.config_calibre_dir, x)):
|
|
|
|
shutil.rmtree(os.path.join(config.config_calibre_dir, x))
|
|
|
|
|
|
|
|
# update gdrive.db on edit of books title
|
|
|
|
def updateDatabaseOnEdit(ID,newPath):
|
|
|
|
storedPathName = session.query(GdriveId).filter(GdriveId.gdrive_id == ID).first()
|
|
|
|
if storedPathName:
|
|
|
|
storedPathName.path = newPath
|
|
|
|
session.commit()
|
2018-07-14 11:48:51 +00:00
|
|
|
|
|
|
|
# Deletes the hashes in database of deleted book
|
|
|
|
def deleteDatabaseEntry(ID):
|
|
|
|
session.query(GdriveId).filter(GdriveId.gdrive_id == ID).delete()
|
|
|
|
session.commit()
|
2018-08-04 08:56:42 +00:00
|
|
|
|
|
|
|
# Gets cover file from gdrive
|
|
|
|
def get_cover_via_gdrive(cover_path):
|
|
|
|
df = getFileFromEbooksFolder(cover_path, 'cover.jpg')
|
|
|
|
if df:
|
|
|
|
if not session.query(PermissionAdded).filter(PermissionAdded.gdrive_id == df['id']).first():
|
|
|
|
df.GetPermissions()
|
|
|
|
df.InsertPermission({
|
|
|
|
'type': 'anyone',
|
|
|
|
'value': 'anyone',
|
|
|
|
'role': 'reader',
|
|
|
|
'withLink': True})
|
|
|
|
permissionAdded = PermissionAdded()
|
|
|
|
permissionAdded.gdrive_id = df['id']
|
|
|
|
session.add(permissionAdded)
|
|
|
|
session.commit()
|
|
|
|
return df.metadata.get('webContentLink')
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Creates chunks for downloading big files
|
|
|
|
def partial(total_byte_len, part_size_limit):
|
|
|
|
s = []
|
|
|
|
for p in range(0, total_byte_len, part_size_limit):
|
|
|
|
last = min(total_byte_len - 1, p + part_size_limit - 1)
|
|
|
|
s.append([p, last])
|
|
|
|
return s
|
|
|
|
|
|
|
|
# downloads files in chunks from gdrive
|
|
|
|
def do_gdrive_download(df, headers):
|
|
|
|
total_size = int(df.metadata.get('fileSize'))
|
|
|
|
download_url = df.metadata.get('downloadUrl')
|
|
|
|
s = partial(total_size, 1024 * 1024) # I'm downloading BIG files, so 100M chunk size is fine for me
|
|
|
|
|
|
|
|
def stream():
|
|
|
|
for byte in s:
|
|
|
|
headers = {"Range": 'bytes=%s-%s' % (byte[0], byte[1])}
|
|
|
|
resp, content = df.auth.Get_Http_Object().request(download_url, headers=headers)
|
|
|
|
if resp.status == 206:
|
|
|
|
yield content
|
|
|
|
else:
|
|
|
|
web.app.logger.info('An error occurred: %s' % resp)
|
|
|
|
return
|
|
|
|
return Response(stream_with_context(stream()), headers=headers)
|