Skip to content

Commit

Permalink
Merge pull request #70 from rxsegrxup/dev
Browse files Browse the repository at this point in the history
Version 0.7.1
  • Loading branch information
Michael committed Nov 18, 2014
2 parents 679ebd9 + b2e2da8 commit a53198c
Show file tree
Hide file tree
Showing 11 changed files with 178 additions and 110 deletions.
6 changes: 4 additions & 2 deletions BitcasaFileFetcher/__main__.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
insert_path = os.path.abspath("./includes/lib/")
sys.path.append(insert_path)
from helpers import logger
from bitcasa import BitcasaClient
from bitcasa import BitcasaException
from lib.bitcasa import BitcasaClient
from lib.bitcasa import BitcasaException

should_exit = threading.Event()
bitc = None
Expand Down Expand Up @@ -148,6 +148,8 @@ def parse(self):
def run_download(self, upload=False):
"""Run the main program checks"""
self.run_level = Args.RUN_LEVEL_MAIN
self.args.src = self.args.src.decode("utf-8")
self.args.dst = self.args.dst.decode("utf-8")
self.args.upload = upload
if not upload:
self.args.local = False
Expand Down
4 changes: 2 additions & 2 deletions BitcasaFileFetcher/bitcasadownload.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def process(self, base=None):
base = BitcasaFolder(None, "root", self.basefolder)
else:
remainingtries = 3
apiratecount = 0
apiratecount = 1
while base is None and remainingtries > 0 and not self.should_exit.is_set():
try:
base = self.client.get_folder(self.basefolder)
Expand Down Expand Up @@ -157,7 +157,7 @@ def process_single(self):
fold = BitcasaFolder(None, "root", "", items=[myfile])
else:
remainingtries = 3
apiratecount = 0
apiratecount = 1
while myfile is None and remainingtries > 0 and not self.should_exit.is_set():
try:
myfile = self.client.get_file_meta(self.basefolder)
Expand Down
10 changes: 5 additions & 5 deletions BitcasaFileFetcher/threads/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def download(status, should_exit, session, results, command_args):
continue

random.seed(item["filepath"])
sleeptime = random.randint(10, 45)
filename = item["filename"]
size_bytes = item["filesize"]
size_str = utils.convert_size(size_bytes)
Expand All @@ -61,9 +60,10 @@ def download(status, should_exit, session, results, command_args):

log.debug("Downloading file to %s", temp_file)
retriesleft = 10
apiratecount = 0
apiratecount = 1
down_failed = True
while retriesleft > 0 and not should_exit.is_set():
sleeptime = random.randint(10, 70)
if apiratecount > 5:
apiratecount = 5
try:
Expand Down Expand Up @@ -131,7 +131,7 @@ def download(status, should_exit, session, results, command_args):
retriesleft -= 1
log.exception("%s File size mismatch. Will retry %s more times", filename, retriesleft)
if retriesleft > 0:
time.sleep(10)
time.sleep(sleeptime)
else:
cleanUpAfterError("Error downloading %s Maximum retries reached" % filename, item, results)
except IOError as e:
Expand All @@ -146,14 +146,14 @@ def download(status, should_exit, session, results, command_args):
retriesleft -= 1
if retriesleft > 0:
log.exception("Error downloading %s. Will retry %s more times", filename, retriesleft)
time.sleep(10)
time.sleep(sleeptime)
else:
cleanUpAfterError("An unknown error occurred", item, results)
except:
retriesleft -= 1
if retriesleft > 0:
log.exception("Error downloading %s. Will retry %s more times", filename, retriesleft)
time.sleep(10)
time.sleep(sleeptime)
else:
cleanUpAfterError("An unknown error occurred", item, results)
else:
Expand Down
115 changes: 75 additions & 40 deletions BitcasaFileFetcher/threads/folder_traverse.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from lib.bitcasa import BitcasaClient, BitcasaFolder, BitcasaFile
from lib.bitcasa.exception import BitcasaException
from lib.gdrive import GoogleDrive
from googleapiclient.errors import HttpError
from lib.googleapiclient.errors import HttpError
from helpers import utils
log = logging.getLogger("BitcasaFileFetcher")

Expand Down Expand Up @@ -39,7 +39,7 @@ def folder_traverse(status, results, args, should_exit):

def get_folder_items(fold, should_exit):
remainingtries = 5
apiratecount = 0
apiratecount = 1
folderitems = None
while remainingtries > 0 and not should_exit.is_set():
if apiratecount > 5:
Expand Down Expand Up @@ -86,15 +86,15 @@ def get_local_items(fold, should_exit, results):
fullpath = os.path.join(fold.path, item)
if should_exit.is_set():
break
filesize = 0
filesize = None
try:
if not os.path.isdir(fullpath):
filesize = os.path.getsize(fullpath)
except OSError:
log.exception("Error getting file info")
results.writeError(item, fullpath, "", "Error listing file %s" % item)
continue
if filesize:
if filesize is not None:
bitem = BitcasaFile(None, fullpath, item, None, filesize)
else:
bitem = BitcasaFolder(None, item, fullpath)
Expand Down Expand Up @@ -126,7 +126,7 @@ def folder_list_gdrive(folder, status, results, args, should_exit, g):
folderitems = get_folder_items(fold, should_exit)
if folderitems is None:
log.error("Error downloading at folder %s", path)
if args.local:
if not args.local:
results.writeError(folder["folder"].name, path, folder_id, "")
else:
results.writeError(folder["folder"].name, path, folder["folder"].path, "")
Expand All @@ -149,43 +149,44 @@ def folder_list_gdrive(folder, status, results, args, should_exit, g):
if isinstance(item, BitcasaFile):
filesize = item.size
retriesleft = 10
apiratecount = 0
apiratecount = 1
while not should_exit.is_set() and retriesleft > 0:
try:
needtoupload = g.need_to_upload(nm, folder_id, filesize)
if args.dryrun and not folder_id:
needtoupload = True
else:
needtoupload = g.need_to_upload(nm, folder_id, filesize)
except HttpError as e:
retriesleft -= 1
if e.resp.status == 403:
apiratecount += 1
retriesleft += 1
log.warn("Google API rate limit reached. Will retry")
else:
log.exception("Error checking is file exists will retry %s more times", retriesleft)
log.exception("Error checking if %s exists will retry %s more times", nm, retriesleft)

if retriesleft > 0:
time.sleep(10 * apiratecount)
else:
results.writeError(nm, tfd, base64_path, "Error queuing file %s" % filename)
results.writeError(nm, tfd, base64_path, "Error queuing file %s" % nm)
except:
retriesleft -= 1
log.exception("Error checking is file exists will retry %s more times", retriesleft)
log.exception("Error checking if %s exists will retry %s more times", nm, retriesleft)
if retriesleft > 0:
time.sleep(10 * apiratecount)
else:
results.writeError(nm, tfd, base64_path, "Error queuing file %s" % filename)
results.writeError(nm, tfd, base64_path, "Error queuing file %s" % nm)
else:
retriesleft = 0
if should_exit.is_set():
log.debug("Stopping folder list")
return
elif needtoupload:
if needtoupload:
if args.dryrun:
if not args.silentqueuer:
log.debug("%s %s", nm, filesize)
results.writeSuccess(tfd, base64_path)
results.writeSuccess(tfd, base64_path)
else:
if not args.silentqueuer:
log.debug("Queuing file download for %s", nm)
filedownload = {
"filename": nm,
"filepath": base64_path,
Expand All @@ -194,9 +195,13 @@ def folder_list_gdrive(folder, status, results, args, should_exit, g):
"filedir": folder_id
}
if args.local:
if not args.silentqueuer:
log.debug("Queuing file upload for %s", nm)
filedownload["temppath"] = base64_path
status.queue_up(filedownload)
else:
if not args.silentqueuer:
log.debug("Queuing file download for %s", nm)
status.queue_down(filedownload)
else:
results.writeSkipped(tfd, base64_path, nm)
Expand All @@ -206,30 +211,60 @@ def folder_list_gdrive(folder, status, results, args, should_exit, g):
if should_exit.is_set():
log.debug("Stopping folder list")
return
elif args.rec and (not args.depth or args.depth > depth):
g_fold = g.get_folder_byname(nm, parent=folder_id, createnotfound=cnf)
remainingtries = 5
while not should_exit.is_set() and g_fold is None and remainingtries > 0:
remainingtries -= 1
log.error("Will retry to get/create %s %s more times", nm, remainingtries)
time.sleep(5)
g_fold = g.get_folder_byname(nm, parent=folder_id, createnotfound=cnf)
if should_exit.is_set():
log.debug("Stopping folder list")
return
elif g_fold is None:
log.error("Failed to get/create folder")
return
if not args.silentqueuer:
log.debug("Queuing folder listing for %s", nm)
folder = {
"folder": item,
"depth": (depth+1),
"path": tfd,
"folder_id": g_fold["id"]
}
status.queue(folder)
except: #Hopefully this won't get called
if not args.rec or ( args.depth and args.depth <= depth ):
continue
retriesleft = 10
apiratecount = 1
while not should_exit.is_set() and retriesleft > 0:
try:
if args.dryrun and not folder_id:
g_fold = False
else:
g_fold = g.get_folder_byname(nm, parent=folder_id, createnotfound=cnf)
except HttpError as e:
retriesleft -= 1
if e.resp.status == 403:
apiratecount += 1
retriesleft += 1
log.warn("Google API rate limit reached. Will retry")
else:
log.exception("Will retry to get/create %s %s more times", nm, retriesleft)

if retriesleft > 0:
time.sleep(10 * apiratecount)
else:
results.writeError(nm, tfd, base64_path, "Failed to get/create folder %s" % nm)
continue
except:
retriesleft -= 1
log.error("Will retry to get/create %s %s more times", nm, retriesleft)
if retriesleft > 0:
time.sleep(10 * apiratecount)
else:
results.writeError(nm, tfd, base64_path, "Failed to get/create folder %s" % nm)
continue
else:
retriesleft = 0

if should_exit.is_set():
log.debug("Stopping folder list")
return
folder = {
"folder": item,
"depth": (depth+1),
"path": tfd
}
if args.dryrun and not g_fold:
folder["folder_id"] = None
elif not g_fold:
results.writeError(nm, tfd, base64_path, "Failed to get/create folder %s" % nm)
continue
else:
folder["folder_id"] = g_fold["id"]
if not args.silentqueuer:
log.debug("Queuing folder listing for %s", nm)
status.queue(folder)
except:
results.writeError(nm, tfd, base64_path, traceback.format_exc())

def folder_list(folder, status, results, args, should_exit):
Expand Down Expand Up @@ -310,7 +345,7 @@ def folder_list(folder, status, results, args, should_exit):
if args.dryrun:
if not args.silentqueuer:
log.debug("%s %s", nm, filesize)
results.writeSuccess(tfd, base64_path)
results.writeSuccess(tfd, base64_path)
else:
if not args.silentqueuer:
log.debug("Queuing file download for %s", nm)
Expand Down
3 changes: 1 addition & 2 deletions BitcasaFileFetcher/threads/upload.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import time, os, logging
from helpers import utils
from lib.gdrive import GoogleDrive
from googleapiclient.errors import HttpError
import requests
from lib.googleapiclient.errors import HttpError
from Queue import Empty as EmptyException

log = logging.getLogger("BitcasaFileFetcher")
Expand Down
7 changes: 5 additions & 2 deletions BitcasaFileLister/server.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import sys, logging
import sys, logging, json
from lib import bottle, cherrypy, BitcasaUtils
from lib.bottle import route, run, request, get, post, template, response, view, static_file, redirect
from lib.bitcasa import BitcasaException, BitcasaFile
Expand Down Expand Up @@ -93,8 +93,11 @@ def do_bitcasa_auth():
error_msg = "Storing permanent token %s" % client.access_token
log.info(error_msg)
try:
with open(utils.BITCASA_TOKEN, "r") as tokenfile:
json_token = json.loads(tokenfile.read())
with open(utils.BITCASA_TOKEN, "w") as tokenfile:
tokenfile.write(client.access_token)
json_token["bitcasa"]["TOKEN"] = client.access_token
tokenfile.write(json.dumps(json_token, indent=4))
except Exception as e:
auth_name="Login"
auth_url="/bitcasafilelister/auth"
Expand Down
10 changes: 9 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -80,4 +80,12 @@
* Upload local files
* Better handling of exit
* Prevent bug that would improperly cause threads to shutdown
* Better handling of google drive api limit errors
* Better handling of google drive api limit errors

## Version 0.7.1
* Fixed issues with `--dryrun`
* Proper log when running local
* Put custom api creds in bitcasa.ini fixes #56
* Fix encoding issues. Fixes #62 Fixes #55
* Fix some api rate limit issues where it wouldn't sleep properly
* Fix a bug where the improper variable was used
7 changes: 7 additions & 0 deletions bitcasa_sample.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"bitcasa": {
"CLIENTID": "758ab3de",
"CLIENTSECRET": "5669c999ac340185a7c80c28d12a4319",
"TOKEN": ""
}
}
3 changes: 1 addition & 2 deletions includes/helpers/utils.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
CLIENTID = "758ab3de"
CLIENTSECRET = "5669c999ac340185a7c80c28d12a4319"
SERVER_HOST = "localhost"
SERVER_PORT = 1115
SERVER_URL = "http://%s:%s/bitcasafilelister" % (SERVER_HOST, SERVER_PORT)
REDIRECT_URI = "%s/auth" % SERVER_URL

import math, os, hashlib, logging, tempfile
BITCASA_TOKEN = os.path.abspath("bitcasa.ini")
BITCASA_SAMPLE_TOKEN = os.path.abspath("bitcasa_sample.ini")
GDRIVE_CREDS = os.path.abspath("gdrive.ini")
GDRIVE_SECRETS = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../lib", "gdrive_secrets.ini"))

Expand Down
Loading

0 comments on commit a53198c

Please sign in to comment.