X-Git-Url: https://glassweightruler.freedombox.rocks/gitweb/waydroid.git/blobdiff_plain/1f0393876d891d0e32a995d0bf7259cc6505afdc..eebdcec852186e21a4c44ab383380e3a0453464a:/tools/helpers/http.py diff --git a/tools/helpers/http.py b/tools/helpers/http.py index d05522b..68031c3 100644 --- a/tools/helpers/http.py +++ b/tools/helpers/http.py @@ -1,13 +1,14 @@ # Copyright 2021 Oliver Smith # SPDX-License-Identifier: GPL-3.0-or-later import hashlib -import json import logging import os import shutil +import threading import urllib.request import tools.helpers.run +import time def download(args, url, prefix, cache=True, loglevel=logging.INFO, @@ -26,6 +27,70 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO, with a 404 Not Found error. Only display a warning on stdout (no matter if loglevel is changed). :returns: path to the downloaded file in the cache or None on 404 """ + + # helper functions for progress + def fromBytesToMB(numBytes, decimalPlaces=2): + return round(int(numBytes)/1000000, decimalPlaces) + + def getDownloadSpeed(lastSize, currentSize, timeTaken, decimalPlaces=2): + # sizes are in mb and timeTaken in seconds + speedUnit = "mbps" + sizeDifference = currentSize-lastSize + + if sizeDifference < 1: + # sizeDifference is less than 1 mb + # convert sizeDifference to kb and speedUnit to kbps, + # for better readability + sizeDifference*=1000 + speedUnit = "kbps" + + # sizeDifference mb(or kb) was downloaded in timeTaken seconds + # so downloadSpeed = sizeDifference/timeTaken mbps(or kbps) + return (round(sizeDifference/timeTaken, decimalPlaces), speedUnit) + + # Show progress while downloading + downloadEnded = False + def progress(totalSize, destinationPath): + # convert totalSize to mb before hand, + # it's value won't change inside while loop and + # will be unnecessarily calculated every .01 seconds + totalSize = fromBytesToMB(totalSize) + + # this value will be used to figure out maximum chars + # required to denote downloaded size later on + totalSizeStrLen = len(str(totalSize)) + + # lastSize and lastSizeChangeAt is used to calculate speed + lastSize = 0 + lastSizeChangeAt = time.time() + + downloadSpeed = 0, "mbps" + + while not downloadEnded: + currentSize = fromBytesToMB(os.path.getsize(destinationPath)) + + if currentSize != lastSize: + sizeChangeAt = time.time() + downloadSpeed = getDownloadSpeed( + lastSize, currentSize, + timeTaken=sizeChangeAt-lastSizeChangeAt + ) + + lastSize = currentSize + lastSizeChangeAt = sizeChangeAt + + # make currentSize and downloadSpeed of a fix max len, + # to avoid previously printed chars to appear while \ + # printing recursively + # currentSize is not going to exceed totalSize + currentSize = str(currentSize).rjust(totalSizeStrLen) + # assuming max downloadSpeed to be 9999.99 mbps + downloadSpeed = f"{str(downloadSpeed[0]).rjust(7)} {downloadSpeed[1]}" + + # print progress bar + print(f"\r[Downloading] {currentSize} MB/{totalSize} MB {downloadSpeed}(approx.)", end=" ") + time.sleep(.01) + # Create cache folder if not os.path.exists(args.work + "/cache_http"): tools.helpers.run.user(args, ["mkdir", "-p", args.work + "/cache_http"]) @@ -44,6 +109,9 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO, try: with urllib.request.urlopen(url) as response: with open(path, "wb") as handle: + # adding daemon=True will kill this thread if main thread is killed + # else progress_bar will continue to show even if user cancels download by ctrl+c + threading.Thread(target=progress, args=(response.headers.get('content-length'), path), daemon=True).start() shutil.copyfileobj(response, handle) # Handle 404 except urllib.error.HTTPError as e: @@ -51,19 +119,18 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO, logging.warning("WARNING: file not found: " + url) return None raise + downloadEnded = True # Return path in cache return path -def retrieve(url, headers=None, allow_404=False): +def retrieve(url, headers=None): """ Fetch the content of a URL and returns it as string. :param url: the http(s) address of to the resource to fetch :param headers: dict of HTTP headers to use - :param allow_404: do not raise an exception when the server responds - with a 404 Not Found error. Only display a warning - :returns: str with the content of the response + :returns: status and str with the content of the response """ # Download the file logging.verbose("Retrieving " + url) @@ -74,16 +141,7 @@ def retrieve(url, headers=None, allow_404=False): req = urllib.request.Request(url, headers=headers) try: with urllib.request.urlopen(req) as response: - return response.read() + return 200, response.read() # Handle 404 except urllib.error.HTTPError as e: - if e.code == 404 and allow_404: - logging.warning("WARNING: failed to retrieve content from: " + url) - return None - raise - - -def retrieve_json(*args, **kwargs): - """ Fetch the contents of a URL, parse it as JSON and return it. See - retrieve() for the list of all parameters. """ - return json.loads(retrieve(*args, **kwargs)) + return e.code, ""