]> glassweightruler.freedombox.rocks Git - waydroid.git/blobdiff - tools/helpers/http.py
arch: Separately identify arm64_only CPUs without AArch32 support
[waydroid.git] / tools / helpers / http.py
index c63d665719c93cb14a0b81f8297a3ba4c4b6cae2..2aac2b52f6282f4c183ee2e5d9855239a6dc768c 100644 (file)
@@ -4,9 +4,11 @@ import hashlib
 import logging
 import os
 import shutil
+import threading
 import urllib.request
 
 import tools.helpers.run
+import time
 
 
 def download(args, url, prefix, cache=True, loglevel=logging.INFO,
@@ -25,6 +27,70 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO,
                           with a 404 Not Found error. Only display a warning on
                           stdout (no matter if loglevel is changed).
         :returns: path to the downloaded file in the cache or None on 404 """
+    
+    # helper functions for progress
+    def fromBytesToMB(numBytes, decimalPlaces=2):
+        return round(int(numBytes)/1000000, decimalPlaces)
+    
+    def getDownloadSpeed(lastSize, currentSize, timeTaken, decimalPlaces=2):
+        # sizes are in mb and timeTaken in seconds
+        speedUnit = "mbps"
+        sizeDifference = currentSize-lastSize
+
+        if sizeDifference < 1:
+            # sizeDifference is less than 1 mb
+            # convert sizeDifference to kb and speedUnit to kbps,
+            # for better readability
+            sizeDifference*=1000
+            speedUnit = "kbps"
+        
+        # sizeDifference mb(or kb) was downloaded in timeTaken seconds
+        # so downloadSpeed = sizeDifference/timeTaken mbps(or kbps)
+        return (round(sizeDifference/timeTaken, decimalPlaces), speedUnit)
+
+    # Show progress while downloading
+    downloadEnded = False
+    def progress(totalSize, destinationPath):
+        # convert totalSize to mb before hand,
+        # it's value won't change inside while loop and
+        # will be unnecessarily calculated every .01 seconds 
+        totalSize = fromBytesToMB(totalSize)
+
+        # this value will be used to figure out maximum chars
+        # required to denote downloaded size later on
+        totalSizeStrLen = len(str(totalSize))
+
+        # lastSize and lastSizeChangeAt is used to calculate speed
+        lastSize = 0
+        lastSizeChangeAt = time.time()
+
+        downloadSpeed = 0, "mbps"
+
+        while not downloadEnded:
+            currentSize = fromBytesToMB(os.path.getsize(destinationPath))
+            
+            if currentSize != lastSize:
+                sizeChangeAt = time.time()
+                downloadSpeed = getDownloadSpeed(
+                    lastSize, currentSize,
+                    timeTaken=sizeChangeAt-lastSizeChangeAt
+                )
+
+                lastSize = currentSize
+                lastSizeChangeAt = sizeChangeAt
+
+                # make currentSize and downloadSpeed of a fix max len,
+                # to avoid previously printed chars to appear while \
+                # printing recursively
+                # currentSize is not going to exceed totalSize
+                currentSize = str(currentSize).rjust(totalSizeStrLen)
+                # assuming max downloadSpeed to be 9999.99 mbps
+                downloadSpeed = f"{str(downloadSpeed[0]).rjust(7)} {downloadSpeed[1]}"
+                
+                # print progress bar
+                print(f"\r[Downloading] {currentSize} MB/{totalSize} MB    {downloadSpeed}(approx.)", end=" ")
+            time.sleep(.01)
+
     # Create cache folder
     if not os.path.exists(args.work + "/cache_http"):
         tools.helpers.run.user(args, ["mkdir", "-p", args.work + "/cache_http"])
@@ -39,10 +105,13 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO,
         tools.helpers.run.user(args, ["rm", path])
 
     # Download the file
-    logging.log(loglevel, "Download " + url)
+    logging.log(loglevel, "Downloading " + url)
     try:
         with urllib.request.urlopen(url) as response:
             with open(path, "wb") as handle:
+                # adding daemon=True will kill this thread if main thread is killed
+                # else progress_bar will continue to show even if user cancels download by ctrl+c
+                threading.Thread(target=progress, args=(response.headers.get('content-length'), path), daemon=True).start()
                 shutil.copyfileobj(response, handle)
     # Handle 404
     except urllib.error.HTTPError as e:
@@ -50,6 +119,7 @@ def download(args, url, prefix, cache=True, loglevel=logging.INFO,
             logging.warning("WARNING: file not found: " + url)
             return None
         raise
+    downloadEnded = True
 
     # Return path in cache
     return path
@@ -68,10 +138,13 @@ def retrieve(url, headers=None):
     if headers is None:
         headers = {}
 
-    req = urllib.request.Request(url, headers=headers)
     try:
+        req = urllib.request.Request(url, headers=headers)
         with urllib.request.urlopen(req) as response:
             return 200, response.read()
+    # Handle malformed URL
+    except ValueError as e:
+        return -1, ""
     # Handle 404
     except urllib.error.HTTPError as e:
         return e.code, ""