]>
glassweightruler.freedombox.rocks Git - waydroid.git/blob - tools/helpers/http.py
1 # Copyright 2021 Oliver Smith
2 # SPDX-License-Identifier: GPL-3.0-or-later
9 import tools
.helpers
.run
12 def download(args
, url
, prefix
, cache
=True, loglevel
=logging
.INFO
,
14 """ Download a file to disk.
16 :param url: the http(s) address of to the file to download
17 :param prefix: for the cache, to make it easier to find (cache files
18 get a hash of the URL after the prefix)
19 :param cache: if True, and url is cached, do not download it again
20 :param loglevel: change to logging.DEBUG to only display the download
21 message in 'waydroid log', not in stdout. We use
22 this when downloading many APKINDEX files at once, no
23 point in showing a dozen messages.
24 :param allow_404: do not raise an exception when the server responds
25 with a 404 Not Found error. Only display a warning on
26 stdout (no matter if loglevel is changed).
27 :returns: path to the downloaded file in the cache or None on 404 """
29 if not os
.path
.exists(args
.work
+ "/cache_http"):
30 tools
.helpers
.run
.user(args
, ["mkdir", "-p", args
.work
+ "/cache_http"])
32 # Check if file exists in cache
33 prefix
= prefix
.replace("/", "_")
34 path
= (args
.work
+ "/cache_http/" + prefix
+ "_" +
35 hashlib
.sha256(url
.encode("utf-8")).hexdigest())
36 if os
.path
.exists(path
):
39 tools
.helpers
.run
.user(args
, ["rm", path
])
42 logging
.log(loglevel
, "Download " + url
)
44 with urllib
.request
.urlopen(url
) as response
:
45 with open(path
, "wb") as handle
:
46 shutil
.copyfileobj(response
, handle
)
48 except urllib
.error
.HTTPError
as e
:
49 if e
.code
== 404 and allow_404
:
50 logging
.warning("WARNING: file not found: " + url
)
54 # Return path in cache
58 def retrieve(url
, headers
=None):
59 """ Fetch the content of a URL and returns it as string.
61 :param url: the http(s) address of to the resource to fetch
62 :param headers: dict of HTTP headers to use
63 :returns: status and str with the content of the response
66 logging
.verbose("Retrieving " + url
)
71 req
= urllib
.request
.Request(url
, headers
=headers
)
73 with urllib
.request
.urlopen(req
) as response
:
74 return 200, response
.read()
76 except urllib
.error
.HTTPError
as e
: