1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
5 from __future__ import division
6 from future import standard_library
7 standard_library.install_aliases()
15 import arvados.collection
21 logger = logging.getLogger('arvados.cwl-runner')
23 def my_formatdate(dt):
24 return email.utils.formatdate(timeval=calendar.timegm(dt.timetuple()),
25 localtime=False, usegmt=True)
27 def my_parsedate(text):
28 parsed = email.utils.parsedate_tz(text)
32 return datetime.datetime(*parsed[:6]) + datetime.timedelta(seconds=parsed[9])
34 # TZ is zero or missing, assume UTC.
35 return datetime.datetime(*parsed[:6])
37 return datetime.datetime(1970, 1, 1)
39 def fresh_cache(url, properties, now):
43 logger.debug("Checking cache freshness for %s using %s", url, pr)
45 if "Cache-Control" in pr:
46 if re.match(r"immutable", pr["Cache-Control"]):
49 g = re.match(r"(s-maxage|max-age)=(\d+)", pr["Cache-Control"])
51 expires = my_parsedate(pr["Date"]) + datetime.timedelta(seconds=int(g.group(2)))
53 if expires is None and "Expires" in pr:
54 expires = my_parsedate(pr["Expires"])
57 # Use a default cache time of 24 hours if upstream didn't set
58 # any cache headers, to reduce redundant downloads.
59 expires = my_parsedate(pr["Date"]) + datetime.timedelta(hours=24)
64 return (now < expires)
66 def remember_headers(url, properties, headers, now):
67 properties.setdefault(url, {})
68 for h in ("Cache-Control", "ETag", "Expires", "Date", "Content-Length"):
70 properties[url][h] = headers[h]
71 if "Date" not in headers:
72 properties[url]["Date"] = my_formatdate(now)
75 def changed(url, properties, now):
76 req = requests.head(url, allow_redirects=True)
77 remember_headers(url, properties, req.headers, now)
79 if req.status_code != 200:
80 # Sometimes endpoints are misconfigured and will deny HEAD but
81 # allow GET so instead of failing here, we'll try GET If-None-Match
85 if "ETag" in pr and "ETag" in req.headers:
86 if pr["ETag"] == req.headers["ETag"]:
92 # if it already has leading and trailing quotes, do nothing
93 if etag[0] == '"' and etag[-1] == '"':
97 return '"' + etag + '"'
99 def http_to_keep(api, project_uuid, url, utcnow=datetime.datetime.utcnow):
100 r = api.collections().list(filters=[["properties", "exists", url]]).execute()
106 for item in r["items"]:
107 properties = item["properties"]
108 if fresh_cache(url, properties, now):
110 cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
111 return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
113 if not changed(url, properties, now):
114 # ETag didn't change, same content, just update headers
115 api.collections().update(uuid=item["uuid"], body={"collection":{"properties": properties}}).execute()
116 cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
117 return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
119 if "ETag" in properties and len(properties["ETag"]) > 2:
120 etags[properties["ETag"]] = item
122 logger.debug("Found ETags %s", etags)
127 headers['If-None-Match'] = ', '.join([etag_quote(k) for k,v in etags.items()])
128 logger.debug("Sending GET request with headers %s", headers)
129 req = requests.get(url, stream=True, allow_redirects=True, headers=headers)
131 if req.status_code not in (200, 304):
132 raise Exception("Failed to download '%s' got status %s " % (url, req.status_code))
134 remember_headers(url, properties, req.headers, now)
136 if req.status_code == 304 and "ETag" in req.headers and req.headers["ETag"] in etags:
137 item = etags[req.headers["ETag"]]
138 item["properties"].update(properties)
139 api.collections().update(uuid=item["uuid"], body={"collection":{"properties": item["properties"]}}).execute()
140 cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
141 return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
143 if "Content-Length" in properties[url]:
144 cl = int(properties[url]["Content-Length"])
145 logger.info("Downloading %s (%s bytes)", url, cl)
148 logger.info("Downloading %s (unknown size)", url)
150 c = arvados.collection.Collection()
152 if req.headers.get("Content-Disposition"):
153 grp = re.search(r'filename=("((\"|[^"])+)"|([^][()<>@,;:\"/?={} ]+))', req.headers["Content-Disposition"])
159 name = urllib.parse.urlparse(url).path.split("/")[-1]
164 with c.open(name, "wb") as f:
165 for chunk in req.iter_content(chunk_size=1024):
168 loopnow = time.time()
169 if (loopnow - checkpoint) > 20:
170 bps = count / (loopnow - start)
172 logger.info("%2.1f%% complete, %3.2f MiB/s, %1.0f seconds left",
173 ((count * 100) / cl),
174 (bps // (1024*1024)),
177 logger.info("%d downloaded, %3.2f MiB/s", count, (bps / (1024*1024)))
180 logger.info("Download complete")
182 collectionname = "Downloaded from %s" % urllib.parse.quote(url, safe='')
184 # max length - space to add a timestamp used by ensure_unique_name
185 max_name_len = 254 - 28
187 if len(collectionname) > max_name_len:
188 over = len(collectionname) - max_name_len
189 split = int(max_name_len/2)
190 collectionname = collectionname[0:split] + "…" + collectionname[split+over:]
192 c.save_new(name=collectionname, owner_uuid=project_uuid, ensure_unique_name=True)
194 api.collections().update(uuid=c.manifest_locator(), body={"collection":{"properties": properties}}).execute()
196 return "keep:%s/%s" % (c.portable_data_hash(), name)