projects
/
arvados.git
/ blobdiff
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Merge branch 'master' into 14885-ciso-and-conda-packaging-pr
[arvados.git]
/
sdk
/
python
/
arvados
/
commands
/
keepdocker.py
diff --git
a/sdk/python/arvados/commands/keepdocker.py
b/sdk/python/arvados/commands/keepdocker.py
index ff7201a75bdaf4aaf1b6086803fe8f1eaca83e30..ec2a9942a6794153ea69138ba467a20f8b1ae6a6 100644
(file)
--- a/
sdk/python/arvados/commands/keepdocker.py
+++ b/
sdk/python/arvados/commands/keepdocker.py
@@
-10,17
+10,20
@@
import errno
import json
import os
import re
import json
import os
import re
-import subprocess
import sys
import tarfile
import tempfile
import shutil
import _strptime
import fcntl
import sys
import tarfile
import tempfile
import shutil
import _strptime
import fcntl
-
from operator import itemgetter
from stat import *
from operator import itemgetter
from stat import *
+if os.name == "posix" and sys.version_info[0] < 3:
+ import subprocess32 as subprocess
+else:
+ import subprocess
+
import arvados
import arvados.util
import arvados.commands._util as arv_cmd
import arvados
import arvados.util
import arvados.commands._util as arv_cmd
@@
-133,6
+136,7
@@
def docker_images():
next(list_output) # Ignore the header line
for line in list_output:
words = line.split()
next(list_output) # Ignore the header line
for line in list_output:
words = line.split()
+ words = [word.decode() for word in words]
size_index = len(words) - 2
repo, tag, imageid = words[:3]
ctime = ' '.join(words[3:size_index])
size_index = len(words) - 2
repo, tag, imageid = words[:3]
ctime = ' '.join(words[3:size_index])
@@
-226,12
+230,15
@@
def docker_link_sort_key(link):
Docker metadata links to sort them from least to most preferred.
"""
try:
Docker metadata links to sort them from least to most preferred.
"""
try:
- image_timestamp = ciso8601.parse_datetime_
unawar
e(
+ image_timestamp = ciso8601.parse_datetime_
as_naiv
e(
link['properties']['image_timestamp'])
except (KeyError, ValueError):
image_timestamp = EARLIEST_DATETIME
link['properties']['image_timestamp'])
except (KeyError, ValueError):
image_timestamp = EARLIEST_DATETIME
- return (image_timestamp,
- ciso8601.parse_datetime_unaware(link['created_at']))
+ try:
+ created_timestamp = ciso8601.parse_datetime_as_naive(link['created_at'])
+ except ValueError:
+ created_timestamp = None
+ return (image_timestamp, created_timestamp)
def _get_docker_links(api_client, num_retries, **kwargs):
links = arvados.util.list_all(api_client.links().list,
def _get_docker_links(api_client, num_retries, **kwargs):
links = arvados.util.list_all(api_client.links().list,
@@
-345,9
+352,10
@@
def _uuid2pdh(api, uuid):
select=['portable_data_hash'],
).execute()['items'][0]['portable_data_hash']
select=['portable_data_hash'],
).execute()['items'][0]['portable_data_hash']
-def main(arguments=None, stdout=sys.stdout):
+def main(arguments=None, stdout=sys.stdout
, install_sig_handlers=True, api=None
):
args = arg_parser.parse_args(arguments)
args = arg_parser.parse_args(arguments)
- api = arvados.api('v1')
+ if api is None:
+ api = arvados.api('v1')
if args.image is None or args.image == 'images':
fmt = "{:30} {:10} {:12} {:29} {:20}\n"
if args.image is None or args.image == 'images':
fmt = "{:30} {:10} {:12} {:29} {:20}\n"
@@
-490,7
+498,8
@@
def main(arguments=None, stdout=sys.stdout):
put_args += ['--name', collection_name]
coll_uuid = arv_put.main(
put_args += ['--name', collection_name]
coll_uuid = arv_put.main(
- put_args + ['--filename', outfile_name, image_file.name], stdout=stdout).strip()
+ put_args + ['--filename', outfile_name, image_file.name], stdout=stdout,
+ install_sig_handlers=install_sig_handlers).strip()
# Read the image metadata and make Arvados links from it.
image_file.seek(0)
# Read the image metadata and make Arvados links from it.
image_file.seek(0)
@@
-501,7
+510,7
@@
def main(arguments=None, stdout=sys.stdout):
else:
json_filename = raw_image_hash + '/json'
json_file = image_tar.extractfile(image_tar.getmember(json_filename))
else:
json_filename = raw_image_hash + '/json'
json_file = image_tar.extractfile(image_tar.getmember(json_filename))
- image_metadata = json.load
(json_file
)
+ image_metadata = json.load
s(json_file.read().decode()
)
json_file.close()
image_tar.close()
link_base = {'head_uuid': coll_uuid, 'properties': {}}
json_file.close()
image_tar.close()
link_base = {'head_uuid': coll_uuid, 'properties': {}}