1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
12 from arvados.events import subscribe
13 from arvados._version import __version__
14 from . import _util as arv_cmd
16 def main(arguments=None):
17 logger = logging.getLogger('arvados.arv-ws')
19 parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
20 parser.add_argument('--version', action='version',
21 version="%s %s" % (sys.argv[0], __version__),
22 help='Print version and exit.')
23 parser.add_argument('-u', '--uuid', type=str, default="", help="Filter events on object_uuid")
24 parser.add_argument('-f', '--filters', type=str, default="", help="Arvados query filter to apply to log events (JSON encoded)")
25 parser.add_argument('-s', '--start-time', type=str, default="", help="Arvados query filter to fetch log events created at or after this time. This will be server time in UTC. Allowed format: YYYY-MM-DD or YYYY-MM-DD hh:mm:ss")
26 parser.add_argument('-i', '--id', type=int, default=None, help="Start from given log id.")
28 group = parser.add_mutually_exclusive_group()
29 group.add_argument('--poll-interval', default=15, type=int, help="If websockets is not available, specify the polling interval, default is every 15 seconds")
30 group.add_argument('--no-poll', action='store_false', dest='poll_interval', help="Do not poll if websockets are not available, just fail")
32 group = parser.add_mutually_exclusive_group()
33 group.add_argument('-p', '--pipeline', type=str, default="", help="Supply pipeline uuid, print log output from pipeline and its jobs")
34 group.add_argument('-j', '--job', type=str, default="", help="Supply job uuid, print log output from jobs")
36 args = parser.parse_args(arguments)
39 global known_component_jobs
43 known_component_jobs = set()
46 def update_subscribed_components(components):
47 global known_component_jobs
51 if "job" in components[c]:
52 pipeline_jobs.add(components[c]["job"]["uuid"])
53 if known_component_jobs != pipeline_jobs:
54 new_filters = [['object_uuid', 'in', [args.pipeline] + list(pipeline_jobs)]]
55 ws.subscribe(new_filters)
56 ws.unsubscribe(filters)
58 known_component_jobs = pipeline_jobs
60 api = arvados.api('v1', num_retries=args.retries)
63 filters += [ ['object_uuid', '=', args.uuid] ]
66 filters += json.loads(args.filters)
69 filters += [ ['object_uuid', '=', args.job] ]
72 filters += [ ['object_uuid', '=', args.pipeline] ]
76 filters += [ ['created_at', '>=', args.start_time] ]
81 last_log_id = args.id-1
88 if 'event_type' in ev and (args.pipeline or args.job):
89 if ev['event_type'] in ('stderr', 'stdout'):
90 sys.stdout.write(ev["properties"]["text"])
91 elif ev["event_type"] in ("create", "update"):
92 if ev["object_kind"] == "arvados#pipelineInstance":
93 c = api.pipeline_instances().get(uuid=ev["object_uuid"]).execute()
94 update_subscribed_components(c["components"])
96 if ev["object_kind"] == "arvados#pipelineInstance" and args.pipeline:
97 if ev["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Paused"):
100 if ev["object_kind"] == "arvados#job" and args.job:
101 if ev["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled"):
103 elif 'status' in ev and ev['status'] == 200:
106 print(json.dumps(ev))
109 ws = subscribe(arvados.api('v1'), filters, on_message, poll_fallback=args.poll_interval, last_log_id=last_log_id)
112 c = api.pipeline_instances().get(uuid=args.pipeline).execute()
113 update_subscribed_components(c["components"])
114 if c["state"] in ("Complete", "Failed", "Paused"):
117 except KeyboardInterrupt:
119 except Exception as e: