1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
5 from __future__ import print_function
11 from arvados.events import subscribe
12 from arvados._version import __version__
15 def main(arguments=None):
16 logger = logging.getLogger('arvados.arv-ws')
18 parser = argparse.ArgumentParser()
19 parser.add_argument('--version', action='version',
20 version="%s %s" % (sys.argv[0], __version__),
21 help='Print version and exit.')
22 parser.add_argument('-u', '--uuid', type=str, default="", help="Filter events on object_uuid")
23 parser.add_argument('-f', '--filters', type=str, default="", help="Arvados query filter to apply to log events (JSON encoded)")
24 parser.add_argument('-s', '--start-time', type=str, default="", help="Arvados query filter to fetch log events created at or after this time. This will be server time in UTC. Allowed format: YYYY-MM-DD or YYYY-MM-DD hh:mm:ss")
25 parser.add_argument('-i', '--id', type=int, default=None, help="Start from given log id.")
27 group = parser.add_mutually_exclusive_group()
28 group.add_argument('--poll-interval', default=15, type=int, help="If websockets is not available, specify the polling interval, default is every 15 seconds")
29 group.add_argument('--no-poll', action='store_false', dest='poll_interval', help="Do not poll if websockets are not available, just fail")
31 group = parser.add_mutually_exclusive_group()
32 group.add_argument('-p', '--pipeline', type=str, default="", help="Supply pipeline uuid, print log output from pipeline and its jobs")
33 group.add_argument('-j', '--job', type=str, default="", help="Supply job uuid, print log output from jobs")
35 args = parser.parse_args(arguments)
38 global known_component_jobs
42 known_component_jobs = set()
45 def update_subscribed_components(components):
46 global known_component_jobs
50 if "job" in components[c]:
51 pipeline_jobs.add(components[c]["job"]["uuid"])
52 if known_component_jobs != pipeline_jobs:
53 new_filters = [['object_uuid', 'in', [args.pipeline] + list(pipeline_jobs)]]
54 ws.subscribe(new_filters)
55 ws.unsubscribe(filters)
57 known_component_jobs = pipeline_jobs
59 api = arvados.api('v1')
62 filters += [ ['object_uuid', '=', args.uuid] ]
65 filters += json.loads(args.filters)
68 filters += [ ['object_uuid', '=', args.job] ]
71 filters += [ ['object_uuid', '=', args.pipeline] ]
75 filters += [ ['created_at', '>=', args.start_time] ]
80 last_log_id = args.id-1
87 if 'event_type' in ev and (args.pipeline or args.job):
88 if ev['event_type'] in ('stderr', 'stdout'):
89 sys.stdout.write(ev["properties"]["text"])
90 elif ev["event_type"] in ("create", "update"):
91 if ev["object_kind"] == "arvados#pipelineInstance":
92 c = api.pipeline_instances().get(uuid=ev["object_uuid"]).execute()
93 update_subscribed_components(c["components"])
95 if ev["object_kind"] == "arvados#pipelineInstance" and args.pipeline:
96 if ev["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Paused"):
99 if ev["object_kind"] == "arvados#job" and args.job:
100 if ev["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled"):
102 elif 'status' in ev and ev['status'] == 200:
105 print(json.dumps(ev))
108 ws = subscribe(arvados.api('v1'), filters, on_message, poll_fallback=args.poll_interval, last_log_id=last_log_id)
111 c = api.pipeline_instances().get(uuid=args.pipeline).execute()
112 update_subscribed_components(c["components"])
113 if c["state"] in ("Complete", "Failed", "Paused"):
116 except KeyboardInterrupt:
118 except Exception as e: