logger.error "#{e.backtrace.join("\n\t")}"
case fallback
when :json
- render(partial: "pipeline_instances/show_components_json")
+ render(partial: "pipeline_instances/show_components_json",
+ locals: {error_name: e.inspect, backtrace: e.backtrace.join("\n\t")})
end
end
end
end
if c[:job] and c[:job][:uuid] and job[c[:job][:uuid]]
pj[:job] = job[c[:job][:uuid]]
- else
- pj[:job] = c[:job].is_a?(Hash) ? c[:job] : {}
-
+ elsif c[:job].is_a?(Hash)
+ pj[:job] = c[:job]
+ if pj[:job][:started_at].is_a? String
+ pj[:job][:started_at] = Time.parse(pj[:job][:started_at])
+ end
+ if pj[:job][:finished_at].is_a? String
+ pj[:job][:finished_at] = Time.parse(pj[:job][:finished_at])
+ end
# If necessary, figure out the state based on the other fields.
pj[:job][:state] ||= if pj[:job][:cancelled_at]
"Cancelled"
else
"Queued"
end
+ else
+ pj[:job] = {}
end
pj[:percent_done] = 0
pj[:percent_running] = 0
pj[:nondeterministic] = pj[:job][:nondeterministic] || c[:nondeterministic]
pj[:output] = pj[:job][:output]
pj[:output_uuid] = c[:output_uuid]
- pj[:finished_at] = (Time.parse(pj[:job][:finished_at]) rescue nil)
+ pj[:finished_at] = pj[:job][:finished_at]
ret << pj
end
ret
<div class="col-md-3">
<% if current_job[:started_at] %>
- <% walltime = ((if current_job.finished_at then current_job.finished_at else Time.now() end) - current_job.started_at) %>
+ <% walltime = ((if current_job[:finished_at] then current_job[:finished_at] else Time.now() end) - current_job[:started_at]) %>
<% cputime = tasks.map { |task|
- if task.started_at and task.job_uuid == current_job.uuid
+ if task.started_at and task.job_uuid == current_job[:uuid]
(if task.finished_at then task.finished_at else Time.now() end) - task.started_at
else
0
<div class="col-md-5 text-overflow-ellipsis">
<% if pj[:output_uuid] %>
<%= link_to_if_arvados_object pj[:output_uuid], friendly_name: true %>
- <% elsif current_job.andand[:output] %>
+ <% elsif current_job[:output] %>
<%= link_to_if_arvados_object current_job[:output], link_text: "Output of #{pj[:name]}" %>
<% else %>
No output.
<%= pj[:progress_bar] %>
</div>
<div class="col-md-1 pipeline-instance-spacing">
- <%= form_tag "/jobs/#{current_job.uuid}/cancel", style: "display:inline; padding-left: 1em" do |f| %>
+ <%= form_tag "/jobs/#{current_job[:uuid]}/cancel", style: "display:inline; padding-left: 1em" do |f| %>
<%= hidden_field_tag :return_to, url_for(@object) %>
<%= button_tag "Cancel", {class: 'btn btn-xs btn-danger', id: "cancel-job-button"} %>
</div>
<% queuetime = Time.now - current_job[:created_at] %>
Queued for <%= render_runtime(queuetime, true) %>.
<% begin %>
- <% if current_job.queue_position == 0 %>
+ <% if current_job[:queue_position] == 0 %>
This job is next in the queue to run.
- <% elsif current_job.queue_position == 1 %>
+ <% elsif current_job[:queue_position] == 1 %>
There is 1 job in the queue ahead of this one.
<% else %>
- There are <%= current_job.queue_position %> jobs in the queue ahead of this one.
+ There are <%= current_job[:queue_position] %> jobs in the queue ahead of this one.
<% end %>
<% rescue %>
<% end %>
<p>The components of this pipeline are in a format that Workbench does not recognize.</p>
+<p>Error encountered: <b><%= error_name %></b></p>
+
<div id="components-accordion" class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">
<pre><%= Oj.dump(@object.components, indent: 2) %></pre>
</div>
</div>
+ <div class="panel-heading">
+ <h4 class="panel-title">
+ <a data-toggle="collapse" data-parent="#components-accordion" href="#components-backtrace">
+ Show backtrace
+ </a>
+ </h4>
+ </div>
+ <div id="components-backtrace" class="panel-collapse collapse">
+ <div class="panel-body">
+ <pre><%= backtrace %></pre>
+ </div>
+ </div>
</div>
session_for(:active))
assert_response :success
end
+
+ test "dates in JSON components are parsed" do
+ get(:show,
+ {id: api_fixture('pipeline_instances')['has_component_with_completed_jobs']['uuid']},
+ session_for(:active))
+ assert_response :success
+ assert_not_nil assigns(:object)
+ assert_not_nil assigns(:object).components[:foo][:job]
+ assert assigns(:object).components[:foo][:job][:started_at].is_a? Time
+ assert assigns(:object).components[:foo][:job][:finished_at].is_a? Time
+ end
end
script: foo
script_version: master
+has_component_with_completed_jobs:
+ # Test that the job "started_at" and "finished_at" fields are
+ # parsed into Time fields when rendering. This job must *not*
+ # have its own fixture; the point is to force the
+ # pipeline_instances_controller_test in Workbench to parse
+ # the "components" field.
+ state: Complete
+ uuid: zzzzz-d1hrv-i3e77t9z5y8j9cc
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ components:
+ foo:
+ script: foo
+ script_version: master
+ script_parameters: {}
+ job:
+ uuid: zzzzz-8i9sb-rft1xdewxkwgxnz
+ script_version: master
+ started_at: <%= 10.minute.ago.to_s(:db) %>
+ finished_at: <%= 9.minute.ago.to_s(:db) %>
+
has_job:
name: pipeline_with_job
state: Ready
so request handlers do not run concurrently unless the lock is explicitly released
using "with llfuse.lock_released:"'''
- def __init__(self, uid, gid):
+ def __init__(self, uid, gid, encoding="utf-8"):
super(Operations, self).__init__()
self.inodes = Inodes()
self.uid = uid
self.gid = gid
+ self.encoding = encoding
# dict of inode to filehandle
self._filehandles = {}
return entry
def lookup(self, parent_inode, name):
+ name = unicode(name, self.encoding)
_logger.debug("arv-mount lookup: parent_inode %i name %s",
parent_inode, name)
inode = None
e = off
while e < len(handle.entry):
if handle.entry[e][1].inode in self.inodes:
- yield (handle.entry[e][0], self.getattr(handle.entry[e][1].inode), e+1)
+ try:
+ yield (handle.entry[e][0].encode(self.encoding), self.getattr(handle.entry[e][1].inode), e+1)
+ except UnicodeEncodeError:
+ pass
e += 1
def releasedir(self, fh):
parser.add_argument('--debug', action='store_true', help="""Debug mode""")
parser.add_argument('--logfile', help="""Write debug logs and errors to the specified file (default stderr).""")
parser.add_argument('--foreground', action='store_true', help="""Run in foreground (default is to daemonize unless --exec specified)""", default=False)
+ parser.add_argument('--encoding', type=str, help="Character encoding to use for filesystem, default is utf-8 (see Python codec registry for list of available encodings)", default="utf-8")
parser.add_argument('--exec', type=str, nargs=argparse.REMAINDER,
dest="exec_args", metavar=('command', 'args', '...', '--'),
help="""Mount, run a command, then unmount and exit""")
try:
# Create the request handler
- operations = Operations(os.getuid(), os.getgid())
+ operations = Operations(os.getuid(), os.getgid(), args.encoding)
api = SafeApi(arvados.config)
usr = api.users().current().execute(num_retries=args.retries)