use Fcntl qw(F_GETFL F_SETFL O_NONBLOCK);
use Arvados;
use Getopt::Long;
-use Warehouse;
-use Warehouse::Stream;
-use IPC::System::Simple qw(capturex);
+use IPC::Open2;
+use IO::Select;
+use File::Temp;
$ENV{"TMPDIR"} ||= "/tmp";
unless (defined $ENV{"CRUNCH_TMP"}) {
}
}
$ENV{"JOB_WORK"} = $ENV{"CRUNCH_TMP"} . "/work";
+$ENV{"CRUNCH_INSTALL"} = "$ENV{CRUNCH_TMP}/opt";
$ENV{"CRUNCH_WORK"} = $ENV{"JOB_WORK"}; # deprecated
mkdir ($ENV{"JOB_WORK"});
-my $arv = Arvados->new;
+my $arv = Arvados->new('apiVersion' => 'v1');
my $metastream;
my $User = $arv->{'users'}->{'current'}->execute;
}
$job_id = $Job->{'uuid'};
-$metastream = Warehouse::Stream->new(whc => new Warehouse);
-$metastream->clear;
-$metastream->name('.');
-$metastream->write_start($job_id . '.log.txt');
-
+my $keep_logfile = $job_id . '.log.txt';
+my $local_logfile = File::Temp->new();
$Job->{'runtime_constraints'} ||= {};
$Job->{'runtime_constraints'}->{'max_tasks_per_node'} ||= 0;
if ($skip_install)
{
$ENV{"CRUNCH_SRC"} = $Job->{script_version};
+ for my $src_path ("$ENV{CRUNCH_SRC}/arvados/sdk/python") {
+ if (-d $src_path) {
+ system("virtualenv", "$ENV{CRUNCH_TMP}/opt") == 0
+ or croak ("virtualenv $ENV{CRUNCH_TMP}/opt failed: exit ".($?>>8));
+ system ("cd $src_path && ./build.sh && \$CRUNCH_TMP/opt/bin/python setup.py install")
+ == 0
+ or croak ("setup.py in $src_path failed: exit ".($?>>8));
+ }
+ }
}
else
{
$ENV{"CRUNCH_SRC_COMMIT"} = $Job->{script_version};
$ENV{"CRUNCH_SRC"} = "$ENV{CRUNCH_TMP}/src";
- $ENV{"CRUNCH_INSTALL"} = "$ENV{CRUNCH_TMP}/opt";
my $commit;
my $git_archive;
$ENV{"TASK_SLOT_NODE"} = $slot[$childslot]->{node}->{name};
$ENV{"TASK_SLOT_NUMBER"} = $slot[$childslot]->{cpu};
$ENV{"TASK_WORK"} = $ENV{"JOB_WORK"}."/".$slot[$childslot]->{cpu};
+ $ENV{"TASK_KEEPMOUNT"} = $ENV{"TASK_WORK"}."/keep";
$ENV{"TASK_TMPDIR"} = $ENV{"TASK_WORK"}; # deprecated
$ENV{"CRUNCH_NODE_SLOTS"} = $slot[$childslot]->{node}->{ncpus};
+ $ENV{"PATH"} = $ENV{"CRUNCH_INSTALL"} . "/bin:" . $ENV{"PATH"};
$ENV{"GZIP"} = "-n";
my $build_script_to_send = "";
my $command =
"if [ -e $ENV{TASK_WORK} ]; then rm -rf $ENV{TASK_WORK}; fi; "
- ."mkdir -p $ENV{JOB_WORK} $ENV{CRUNCH_TMP} $ENV{TASK_WORK} "
+ ."mkdir -p $ENV{JOB_WORK} $ENV{CRUNCH_TMP} $ENV{TASK_WORK} $ENV{TASK_KEEPMOUNT} "
."&& cd $ENV{CRUNCH_TMP} ";
if ($build_script)
{
$command .=
"&& perl -";
}
- $ENV{"PYTHONPATH"} =~ s{^}{:} if $ENV{"PYTHONPATH"};
- $ENV{"PYTHONPATH"} =~ s{^}{$ENV{CRUNCH_SRC}/sdk/python}; # xxx hack
- $ENV{"PYTHONPATH"} =~ s{$}{:/usr/local/arvados/src/sdk/python}; # xxx hack
$command .=
- "&& exec $ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
+ "&& exec arv-mount $ENV{TASK_KEEPMOUNT} --exec $ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
my @execargs = ('bash', '-c', $command);
srun (\@srunargs, \@execargs, undef, $build_script_to_send);
exit (111);
if ($Job->{'output'})
{
eval {
- my $manifest_text = capturex("whget", $Job->{'output'});
+ my $manifest_text = `arv keep get $Job->{'output'}`;
$arv->{'collections'}->{'create'}->execute('collection' => {
'uuid' => $Job->{'output'},
'manifest_text' => $manifest_text,
} split ("\n", $jobstep[$job]->{stderr});
}
+sub fetch_block
+{
+ my $hash = shift;
+ my ($child_out, $child_in, $output_block);
+
+ my $pid = open2($child_out, $child_in, 'arv', 'keep', 'get', $hash);
+ sysread($child_out, $output_block, 64 * 1024 * 1024);
+ waitpid($pid, 0);
+ return $output_block;
+}
sub collate_output
{
- my $whc = Warehouse->new;
Log (undef, "collate");
- $whc->write_start (1);
+
+ my ($child_out, $child_in);
+ my $pid = open2($child_out, $child_in, 'arv', 'keep', 'put', '--raw');
my $joboutput;
for (@jobstep)
{
if ($output !~ /^[0-9a-f]{32}(\+\S+)*$/)
{
$output_in_keep ||= $output =~ / [0-9a-f]{32}\S*\+K/;
- $whc->write_data ($output);
+ print $child_in $output;
}
elsif (@jobstep == 1)
{
$joboutput = $output;
- $whc->write_finish;
+ last;
}
- elsif (defined (my $outblock = $whc->fetch_block ($output)))
+ elsif (defined (my $outblock = fetch_block ($output)))
{
$output_in_keep ||= $outblock =~ / [0-9a-f]{32}\S*\+K/;
- $whc->write_data ($outblock);
+ print $child_in $outblock;
}
else
{
- my $errstr = $whc->errstr;
- $whc->write_data ("XXX fetch_block($output) failed: $errstr XXX\n");
+ print $child_in "XXX fetch_block($output) failed XXX\n";
$main::success = 0;
}
}
- $joboutput = $whc->write_finish if !defined $joboutput;
+ if (!defined $joboutput) {
+ my $s = IO::Select->new($child_out);
+ sysread($child_out, $joboutput, 64 * 1024 * 1024) if $s->can_read(5);
+ }
+ $child_in->close;
+ waitpid($pid, 0);
+
if ($joboutput)
{
Log (undef, "output $joboutput");
}
print STDERR ((-t STDERR) ? ($datetime." ".$message) : $message);
- return if !$metastream;
- $metastream->write_data ($datetime . " " . $message);
+ if ($metastream) {
+ print $metastream $datetime . " " . $message;
+ }
}
sub save_meta
{
my $justcheckpoint = shift; # false if this will be the last meta saved
- my $m = $metastream;
- $m = $m->copy if $justcheckpoint;
- $m->write_finish;
- my $whc = Warehouse->new;
- my $loglocator = $whc->store_block ($m->as_string);
- $arv->{'collections'}->{'create'}->execute('collection' => {
- 'uuid' => $loglocator,
- 'manifest_text' => $m->as_string,
- });
- undef $metastream if !$justcheckpoint; # otherwise Log() will try to use it
+ return if $justcheckpoint; # checkpointing is not relevant post-Warehouse.pm
+
+ $local_logfile->flush;
+ my $cmd = "arv keep put --filename $keep_logfile ". $local_logfile->filename;
+ my $loglocator = `$cmd`;
+ die "system $cmd failed: $?" if $?;
+
+ $local_logfile = undef; # the temp file is automatically deleted
Log (undef, "log manifest is $loglocator");
$Job->{'log'} = $loglocator;
$Job->update_attributes('log', $loglocator) if $job_has_uuid;
{
croak ("Thaw not implemented");
- my $whc;
- my $key = shift;
- Log (undef, "thaw from $key");
-
- @jobstep = ();
- @jobstep_done = ();
- @jobstep_todo = ();
- @jobstep_tomerge = ();
- $jobstep_tomerge_level = 0;
- my $frozenjob = {};
-
- my $stream = new Warehouse::Stream ( whc => $whc,
- hash => [split (",", $key)] );
- $stream->rewind;
- while (my $dataref = $stream->read_until (undef, "\n\n"))
- {
- if ($$dataref =~ /^job /)
- {
- foreach (split ("\n", $$dataref))
- {
- my ($k, $v) = split ("=", $_, 2);
- $frozenjob->{$k} = freezeunquote ($v);
- }
- next;
- }
-
- if ($$dataref =~ /^merge (\d+) (.*)/)
- {
- $jobstep_tomerge_level = $1;
- @jobstep_tomerge
- = map { freezeunquote ($_) } split ("\n", freezeunquote($2));
- next;
- }
-
- my $Jobstep = { };
- foreach (split ("\n", $$dataref))
- {
- my ($k, $v) = split ("=", $_, 2);
- $Jobstep->{$k} = freezeunquote ($v) if $k;
- }
- $Jobstep->{'failures'} = 0;
- push @jobstep, $Jobstep;
-
- if ($Jobstep->{exitcode} eq "0")
- {
- push @jobstep_done, $#jobstep;
- }
- else
- {
- push @jobstep_todo, $#jobstep;
- }
- }
-
- foreach (qw (script script_version script_parameters))
- {
- $Job->{$_} = $frozenjob->{$_};
- }
- $Job->save if $job_has_uuid;
+ # my $whc;
+ # my $key = shift;
+ # Log (undef, "thaw from $key");
+
+ # @jobstep = ();
+ # @jobstep_done = ();
+ # @jobstep_todo = ();
+ # @jobstep_tomerge = ();
+ # $jobstep_tomerge_level = 0;
+ # my $frozenjob = {};
+
+ # my $stream = new Warehouse::Stream ( whc => $whc,
+ # hash => [split (",", $key)] );
+ # $stream->rewind;
+ # while (my $dataref = $stream->read_until (undef, "\n\n"))
+ # {
+ # if ($$dataref =~ /^job /)
+ # {
+ # foreach (split ("\n", $$dataref))
+ # {
+ # my ($k, $v) = split ("=", $_, 2);
+ # $frozenjob->{$k} = freezeunquote ($v);
+ # }
+ # next;
+ # }
+
+ # if ($$dataref =~ /^merge (\d+) (.*)/)
+ # {
+ # $jobstep_tomerge_level = $1;
+ # @jobstep_tomerge
+ # = map { freezeunquote ($_) } split ("\n", freezeunquote($2));
+ # next;
+ # }
+
+ # my $Jobstep = { };
+ # foreach (split ("\n", $$dataref))
+ # {
+ # my ($k, $v) = split ("=", $_, 2);
+ # $Jobstep->{$k} = freezeunquote ($v) if $k;
+ # }
+ # $Jobstep->{'failures'} = 0;
+ # push @jobstep, $Jobstep;
+
+ # if ($Jobstep->{exitcode} eq "0")
+ # {
+ # push @jobstep_done, $#jobstep;
+ # }
+ # else
+ # {
+ # push @jobstep_todo, $#jobstep;
+ # }
+ # }
+
+ # foreach (qw (script script_version script_parameters))
+ # {
+ # $Job->{$_} = $frozenjob->{$_};
+ # }
+ # $Job->save if $job_has_uuid;
}
open STDERR, ">&STDOUT";
mkdir $destdir;
-open TARX, "|-", "tar", "-C", $destdir, "-xf", "-";
-print TARX <DATA>;
-if(!close(TARX)) {
- die "'tar -C $destdir -xf -' exited $?: $!";
+my @git_archive_data = <DATA>;
+if (@git_archive_data) {
+ open TARX, "|-", "tar", "-C", $destdir, "-xf", "-";
+ print TARX @git_archive_data;
+ if(!close(TARX)) {
+ die "'tar -C $destdir -xf -' exited $?: $!";
+ }
}
my $pwd;
chomp ($pwd = `pwd`);
my $install_dir = $ENV{"CRUNCH_INSTALL"} || "$pwd/opt";
mkdir $install_dir;
+
+for my $src_path ("$destdir/arvados/sdk/python") {
+ if (-d $src_path) {
+ shell_or_die ("virtualenv", $install_dir);
+ shell_or_die ("cd $src_path && ./build.sh && $install_dir/bin/python setup.py install");
+ }
+}
+
if (-e "$destdir/crunch_scripts/install") {
shell_or_die ("$destdir/crunch_scripts/install", $install_dir);
} elsif (!-e "./install.sh" && -e "./tests/autotests.sh") {