my $have_slurm = exists $ENV{SLURM_JOBID} && exists $ENV{SLURM_NODELIST};
my $job_has_uuid = $jobspec =~ /^[-a-z\d]+$/;
+my $local_job = !$job_has_uuid;
$SIG{'HUP'} = sub
my $arv = Arvados->new;
-my $metastream = Warehouse::Stream->new;
+my $metastream = Warehouse::Stream->new(whc => new Warehouse);
$metastream->clear;
$metastream->write_start('log.txt');
-my $User = {};
+my $User = $arv->{'users'}->{'current'}->execute;
+
my $Job = {};
my $job_id;
my $dbh;
if ($job_has_uuid)
{
$Job = $arv->{'jobs'}->{'get'}->execute('uuid' => $jobspec);
- $User = $arv->{'users'}->{'current'}->execute;
if (!$force_unlock) {
if ($Job->{'is_locked_by'}) {
croak("Job is locked: " . $Job->{'is_locked_by'});
qw(script script_version script_parameters);
}
- if (!defined $Job->{'uuid'}) {
- chomp ($Job->{'uuid'} = sprintf ("%s-t%d-p%d", `hostname -s`, time, $$));
- }
+ $Job->{'is_locked_by'} = $User->{'uuid'};
+ $Job->{'started_at'} = gmtime;
+
+ $Job = $arv->{'jobs'}->{'create'}->execute('job' => $Job);
+
+ $job_has_uuid = 1;
}
$job_id = $Job->{'uuid'};
'todo' => 1,
'running' => 0,
'done' => 0 };
- unless ($Job->save() && $Job->{'is_locked_by'} == $User->{'uuid'}) {
- croak("Error while updating / locking job");
+ if ($job_has_uuid) {
+ unless ($Job->save() && $Job->{'is_locked_by'} == $User->{'uuid'}) {
+ croak("Error while updating / locking job");
+ }
}
}
my $build_script;
-do {
- local $/ = undef;
- $build_script = <DATA>;
-};
-$ENV{"CRUNCH_SRC_COMMIT"} = $Job->{revision};
+$ENV{"CRUNCH_SRC_COMMIT"} = $Job->{script_version};
-my $skip_install = (!$have_slurm && $Job->{revision} =~ m{^/});
+my $skip_install = ($local_job && $Job->{script_version} =~ m{^/});
if ($skip_install)
{
- $ENV{"CRUNCH_SRC"} = $Job->{revision};
+ $ENV{"CRUNCH_SRC"} = $Job->{script_version};
}
else
{
- Log (undef, "Install revision ".$Job->{revision});
+ do {
+ local $/ = undef;
+ $build_script = <DATA>;
+ };
+ Log (undef, "Install revision ".$Job->{script_version});
my $nodelist = join(",", @node);
# Clean out crunch_tmp/work and crunch_tmp/opt
$ENV{"CRUNCH_INSTALL"} = "$ENV{CRUNCH_TMP}/opt";
my $commit;
+ my $git_archive;
my $treeish = $Job->{'script_version'};
my $repo = $git_dir || $ENV{'CRUNCH_DEFAULT_GIT_DIR'};
# Todo: let script_version specify repository instead of expecting
chomp $gitlog;
if ($gitlog =~ /^[a-f0-9]{40}$/) {
$commit = $gitlog;
- Log (undef, "Using commit $commit for revision $treeish");
+ Log (undef, "Using commit $commit for script_version $treeish");
}
}
Log (undef, "Using commit $commit for tree-ish $treeish");
if ($commit ne $treeish) {
$Job->{'script_version'} = $commit;
- $Job->save() or croak("Error while updating job");
+ !$job_has_uuid or $Job->save() or croak("Error while updating job");
}
}
}
$ENV{"CRUNCH_SRC_COMMIT"} = $commit;
@execargs = ("sh", "-c",
"mkdir -p $ENV{CRUNCH_INSTALL} && cd $ENV{CRUNCH_TMP} && perl -");
+ $git_archive = `cd $ENV{CRUNCH_SRC} && git archive $commit`;
}
else {
croak ("could not figure out commit id for $treeish");
my $installpid = fork();
if ($installpid == 0)
{
- srun (\@srunargs, \@execargs, {}, $build_script);
+ srun (\@srunargs, \@execargs, {}, $build_script . $git_archive);
exit (1);
}
while (1)
my @execargs = qw(sh);
my $build_script_to_send = "";
my $command =
- "mkdir -p $ENV{CRUNCH_TMP}/revision "
+ "mkdir -p $ENV{CRUNCH_WORK} $ENV{CRUNCH_TMP} "
."&& cd $ENV{CRUNCH_TMP} ";
if ($build_script)
{
$command .=
"&& perl -";
}
- elsif (!$skip_install)
- {
- $command .=
- "&& "
- ."( "
- ." [ -e '$ENV{CRUNCH_INSTALL}/.tested' ] "
- ."|| "
- ." ( svn export --quiet '$ENV{INSTALL_REPOS}/installrevision' "
- ." && ./installrevision "
- ." ) "
- .") ";
- }
$ENV{"PYTHONPATH"} = "$ENV{CRUNCH_SRC}/sdk/python"; # xxx hack
$command .=
"&& exec $ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
freeze();
$Job->{'output'} = &collate_output();
$Job->{'success'} = $Job->{'output'} && $success;
-$Job->save;
+$Job->save if $job_has_uuid;
if ($Job->{'output'})
{
$Job->{'tasks_summary'}->{'todo'} = $todo;
$Job->{'tasks_summary'}->{'done'} = $done;
$Job->{'tasks_summary'}->{'running'} = $running;
- $Job->save;
+ $Job->save if $job_has_uuid;
Log (undef, "status: $done done, $running running, $todo todo");
$progress_is_dirty = 0;
}
{
Log (undef, "output $joboutput");
$Job->{'output'} = $joboutput;
- $Job->save;
+ $Job->save if $job_has_uuid;
}
else
{
sub cleanup
{
return if !$job_has_uuid;
- $Job->reload;
+ $Job->reload if $job_has_uuid;
$Job->{'running'} = 0;
$Job->{'success'} = 0;
$Job->{'finished_at'} = gmtime;
- $Job->save;
+ $Job->save if $job_has_uuid;
}
undef $metastream if !$justcheckpoint; # otherwise Log() will try to use it
Log (undef, "meta key is $loglocator");
$Job->{'log'} = $loglocator;
- $Job->save;
+ $Job->save if $job_has_uuid;
}
{
$Job->{$_} = $frozenjob->{$_};
}
- $Job->save;
+ $Job->save if $job_has_uuid;
}
exit 0;
}
+unlink "$destdir.commit";
open STDOUT, ">", "$destdir.log";
open STDERR, ">&STDOUT";
-if (-d "$destdir/.git") {
- chdir $destdir or die "chdir $destdir: $!";
- if (0 != system (qw(git remote set-url origin), $repo)) {
- # awful... for old versions of git that don't know "remote set-url"
- shell_or_die (q(perl -pi~ -e '$_="\turl = ).$repo.q(\n" if /url = /' .git/config));
- }
-}
-elsif ($repo && $commit)
-{
- shell_or_die('git', 'clone', $repo, $destdir);
- chdir $destdir or die "chdir $destdir: $!";
- shell_or_die(qw(git config clean.requireForce false));
-}
-else {
- die "$destdir does not exist, and no repo/commit specified -- giving up";
-}
-
-if ($commit) {
- unlink "$destdir.commit";
- shell_or_die (qw(git stash));
- shell_or_die (qw(git clean -d -x));
- shell_or_die (qw(git fetch origin));
- shell_or_die (qw(git checkout), $commit);
+mkdir $destdir;
+open TARX, "|-", "tar", "-C", $destdir, "-xf", "-";
+print TARX <DATA>;
+if(!close(TARX)) {
+ die "'tar -C $destdir -xf -' exited $?: $!";
}
my $pwd;
system (@_) == 0
or die "@_ failed: $! exit 0x".sprintf("%x",$?);
}
+
+__DATA__