Skip to content
This repository has been archived by the owner on Apr 22, 2024. It is now read-only.

Commit

Permalink
Merge pull request #122 from SciCatProject/hotfix/JobToDatasetFlagTra…
Browse files Browse the repository at this point in the history
…nsferBugfix

Now the flags are explicitly transferred to dataset directly
  • Loading branch information
lukegorman authored Jan 8, 2019
2 parents b4c6a2c + 4caf8a5 commit 1c15c66
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 20 deletions.
3 changes: 2 additions & 1 deletion common/models/dataset-lifecycle.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ module.exports = function(Datasetlifecycle) {
utils.addOwnerGroup(ctx, next)
})

// transfer status flags to linked dataset
// transfer status flags to linked dataset.
// Warning: when using the updateAll API endpoint the context is missing !
Datasetlifecycle.observe('after save', (ctx, next) => {
var Dataset = app.models.Dataset
var instance = ctx.instance
Expand Down
39 changes: 29 additions & 10 deletions common/models/job.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ function publishJob(job, ctx, next) {
}

function MarkDatasetsAsScheduled(job, ctx, idList, next) {
//console.log('Searching for datasetlifecycle with ids:', idList)

let DatasetLifecycle = app.models.DatasetLifecycle;
DatasetLifecycle.updateAll(
{
Expand All @@ -62,14 +62,35 @@ function MarkDatasetsAsScheduled(job, ctx, idList, next) {
archiveStatusMessage: "scheduledForArchiving"
}
, ctx.options, function (err, p) {
// console.log("============= DatasetLifecycle Result:", JSON.stringify(p))
if (err) {
var e = new Error();
e.statusCode = 400;
e.statusCode = 404;
e.message = 'Can not find all needed DatasetLifecycle entries - no archive job sent:\n' + JSON.stringify(err)
next(e);
} else {
publishJob(job, ctx, next)
// since updateAll does not send context and therefore the DatasetLifecycle updates are not copied over
// to Dataset one has to do the update here on Dataset in addition
let Dataset = app.models.Dataset;
Dataset.updateAll(
{
pid: {
inq: idList
}
},
{
archivable: false,
retrievable: false
}
, ctx.options, function (err, p) {
if (err) {
var e = new Error();
e.statusCode = 404;
e.message = 'Can not find all needed Dataset entries - no archive job sent:\n' + JSON.stringify(err)
next(e);
} else {
publishJob(job, ctx, next)
}
});
}
});
}
Expand All @@ -84,14 +105,14 @@ function TestArchiveJobs(job, ctx, idList, next) {
}
}
}, ctx.options, function (err, p) {
//console.log("============= Archive Result:", JSON.stringify(p))
if (p.length > 0) {
var e = new Error();
e.statusCode = 400;
e.statusCode = 409;
e.message = 'The following datasets are not in archivable state - no archive job sent:\n' + JSON.stringify(p)
next(e);
} else {
// mark all Datasets as in state scheduledForArchiving, archivable=false
// console.log("mark datasets as to be archived: ctx.options,idlist",ctx.options,idList)
MarkDatasetsAsScheduled(job, ctx, idList, next)
}
});
Expand Down Expand Up @@ -127,7 +148,7 @@ function TestRetrieveJobs(job, ctx, idList, next) {
return next(err2)
} else {
var e = new Error();
e.statusCode = 400;
e.statusCode = 409;
e.message = 'The following datasets are not in retrievable state - no retrieve job sent:\n' + JSON.stringify(pmiss)
return next(e);
}
Expand All @@ -139,7 +160,6 @@ function TestRetrieveJobs(job, ctx, idList, next) {
}

function TestAllDatasets(job, ctx, idList, next) {
//console.log(" ====== find datasets with id", idList)
let Dataset = app.models.Dataset;
Dataset.find({
where: {
Expand All @@ -151,7 +171,7 @@ function TestAllDatasets(job, ctx, idList, next) {
let to = ctx.instance.emailJobInitiator
if (err || (p.length != idList.length)) {
var e = new Error();
e.statusCode = 400;
e.statusCode = 404;
e.message = 'At least one of the datasets could not be found - no Job sent';
// TODO should I send an email here ? Only if triggered by autoarchive option ?
// subjectText =
Expand Down Expand Up @@ -184,7 +204,6 @@ module.exports = function (Job) {

Job.observe('before save', (ctx, next) => {
if (ctx.instance) {

// replace email with that from userIdentity
var UserIdentity = app.models.UserIdentity;
var userId = ctx.options.accessToken.userId;
Expand Down
25 changes: 17 additions & 8 deletions test/MessageHistory.js
Original file line number Diff line number Diff line change
Expand Up @@ -255,19 +255,28 @@ describe('Test MessageHistory in jobs', () => {
});
});

it('Adds a new retrieve job request', function(done) {
it('Adds a new archive job request for same data which should fail', function(done) {
request(app)
.post('/api/v2/Jobs?access_token=' + accessTokenIngestor)
.send(testArchiveJob)
.set('Accept', 'application/json')
.expect(409)
.expect('Content-Type', /json/)
.end((err, res) => {
res.body.should.have.property('error');
done();
});
});

it('Adds a new retrieve job request on same dataset, which should fail as well because not yet retrievable', function(done) {
request(app)
.post('/api/v2/Jobs?access_token=' + accessTokenIngestor)
.send(testRetrieveJob)
.set('Accept', 'application/json')
.expect(200)
.expect(409)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err)
return done(err);
res.body.should.have.property('type').and.be.string;
idJob = res.body['id']
//console.log("Jobid:", idJob)
.end((err, res) => {
res.body.should.have.property('error');
done();
});
});
Expand Down
2 changes: 1 addition & 1 deletion test/config/tests.json
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
{
"method": "POST",
"route": "Jobs",
"expect": 400,
"expect": 404,
"authenticate": "admin",
"body": {
"emailJobInitiator": "[email protected]",
Expand Down

0 comments on commit 1c15c66

Please sign in to comment.