Skip to content

Commit

Permalink
Fixes #29 changed IN-CORE Dataset DAO and FileStorage implementation …
Browse files Browse the repository at this point in the history
…with latest API (#30)

* Fixes #29 changed IN-CORE Dataset DAO and FileStorage implementation to use latest data service API
* Updated command line executor and kubernetes executor based on changes to IN-CORE Dataset DAO and file storage
* Updated how IN-CORE token is fetched and verify it exists before reading it
* Verify file being deleted is in the temp directory space before calling delete
  • Loading branch information
navarroc authored Sep 24, 2024
1 parent 2a7afce commit c841266
Show file tree
Hide file tree
Showing 7 changed files with 265 additions and 70 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).


### Changed
- IN-CORE Dataset DAO and FileStorage implementation to use latest API [#29](https://github.com/ncsa/datawolf/issues/29)
- Kubernetes executor prints exception [#23](https://github.com/ncsa/datawolf/issues/23)

## [4.6.0] - 2023-02-15
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,14 @@ public void execute(File cwd) throws AbortException, FailedException {
throw (new FailedException("Could not get input file.", e));
}
} else {

// Create a folder for the datasets
File inputFolder = new File(filename);
if (inputFolder.exists() && inputFolder.getAbsolutePath().startsWith(System.getProperty("java.io.tmpdir"))) {
// For single file, a tmp file got created above; however in this case, we need
// a temporary folder to store the files
inputFolder.delete();
}

if (!inputFolder.mkdirs()) {
throw (new FailedException("Could not create folder for input files"));
}
Expand Down Expand Up @@ -257,6 +262,7 @@ public void execute(File cwd) throws AbortException, FailedException {
sb.append(" ");
}
println("Executing : " + sb.toString());
logger.debug("Executing : " + sb.toString());

// create the process builder
ProcessBuilder pb = new ProcessBuilder(command);
Expand Down Expand Up @@ -375,11 +381,11 @@ public void execute(File cwd) throws AbortException, FailedException {
ds.setTitle(step.getTool().getOutput(impl.getCaptureStdOut()).getTitle());
ds.setCreator(execution.getCreator());

ds = datasetDao.save(ds);

ByteArrayInputStream bais = new ByteArrayInputStream(stdout.toString().getBytes("UTF-8"));
FileDescriptor fd = fileStorage.storeFile(step.getTool().getOutput(impl.getCaptureStdOut()).getTitle(), bais, execution.getCreator(), ds);

ds = datasetDao.save(ds);

execution.setDataset(step.getOutputs().get(impl.getCaptureStdOut()), ds.getId());
saveExecution = true;
} catch (IOException exc) {
Expand All @@ -391,11 +397,11 @@ public void execute(File cwd) throws AbortException, FailedException {
Dataset ds = new Dataset();
ds.setTitle(step.getTool().getOutput(impl.getCaptureStdErr()).getTitle());
ds.setCreator(execution.getCreator());
ds = datasetDao.save(ds);

ByteArrayInputStream bais = new ByteArrayInputStream(stderr.toString().getBytes("UTF-8"));
FileDescriptor fd = fileStorage.storeFile(step.getTool().getOutput(impl.getCaptureStdErr()).getTitle(), bais, execution.getCreator(), ds);

ds = datasetDao.save(ds);

execution.setDataset(step.getOutputs().get(impl.getCaptureStdErr()), ds.getId());
saveExecution = true;
Expand Down Expand Up @@ -425,15 +431,15 @@ public boolean accept(File pathname) {
for (File file : files) {
logger.debug("adding files to a dataset: " + file);
FileInputStream fis = new FileInputStream(file);
fileStorage.storeFile(file.getName(), fis, ds.getCreator(), ds);
fileStorage.storeFile(file.getName(), fis, execution.getCreator(), ds);
fis.close();
}

} else {
FileInputStream fis = new FileInputStream(entry.getValue());
fileStorage.storeFile(new File(entry.getValue()).getName(), fis, ds.getCreator(), ds);
fileStorage.storeFile(new File(entry.getValue()).getName(), fis, execution.getCreator(), ds);
}
ds = datasetDao.save(ds);
// ds = datasetDao.save(ds);

execution.setDataset(step.getOutputs().get(entry.getKey()), ds.getId());
saveExecution = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,10 +159,17 @@ public State submitRemoteJob(File cwd) throws AbortException, FailedException {

// Create a folder for the datasets
File inputFolder = new File(filename);
if (inputFolder.exists()) {
// For single file, a tmp file got created above; however in this case, we need
// a temporary folder to store the files
inputFolder.delete();
}

if (!inputFolder.mkdirs()) {
throw (new FailedException("Could not create folder for input files"));
}


int duplicate = 1;
for (FileDescriptor fd : ds.getFileDescriptors()) {
String localFileName = fd.getFilename();
Expand Down Expand Up @@ -401,12 +408,11 @@ public State checkRemoteJob() throws FailedException {
Dataset ds = new Dataset();
ds.setTitle(step.getTool().getOutput(impl.getCaptureStdOut()).getTitle());
ds.setCreator(execution.getCreator());
ds = datasetDao.save(ds);

ByteArrayInputStream bais = new ByteArrayInputStream(lastlog.getBytes("UTF-8"));
FileDescriptor fd = fileStorage.storeFile(step.getTool().getOutput(impl.getCaptureStdOut()).getTitle(), bais, execution.getCreator(), ds);

ds = datasetDao.save(ds);

execution.setDataset(step.getOutputs().get(impl.getCaptureStdOut()), ds.getId());
saveExecution = true;
}
Expand Down Expand Up @@ -436,15 +442,15 @@ public boolean accept(File pathname) {
for (File file : files) {
logger.debug("adding files to a dataset: " + file);
FileInputStream fis = new FileInputStream(file);
fileStorage.storeFile(file.getName(), fis, ds.getCreator(), ds);
fileStorage.storeFile(file.getName(), fis, execution.getCreator(), ds);
fis.close();
}

} else {
FileInputStream fis = new FileInputStream(entry.getValue());
fileStorage.storeFile(new File(entry.getValue()).getName(), fis, ds.getCreator(), ds);
fileStorage.storeFile(new File(entry.getValue()).getName(), fis, execution.getCreator(), ds);
}
ds = datasetDao.save(ds);
// ds = datasetDao.save(ds);

execution.setDataset(step.getOutputs().get(entry.getKey()), ds.getId());
saveExecution = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@

public class IncoreDataset {

// Auth API
public static final String X_AUTH_USERINFO = "x-auth-userinfo";
public static final String X_AUTH_USERGROUP = "x-auth-usergroup";

// Datasets API
public static final String DATASETS_ENDPOINT = "data/api/datasets";
public static final String PARENT_DATASET = "parentdataset";
Expand Down Expand Up @@ -53,6 +57,7 @@ public static Dataset getDataset(JsonObject datasetProperties, Person creator) {
dataset.setDescription(description);
dataset.setTitle(title);


for (int index = 0; index < fileDescriptors.size(); index++) {
JsonObject fileDescriptor = fileDescriptors.get(index).getAsJsonObject();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,14 @@ public abstract class AbstractIncoreDao<T, ID extends Serializable> implements I
@Named("incore.server")
private String server;

@Inject
@Named("incore.group")
private String group;

@Inject
@Named("incore.user")
private String incoreUser;

/**
* IN-CORE Service endpoint
*
Expand All @@ -25,4 +33,20 @@ public String getServer() {
return this.server;
}

/**
* Primary IN-CORE user group
* @return
*/
public String getGroup() {
return this.group;
}

/**
* DataWolf User that can access data on IN-CORE services
* @return
*/
public String getIncoreUser() {
return this.incoreUser;
}

}
Loading

0 comments on commit c841266

Please sign in to comment.