diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceConfig.java b/src/main/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceConfig.java index cd88ac1aabf..9cdef9be1d1 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceConfig.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceConfig.java @@ -12,10 +12,6 @@ import org.springframework.context.annotation.Profile; import org.springframework.scheduling.annotation.EnableAsync; -import com.github.jmchilton.blend4j.galaxy.JobsClient; -import com.github.jmchilton.blend4j.galaxy.ToolsClient; -import com.google.common.collect.Lists; - import ca.corefacility.bioinformatics.irida.config.services.IridaPluginConfig; import ca.corefacility.bioinformatics.irida.pipeline.results.AnalysisSubmissionSampleProcessor; import ca.corefacility.bioinformatics.irida.pipeline.results.impl.AnalysisSubmissionSampleProcessorImpl; @@ -28,6 +24,7 @@ import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository; import ca.corefacility.bioinformatics.irida.service.AnalysisService; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.analysis.execution.AnalysisExecutionService; import ca.corefacility.bioinformatics.irida.service.analysis.execution.AnalysisExecutionServiceAspect; @@ -42,21 +39,23 @@ import ca.corefacility.bioinformatics.irida.service.sample.SampleService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; +import com.github.jmchilton.blend4j.galaxy.JobsClient; +import com.github.jmchilton.blend4j.galaxy.ToolsClient; +import com.google.common.collect.Lists; + /** * Configuration for an AnalysisExecutionService class. - * - * */ @Configuration @EnableAsync(order = AnalysisExecutionServiceConfig.ASYNC_ORDER) @Profile({ "dev", "prod", "it", "analysis", "ncbi", "processing", "sync", "web" }) public class AnalysisExecutionServiceConfig { - + private static final Logger logger = LoggerFactory.getLogger(AnalysisExecutionServiceConfig.class); /** - * The order for asynchronous tasks. In particular, defines the order for - * methods in {@link AnalysisExecutionServiceGalaxyAsync}. + * The order for asynchronous tasks. In particular, defines the order for methods in + * {@link AnalysisExecutionServiceGalaxyAsync}. */ public static final int ASYNC_ORDER = AnalysisExecutionServiceAspect.ANALYSIS_EXECUTION_ASPECT_ORDER - 1; @@ -68,28 +67,31 @@ public class AnalysisExecutionServiceConfig { @Autowired private IridaWorkflowsService iridaWorkflowsService; - + @Autowired private AnalysisParameterServiceGalaxy analysisParameterServiceGalaxy; - + @Autowired private GalaxyHistoriesService galaxyHistoriesService; - + @Autowired private GalaxyLibrariesService galaxyLibrariesService; - + @Autowired private GalaxyWorkflowService galaxyWorkflowService; - + @Autowired private SequencingObjectService sequencingObjectService; - + + @Autowired + private GenomeAssemblyService genomeAssemblyService; + @Autowired private ToolsClient toolsClient; - + @Autowired private JobsClient jobsClient; - + @Autowired private IridaPluginConfig.IridaPluginList pipelinePlugins; @@ -131,7 +133,7 @@ public AnalysisSubmissionSampleProcessor analysisSubmissionSampleProcessor() { return new AnalysisSubmissionSampleProcessorImpl(sampleRepository, analysisSampleUpdaters); } - + @Lazy @Bean public AnalysisExecutionService analysisExecutionService() { @@ -143,23 +145,23 @@ public AnalysisExecutionService analysisExecutionService() { @Bean public AnalysisExecutionServiceGalaxyAsync analysisExecutionServiceGalaxyAsync() { return new AnalysisExecutionServiceGalaxyAsync(analysisSubmissionService, analysisService, - galaxyWorkflowService, analysisWorkspaceService(), iridaWorkflowsService, analysisSubmissionSampleProcessor()); + galaxyWorkflowService, analysisWorkspaceService(), iridaWorkflowsService, + analysisSubmissionSampleProcessor()); } - + @Lazy @Bean public AnalysisExecutionServiceGalaxyCleanupAsync analysisExecutionServiceGalaxyCleanupAsync() { - return new AnalysisExecutionServiceGalaxyCleanupAsync(analysisSubmissionService, - galaxyWorkflowService, galaxyHistoriesService, galaxyLibrariesService); + return new AnalysisExecutionServiceGalaxyCleanupAsync(analysisSubmissionService, galaxyWorkflowService, + galaxyHistoriesService, galaxyLibrariesService); } @Lazy @Bean public AnalysisWorkspaceServiceGalaxy analysisWorkspaceService() { - return new AnalysisWorkspaceServiceGalaxy(galaxyHistoriesService, galaxyWorkflowService, - galaxyLibrariesService, iridaWorkflowsService, analysisCollectionServiceGalaxy(), - analysisProvenanceService(), analysisParameterServiceGalaxy, - sequencingObjectService); + return new AnalysisWorkspaceServiceGalaxy(galaxyHistoriesService, galaxyWorkflowService, galaxyLibrariesService, + iridaWorkflowsService, analysisCollectionServiceGalaxy(), analysisProvenanceService(), + analysisParameterServiceGalaxy, sequencingObjectService, genomeAssemblyService); } @Lazy @@ -167,7 +169,7 @@ galaxyLibrariesService, iridaWorkflowsService, analysisCollectionServiceGalaxy() public AnalysisProvenanceServiceGalaxy analysisProvenanceService() { return new AnalysisProvenanceServiceGalaxy(galaxyHistoriesService, toolsClient, jobsClient); } - + @Lazy @Bean public AnalysisCollectionServiceGalaxy analysisCollectionServiceGalaxy() { diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/model/assembly/GenomeAssembly.java b/src/main/java/ca/corefacility/bioinformatics/irida/model/assembly/GenomeAssembly.java index dd41fe46a47..7408d6b25e3 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/model/assembly/GenomeAssembly.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/model/assembly/GenomeAssembly.java @@ -21,6 +21,7 @@ import ca.corefacility.bioinformatics.irida.model.VersionedFileFields; import ca.corefacility.bioinformatics.irida.model.irida.IridaSequenceFile; import ca.corefacility.bioinformatics.irida.model.joins.impl.SampleGenomeAssemblyJoin; +import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.collect.Lists; @@ -51,6 +52,9 @@ public abstract class GenomeAssembly extends IridaRepresentationModel @OneToMany(fetch = FetchType.LAZY, mappedBy = "genomeAssembly") private List sampleGenomeAssemblies; + @ManyToMany(fetch = FetchType.LAZY, cascade = CascadeType.DETACH, mappedBy = "inputAssemblies") + private List analysisSubmissions; + protected GenomeAssembly() { this.id = null; this.createdDate = null; @@ -64,8 +68,7 @@ public GenomeAssembly(Date createdDate) { @Override public String getLabel() { - return getFile().getFileName() - .toString(); + return getFile().getFileName().toString(); } @Override @@ -128,8 +131,7 @@ public String getFileSize() { @Override public String getFileName() { - return getFile().getFileName() - .toString(); + return getFile().getFileName().toString(); } @Override diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowDescription.java b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowDescription.java index 078022a07ff..71f37c24135 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowDescription.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowDescription.java @@ -1,16 +1,8 @@ package ca.corefacility.bioinformatics.irida.model.workflow.description; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlElementWrapper; -import javax.xml.bind.annotation.XmlRootElement; +import java.util.*; + +import javax.xml.bind.annotation.*; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.AnalysisType; @@ -19,13 +11,11 @@ /** * Class providing access to generic information about a workflow. - * - * */ @XmlRootElement(name = "iridaWorkflow") @XmlAccessorType(XmlAccessType.FIELD) public class IridaWorkflowDescription { - + @XmlElement(name = "id") private UUID id; @@ -44,7 +34,7 @@ public class IridaWorkflowDescription { @XmlElementWrapper(name = "outputs") @XmlElement(name = "output") private List outputs; - + @XmlElementWrapper(name = "parameters") @XmlElement(name = "parameter") private List parameters; @@ -57,28 +47,19 @@ public IridaWorkflowDescription() { } /** - * Generates a new {@link IridaWorkflowDescription} with the given - * information. + * Generates a new {@link IridaWorkflowDescription} with the given information. * - * @param id - * The {@link UUID} for a workflow. - * @param name - * The name of the workflow. - * @param version - * The version of the workflow. - * @param analysisType - * The class type of the {@link Analysis}. - * @param inputs - * The inputs to the workflow. - * @param outputs - * The outputs to the workflow. - * @param toolRepositories - * The list of tools repositories for this workflow. - * @param parameters - * The valid parameters that can be modified for this workflow. + * @param id The {@link UUID} for a workflow. + * @param name The name of the workflow. + * @param version The version of the workflow. + * @param analysisType The class type of the {@link Analysis}. + * @param inputs The inputs to the workflow. + * @param outputs The outputs to the workflow. + * @param toolRepositories The list of tools repositories for this workflow. + * @param parameters The valid parameters that can be modified for this workflow. */ - public IridaWorkflowDescription(UUID id, String name, String version, - AnalysisType analysisType, IridaWorkflowInput inputs, List outputs, + public IridaWorkflowDescription(UUID id, String name, String version, AnalysisType analysisType, + IridaWorkflowInput inputs, List outputs, List toolRepositories, List parameters) { this.id = id; this.name = name; @@ -129,8 +110,7 @@ public boolean requiresDynamicSource() { /** * Whether or not this workflow accepts single sequence files as input. * - * @return True if this workflow accepts single sequence files, false - * otherwise. + * @return True if this workflow accepts single sequence files, false otherwise. */ public boolean acceptsSingleSequenceFiles() { return inputs.getSequenceReadsSingle().isPresent(); @@ -139,13 +119,21 @@ public boolean acceptsSingleSequenceFiles() { /** * Whether or not this workflow accepts paired sequence files as input. * - * @return True if this workflow accepts paired sequence files, false - * otherwise. + * @return True if this workflow accepts paired sequence files, false otherwise. */ public boolean acceptsPairedSequenceFiles() { return inputs.getSequenceReadsPaired().isPresent(); } + /** + * Whether or not this workflow accepts genome assemblies as input. + * + * @return True if this workflow accepts genome assemblies, false otherwise. + */ + public boolean acceptsGenomeAssemblies() { + return inputs.getGenomeAssemblies().isPresent(); + } + public IridaWorkflowInput getInputs() { return inputs; } @@ -153,17 +141,16 @@ public IridaWorkflowInput getInputs() { public List getOutputs() { return outputs; } - + public List getParameters() { return parameters; } /** - * Gets a {@link Map} representation of the outputs of a workflow, linking - * the output name to the {@link IridaWorkflowOutput} entry. + * Gets a {@link Map} representation of the outputs of a workflow, linking the output name to the + * {@link IridaWorkflowOutput} entry. * - * @return A {@link Map} linking the output name to the - * {@link IridaWorkflowOutput} entry. + * @return A {@link Map} linking the output name to the {@link IridaWorkflowOutput} entry. */ public Map getOutputsMap() { Map outputsMap = new HashMap<>(); @@ -182,7 +169,7 @@ public List getToolRepositories() { public AnalysisType getAnalysisType() { return analysisType; } - + /** * Determines if this workflow accepts parameters. * @@ -215,7 +202,7 @@ else if (obj instanceof IridaWorkflowDescription) { @Override public String toString() { - return "IridaWorkflowDescription [id=" + id + ", name=" + name + ", version=" + version - + ", analysisType=" + analysisType + "]"; + return "IridaWorkflowDescription [id=" + id + ", name=" + name + ", version=" + version + ", analysisType=" + + analysisType + "]"; } } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowInput.java b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowInput.java index 38d430f47ef..4249a29f8fe 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowInput.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/description/IridaWorkflowInput.java @@ -1,15 +1,14 @@ package ca.corefacility.bioinformatics.irida.model.workflow.description; import java.util.Objects; - -import javax.xml.bind.annotation.*; - import java.util.Optional; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; + /** * Defines the input labels for a workflow. - * - * */ @XmlAccessorType(XmlAccessType.FIELD) public class IridaWorkflowInput { @@ -17,38 +16,39 @@ public class IridaWorkflowInput { @XmlElement(name = "sequenceReadsSingle") private String sequenceReadsSingle; + @XmlElement(name = "sequenceReadsPaired") + private String sequenceReadsPaired; + + @XmlElement(name = "genomeAssemblies") + private String genomeAssemblies; + @XmlElement(name = "reference") private String reference; - @XmlElement(name = "sequenceReadsPaired") - private String sequenceReadsPaired; - - @XmlElement(name = "requiresSingleSample", defaultValue="false") + @XmlElement(name = "requiresSingleSample", defaultValue = "false") private boolean requiresSingleSample; public IridaWorkflowInput() { } /** - * Builds a new {@link IridaWorkflowInput} object with the given - * information. + * Builds a new {@link IridaWorkflowInput} object with the given information. * - * @param sequenceReadsSingle - * The label to use for a collection of single-end sequence - * reads. Null if no acceptance of single-end reads. - * @param sequenceReadsPaired - * The label to use for a collection of paired-end sequence - * reads. Null if no acceptance of paired-end reads. - * @param reference - * The label to use for a reference file. - * @param requiresSingleSample - * Whether or not this workflow requires a single sample, or can - * work with multiple samples. + * @param sequenceReadsSingle The label to use for a collection of single-end sequence reads. Null if no acceptance + * of single-end reads. + * @param sequenceReadsPaired The label to use for a collection of paired-end sequence reads. Null if no acceptance + * of paired-end reads. + * @param genomeAssemblies The label to use for a collection of sequence assemblies. Null if no acceptance of + * assemblies. + * @param reference The label to use for a reference file. + * @param requiresSingleSample Whether or not this workflow requires a single sample, or can work with multiple + * samples. */ - public IridaWorkflowInput(String sequenceReadsSingle, String sequenceReadsPaired, String reference, - boolean requiresSingleSample) { + public IridaWorkflowInput(String sequenceReadsSingle, String sequenceReadsPaired, String genomeAssemblies, + String reference, boolean requiresSingleSample) { this.sequenceReadsSingle = sequenceReadsSingle; this.sequenceReadsPaired = sequenceReadsPaired; + this.genomeAssemblies = genomeAssemblies; this.reference = reference; this.requiresSingleSample = requiresSingleSample; } @@ -56,33 +56,39 @@ public IridaWorkflowInput(String sequenceReadsSingle, String sequenceReadsPaired /** * Gets the sequence reads single label. * - * @return The sequence reads single label, or {@link Optional#empty()} if no - * such label exists. + * @return The sequence reads single label, or {@link Optional#empty()} if no such label exists. */ public Optional getSequenceReadsSingle() { return Optional.ofNullable(sequenceReadsSingle); } /** - * Gets the reference label. + * Gets the sequence reads paired label. * - * @return The reference label, or {@link Optional#empty()} if no such label - * exists. + * @return The sequence reads paired label, or {@link Optional#empty()} if no such label exists. */ - public Optional getReference() { - return Optional.ofNullable(reference); + public Optional getSequenceReadsPaired() { + return Optional.ofNullable(sequenceReadsPaired); } /** - * Gets the sequence reads paired label. + * Gets the sequence assemblies label. * - * @return The sequence reads paired label, or {@link Optional#empty()} if no - * such label exists. + * @return The sequence assemblies label, or {@link Optional#empty()} if no such label exists. */ - public Optional getSequenceReadsPaired() { - return Optional.ofNullable(sequenceReadsPaired); + public Optional getGenomeAssemblies() { + return Optional.ofNullable(genomeAssemblies); } - + + /** + * Gets the reference label. + * + * @return The reference label, or {@link Optional#empty()} if no such label exists. + */ + public Optional getReference() { + return Optional.ofNullable(reference); + } + /** * Whether or not this workflow requires a single sample. * @@ -94,7 +100,8 @@ public boolean requiresSingleSample() { @Override public int hashCode() { - return Objects.hash(sequenceReadsSingle, sequenceReadsPaired, reference, requiresSingleSample); + return Objects.hash(sequenceReadsSingle, sequenceReadsPaired, genomeAssemblies, reference, + requiresSingleSample); } @Override @@ -106,6 +113,7 @@ else if (obj instanceof IridaWorkflowInput) { return Objects.equals(sequenceReadsSingle, other.sequenceReadsSingle) && Objects.equals(sequenceReadsPaired, other.sequenceReadsPaired) + && Objects.equals(genomeAssemblies, other.genomeAssemblies) && Objects.equals(reference, other.reference) && Objects.equals(requiresSingleSample, other.requiresSingleSample); } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java index 695c572a3d7..903e6402ad0 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java @@ -1,6 +1,16 @@ package ca.corefacility.bioinformatics.irida.model.workflow.submission; +import java.util.*; + +import javax.persistence.*; +import javax.validation.constraints.NotNull; + +import org.hibernate.envers.Audited; +import org.hibernate.envers.NotAudited; +import org.springframework.data.jpa.domain.support.AuditingEntityListener; + import ca.corefacility.bioinformatics.irida.exceptions.AnalysisAlreadySetException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisCleanedState; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState; import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; @@ -8,16 +18,10 @@ import ca.corefacility.bioinformatics.irida.model.user.User; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.JobError; + import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import org.hibernate.envers.Audited; -import org.hibernate.envers.NotAudited; -import org.springframework.data.jpa.domain.support.AuditingEntityListener; - -import javax.persistence.*; -import javax.validation.constraints.NotNull; -import java.util.*; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; @@ -60,16 +64,24 @@ public class AnalysisSubmission extends AbstractAnalysisSubmission implements Co // Analysis entity for this analysis submission. Cascading everything except // removals - @OneToOne(fetch = FetchType.EAGER, cascade = { CascadeType.DETACH, CascadeType.MERGE, CascadeType.PERSIST, - CascadeType.REFRESH }) + @OneToOne(fetch = FetchType.EAGER, + cascade = { CascadeType.DETACH, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH }) @JoinColumn(name = "analysis_id") @NotAudited private Analysis analysis; @ManyToMany(fetch = FetchType.LAZY, cascade = CascadeType.DETACH) - @JoinTable(name = "analysis_submission_sequencing_object", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequencing_object_id", nullable = false)) + @JoinTable(name = "analysis_submission_sequencing_object", + joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), + inverseJoinColumns = @JoinColumn(name = "sequencing_object_id", nullable = false)) protected Set inputFiles; + @ManyToMany(fetch = FetchType.LAZY, cascade = CascadeType.DETACH) + @JoinTable(name = "analysis_submission_genome_assembly", + joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), + inverseJoinColumns = @JoinColumn(name = "genome_assembly_id", nullable = false)) + protected Set inputAssemblies; + @NotNull @Enumerated(EnumType.STRING) protected AnalysisSubmission.Priority priority; @@ -100,11 +112,12 @@ public AnalysisSubmission(Builder builder) { this(); checkNotNull(builder.workflowId, "workflowId is null"); - checkArgument(builder.inputFiles != null, + checkArgument(builder.inputFiles != null || builder.inputAssemblies != null, "input file collection is null. You must supply at least one set of input files"); this.name = (builder.name != null) ? builder.name : "Unknown"; this.inputFiles = builder.inputFiles; + this.inputAssemblies = builder.inputAssemblies; this.inputParameters = (builder.inputParameters != null) ? ImmutableMap.copyOf(builder.inputParameters) : ImmutableMap.of(); @@ -219,9 +232,9 @@ public boolean isAutomated() { } /** - * Set the {@link Analysis} generated as a result of this submission. Note: {@link - * AnalysisSubmission#setAnalysis(Analysis)} can only be set **once**; if the current {@link Analysis} is non-null, - * then this method will throw a {@link AnalysisAlreadySetException}. + * Set the {@link Analysis} generated as a result of this submission. Note: + * {@link AnalysisSubmission#setAnalysis(Analysis)} can only be set **once**; if the current {@link Analysis} is + * non-null, then this method will throw a {@link AnalysisAlreadySetException}. * * @param analysis the analysis to set * @throws AnalysisAlreadySetException if the {@link Analysis} reference has already been created for this @@ -265,6 +278,7 @@ public void setAnalysisCleanedState(AnalysisCleanedState analysisCleanedState) { public static class Builder { private String name; private Set inputFiles; + private Set inputAssemblies; private ReferenceFile referenceFile; private UUID workflowId; private Map inputParameters; @@ -304,10 +318,8 @@ public Builder(AnalysisSubmissionTemplate template) { emailPipelineResultCompleted(template.getEmailPipelineResultCompleted()); emailPipelineResultError(template.getEmailPipelineResultError()); - if (template.getReferenceFile() - .isPresent()) { - referenceFile(template.getReferenceFile() - .get()); + if (template.getReferenceFile().isPresent()) { + referenceFile(template.getReferenceFile().get()); } //check if we have named params. If so, add them @@ -346,6 +358,20 @@ public Builder inputFiles(Set inputFiles) { return this; } + /** + * Sets the inputAssemblies for this submission. + * + * @param inputAssemblies The inputAssemblies for this submission. + * @return A {@link Builder}. + */ + public Builder inputAssemblies(Set inputAssemblies) { + checkNotNull(inputAssemblies, "inputAssemblies is null"); + checkArgument(!inputAssemblies.isEmpty(), "inputAssemblies is empty"); + + this.inputAssemblies = inputAssemblies; + return this; + } + /** * Sets the referenceFile for this submission. * @@ -497,7 +523,7 @@ public Builder submitter(User submitter) { * @return the new AnalysisSubmission */ public AnalysisSubmission build() { - checkArgument(inputFiles != null, + checkArgument(inputFiles != null || inputAssemblies != null, "input file collection is null. You must supply at least one set of input files"); return new AnalysisSubmission(this); @@ -553,13 +579,15 @@ public boolean equals(Object other) { if (other instanceof AnalysisSubmission) { AnalysisSubmission p = (AnalysisSubmission) other; return Objects.equals(createdDate, p.createdDate) && Objects.equals(modifiedDate, p.modifiedDate) - && Objects.equals(name, p.name) && Objects.equals(workflowId, p.workflowId) && Objects.equals( - remoteAnalysisId, p.remoteAnalysisId) && Objects.equals(remoteInputDataId, p.remoteInputDataId) - && Objects.equals(remoteWorkflowId, p.remoteWorkflowId) && Objects.equals(analysisState, - p.analysisState) && Objects.equals(analysisCleanedState, p.analysisCleanedState) && Objects.equals( - referenceFile, p.referenceFile) && Objects.equals(analysis, p.analysis) && Objects.equals( - namedParameters, p.namedParameters) && Objects.equals(submitter, p.submitter) && Objects.equals( - priority, p.priority); + && Objects.equals(name, p.name) && Objects.equals(workflowId, p.workflowId) + && Objects.equals(remoteAnalysisId, p.remoteAnalysisId) + && Objects.equals(remoteInputDataId, p.remoteInputDataId) + && Objects.equals(remoteWorkflowId, p.remoteWorkflowId) + && Objects.equals(analysisState, p.analysisState) + && Objects.equals(analysisCleanedState, p.analysisCleanedState) + && Objects.equals(referenceFile, p.referenceFile) && Objects.equals(analysis, p.analysis) + && Objects.equals(namedParameters, p.namedParameters) && Objects.equals(submitter, p.submitter) + && Objects.equals(priority, p.priority); } return false; diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/GalaxyLibrariesService.java b/src/main/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/GalaxyLibrariesService.java index 1eaddcc31d3..431a721d062 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/GalaxyLibrariesService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/GalaxyLibrariesService.java @@ -1,9 +1,5 @@ package ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy; -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkState; - import java.io.File; import java.io.IOException; import java.nio.file.Path; @@ -11,16 +7,12 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +import java.util.concurrent.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import ca.corefacility.bioinformatics.irida.exceptions.UploadErrorException; import ca.corefacility.bioinformatics.irida.exceptions.UploadException; @@ -33,54 +25,49 @@ import ca.corefacility.bioinformatics.irida.util.FileUtils; import com.github.jmchilton.blend4j.galaxy.LibrariesClient; -import com.github.jmchilton.blend4j.galaxy.beans.FilesystemPathsLibraryUpload; -import com.github.jmchilton.blend4j.galaxy.beans.GalaxyObject; -import com.github.jmchilton.blend4j.galaxy.beans.Library; -import com.github.jmchilton.blend4j.galaxy.beans.LibraryContent; -import com.github.jmchilton.blend4j.galaxy.beans.LibraryDataset; +import com.github.jmchilton.blend4j.galaxy.beans.*; import com.google.common.collect.Lists; import com.sun.jersey.api.client.ClientResponse; +import static com.google.common.base.Preconditions.*; + /** * A service class for dealing with Galaxy libraries. - * - * */ public class GalaxyLibrariesService { - private static final Logger logger = LoggerFactory - .getLogger(GalaxyLibrariesService.class); + private static final Logger logger = LoggerFactory.getLogger(GalaxyLibrariesService.class); private LibrariesClient librariesClient; - + private final ExecutorService executor; private final int libraryPollingTime; private final int libraryUploadTimeout; + @Autowired + @Qualifier("assemblyFileBaseDirectory") + private Path assemblyFileBaseDirectory; + /** * State a library dataset should be in on proper upload. */ private static final String LIBRARY_OK_STATE = "ok"; - + /** - * Failure states for a library dataset. Derived from + * Failure states for a library dataset. Derived from * https://github.com/galaxyproject/galaxy/blob/release_16.10/lib/galaxy/model/__init__.py#L1645 */ - private static List LIBRARY_FAIL_STATES = Lists.newArrayList("paused", "error", "failed_metadata", "discarded"); + private static List LIBRARY_FAIL_STATES = Lists.newArrayList("paused", "error", "failed_metadata", + "discarded"); /** * Builds a new GalaxyLibrariesService with the given LibrariesClient. * - * @param librariesClient - * The LibrariesClient used to interact with Galaxy libraries. - * @param libraryPollingTime - * The time (in seconds) for polling a Galaxy library. - * @param libraryUploadTimeout - * The timeout (in seconds) for waiting for files to be uploaded - * to a library. - * @param threadPoolSize - * The thread pool size for parallel polling of Galaxy to check if uploads are finished. + * @param librariesClient The LibrariesClient used to interact with Galaxy libraries. + * @param libraryPollingTime The time (in seconds) for polling a Galaxy library. + * @param libraryUploadTimeout The timeout (in seconds) for waiting for files to be uploaded to a library. + * @param threadPoolSize The thread pool size for parallel polling of Galaxy to check if uploads are finished. */ public GalaxyLibrariesService(LibrariesClient librariesClient, final int libraryPollingTime, final int libraryUploadTimeout, final int threadPoolSize) { @@ -90,28 +77,25 @@ public GalaxyLibrariesService(LibrariesClient librariesClient, final int library checkArgument(libraryUploadTimeout > libraryPollingTime, "libraryUploadTimeout=" + libraryUploadTimeout + " must be greater then libraryPollingTime=" + libraryPollingTime); checkArgument(threadPoolSize > 0, "threadPoolSize=" + threadPoolSize + " must be positive"); - - logger.debug("Setting libraryPollingTime=" + libraryPollingTime + ", libraryUploadTimeout=" + libraryUploadTimeout - + ", threadPoolSize=" + threadPoolSize); + + logger.debug("Setting libraryPollingTime=" + libraryPollingTime + ", libraryUploadTimeout=" + + libraryUploadTimeout + ", threadPoolSize=" + threadPoolSize); this.librariesClient = librariesClient; this.libraryPollingTime = libraryPollingTime; this.libraryUploadTimeout = libraryUploadTimeout; - + executor = Executors.newFixedThreadPool(threadPoolSize); } - + /** * Builds a new empty library with the given name. * - * @param libraryName - * The name of the new library. + * @param libraryName The name of the new library. * @return A Library object for the newly created library. - * @throws CreateLibraryException - * If no library could be created. + * @throws CreateLibraryException If no library could be created. */ - public Library buildEmptyLibrary(GalaxyProjectName libraryName) - throws CreateLibraryException { + public Library buildEmptyLibrary(GalaxyProjectName libraryName) throws CreateLibraryException { checkNotNull(libraryName, "libraryName is null"); Library persistedLibrary; @@ -120,33 +104,26 @@ public Library buildEmptyLibrary(GalaxyProjectName libraryName) persistedLibrary = librariesClient.createLibrary(library); if (persistedLibrary != null) { - logger.debug("Created library=" + library.getName() + " libraryId=" - + persistedLibrary.getId()); + logger.debug("Created library=" + library.getName() + " libraryId=" + persistedLibrary.getId()); return persistedLibrary; } else { - throw new CreateLibraryException("Could not create library named " - + libraryName); + throw new CreateLibraryException("Could not create library named " + libraryName); } } /** * Uploads the given file to a library with the given information. * - * @param path - * The path of the file to upload. - * @param fileType - * The type of the file to upload. - * @param library - * The library to upload the file into. - * @param dataStorage - * The {@link DataStorage} method to apply to this dataset. + * @param path The path of the file to upload. + * @param fileType The type of the file to upload. + * @param library The library to upload the file into. + * @param dataStorage The {@link DataStorage} method to apply to this dataset. * @return A dataset id for the dataset in this library. - * @throws UploadException - * If there was an issue uploading the file to the library. + * @throws UploadException If there was an issue uploading the file to the library. */ - public String fileToLibrary(Path path, InputFileType fileType, - Library library, DataStorage dataStorage) throws UploadException { + public String fileToLibrary(Path path, InputFileType fileType, Library library, DataStorage dataStorage) + throws UploadException { checkNotNull(path, "path is null"); checkNotNull(fileType, "fileType is null"); checkNotNull(library, "library is null"); @@ -156,8 +133,7 @@ public String fileToLibrary(Path path, InputFileType fileType, File file = path.toFile(); try { - LibraryContent rootContent = librariesClient.getRootFolder(library - .getId()); + LibraryContent rootContent = librariesClient.getRootFolder(library.getId()); FilesystemPathsLibraryUpload upload = new FilesystemPathsLibraryUpload(); upload.setFolderId(rootContent.getId()); @@ -166,8 +142,7 @@ public String fileToLibrary(Path path, InputFileType fileType, upload.setLinkData(DataStorage.LOCAL.equals(dataStorage)); upload.setFileType(fileType.toString()); - GalaxyObject uploadObject = librariesClient.uploadFilesystemPaths( - library.getId(), upload); + GalaxyObject uploadObject = librariesClient.uploadFilesystemPaths(library.getId(), upload); return uploadObject.getId(); } catch (RuntimeException e) { @@ -176,25 +151,18 @@ public String fileToLibrary(Path path, InputFileType fileType, } /** - * Uploads a set of files to a given library, waiting until all uploads are - * complete. + * Uploads a set of files to a given library, waiting until all uploads are complete. * - * @param paths - * The set of paths to upload. - * @param library - * The library to initially upload the file into. - * @param dataStorage - * The type of DataStorage strategy to use. - * @return An @{link Map} of paths and ids for each dataset object in this - * library. - * @throws UploadException - * If there was an issue uploading the file to Galaxy. + * @param paths The set of paths to upload. + * @param library The library to initially upload the file into. + * @param dataStorage The type of DataStorage strategy to use. + * @return An @{link Map} of paths and ids for each dataset object in this library. + * @throws UploadException If there was an issue uploading the file to Galaxy. */ - public Map filesToLibraryWait(Set paths, - Library library, DataStorage dataStorage) + public Map filesToLibraryWait(Set paths, Library library, DataStorage dataStorage) throws UploadException { checkNotNull(paths, "paths is null"); - final int pollingTimeMillis = libraryPollingTime*1000; + final int pollingTimeMillis = libraryPollingTime * 1000; Map datasetLibraryIdsMap = new HashMap<>(); @@ -202,28 +170,23 @@ public Map filesToLibraryWait(Set paths, // upload all files to library first for (Path path : paths) { InputFileType fileType = getFileType(path); - String datasetLibraryId = fileToLibrary(path, fileType, - library, dataStorage); + String datasetLibraryId = fileToLibrary(path, fileType, library, dataStorage); datasetLibraryIdsMap.put(path, datasetLibraryId); } - Future waitForLibraries = executor.submit(new Callable(){ + Future waitForLibraries = executor.submit(new Callable() { @Override public Void call() throws Exception { // wait for uploads to finish for (Path path : paths) { String datasetLibraryId = datasetLibraryIdsMap.get(path); - LibraryDataset libraryDataset = librariesClient.showDataset( - library.getId(), datasetLibraryId); + LibraryDataset libraryDataset = librariesClient.showDataset(library.getId(), datasetLibraryId); while (!LIBRARY_OK_STATE.equals(libraryDataset.getState())) { - logger.trace("Waiting for library dataset " - + libraryDataset.getId() - + " to be finished processing, in state " - + libraryDataset.getState()); + logger.trace("Waiting for library dataset " + libraryDataset.getId() + + " to be finished processing, in state " + libraryDataset.getState()); Thread.sleep(pollingTimeMillis); - - libraryDataset = librariesClient.showDataset( - library.getId(), datasetLibraryId); + + libraryDataset = librariesClient.showDataset(library.getId(), datasetLibraryId); if (LIBRARY_FAIL_STATES.contains(libraryDataset.getState())) { throw new UploadErrorException("Error: upload to Galaxy library id=" + library.getId() @@ -233,19 +196,20 @@ public Void call() throws Exception { } } } - + return null; } }); - + waitForLibraries.get(libraryUploadTimeout, TimeUnit.SECONDS); } catch (RuntimeException | IOException e) { throw new UploadException(e); } catch (TimeoutException e) { - throw new UploadTimeoutException("Timeout while uploading, time limit = " + libraryUploadTimeout + " seconds", e); + throw new UploadTimeoutException( + "Timeout while uploading, time limit = " + libraryUploadTimeout + " seconds", e); } catch (ExecutionException e) { if (e.getCause() instanceof UploadErrorException) { - throw (UploadErrorException)e.getCause(); + throw (UploadErrorException) e.getCause(); } else { throw new UploadException(e); } @@ -255,34 +219,36 @@ public Void call() throws Exception { return datasetLibraryIdsMap; } - + /** * Given a {@link Path}, gets the {@link InputFileType} for the data type to upload to Galaxy. + * * @param path The path to upload. * @return The {@link InputFileType} for the data to upload to Galaxy. * @throws IOException If there was an error reading the file to determine the file type. */ private InputFileType getFileType(Path path) throws IOException { checkArgument(path.toFile().exists(), "path=[" + path + "] does not exist"); - - return (FileUtils.isGzipped(path) ? InputFileType.FASTQ_SANGER_GZ : InputFileType.FASTQ_SANGER); + + if (path.toString().startsWith(assemblyFileBaseDirectory.toString())) { + return InputFileType.FASTA; + } else { + return (FileUtils.isGzipped(path) ? InputFileType.FASTQ_SANGER_GZ : InputFileType.FASTQ_SANGER); + } } /** * Deletes the Galaxy library with the given id. * - * @param libraryId - * The id of the library to delete. - * @throws DeleteGalaxyObjectFailedException - * If there was a failure to delete the library. + * @param libraryId The id of the library to delete. + * @throws DeleteGalaxyObjectFailedException If there was a failure to delete the library. */ public void deleteLibrary(String libraryId) throws DeleteGalaxyObjectFailedException { try { ClientResponse response = librariesClient.deleteLibraryRequest(libraryId); if (ClientResponse.Status.OK.getStatusCode() != response.getStatusInfo().getStatusCode()) { throw new DeleteGalaxyObjectFailedException("Could not delete library with id " + libraryId - + ", status=" + response.getStatusInfo() + ", content=" - + response.getEntity(String.class)); + + ", status=" + response.getStatusInfo() + ", content=" + response.getEntity(String.class)); } } catch (RuntimeException e) { throw new DeleteGalaxyObjectFailedException("Error while deleting library with id " + libraryId, e); diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/analysis/submission/AnalysisSubmissionRepositoryImpl.java b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/analysis/submission/AnalysisSubmissionRepositoryImpl.java index 161d61cfbb9..a5d9119fa5e 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/analysis/submission/AnalysisSubmissionRepositoryImpl.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/analysis/submission/AnalysisSubmissionRepositoryImpl.java @@ -23,11 +23,10 @@ import ca.corefacility.bioinformatics.irida.ria.utilities.FileUtilities; /** - * Implementation of {@link AnalysisSubmissionRepositoryCustom} with methods - * using native SQL queries to get - * {@link ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile} - * info for {@link ca.corefacility.bioinformatics.irida.model.project.Project} - * and {@link ca.corefacility.bioinformatics.irida.model.user.User} + * Implementation of {@link AnalysisSubmissionRepositoryCustom} with methods using native SQL queries to get + * {@link ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile} info for + * {@link ca.corefacility.bioinformatics.irida.model.project.Project} and + * {@link ca.corefacility.bioinformatics.irida.model.user.User} */ @Repository public class AnalysisSubmissionRepositoryImpl implements AnalysisSubmissionRepositoryCustom { @@ -79,7 +78,36 @@ public List getAllAnalysisOutputInfoSharedWithP + " INNER JOIN project_analysis_submission pasub ON asub.id = pasub.analysis_submission_id\n" + "WHERE\n" + " psample.project_id = :projectId\n" - + " AND asub.workflow_id IN (:workflowIds)\n"; + + " AND asub.workflow_id IN (:workflowIds)\n" + + "UNION\n" + + "SELECT\n" + + " s.id AS sampleId,\n" + + " s.sampleName AS sampleName,\n" + + " a.id AS analysisId,\n" + + " aofmap.analysis_output_file_key AS analysisOutputFileKey,\n" + + " aof.file_path AS filePath,\n" + + " aof.id AS analysisOutputFileId,\n" + + " a.analysis_type AS analysisType,\n" + + " asub.workflow_id AS workflowId,\n" + + " aof.created_date AS createdDate,\n" + + " asub.name AS analysisSubmissionName,\n" + + " asub.id AS analysisSubmissionId,\n" + + " u.id AS userId,\n" + + " u.firstName AS userFirstName,\n" + + " u.lastName AS userLastName\n" + + "FROM analysis_output_file aof\n" + + " INNER JOIN analysis_output_file_map aofmap ON aof.id = aofmap.analysisOutputFilesMap_id\n" + + " INNER JOIN analysis a ON aofmap.analysis_id = a.id\n" + + " INNER JOIN analysis_submission asub ON a.id = asub.analysis_id\n" + + " INNER JOIN analysis_submission_genome_assembly o ON asub.id = o.analysis_submission_id\n" + + " INNER JOIN sample_genome_assembly sga ON sga.genome_assembly_id = o.genome_assembly_id\n" + + " INNER JOIN sample s ON sga.sample_id = s.id\n" + + " INNER JOIN project_sample psample ON s.id = psample.sample_id\n" + + " INNER JOIN user u ON asub.submitter = u.id\n" + + " INNER JOIN project_analysis_submission pasub ON asub.id = pasub.analysis_submission_id\n" + + "WHERE\n" + + " psample.project_id = :projectId\n" + + " AND asub.workflow_id IN (:workflowIds)"; // @formatter:on MapSqlParameterSource parameters = new MapSqlParameterSource(); // need to explicitly convert UUIDs to String @@ -128,7 +156,37 @@ public List getAllAutomatedAnalysisOutputInfoFo + "WHERE\n" + " psample.project_id = :projectId\n" + " AND asub.workflow_id IN (:workflowIds)\n" - + " AND asub.automated=1"; + + " AND asub.automated=1\n" + + "UNION\n" + + "SELECT\n" + + " s.id AS sampleId,\n" + + " s.sampleName AS sampleName,\n" + + " a.id AS analysisId,\n" + + " aofmap.analysis_output_file_key AS analysisOutputFileKey,\n" + + " aof.file_path AS filePath,\n" + + " aof.id AS analysisOutputFileId,\n" + + " a.analysis_type AS analysisType,\n" + + " asub.workflow_id AS workflowId,\n" + + " aof.created_date AS createdDate,\n" + + " asub.name AS analysisSubmissionName,\n" + + " asub.id AS analysisSubmissionId,\n" + + " u.id AS userId,\n" + + " u.firstName AS userFirstName,\n" + + " u.lastName AS userLastName\n" + + "FROM analysis_output_file aof\n" + + " INNER JOIN analysis_output_file_map aofmap ON aof.id = aofmap.analysisOutputFilesMap_id\n" + + " INNER JOIN analysis a ON aofmap.analysis_id = a.id\n" + + " INNER JOIN analysis_submission asub ON a.id = asub.analysis_id\n" + + " INNER JOIN analysis_submission_genome_assembly o ON asub.id = o.analysis_submission_id\n" + + " INNER JOIN sample_genome_assembly sga ON sga.genome_assembly_id = o.genome_assembly_id\n" + + " INNER JOIN sample s ON sga.sample_id = s.id\n" + + " INNER JOIN project_sample psample ON s.id = psample.sample_id\n" + + " INNER JOIN user u ON asub.submitter = u.id\n" + + " INNER JOIN project_analysis_submission pasub ON asub.id = pasub.analysis_submission_id\n" + + "WHERE\n" + + " psample.project_id = :projectId\n" + + " AND asub.workflow_id IN (:workflowIds)\n" + + " AND asub.automated=1\n"; // @formatter:on MapSqlParameterSource parameters = new MapSqlParameterSource(); parameters.addValue("projectId", projectId); @@ -168,7 +226,29 @@ public List getAllUserAnalysisOutputInfo(Long u + " INNER JOIN sample_sequencingobject sso ON sso.sequencingobject_id = o.sequencing_object_id\n" + " INNER JOIN sample s ON sso.sample_id = s.id\n" + "WHERE\n" - + " asub.submitter = :userId"; + + " asub.submitter = :userId\n" + + "UNION\n" + + "SELECT\n" + + " s.id AS sampleId,\n" + + " s.sampleName AS sampleName,\n" + + " a.id AS analysisId,\n" + + " aofmap.analysis_output_file_key AS analysisOutputFileKey,\n" + + " aof.file_path AS filePath,\n" + + " aof.id AS analysisOutputFileId,\n" + + " a.analysis_type AS analysisType,\n" + + " asub.workflow_id AS workflowId,\n" + + " aof.created_date AS createdDate,\n" + + " asub.name AS analysisSubmissionName,\n" + + " asub.id AS analysisSubmissionId\n" + + "FROM analysis_output_file aof\n" + + " INNER JOIN analysis_output_file_map aofmap ON aof.id = aofmap.analysisOutputFilesMap_id\n" + + " INNER JOIN analysis a ON aofmap.analysis_id = a.id\n" + + " INNER JOIN analysis_submission asub ON a.id = asub.analysis_id\n" + + " INNER JOIN analysis_submission_genome_assembly o ON asub.id = o.analysis_submission_id\n" + + " INNER JOIN sample_genome_assembly sga ON sga.genome_assembly_id = o.genome_assembly_id\n" + + " INNER JOIN sample s ON sga.sample_id = s.id\n" + + "WHERE\n" + + " asub.submitter = :userId\n"; // @formatter:on MapSqlParameterSource parameters = new MapSqlParameterSource(); parameters.addValue("userId", userId); diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/assembly/GenomeAssemblyRepository.java b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/assembly/GenomeAssemblyRepository.java index da976d84544..decd32b5054 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/assembly/GenomeAssemblyRepository.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/assembly/GenomeAssemblyRepository.java @@ -1,7 +1,12 @@ package ca.corefacility.bioinformatics.irida.repositories.assembly; +import java.util.Set; + +import org.springframework.data.jpa.repository.Query; + import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.assembly.UploadedAssembly; +import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.repositories.IridaJpaRepository; import ca.corefacility.bioinformatics.irida.repositories.filesystem.FilesystemSupplementedRepository; @@ -17,4 +22,13 @@ public interface GenomeAssemblyRepository * Save is overridden here instead of in FilesystemSupplementedRepository as it would throw a compilation error */ S save(S entity); + + /** + * Get the {@link GenomeAssembly}s associated with a given {@link AnalysisSubmission} + * + * @param submission the {@link AnalysisSubmission} + * @return the set of associated {@link GenomeAssembly}s + */ + @Query("select f from GenomeAssembly f where ?1 in elements(f.analysisSubmissions)") + Set findGenomeAssembliesForAnalysisSubmission(AnalysisSubmission submission); } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/joins/sample/SampleGenomeAssemblyJoinRepository.java b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/joins/sample/SampleGenomeAssemblyJoinRepository.java index 0f1450bcdae..0f8d9251e22 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/repositories/joins/sample/SampleGenomeAssemblyJoinRepository.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/repositories/joins/sample/SampleGenomeAssemblyJoinRepository.java @@ -17,8 +17,7 @@ public interface SampleGenomeAssemblyJoinRepository extends CrudRepository>>>>>> + * development * @param locale User's locale * @param response HTTP response object * @return dto with message @@ -133,8 +140,8 @@ public ResponseDetails ajaxUpdateEmailPipelineResult(@RequestBody AnalysisEmailP AnalysisSubmission submission = analysisSubmissionService.read(parameters.getAnalysisSubmissionId()); String message = ""; - if ((submission.getAnalysisState() != AnalysisState.COMPLETED) && (submission.getAnalysisState() - != AnalysisState.ERROR)) { + if ((submission.getAnalysisState() != AnalysisState.COMPLETED) + && (submission.getAnalysisState() != AnalysisState.ERROR)) { if (parameters.getEmailPipelineResultCompleted() && parameters.getEmailPipelineResultError()) { message = messageSource.getMessage("AnalysisDetails.receiveCompletionEmail", new Object[] {}, locale); @@ -174,15 +181,12 @@ public AnalysisDetails ajaxGetDataForDetailsTab(@PathVariable Long submissionId, IridaWorkflow iridaWorkflow = workflowsService.getIridaWorkflowOrUnknown(submission); // Get the name of the workflow - AnalysisType analysisType = iridaWorkflow.getWorkflowDescription() - .getAnalysisType(); + AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType(); String workflowName = messageSource.getMessage("workflow." + analysisType.getType() + ".title", null, analysisType.getType(), locale); - String version = iridaWorkflow.getWorkflowDescription() - .getVersion(); - String priority = submission.getPriority() - .toString(); + String version = iridaWorkflow.getWorkflowDescription().getVersion(); + String priority = submission.getPriority().toString(); // Get the run time of the analysis runtime using the analysis Long duration; @@ -200,14 +204,12 @@ public AnalysisDetails ajaxGetDataForDetailsTab(@PathVariable Long submissionId, boolean canShareToSamples = false; if (submission.getAnalysis() != null) { - canShareToSamples = analysisSubmissionSampleProcessor.hasRegisteredAnalysisSampleUpdater( - submission.getAnalysis() - .getAnalysisType()); + canShareToSamples = analysisSubmissionSampleProcessor + .hasRegisteredAnalysisSampleUpdater(submission.getAnalysis().getAnalysisType()); } String analysisDescription = submission.getAnalysisDescription(); // Check if user can update analysis - Authentication authentication = SecurityContextHolder.getContext() - .getAuthentication(); + Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); response.setStatus(HttpServletResponse.SC_OK); @@ -231,8 +233,8 @@ public AnalysisInputFiles ajaxGetAnalysisInputFiles(@PathVariable Long submissio AnalysisSubmission submission = analysisSubmissionService.read(submissionId); ReferenceFile referenceFile = null; - Set inputFilePairs = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission( - submission, SequenceFilePair.class); + Set inputFilePairs = sequencingObjectService + .getSequencingObjectsOfTypeForAnalysisSubmission(submission, SequenceFilePair.class); List sampleFiles = inputFilePairs.stream() .map(SampleSequencingObject::new) @@ -240,21 +242,26 @@ public AnalysisInputFiles ajaxGetAnalysisInputFiles(@PathVariable Long submissio .collect(Collectors.toList()); // - Single - Set inputFilesSingle = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission( - submission, SingleEndSequenceFile.class); + Set inputFilesSingle = sequencingObjectService + .getSequencingObjectsOfTypeForAnalysisSubmission(submission, SingleEndSequenceFile.class); List singleFiles = inputFilesSingle.stream() .map(SampleSequencingObject::new) .sorted() .collect(Collectors.toList()); + Set inputGenomeAssemblies = genomeAssemblyService + .getGenomeAssembliesForAnalysisSubmission(submission); + List genomeAssemblies = inputGenomeAssemblies.stream() + .map(SampleGenomeAssembly::new) + .sorted() + .collect(Collectors.toList()); + IridaWorkflow iridaWorkflow = workflowsService.getIridaWorkflowOrUnknown(submission); - if (iridaWorkflow != null && iridaWorkflow.getWorkflowDescription() - .requiresReference() && submission.getReferenceFile() - .isPresent()) { + if (iridaWorkflow != null && iridaWorkflow.getWorkflowDescription().requiresReference() + && submission.getReferenceFile().isPresent()) { - referenceFile = submission.getReferenceFile() - .get(); + referenceFile = submission.getReferenceFile().get(); } else { logger.debug("No reference file required for workflow."); } @@ -262,18 +269,16 @@ public AnalysisInputFiles ajaxGetAnalysisInputFiles(@PathVariable Long submissio // List of AnalysisSamples which store the sample info List pairedEnd = new ArrayList<>(); List singleEnd = new ArrayList<>(); + List assemblies = new ArrayList<>(); for (SampleSequencingObject sso : sampleFiles) { SequenceFilePair fp = (SequenceFilePair) sso.getSequencingObject(); - if (fp.getFiles() - .size() == 2) { + if (fp.getFiles().size() == 2) { String sampleName = messageSource.getMessage("AnalysisSamples.sampleDeleted", new Object[] {}, locale); Long sampleId = 0L; if (sso.getSample() != null) { - sampleName = sso.getSample() - .getSampleName(); - sampleId = sso.getSample() - .getId(); + sampleName = sso.getSample().getSampleName(); + sampleId = sso.getSample().getId(); } pairedEnd.add(new AnalysisSamples(sampleName, sampleId, fp.getId(), fp.getForwardSequenceFile(), fp.getReverseSequenceFile())); @@ -282,28 +287,36 @@ public AnalysisInputFiles ajaxGetAnalysisInputFiles(@PathVariable Long submissio for (SampleSequencingObject sso : singleFiles) { SingleEndSequenceFile sesf = (SingleEndSequenceFile) sso.getSequencingObject(); - if (sesf.getFiles() - .size() == 1) { + if (sesf.getFiles().size() == 1) { String sampleName = messageSource.getMessage("AnalysisSamples.sampleDeleted", new Object[] {}, locale); Long sampleId = 0L; if (sso.getSample() != null) { - sampleName = sso.getSample() - .getSampleName(); - sampleId = sso.getSample() - .getId(); + sampleName = sso.getSample().getSampleName(); + sampleId = sso.getSample().getId(); } singleEnd.add(new AnalysisSingleEndSamples(sampleName, sampleId, sesf.getId(), sesf.getSequenceFile())); } } - return new AnalysisInputFiles(pairedEnd, singleEnd, referenceFile); + for (SampleGenomeAssembly sga : genomeAssemblies) { + String sampleName = messageSource.getMessage("AnalysisSamples.sampleDeleted", new Object[] {}, locale); + Long sampleId = 0L; + if (sga.getSample() != null) { + sampleName = sga.getSample().getSampleName(); + sampleId = sga.getSample().getId(); + } + assemblies + .add(new AnalysisGenomeAssemblySamples(sampleName, sampleId, sga.getId(), sga.getGenomeAssembly())); + } + + return new AnalysisInputFiles(pairedEnd, singleEnd, assemblies, referenceFile); } /** - * Update an analysis name and/or priority - * - * @param parameters parameters which include the submission id and the new name - * and/or priority + * Update an analysis name and/or priority <<<<<<< HEAD + * + * @param parameters parameters which include the submission id and the new name and/or priority ======= + * @param parameters parameters which include the submission id and the new name and/or priority >>>>>>> development * @param locale User's locale * @param response HTTP response object * @return dto with message @@ -335,8 +348,7 @@ public ResponseDetails ajaxUpdateSubmission(@RequestBody AnalysisSubmissionInfo } /** - * For an {@link AnalysisSubmission}, get info about each - * {@link AnalysisOutputFile} + * For an {@link AnalysisSubmission}, get info about each {@link AnalysisOutputFile} * * @param id {@link AnalysisSubmission} id * @return map of info about each {@link AnalysisOutputFile} @@ -382,11 +394,8 @@ private AnalysisOutputFileInfo getAnalysisOutputFileInfo(AnalysisSubmission subm ToolExecution tool = aof.getCreatedByTool(); AnalysisOutputFileInfo info = new AnalysisOutputFileInfo(aof.getId(), submission.getId(), analysis.getId(), - aof.getFile() - .getFileName() - .toString(), fileExt, aof.getFile() - .toFile() - .length(), tool.getToolName(), tool.getToolVersion(), outputName); + aof.getFile().getFileName().toString(), fileExt, aof.getFile().toFile().length(), + tool.getToolName(), tool.getToolVersion(), outputName); if (FILE_EXT_READ_FIRST_LINE.contains(fileExt)) { addFirstLine(info, aof); @@ -397,12 +406,11 @@ private AnalysisOutputFileInfo getAnalysisOutputFileInfo(AnalysisSubmission subm } /** - * Add the {@code firstLine} and {@code filePointer} file byte position - * after reading the first line of an {@link AnalysisOutputFile} to a - * {@link AnalysisOutputFileInfo} object. - * - * @param info Object to add {@code firstLine} and {@code filePointer} info - * to + * Add the {@code firstLine} and {@code filePointer} file byte position after reading the first line of an + * {@link AnalysisOutputFile} to a {@link AnalysisOutputFileInfo} object. <<<<<<< HEAD + * + * @param info Object to add {@code firstLine} and {@code filePointer} info to ======= + * @param info Object to add {@code firstLine} and {@code filePointer} info to >>>>>>> development * @param aof {@link AnalysisOutputFile} to read from */ private void addFirstLine(AnalysisOutputFileInfo info, AnalysisOutputFile aof) { @@ -437,9 +445,9 @@ private void addFirstLine(AnalysisOutputFileInfo info, AnalysisOutputFile aof) { * @param end Optional line to stop reading at * @param seek Optional file byte position to seek to and begin reading * @param chunk Optional number of bytes to read from file - * @param response HTTP response object - * @return JSON with file text or lines as well as information about the - * file. + * @param response HTTP response object <<<<<<< HEAD + * @return JSON with file text or lines as well as information about the file. ======= + * @return JSON with file text or lines as well as information about the file. >>>>>>> development */ @RequestMapping(value = "/{id}/outputs/{fileId}", method = RequestMethod.GET) @ResponseBody @@ -462,12 +470,9 @@ public AnalysisOutputFileInfo getOutputFile(@PathVariable Long id, @PathVariable contents.setId(aof.getId()); contents.setAnalysisSubmissionId(submission.getId()); contents.setAnalysisId(analysis.getId()); - contents.setFilename(aofFile.getFileName() - .toString()); + contents.setFilename(aofFile.getFileName().toString()); contents.setFileExt(FileUtilities.getFileExt(aofFile)); - contents.setFileSizeBytes(aof.getFile() - .toFile() - .length()); + contents.setFileSizeBytes(aof.getFile().toFile().length()); contents.setToolName(tool.getToolName()); contents.setToolVersion(tool.getToolVersion()); try { @@ -515,8 +520,8 @@ public AnalysisOutputFileInfo getOutputFile(@PathVariable Long id, @PathVariable } /** - * Get a dto with list of {@link JobError} for an {@link AnalysisSubmission} - * under key `galaxyJobErrors` and the `galaxyUrl` for the galaxy instance + * Get a dto with list of {@link JobError} for an {@link AnalysisSubmission} under key `galaxyJobErrors` and the + * `galaxyUrl` for the galaxy instance * * @param submissionId {@link AnalysisSubmission} id * @return dto with galaxyJobErrors and galaxyUrl @@ -530,8 +535,7 @@ public AnalysisJobError ajaxGetJobErrors(@PathVariable Long submissionId) { List galaxyJobErrors = analysisSubmissionService.getJobErrors(submissionId); String galaxyUrl = ""; try { - galaxyUrl = configFile.galaxyInstance() - .getGalaxyUrl(); + galaxyUrl = configFile.galaxyInstance().getGalaxyUrl(); } catch (ExecutionManagerConfigurationException e) { logger.error("Error " + e); } @@ -563,12 +567,12 @@ public List getSharedProjectsForAnalysis(@PathVariable Lo // Input files // - Paired - Set inputFilePairs = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission( - submission, SequenceFilePair.class); + Set inputFilePairs = sequencingObjectService + .getSequencingObjectsOfTypeForAnalysisSubmission(submission, SequenceFilePair.class); // Single End - Set inputFileSingleEnd = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission( - submission, SingleEndSequenceFile.class); + Set inputFileSingleEnd = sequencingObjectService + .getSequencingObjectsOfTypeForAnalysisSubmission(submission, SingleEndSequenceFile.class); // get projects already shared with submission Set projectsShared = projectService.getProjectsForAnalysisSubmission(submission) @@ -599,10 +603,7 @@ public List getSharedProjectsForAnalysis(@PathVariable Lo @Override public int compare(SharedProjectResponse p1, SharedProjectResponse p2) { - return p1.getProject() - .getName() - .compareTo(p2.getProject() - .getName()); + return p1.getProject().getName().compareTo(p2.getProject().getName()); } }); @@ -610,12 +611,12 @@ public int compare(SharedProjectResponse p1, SharedProjectResponse p2) { } /** - * Update the share status of a given {@link AnalysisSubmission} for a given - * {@link Project} + * Update the share status of a given {@link AnalysisSubmission} for a given {@link Project} * - * @param submissionId the {@link AnalysisSubmission} id to share/unshare - * @param projectShare {@link AnalysisProjectShare} describes of the project and the - * share status. + * @param submissionId the {@link AnalysisSubmission} id to share/unshare <<<<<<< HEAD + * @param projectShare {@link AnalysisProjectShare} describes of the project and the share status. ======= + * @param projectShare {@link AnalysisProjectShare} describes of the project and the share status. >>>>>>> + * development * @param locale Locale of the logged in user * @return Success message if successful */ @@ -667,8 +668,7 @@ public ResponseDetails saveResultsToSamples(@PathVariable Long submissionId, Loc submission.setUpdateSamples(true); analysisSubmissionService.update(submission); } catch (PostProcessingException e) { - if (e.toString() - .contains("Expected one sample; got '0' for analysis [id=" + submissionId + "]")) { + if (e.toString().contains("Expected one sample; got '0' for analysis [id=" + submissionId + "]")) { message = messageSource.getMessage("AnalysisShare.noSamplesToSaveResults", null, locale); } else { message = messageSource.getMessage("analysis.details.save.processingerror", null, locale); @@ -703,17 +703,13 @@ public AnalysisSistrResults getSistrAnalysis(@PathVariable Long id) { logger.error("Error finding workflow, ", e); throw new EntityNotFoundException("Couldn't find workflow for submission " + submission.getId(), e); } - AnalysisType analysisType = iridaWorkflow.getWorkflowDescription() - .getAnalysisType(); - if (analysisTypesService.getViewerForAnalysisType(analysisType) - .get() - .equals("sistr")) { + AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType(); + if (analysisTypesService.getViewerForAnalysisType(analysisType).get().equals("sistr")) { Analysis analysis = submission.getAnalysis(); Path path = null; if (analysis.getAnalysisOutputFile(sistrFileKey) != null) { - path = analysis.getAnalysisOutputFile(sistrFileKey) - .getFile(); + path = analysis.getAnalysisOutputFile(sistrFileKey).getFile(); try { String json = new Scanner(new BufferedReader(new FileReader(path.toFile()))).useDelimiter("\\Z") @@ -728,8 +724,7 @@ public AnalysisSistrResults getSistrAnalysis(@PathVariable Long id) { if (sistrResults.size() > 0) { // should only ever be one sample for these results if (samples != null && samples.size() == 1) { - Sample sample = samples.iterator() - .next(); + Sample sample = samples.iterator().next(); return new AnalysisSistrResults(sample.getSampleName(), false, sistrResults.get(0)); } else { logger.error("Invalid number of associated samples for submission " + submission); @@ -767,9 +762,8 @@ public Map deleteAjaxAnalysisSubmission(@PathVariable Long analy final Locale locale) { final AnalysisSubmission deletedSubmission = analysisSubmissionService.read(analysisSubmissionId); analysisSubmissionService.delete(analysisSubmissionId); - return ImmutableMap.of("result", - messageSource.getMessage("analysis.delete.message", new Object[] { deletedSubmission.getLabel() }, - locale)); + return ImmutableMap.of("result", messageSource.getMessage("analysis.delete.message", + new Object[] { deletedSubmission.getLabel() }, locale)); } /** @@ -813,33 +807,27 @@ public Map getNewickForAnalysis(@PathVariable Long submissionId) } /** - * Get an image file associated with a specific {@link AnalysisSubmission} - * by file name. + * Get an image file associated with a specific {@link AnalysisSubmission} by file name. * * @param submissionId {@link Long} id for an {@link AnalysisSubmission} - * @param filename {@link String} filename for an {@link AnalysisOutputFile} - * @return {@link String} containing the image file contents as a base64 - * encoded string. + * @param filename {@link String} filename for an {@link AnalysisOutputFile} <<<<<<< HEAD + * @return {@link String} containing the image file contents as a base64 encoded string. ======= + * @return {@link String} containing the image file contents as a base64 encoded string. >>>>>>> development */ @RequestMapping("{submissionId}/image") @ResponseBody public ResponseEntity getImageFile(@PathVariable Long submissionId, String filename) { AnalysisSubmission submission = analysisSubmissionService.read(submissionId); - Set files = submission.getAnalysis() - .getAnalysisOutputFiles(); + Set files = submission.getAnalysis().getAnalysisOutputFiles(); AnalysisOutputFile outputFile = null; for (AnalysisOutputFile file : files) { - if (file.getFile() - .toFile() - .getName() - .contains(filename)) { + if (file.getFile().toFile().getName().contains(filename)) { outputFile = file; break; } } - return ResponseEntity.ok(Base64.getEncoder() - .encodeToString(outputFile.getBytesForFile())); + return ResponseEntity.ok(Base64.getEncoder().encodeToString(outputFile.getBytesForFile())); } /** @@ -856,21 +844,17 @@ public Map getMetadataForAnalysisSamples(@PathVariable Long subm // grab the metadata once and put it in a map Map> sampleMetadata = new HashMap<>(); - samples.stream() - .forEach(s -> { - Set metadataForSample = sampleService.getMetadataForSample(s); - sampleMetadata.put(s, metadataForSample); - }); + samples.stream().forEach(s -> { + Set metadataForSample = sampleService.getMetadataForSample(s); + sampleMetadata.put(s, metadataForSample); + }); // Let's get a list of all the metadata available that is unique. Set terms = new HashSet<>(); for (Sample sample : samples) { Set metadataEntries = sampleMetadata.get(sample); if (!metadataEntries.isEmpty()) { - terms.addAll(metadataEntries.stream() - .map(e -> e.getField() - .getLabel()) - .collect(Collectors.toSet())); + terms.addAll(metadataEntries.stream().map(e -> e.getField().getLabel()).collect(Collectors.toSet())); } } @@ -880,8 +864,7 @@ public Map getMetadataForAnalysisSamples(@PathVariable Long subm Set metadataEntries = sampleMetadata.get(sample); Map stringMetadata = new HashMap<>(); metadataEntries.forEach(e -> { - stringMetadata.put(e.getField() - .getLabel(), e); + stringMetadata.put(e.getField().getLabel(), e); }); Map valuesMap = new HashMap<>(); @@ -905,8 +888,7 @@ public Map getMetadataForAnalysisSamples(@PathVariable Long subm } /** - * Get a list of all {@link MetadataTemplate}s for the - * {@link AnalysisSubmission} + * Get a list of all {@link MetadataTemplate}s for the {@link AnalysisSubmission} * * @param submissionId id of the {@link AnalysisSubmission} * @return a map of {@link MetadataTemplate}s @@ -938,10 +920,11 @@ public Map getMetadataTemplatesForAnalysis(@PathVariable Long su } /** - * Generates a list of metadata fields for a five template. - * - * @param templateId {@link Long} id for the {@link MetadataTemplate} that the - * fields are required. + * Generates a list of metadata fields for a five template. <<<<<<< HEAD + * + * @param templateId {@link Long} id for the {@link MetadataTemplate} that the fields are required. ======= + * @param templateId {@link Long} id for the {@link MetadataTemplate} that the fields are required. >>>>>>> + * development * @return {@link Map} */ @RequestMapping("/{submissionId}/metadata-template-fields") @@ -958,8 +941,7 @@ public Map getMetadataTemplateFields(@RequestParam Long template } /** - * Construct the model parameters for results with a newick output - * {@link Analysis} + * Construct the model parameters for results with a newick output {@link Analysis} * * @param submissionId The analysis submission id * @param locale The users current {@link Locale} @@ -1011,8 +993,7 @@ public AnalysisTreeResponse getNewickTree(@PathVariable Long submissionId, Local } /** - * Parse excel file and return an ExcelData dto which contains the row data - * as well as the headers. + * Parse excel file and return an ExcelData dto which contains the row data as well as the headers. * * @param submissionId The analysis submission id * @param filename The name of the excel file to parse @@ -1023,15 +1004,11 @@ public AnalysisTreeResponse getNewickTree(@PathVariable Long submissionId, Local @ResponseBody public ExcelData parseExcelFile(@PathVariable Long submissionId, String filename, Integer sheetIndex) { AnalysisSubmission submission = analysisSubmissionService.read(submissionId); - Set files = submission.getAnalysis() - .getAnalysisOutputFiles(); + Set files = submission.getAnalysis().getAnalysisOutputFiles(); AnalysisOutputFile outputFile = null; for (AnalysisOutputFile file : files) { - if (file.getFile() - .toFile() - .getName() - .contains(filename)) { + if (file.getFile().toFile().getName().contains(filename)) { outputFile = file; break; } @@ -1065,8 +1042,7 @@ public AnalysisProvenanceResponse getProvenanceByFile(@PathVariable Long submiss Set files = analysis.getAnalysisOutputFiles(); for (AnalysisOutputFile file : files) { - if (file.getLabel() - .contains(filename)) { + if (file.getLabel().contains(filename)) { outputFile = file; break; } @@ -1113,16 +1089,15 @@ public ResponseEntity getUpdatedProgress(@PathVariable boolean treeDefault = getTreeViewDefault(submission, locale); - return ResponseEntity.ok( - new UpdatedAnalysisProgress(submission.getAnalysisState(), prevStateBeforeError, duration, - treeDefault)); + return ResponseEntity.ok(new UpdatedAnalysisProgress(submission.getAnalysisState(), prevStateBeforeError, + duration, treeDefault)); } /** - * Private method which gets whether the tree view should be the default - * view or not - * + * <<<<<<< HEAD Private method which gets whether the tree view should be the default view or not ======= Private + * method which gets whether the tree view should be the default view or not >>>>>>> development + * * @param submission The analysis submission * @param locale The user's locale * @return if tree view should be displayed by default or not @@ -1148,8 +1123,8 @@ private boolean getTreeViewDefault(AnalysisSubmission submission, Locale locale) } /** - * Private method which gets the analysis viewer type - * + * Private method which gets the analysis viewer type <<<<<<< HEAD ======= >>>>>>> development + * * @param submission The analysis submission * @return the viewer (tree, sistr, biohansel, etc) */ @@ -1157,8 +1132,7 @@ private String getAnalysisViewer(AnalysisSubmission submission) { IridaWorkflow iridaWorkflow = workflowsService.getIridaWorkflowOrUnknown(submission); // Get the name of the workflow - AnalysisType analysisType = iridaWorkflow.getWorkflowDescription() - .getAnalysisType(); + AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType(); Optional viewerForAnalysisType = analysisTypesService.getViewerForAnalysisType(analysisType); String viewer = ""; @@ -1190,8 +1164,7 @@ public ResponseEntity getAnalysisInfo(@PathVariable Long submissio IridaWorkflow iridaWorkflow = workflowsService.getIridaWorkflowOrUnknown(submission); // Get the name of the workflow - AnalysisType analysisType = iridaWorkflow.getWorkflowDescription() - .getAnalysisType(); + AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType(); String viewer = getAnalysisViewer(submission); @@ -1213,8 +1186,8 @@ public ResponseEntity getAnalysisInfo(@PathVariable Long submissio boolean treeDefault = getTreeViewDefault(submission, locale); return ResponseEntity.ok(new AnalysisInfo(submission, submission.getName(), submission.getAnalysisState(), - analysisType.getType(), viewer, currentUser.getSystemRole() - .equals(Role.ROLE_ADMIN), emailController.isMailConfigured(), prevState, duration, + analysisType.getType(), viewer, currentUser.getSystemRole().equals(Role.ROLE_ADMIN), + emailController.isMailConfigured(), prevState, duration, submission.getAnalysisState() == AnalysisState.COMPLETED, submission.getAnalysisState() == AnalysisState.ERROR, treeDefault)); } @@ -1253,12 +1226,10 @@ private Set getPrevTools(ToolExecution tool) { } /** - * Find a file with a `.newick` extension in the analysis output files if it - * exists. + * Find a file with a `.newick` extension in the analysis output files if it exists. * * @param submission the {@link AnalysisSubmission} to check - * @return an optional of an {@link AnalysisOutputFile} if the file was - * found + * @return an optional of an {@link AnalysisOutputFile} if the file was found */ private Optional getTreeFileForSubmission(AnalysisSubmission submission) { // some submissions may not name their tree with a ".newick" extension. @@ -1272,8 +1243,7 @@ private Optional getTreeFileForSubmission(AnalysisSubmission Optional treeOptional = Optional.empty(); // first check for a file with a key of "tree" - if (analysis.getAnalysisOutputFileNames() - .contains(treeFileKey)) { + if (analysis.getAnalysisOutputFileNames().contains(treeFileKey)) { treeOptional = Optional.of(analysis.getAnalysisOutputFile(treeFileKey)); } @@ -1282,8 +1252,7 @@ private Optional getTreeFileForSubmission(AnalysisSubmission // loop through the files looking for with a newick file. Get the // first one treeOptional = analysisOutputFiles.stream() - .filter(f -> FileUtilities.getFileExt(f.getFile()) - .equals(TREE_EXT)) + .filter(f -> FileUtilities.getFileExt(f.getFile()).equals(TREE_EXT)) .findFirst(); } @@ -1301,16 +1270,14 @@ private Optional getTreeFileForSubmission(AnalysisSubmission private ArrayList getExecutionParameters(ToolExecution tool) { ArrayList executionParameters = new ArrayList<>(); - for (Map.Entry entry : tool.getExecutionTimeParameters() - .entrySet()) { + for (Map.Entry entry : tool.getExecutionTimeParameters().entrySet()) { executionParameters.add(new AnalysisToolExecutionParameters(entry.getKey(), entry.getValue())); } return executionParameters; } /** - * Response object storing a project and whether or not it's shared with a - * given {@link AnalysisSubmission} + * Response object storing a project and whether or not it's shared with a given {@link AnalysisSubmission} */ @SuppressWarnings("unused") private class SharedProjectResponse { @@ -1341,8 +1308,8 @@ class SampleSequencingObject implements Comparable { SampleSequencingObject(SequencingObject sequencingObject) { this.sequencingObject = sequencingObject; try { - SampleSequencingObjectJoin sampleSequencingObjectJoin = sampleService.getSampleForSequencingObject( - sequencingObject); + SampleSequencingObjectJoin sampleSequencingObjectJoin = sampleService + .getSampleForSequencingObject(sequencingObject); if (sampleSequencingObjectJoin != null) { this.sample = sampleSequencingObjectJoin.getSubject(); } @@ -1374,8 +1341,54 @@ public int compareTo(SampleSequencingObject b) { } else if (b.sample == null) { return 1; } - return this.sample.getLabel() - .compareTo(b.sample.getLabel()); + return this.sample.getLabel().compareTo(b.sample.getLabel()); + } + } + + /** + * UI Model to return Genome Assembly with its accompanying sample. + */ + class SampleGenomeAssembly implements Comparable { + private Sample sample; + private GenomeAssembly genomeAssembly; + + SampleGenomeAssembly(GenomeAssembly genomeAssembly) { + this.genomeAssembly = genomeAssembly; + try { + SampleGenomeAssemblyJoin sampleGenomeAssemblyJoin = sampleService + .getSampleForGenomeAssembly(genomeAssembly); + if (sampleGenomeAssemblyJoin != null) { + this.sample = sampleGenomeAssemblyJoin.getSubject(); + } + } catch (Exception e) { + logger.debug("Genome Assembly [" + genomeAssembly.getIdentifier() + "] does not have a parent sample", + e); + sample = null; + } + } + + public Long getId() { + return genomeAssembly.getId(); + } + + public Sample getSample() { + return sample; + } + + public GenomeAssembly getGenomeAssembly() { + return genomeAssembly; + } + + @Override + public int compareTo(SampleGenomeAssembly b) { + if (this.sample == null && b.sample == null) { + return 0; + } else if (this.sample == null) { + return -1; + } else if (b.sample == null) { + return 1; + } + return this.sample.getLabel().compareTo(b.sample.getLabel()); } } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisGenomeAssemblySamples.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisGenomeAssemblySamples.java new file mode 100644 index 00000000000..79b2ba35cc5 --- /dev/null +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisGenomeAssemblySamples.java @@ -0,0 +1,58 @@ + +package ca.corefacility.bioinformatics.irida.ria.web.analysis.dto; + +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; + +/** + * Used as a response for encapsulating analysis genome assembly sample data + */ + +public class AnalysisGenomeAssemblySamples { + private String sampleName; + private Long sampleId; + private Long genomeAssemblyId; + private GenomeAssembly genomeAssembly; + + public AnalysisGenomeAssemblySamples() { + } + + public AnalysisGenomeAssemblySamples(String sampleName, Long sampleId, Long genomeAssemblyId, + GenomeAssembly genomeAssembly) { + this.sampleName = sampleName; + this.sampleId = sampleId; + this.genomeAssemblyId = genomeAssemblyId; + this.genomeAssembly = genomeAssembly; + } + + public String getSampleName() { + return sampleName; + } + + public void setSampleName(String sampleName) { + this.sampleName = sampleName; + } + + public Long getSampleId() { + return sampleId; + } + + public void setSampleId(Long sampleId) { + this.sampleId = sampleId; + } + + public Long getAssemblyId() { + return genomeAssemblyId; + } + + public void setAssemblyId(Long genomeAssemblyId) { + this.genomeAssemblyId = genomeAssemblyId; + } + + public GenomeAssembly getGenomeAssembly() { + return genomeAssembly; + } + + public void setGenomeAssembly(GenomeAssembly genomeAssembly) { + this.genomeAssembly = genomeAssembly; + } +} \ No newline at end of file diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisInputFiles.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisInputFiles.java index aa8268f2e7f..5f6bd09b8f1 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisInputFiles.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/analysis/dto/AnalysisInputFiles.java @@ -4,25 +4,27 @@ import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; - /** - * Used as a response for encapsulating analysis input files data which includes - * the samples, reads, and reference file if it was required by workflow + * Used as a response for encapsulating analysis input files data which includes the samples, reads, and reference file + * if it was required by workflow */ public class AnalysisInputFiles { - private List pairedEndSamples; - private List singleEndSamples; - private ReferenceFile referenceFile; + private List pairedEndSamples; + private List singleEndSamples; + private List genomeAssemblySamples; + private ReferenceFile referenceFile; public AnalysisInputFiles() { } - public AnalysisInputFiles(List pairedEndSamples, List singleEndSamples, ReferenceFile referenceFile) { - this.pairedEndSamples = pairedEndSamples; - this.singleEndSamples = singleEndSamples; - this.referenceFile = referenceFile; - } + public AnalysisInputFiles(List pairedEndSamples, List singleEndSamples, + List genomeAssemblySamples, ReferenceFile referenceFile) { + this.pairedEndSamples = pairedEndSamples; + this.singleEndSamples = singleEndSamples; + this.genomeAssemblySamples = genomeAssemblySamples; + this.referenceFile = referenceFile; + } public List getPairedEndSamples() { return pairedEndSamples; @@ -40,6 +42,14 @@ public void setSingleEndSamples(List singleEndSamples) this.singleEndSamples = singleEndSamples; } + public List getGenomeAssemblySamples() { + return genomeAssemblySamples; + } + + public void setGenomeAssemblySamples(List genomeAssemblySamples) { + this.genomeAssemblySamples = genomeAssemblySamples; + } + public ReferenceFile getReferenceFile() { return referenceFile; } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/LaunchAjaxController.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/LaunchAjaxController.java index e17f5d4c4de..b6ced89806e 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/LaunchAjaxController.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/LaunchAjaxController.java @@ -54,41 +54,42 @@ public ResponseEntity getPipelineDetails(@PathVariable UUID id, Lo try { return ResponseEntity.ok(pipelineService.getPipelineDetails(id, locale)); } catch (IridaWorkflowException e) { - return ResponseEntity.status(HttpStatus.NOT_FOUND) - .body(new AjaxErrorResponse("Cannot find this pipeline")); + return ResponseEntity.status(HttpStatus.NOT_FOUND).body(new AjaxErrorResponse("Cannot find this pipeline")); } } /** - * Get a list of the samples that are in the cart and get their associated sequence files that - * can be used on the current pipeline + * Get a list of the samples that are in the cart and get their associated sequence files that can be used on the + * current pipeline * - * @param paired Whether paired end files can be run on the current pipeline - * @param singles Whether single end files can be run on the current pipeline + * @param paired Whether paired end files can be run on the current pipeline + * @param singles Whether single end files can be run on the current pipeline + * @param assemblies Whether assembly files can be run on the current pipeline * @return list of samples containing their associated sequencing data */ @GetMapping("/samples") public ResponseEntity> getPipelineSamples( @RequestParam(required = false, defaultValue = "false") boolean paired, - @RequestParam(required = false, defaultValue = "false") boolean singles) { - return ResponseEntity.ok(sampleService.getPipelineSamples(paired, singles)); + @RequestParam(required = false, defaultValue = "false") boolean singles, + @RequestParam(required = false, defaultValue = "false") boolean assemblies) { + return ResponseEntity.ok(sampleService.getPipelineSamples(paired, singles, assemblies)); } /** * Launch a new IRIDA Workflow Pipeline * - * @param id The UUID for a workflow + * @param id The UUID for a workflow * @param request required parameters to launch the pipeline - * @param locale the Locale of the currently logged in user. + * @param locale the Locale of the currently logged in user. * @return A response to let the UI know the pipeline was launched successfully */ @PostMapping("/{id}") - public ResponseEntity launchPipeline(@PathVariable UUID id, @RequestBody LaunchRequest request, Locale locale) { + public ResponseEntity launchPipeline(@PathVariable UUID id, @RequestBody LaunchRequest request, + Locale locale) { try { return ResponseEntity.ok(new AjaxCreateItemSuccessResponse(startService.start(id, request, locale))); } catch (IridaWorkflowNotFoundException | ReferenceFileRequiredException e) { - return ResponseEntity.status(HttpStatus.NOT_FOUND) - .body(new AjaxErrorResponse(e.getMessage())); + return ResponseEntity.status(HttpStatus.NOT_FOUND).body(new AjaxErrorResponse(e.getMessage())); } } @@ -108,8 +109,7 @@ public ResponseEntity saveNewPipelineParameters(@PathVariable UUID return ResponseEntity.ok(new CreateNamedParameterSetAjaxResponse( pipelineService.saveNewPipelineParameters(id, parameters, locale))); } catch (IridaWorkflowNotFoundException e) { - return ResponseEntity.status(HttpStatus.NOT_FOUND) - .body(new AjaxErrorResponse("Pipeline cannot be found")); + return ResponseEntity.status(HttpStatus.NOT_FOUND).body(new AjaxErrorResponse("Pipeline cannot be found")); } } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchRequest.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchRequest.java index eae4c542199..2ae214c4a5b 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchRequest.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchRequest.java @@ -13,7 +13,7 @@ public class LaunchRequest { private String name; /** - * General text to provide more context for the pipeline. No restrictions. + * General text to provide more context for the pipeline. No restrictions. */ private String description; @@ -22,6 +22,11 @@ public class LaunchRequest { */ private List fileIds; + /** + * List of genome assembly identifiers to run on the pipeline + */ + private List assemblyIds; + /** * When to send an email on pipeline error or completion */ @@ -33,21 +38,18 @@ public class LaunchRequest { private List projects; /** - * Update the samples run on the pipeline with the pipeline results - * Ignore if not required. + * Update the samples run on the pipeline with the pipeline results Ignore if not required. */ private boolean updateSamples; /** - * Identifier for a reference file to use on the pipeline. - * Ignore if not required + * Identifier for a reference file to use on the pipeline. Ignore if not required */ private Long reference; /** - * List of parameters for the pipeline. - * Should be: key: name, value: value - * The value needs to be an object because it can be boolean, string, number, etc... + * List of parameters for the pipeline. Should be: key: name, value: value The value needs to be an object because + * it can be boolean, string, number, etc... */ private Map parameters; @@ -88,6 +90,14 @@ public void setFileIds(List fileIds) { this.fileIds = fileIds; } + public List getAssemblyIds() { + return assemblyIds; + } + + public void setAssemblyIds(List assemblyIds) { + this.assemblyIds = assemblyIds; + } + public List getProjects() { return projects; } @@ -134,6 +144,7 @@ public void setEmailPipelineResult(String emailPipelineResult) { /** * Check to see if an email should be sent on pipeline errors. + * * @return true if either email on error or completion selected */ public boolean sendEmailOnError() { @@ -142,6 +153,7 @@ public boolean sendEmailOnError() { /** * Check to see if an email should be sent on pipeline completion + * * @return true if an email should be sent on pipeline completion */ public boolean sendEmailOnCompletion() { diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchSample.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchSample.java index 62adb49a15d..3463db04c95 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchSample.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/LaunchSample.java @@ -2,6 +2,7 @@ import java.util.List; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; @@ -18,6 +19,8 @@ public class LaunchSample { */ private List files; + private List assemblyFiles; + public LaunchSample(Sample sample, ca.corefacility.bioinformatics.irida.model.project.Project project) { this.id = sample.getId(); this.label = sample.getLabel(); @@ -42,6 +45,15 @@ public List getFiles() { public void setFiles(List files) { this.files = files; + + } + + public List getAssemblyFiles() { + return assemblyFiles; + } + + public void setAssemblyFiles(List assemblyFiles) { + this.assemblyFiles = assemblyFiles; } /** diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/UIPipelineDetailsResponse.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/UIPipelineDetailsResponse.java index 485a87e1db7..de482be5c5d 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/UIPipelineDetailsResponse.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/launchPipeline/dtos/UIPipelineDetailsResponse.java @@ -9,8 +9,7 @@ import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ui.SelectOption; /** - * Model class to send details about a workflow pipeline to the UI - * Used on the launch pipeline page. + * Model class to send details about a workflow pipeline to the UI Used on the launch pipeline page. */ public class UIPipelineDetailsResponse extends AjaxResponse { private String name; @@ -23,6 +22,7 @@ public class UIPipelineDetailsResponse extends AjaxResponse { private List referenceFiles; private boolean acceptsSingleSequenceFiles; private boolean acceptsPairedSequenceFiles; + private boolean acceptsGenomeAssemblies; private List dynamicSources; private List projects; @@ -106,6 +106,14 @@ public void setAcceptsPairedSequenceFiles(boolean acceptsPairedSequenceFiles) { this.acceptsPairedSequenceFiles = acceptsPairedSequenceFiles; } + public boolean isAcceptsGenomeAssemblies() { + return acceptsGenomeAssemblies; + } + + public void setAcceptsGenomeAssemblies(boolean acceptsGenomeAssemblies) { + this.acceptsGenomeAssemblies = acceptsGenomeAssemblies; + } + public List getDynamicSources() { return dynamicSources; } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineSampleService.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineSampleService.java index 961f6a23546..8ebaefef43a 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineSampleService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineSampleService.java @@ -7,6 +7,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; @@ -27,27 +28,33 @@ public UIPipelineSampleService(UISampleService sampleService, UICartService cart } /** - * Get a list of the samples that are in the cart and get their associated sequence files that - * can be used on the current pipeline + * Get a list of the samples that are in the cart and get their associated sequence files that can be used on the + * current pipeline * - * @param paired Whether paired end files can be run on the current pipeline - * @param singles Whether single end files can be run on the current pipeline + * @param paired Whether paired end files can be run on the current pipeline + * @param singles Whether single end files can be run on the current pipeline + * @param assemblies Whether assemblies can be run on the current pipeline * @return list of samples containing their associated sequencing data */ - public List getPipelineSamples(boolean paired, boolean singles) { + public List getPipelineSamples(boolean paired, boolean singles, boolean assemblies) { Map> cart = cartService.getFullCart(); List samples = new ArrayList<>(); cart.forEach((project, projectSamples) -> { for (Sample sample : projectSamples) { LaunchSample launchSample = new LaunchSample(sample, project); List files = new ArrayList<>(); + List assemblyFiles = new ArrayList<>(); if (paired) { files.addAll(sampleService.getPairedSequenceFilesForSample(sample, project)); } if (singles) { files.addAll(sampleService.getSingleEndSequenceFilesForSample(sample, project)); } + if (assemblies) { + assemblyFiles.addAll(sampleService.getGenomeAssembliesForSample(sample)); + } launchSample.setFiles(files); + launchSample.setAssemblyFiles(assemblyFiles); samples.add(launchSample); } }); diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineService.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineService.java index 7c0922e0523..05c01669470 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineService.java @@ -98,15 +98,14 @@ public UIPipelineDetailsResponse getPipelineDetails(UUID id, Locale locale) thro IridaWorkflow workflow = workflowsService.getIridaWorkflow(id); IridaWorkflowDescription description = workflow.getWorkflowDescription(); UIPipelineDetailsResponse detailsResponse = new UIPipelineDetailsResponse(); - /* - Prefix for getting messages from IRIDA message properties file - */ - String prefix = "workflow." + description.getAnalysisType() - .getType() + "."; - - /* - Set up basic information for the pipeline being launched. - */ + /* + Prefix for getting messages from IRIDA message properties file + */ + String prefix = "workflow." + description.getAnalysisType().getType() + "."; + + /* + Set up basic information for the pipeline being launched. + */ detailsResponse.setName(messageSource.getMessage(prefix + "title", new Object[] {}, locale)); detailsResponse.setDescription(messageSource.getMessage(prefix + "description", new Object[] {}, locale)); detailsResponse.setType(description.getName()); @@ -120,50 +119,50 @@ public UIPipelineDetailsResponse getPipelineDetails(UUID id, Locale locale) thro .collect(Collectors.toList()); detailsResponse.setProjects(projectsToShareWith); - /* - Add all pipeline parameters - */ + /* + Add all pipeline parameters + */ detailsResponse.setParameterWithOptions(getPipelineSpecificParametersWithOptions(description, locale)); - /* - Add saved parameter sets - */ + /* + Add saved parameter sets + */ detailsResponse.setSavedPipelineParameters(getSavedPipelineParameters(workflow, locale)); /* - Check / add reference files - */ - if (description.requiresReference()) { - detailsResponse.setRequiresReference(true); - detailsResponse.setReferenceFiles(getReferenceFilesForPipeline(projects)); - } - - /* - Can the pipeline write back - */ - Map> cart = cartService.getFullCart(); - boolean canUpdateSamples = analysisSubmissionSampleProcessor.hasRegisteredAnalysisSampleUpdater(description.getAnalysisType()); - if (canUpdateSamples) { - Authentication authentication = SecurityContextHolder.getContext() - .getAuthentication(); + Check / add reference files + */ + if (description.requiresReference()) { + detailsResponse.setRequiresReference(true); + detailsResponse.setReferenceFiles(getReferenceFilesForPipeline(projects)); + } + + /* + Can the pipeline write back + */ + Map> cart = cartService.getFullCart(); + boolean canUpdateSamples = analysisSubmissionSampleProcessor + .hasRegisteredAnalysisSampleUpdater(description.getAnalysisType()); + if (canUpdateSamples) { + Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); // Need to make sure that all samples are allowed to be updated. List samples = cart.values().stream().flatMap(Collection::stream).collect(Collectors.toList()); - canUpdateSamples = samples - .stream() + canUpdateSamples = samples.stream() .map(sample -> updateSamplePermission.isAllowed(authentication, sample)) .reduce(true, (a, b) -> a && b); if (canUpdateSamples) { detailsResponse.setUpdateSamples(messageSource.getMessage( - "workflow.label.share-analysis-samples." + description.getAnalysisType() - .getType(), new Object[] {}, locale)); + "workflow.label.share-analysis-samples." + description.getAnalysisType().getType(), + new Object[] {}, locale)); } } - /* - Set the acceptable file types - */ + /* + Set the acceptable file types + */ detailsResponse.setAcceptsSingleSequenceFiles(description.acceptsSingleSequenceFiles()); detailsResponse.setAcceptsPairedSequenceFiles(description.acceptsPairedSequenceFiles()); + detailsResponse.setAcceptsGenomeAssemblies(description.acceptsGenomeAssemblies()); /* Dynamic Sources - these are pulled from Galaxy @@ -188,21 +187,24 @@ public UIPipelineDetailsResponse getPipelineDetails(UUID id, Locale locale) thro */ try { String dynamicSourceName = dynamicSource.getName(); - String label = messageSource.getMessage("dynamicsource.label." + dynamicSourceName, new Object[] {}, locale); + String label = messageSource.getMessage("dynamicsource.label." + dynamicSourceName, + new Object[] {}, locale); List options = new ArrayList<>(); - TabularToolDataTable galaxyToolDataTable = galaxyToolDataService.getToolDataTable(dynamicSourceName); + TabularToolDataTable galaxyToolDataTable = galaxyToolDataService + .getToolDataTable(dynamicSourceName); List labels = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getDisplayColumn()); Iterator labelsIterator = labels.iterator(); - List values = galaxyToolDataTable.getFieldsForColumn( - dynamicSource.getParameterColumn()); + List values = galaxyToolDataTable + .getFieldsForColumn(dynamicSource.getParameterColumn()); Iterator valuesIterator = values.iterator(); while (labelsIterator.hasNext() && valuesIterator.hasNext()) { options.add(new SelectOption(valuesIterator.next(), labelsIterator.next())); } - dynamicSources.add(new InputWithOptions(parameter.getName(), label, options.get(0).getValue(), options)); + dynamicSources.add( + new InputWithOptions(parameter.getName(), label, options.get(0).getValue(), options)); } catch (Exception e) { logger.debug("Tool Data Table not found: ", e); } @@ -226,9 +228,7 @@ public UIPipelineDetailsResponse getPipelineDetails(UUID id, Locale locale) thro public SavedPipelineParameters saveNewPipelineParameters(UUID id, SavePipelineParametersRequest request, Locale locale) throws IridaWorkflowNotFoundException { IridaWorkflow workflow = workflowsService.getIridaWorkflow(id); - final String pipelineName = workflow.getWorkflowDescription() - .getName() - .toLowerCase(); + final String pipelineName = workflow.getWorkflowDescription().getName().toLowerCase(); IridaWorkflowNamedParameters namedParameters = new IridaWorkflowNamedParameters(request.getLabel(), id, request.getParameters()); namedParameters = namedParametersService.create(namedParameters); @@ -237,7 +237,8 @@ public SavedPipelineParameters saveNewPipelineParameters(UUID id, SavePipelinePa .stream() .map(entry -> new Input(entry.getKey(), messageSource.getMessage("pipeline.parameters." + pipelineName + "." + entry.getKey(), - new Object[] {}, locale), entry.getValue())) + new Object[] {}, locale), + entry.getValue())) .collect(Collectors.toList()); return new SavedPipelineParameters(namedParameters.getId(), namedParameters.getLabel(), params); @@ -269,9 +270,7 @@ public List getWorkflowTypes(Boolean automated, Locale locale) { logger.error("Cannot find IridaWorkFlow for '" + type.getType() + "'", e); } } - return pipelines.stream() - .sorted(Comparator.comparing(Pipeline::getName)) - .collect(Collectors.toList()); + return pipelines.stream().sorted(Comparator.comparing(Pipeline::getName)).collect(Collectors.toList()); } /** @@ -284,23 +283,20 @@ public List getWorkflowTypes(Boolean automated, Locale locale) { public List getProjectAnalysisTemplates(Long projectId, Locale locale) { Project project = projectService.read(projectId); List templates = analysisSubmissionService.getAnalysisTemplatesForProject(project); - return templates.stream() - .map(template -> { - UUID id = template.getWorkflowId(); - String type; - try { - IridaWorkflow flow = workflowsService.getIridaWorkflow(id); - AnalysisType analysisType = flow.getWorkflowDescription() - .getAnalysisType(); - type = messageSource.getMessage("workflow." + analysisType.getType() + ".title", - new Object[] {}, locale); - } catch (IridaWorkflowNotFoundException e) { - type = messageSource.getMessage("workflow.UNKNOWN.title", new Object[] {}, locale); - } - return new AnalysisTemplate(template.getId(), template.getName(), type, template.isEnabled(), - template.getStatusMessage()); - }) - .collect(Collectors.toList()); + return templates.stream().map(template -> { + UUID id = template.getWorkflowId(); + String type; + try { + IridaWorkflow flow = workflowsService.getIridaWorkflow(id); + AnalysisType analysisType = flow.getWorkflowDescription().getAnalysisType(); + type = messageSource.getMessage("workflow." + analysisType.getType() + ".title", new Object[] {}, + locale); + } catch (IridaWorkflowNotFoundException e) { + type = messageSource.getMessage("workflow.UNKNOWN.title", new Object[] {}, locale); + } + return new AnalysisTemplate(template.getId(), template.getName(), type, template.isEnabled(), + template.getStatusMessage()); + }).collect(Collectors.toList()); } /** @@ -313,8 +309,8 @@ public List getProjectAnalysisTemplates(Long projectId, Locale */ public String removeProjectAutomatedPipeline(Long templateId, Long projectId, Locale locale) { Project project = projectService.read(projectId); - AnalysisSubmissionTemplate template = analysisSubmissionService.readAnalysisSubmissionTemplateForProject( - templateId, project); + AnalysisSubmissionTemplate template = analysisSubmissionService + .readAnalysisSubmissionTemplateForProject(templateId, project); analysisSubmissionService.deleteAnalysisSubmissionTemplateForProject(templateId, project); return messageSource.getMessage("server.AnalysisTemplates.remove", new Object[] { template.getName() }, locale); } @@ -328,26 +324,21 @@ public String removeProjectAutomatedPipeline(Long templateId, Long projectId, Lo */ private List getPipelineSpecificParametersWithOptions(IridaWorkflowDescription description, Locale locale) { - return description.getParameters() - .stream() - .filter(IridaWorkflowParameter::hasChoices) - .map(parameter -> { - String name = description.getName() - .toLowerCase(); - String label = localizedParamLabel(locale, name, parameter.getName()); - String defaultValue = parameter.getDefaultValue(); - List options = parameter.getChoices() - .stream() - .map(option -> new SelectOption(option.getValue(), - localizedParamOptionLabel(locale, name, parameter.getName(), option.getName()))) - .collect(Collectors.toUnmodifiableList()); - return new InputWithOptions(parameter.getName(), label, defaultValue, options); - }) - .collect(Collectors.toUnmodifiableList()); + return description.getParameters().stream().filter(IridaWorkflowParameter::hasChoices).map(parameter -> { + String name = description.getName().toLowerCase(); + String label = localizedParamLabel(locale, name, parameter.getName()); + String defaultValue = parameter.getDefaultValue(); + List options = parameter.getChoices() + .stream() + .map(option -> new SelectOption(option.getValue(), + localizedParamOptionLabel(locale, name, parameter.getName(), option.getName()))) + .collect(Collectors.toUnmodifiableList()); + return new InputWithOptions(parameter.getName(), label, defaultValue, options); + }).collect(Collectors.toUnmodifiableList()); } /** - * Internationalize a parameter label. If there is not translation for it, just return the default text. + * Internationalize a parameter label. If there is not translation for it, just return the default text. * * @param locale current users {@link Locale} * @param workflowName name of the current {@link IridaWorkflow} @@ -391,52 +382,48 @@ private String localizedParamOptionLabel(Locale locale, String workflowName, Str private List getSavedPipelineParameters(IridaWorkflow workflow, Locale locale) { IridaWorkflowDescription description = workflow.getWorkflowDescription(); List workflowParameters = description.getParameters(); - String pipelineName = description.getName() - .toLowerCase(); + String pipelineName = description.getName().toLowerCase(); List savedParameters = new ArrayList<>(); - /* - If there are no parameters just return an empty list. - */ + /* + If there are no parameters just return an empty list. + */ if (workflowParameters == null) { return savedParameters; } - /* - Get the default parameter set - */ + /* + Get the default parameter set + */ List defaultParameters = workflowParameters.stream() .filter(p -> !p.isRequired()) .map(p -> new Input(p.getName(), messageSource.getMessage("pipeline.parameters." + pipelineName + "." + p.getName(), - new Object[] {}, locale), p.getDefaultValue())) + new Object[] {}, locale), + p.getDefaultValue())) .collect(Collectors.toList()); savedParameters.add(new SavedPipelineParameters(0L, messageSource.getMessage("workflow.parameters.named.default", new Object[] {}, locale), defaultParameters)); - /* - Add any saved parameter sets - */ - List namedParameters = namedParametersService.findNamedParametersForWorkflow( - workflow.getWorkflowIdentifier()); - savedParameters.addAll(namedParameters.stream() - .map(wp -> { - Map inputParameter = wp.getInputParameters(); - - // Go through the parameters and see which ones are getting overwritten. - List parameters = defaultParameters.stream() - .map(parameter -> { - if (inputParameter.containsKey(parameter.getName())) { - return new Input(parameter.getName(), parameter.getLabel(), - inputParameter.get(parameter.getName())); - } - return new Input(parameter.getName(), parameter.getLabel(), parameter.getValue()); - }) - .collect(Collectors.toList()); - return new SavedPipelineParameters(wp.getId(), wp.getLabel(), parameters); - }) - .collect(Collectors.toList())); + /* + Add any saved parameter sets + */ + List namedParameters = namedParametersService + .findNamedParametersForWorkflow(workflow.getWorkflowIdentifier()); + savedParameters.addAll(namedParameters.stream().map(wp -> { + Map inputParameter = wp.getInputParameters(); + + // Go through the parameters and see which ones are getting overwritten. + List parameters = defaultParameters.stream().map(parameter -> { + if (inputParameter.containsKey(parameter.getName())) { + return new Input(parameter.getName(), parameter.getLabel(), + inputParameter.get(parameter.getName())); + } + return new Input(parameter.getName(), parameter.getLabel(), parameter.getValue()); + }).collect(Collectors.toList()); + return new SavedPipelineParameters(wp.getId(), wp.getLabel(), parameters); + }).collect(Collectors.toList())); return savedParameters; } @@ -448,43 +435,40 @@ private List getSavedPipelineParameters(IridaWorkflow w * @return List of reference files for consumption by the UI. */ private List getReferenceFilesForPipeline(List projects) { - return projects.stream() - .map(project -> { - List list = new ArrayList<>(); - for (Join projectReferenceFileJoin : referenceFileService.getReferenceFilesForProject( - project)) { - try { - ReferenceFile file = projectReferenceFileJoin.getObject(); - Path path = file.getFile(); - String filesize = FileUtilities.humanReadableByteCount(Files.size(path), true); - UIReferenceFile uiReferenceFile = new UIReferenceFile(projectReferenceFileJoin, filesize); - list.add(uiReferenceFile); - } catch (IOException e) { - logger.error(e.getMessage()); - } - } - return list; - }) - .flatMap(List::stream) - .collect(Collectors.toList()); + return projects.stream().map(project -> { + List list = new ArrayList<>(); + for (Join projectReferenceFileJoin : referenceFileService + .getReferenceFilesForProject(project)) { + try { + ReferenceFile file = projectReferenceFileJoin.getObject(); + Path path = file.getFile(); + String filesize = FileUtilities.humanReadableByteCount(Files.size(path), true); + UIReferenceFile uiReferenceFile = new UIReferenceFile(projectReferenceFileJoin, filesize); + list.add(uiReferenceFile); + } catch (IOException e) { + logger.error(e.getMessage()); + } + } + return list; + }).flatMap(List::stream).collect(Collectors.toList()); } - /** - * Create a Pipeline for consumption by the UI - * - * @param analysisType {@link AnalysisType} type of analysis pipeline - * @param locale {@link Locale} - * @return {@link Pipeline} - * @throws IridaWorkflowNotFoundException thrown if {@link IridaWorkflowDescription} is not found - */ - private Pipeline createPipeline(AnalysisType analysisType, Locale locale) throws IridaWorkflowNotFoundException { - IridaWorkflowDescription workflowDescription = workflowsService.getDefaultWorkflowByType(analysisType) - .getWorkflowDescription(); - String prefix = "workflow." + analysisType.getType(); - String name = messageSource.getMessage(prefix + ".title", new Object[]{}, locale); - String description = messageSource.getMessage(prefix + ".description", new Object[]{}, locale); - UUID id = workflowDescription.getId(); - String styleName = analysisType.getType(); - return new Pipeline(name, description, id, styleName); - } + /** + * Create a Pipeline for consumption by the UI + * + * @param analysisType {@link AnalysisType} type of analysis pipeline + * @param locale {@link Locale} + * @return {@link Pipeline} + * @throws IridaWorkflowNotFoundException thrown if {@link IridaWorkflowDescription} is not found + */ + private Pipeline createPipeline(AnalysisType analysisType, Locale locale) throws IridaWorkflowNotFoundException { + IridaWorkflowDescription workflowDescription = workflowsService.getDefaultWorkflowByType(analysisType) + .getWorkflowDescription(); + String prefix = "workflow." + analysisType.getType(); + String name = messageSource.getMessage(prefix + ".title", new Object[] {}, locale); + String description = messageSource.getMessage(prefix + ".description", new Object[] {}, locale); + UUID id = workflowDescription.getId(); + String styleName = analysisType.getType(); + return new Pipeline(name, description, id, styleName); + } } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineStartService.java b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineStartService.java index 23cdeb0ec27..017b3e0c305 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineStartService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIPipelineStartService.java @@ -11,6 +11,7 @@ import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.pipelines.ReferenceFileRequiredException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; @@ -23,6 +24,7 @@ import ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters; import ca.corefacility.bioinformatics.irida.ria.web.launchPipeline.dtos.LaunchRequest; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.ProjectService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; @@ -35,6 +37,7 @@ public class UIPipelineStartService { private final IridaWorkflowsService workflowsService; private final SequencingObjectService sequencingObjectService; + private final GenomeAssemblyService genomeAssemblyService; private final AnalysisSubmissionService submissionService; private final ProjectService projectService; private final WorkflowNamedParametersService namedParametersService; @@ -42,11 +45,12 @@ public class UIPipelineStartService { @Autowired public UIPipelineStartService(IridaWorkflowsService workflowsService, - SequencingObjectService sequencingObjectService, AnalysisSubmissionService submissionService, - ProjectService projectService, WorkflowNamedParametersService namedParametersService, - MessageSource messageSource) { + SequencingObjectService sequencingObjectService, GenomeAssemblyService genomeAssemblyService, + AnalysisSubmissionService submissionService, ProjectService projectService, + WorkflowNamedParametersService namedParametersService, MessageSource messageSource) { this.workflowsService = workflowsService; this.sequencingObjectService = sequencingObjectService; + this.genomeAssemblyService = genomeAssemblyService; this.submissionService = submissionService; this.projectService = projectService; this.namedParametersService = namedParametersService; @@ -56,19 +60,13 @@ public UIPipelineStartService(IridaWorkflowsService workflowsService, /** * Start a new pipeline * - * @param id - * - pipeline identifier - * @param request - * - details about the request to start the pipeline - * @param locale - * - currently logged in users locale - * @return The id of the new {@link AnalysisSubmission}, if more than one - * are kicked off, then the first id is returned. - * @throws IridaWorkflowNotFoundException - * thrown if the workflow cannot be found - * @throws ReferenceFileRequiredException - * thrown if a reference file is required and not sent (should - * not happen). + * @param id - pipeline identifier + * @param request - details about the request to start the pipeline + * @param locale - currently logged in users locale + * @return The id of the new {@link AnalysisSubmission}, if more than one are kicked off, then the first id is + * returned. + * @throws IridaWorkflowNotFoundException thrown if the workflow cannot be found + * @throws ReferenceFileRequiredException thrown if a reference file is required and not sent (should not happen). */ public Long start(UUID id, LaunchRequest request, Locale locale) throws IridaWorkflowNotFoundException, ReferenceFileRequiredException { @@ -112,8 +110,12 @@ public Long start(UUID id, LaunchRequest request, Locale locale) */ List singles = new ArrayList<>(); List pairs = new ArrayList<>(); - // Check for single ended sequence files + List assemblies = new ArrayList<>(); + Iterable sequencingObjects = sequencingObjectService.readMultiple(request.getFileIds()); + Iterable genomeAssemblies = genomeAssemblyService.readMultiple(request.getAssemblyIds()); + + // Check for single ended sequence files if (description.acceptsSingleSequenceFiles()) { sequencingObjects.forEach(sequencingObject -> { if (sequencingObject instanceof SingleEndSequenceFile) { @@ -126,21 +128,26 @@ public Long start(UUID id, LaunchRequest request, Locale locale) pairs.add((SequenceFilePair) sequencingObject); } }); + } else if (description.acceptsGenomeAssemblies()) { + genomeAssemblies.forEach(genomeAssembly -> { + assemblies.add(genomeAssembly); + }); } IridaWorkflowInput inputs = description.getInputs(); if (inputs.requiresSingleSample()) { submissionService.createSingleSampleSubmission(workflow, request.getReference(), singles, pairs, - request.getParameters(), namedParameters, request.getName(), request.getDescription(), projects, - request.isUpdateSamples(), request.sendEmailOnCompletion(), request.sendEmailOnError()); + assemblies, request.getParameters(), namedParameters, request.getName(), + request.getDescription(), projects, request.isUpdateSamples(), request.sendEmailOnCompletion(), + request.sendEmailOnError()); // Returning -1L as a flag to the UI that multiple pipelines // have been launched, thereby there is not // On specific pipeline to go to. return -1L; } else { AnalysisSubmission submission = submissionService.createMultipleSampleSubmission(workflow, - request.getReference(), singles, pairs, request.getParameters(), namedParameters, + request.getReference(), singles, pairs, assemblies, request.getParameters(), namedParameters, request.getName(), request.getDescription(), projects, request.isUpdateSamples(), request.sendEmailOnCompletion(), request.sendEmailOnError()); return submission.getId(); diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/security/permissions/files/ReadGenomeAssemblyPermission.java b/src/main/java/ca/corefacility/bioinformatics/irida/security/permissions/files/ReadGenomeAssemblyPermission.java new file mode 100644 index 00000000000..0de808ec42e --- /dev/null +++ b/src/main/java/ca/corefacility/bioinformatics/irida/security/permissions/files/ReadGenomeAssemblyPermission.java @@ -0,0 +1,69 @@ +package ca.corefacility.bioinformatics.irida.security.permissions.files; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; +import ca.corefacility.bioinformatics.irida.model.joins.impl.SampleGenomeAssemblyJoin; +import ca.corefacility.bioinformatics.irida.model.sample.Sample; +import ca.corefacility.bioinformatics.irida.repositories.assembly.GenomeAssemblyRepository; +import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleGenomeAssemblyJoinRepository; +import ca.corefacility.bioinformatics.irida.security.permissions.RepositoryBackedPermission; +import ca.corefacility.bioinformatics.irida.security.permissions.sample.ReadSamplePermission; + +/** + * Evaluate whether or not a user can read a {@link GenomeAssembly} + */ +@Component +public class ReadGenomeAssemblyPermission extends RepositoryBackedPermission { + private static final String PERMISSION_PROVIDED = "canReadGenomeAssembly"; + + private static final Logger logger = LoggerFactory.getLogger(ReadGenomeAssemblyPermission.class); + + private final ReadSamplePermission samplePermission; + private final SampleGenomeAssemblyJoinRepository sgaRepository; + + /** + * Construct an instance of {@link ReadGenomeAssemblyPermission} + * + * @param genomeAssemblyRepository Repository for {@link GenomeAssembly}s + * @param samplePermission Permission reading {@link Sample}s + * @param sgaRepository {@link SampleGenomeAssemblyJoinRepository} + */ + @Autowired + public ReadGenomeAssemblyPermission(final GenomeAssemblyRepository genomeAssemblyRepository, + ReadSamplePermission samplePermission, SampleGenomeAssemblyJoinRepository sgaRepository) { + super(GenomeAssembly.class, Long.class, genomeAssemblyRepository); + + this.samplePermission = samplePermission; + this.sgaRepository = sgaRepository; + } + + /** + * {@inheritDoc} + */ + @Override + protected boolean customPermissionAllowed(final Authentication authentication, final GenomeAssembly assembly) { + SampleGenomeAssemblyJoin genomeAssemblyJoin = sgaRepository.getSampleForGenomeAssembly(assembly); + + if (genomeAssemblyJoin != null) { + return samplePermission.isAllowed(authentication, genomeAssemblyJoin.getSubject()); + } else { + logger.trace("Permission denied for reading genome assembly id=" + assembly.getId() + " by user=" + + authentication.getName() + ", no joined sample found."); + + return false; + } + } + + /** + * {@inheritDoc} + */ + @Override + public String getPermissionProvided() { + return PERMISSION_PROVIDED; + } +} diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/AnalysisSubmissionService.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/AnalysisSubmissionService.java index a61fd90f798..fa38d33036b 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/AnalysisSubmissionService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/AnalysisSubmissionService.java @@ -8,6 +8,7 @@ import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException; import ca.corefacility.bioinformatics.irida.exceptions.NoPercentageCompleteException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState; import ca.corefacility.bioinformatics.irida.model.enums.StatisticTimePeriod; import ca.corefacility.bioinformatics.irida.model.project.Project; @@ -27,48 +28,38 @@ /** * A service for AnalysisSubmissions. - * - * */ public interface AnalysisSubmissionService extends CRUDService { /** * Given an analysis submission id, gets the state of this analysis. * - * @param analysisSubmissionId - * The id of this analysis. + * @param analysisSubmissionId The id of this analysis. * @return The state of this analysis. - * @throws EntityNotFoundException - * If the corresponding analysis cannot be found. + * @throws EntityNotFoundException If the corresponding analysis cannot be found. */ public AnalysisState getStateForAnalysisSubmission(Long analysisSubmissionId) throws EntityNotFoundException; /** * Gets a {@link Set} of {@link AnalysisSubmission}s for a {@link User}. * - * @param user - * The {@link User} to find all submissions for. + * @param user The {@link User} to find all submissions for. * @return A {@link Set} of {@link AnalysisSubmission}s for a user. */ public Set getAnalysisSubmissionsForUser(User user); /** - * Gets a {@link Set} of {@link AnalysisSubmission}s for the current - * {@link User}. + * Gets a {@link Set} of {@link AnalysisSubmission}s for the current {@link User}. * - * @return A {@link Set} of {@link AnalysisSubmission}s for the current - * user. + * @return A {@link Set} of {@link AnalysisSubmission}s for the current user. */ public Set getAnalysisSubmissionsForCurrentUser(); - + /** - * Gets all {@link AnalysisSubmissionService}s accessible by the current - * user matching one of the workflow ids. + * Gets all {@link AnalysisSubmissionService}s accessible by the current user matching one of the workflow ids. * - * @param workflowIds - * The workflow ids to match. - * @return A list of {@link AnalysisSubmission}s matching one of the - * workflow ids. + * @param workflowIds The workflow ids to match. + * @return A list of {@link AnalysisSubmission}s matching one of the workflow ids. */ public List getAnalysisSubmissionsAccessibleByCurrentUserByWorkflowIds( Collection workflowIds); @@ -81,75 +72,74 @@ public List getAnalysisSubmissionsAccessibleByCurrentUserByW public void deleteMultiple(Collection ids); /** - * Submit {@link AnalysisSubmission} for workflows allowing multiple one - * {@link SequenceFile} or {@link SequenceFilePair} + * Submit {@link AnalysisSubmission} for workflows allowing multiple one {@link SequenceFile} or + * {@link SequenceFilePair} * * @param workflow {@link IridaWorkflow} that the files will be run on * @param ref {@link Long} id for a {@link ReferenceFile} * @param sequenceFiles {@link List} of {@link SequenceFile} to run on the workflow - * @param sequenceFilePairs {@link List} of {@link SequenceFilePair} to run on the - * workflow + * @param sequenceFilePairs {@link List} of {@link SequenceFilePair} to run on the workflow + * @param assemblies {@link List} of {@link GenomeAssembly} to run on the workflow * @param unnamedParameters {@link Map} of parameters specific for the pipeline * @param namedParameters the named parameters to use for the workflow. * @param name {@link String} the name for the analysis * @param analysisDescription {@link String} the description of the analysis being submitted * @param projectsToShare A list of {@link Project}s to share analysis results with - * @param writeResultsToSamples If true, results of this pipeline will be saved back to the - * samples on successful completion. - * @param emailPipelineResultCompleted If true, user will be emailed if a pipeline successfully - * completes + * @param writeResultsToSamples If true, results of this pipeline will be saved back to the samples on + * successful completion. + * @param emailPipelineResultCompleted If true, user will be emailed if a pipeline successfully completes * @param emailPipelineResultError If true, user will be emailed if a pipeline errors * @return the {@link AnalysisSubmission} created for the files. */ public AnalysisSubmission createMultipleSampleSubmission(IridaWorkflow workflow, Long ref, List sequenceFiles, List sequenceFilePairs, - Map unnamedParameters, IridaWorkflowNamedParameters namedParameters, String name, - String analysisDescription, List projectsToShare, boolean writeResultsToSamples, - boolean emailPipelineResultCompleted, boolean emailPipelineResultError); + List assemblies, Map unnamedParameters, + IridaWorkflowNamedParameters namedParameters, String name, String analysisDescription, + List projectsToShare, boolean writeResultsToSamples, boolean emailPipelineResultCompleted, + boolean emailPipelineResultError); /** - * Submit {@link AnalysisSubmission} for workflows requiring only one - * {@link SequenceFile} or {@link SequenceFilePair} + * Submit {@link AnalysisSubmission} for workflows requiring only one {@link SequenceFile} or + * {@link SequenceFilePair} * * @param workflow {@link IridaWorkflow} that the files will be run on * @param ref {@link Long} id for a {@link ReferenceFile} * @param sequenceFiles {@link List} of {@link SequenceFile} to run on the workflow - * @param sequenceFilePairs {@link List} of {@link SequenceFilePair} to run on the - * workflow + * @param sequenceFilePairs {@link List} of {@link SequenceFilePair} to run on the workflow + * @param assemblies {@link List} or {@link GenomeAssembly} to run on the workflow * @param unnamedParameters {@link Map} of parameters specific for the pipeline * @param namedParameters the named parameters to use for the workflow. * @param name {@link String} the name for the analysis * @param analysisDescription {@link String} the description of the analysis being submitted * @param projectsToShare A list of {@link Project}s to share analysis results with - * @param writeResultsToSamples If true, results of this pipeline will be saved back to the - * samples on successful completion. - * @param emailPipelineResultCompleted If true, user will be emailed if a pipelines successfully - * completes + * @param writeResultsToSamples If true, results of this pipeline will be saved back to the samples on + * successful completion. + * @param emailPipelineResultCompleted If true, user will be emailed if a pipelines successfully completes * @param emailPipelineResultError If true, user will be emailed if a pipeline errors - * @return the {@link Collection} of {@link AnalysisSubmission} created for - * the supplied files. + * @return the {@link Collection} of {@link AnalysisSubmission} created for the supplied files. */ public Collection createSingleSampleSubmission(IridaWorkflow workflow, Long ref, List sequenceFiles, List sequenceFilePairs, - Map unnamedParameters, IridaWorkflowNamedParameters namedParameters, String name, - String analysisDescription, List projectsToShare, boolean writeResultsToSamples, - boolean emailPipelineResultCompleted, boolean emailPipelineResultError); + List assemblies, Map unnamedParameters, + IridaWorkflowNamedParameters namedParameters, String name, String analysisDescription, + List projectsToShare, boolean writeResultsToSamples, boolean emailPipelineResultCompleted, + boolean emailPipelineResultError); /** * Create a new {@link AnalysisSubmissionTemplate} for a project with the given settings * - * @param workflow {@link IridaWorkflow} that the files will be run on - * @param referenceFileId {@link Long} id for a {@link ReferenceFile} - * @param params {@link Map} of parameters specific for the pipeline - * @param namedParameters the named parameters to use for the workflow. - * @param submissionName {@link String} the name for the analysis - * @param statusMessage A status message for the submission template - * @param analysisDescription {@link String} the description of the analysis being submitted - * @param projectsToShare The {@link Project} to save the analysis to - * @param writeResultsToSamples If true, results of this pipeline will be saved back to the samples on successful - * completion. + * @param workflow {@link IridaWorkflow} that the files will be run on + * @param referenceFileId {@link Long} id for a {@link ReferenceFile} + * @param params {@link Map} of parameters specific for the pipeline + * @param namedParameters the named parameters to use for the workflow. + * @param submissionName {@link String} the name for the analysis + * @param statusMessage A status message for the submission template + * @param analysisDescription {@link String} the description of the analysis being submitted + * @param projectsToShare The {@link Project} to save the analysis to + * @param writeResultsToSamples If true, results of this pipeline will be saved back to the samples on + * successful completion. * @param emailPipelineResultCompleted Whether or not to email the pipeline results that completed to the user - * @param emailPipelineResultError Whether or not to email the pipeline results that errored to the user + * @param emailPipelineResultError Whether or not to email the pipeline results that errored to the user * @return the newly created {@link AnalysisSubmissionTemplate} */ public AnalysisSubmissionTemplate createSingleSampleSubmissionTemplate(IridaWorkflow workflow, Long referenceFileId, @@ -183,82 +173,68 @@ public AnalysisSubmissionTemplate createSingleSampleSubmissionTemplate(IridaWork public void deleteAnalysisSubmissionTemplateForProject(Long id, Project project); /** - * Given the id of an {@link AnalysisSubmission} gets the percentage - * complete. + * Given the id of an {@link AnalysisSubmission} gets the percentage complete. * - * @param id - * The id of an {@link AnalysisSubmission}. + * @param id The id of an {@link AnalysisSubmission}. * @return The percentage complete for this {@link AnalysisSubmission}. - * @throws NoPercentageCompleteException - * An exception that indicates there is no percentage complete - * for the submission. - * @throws ExecutionManagerException - * If there was an issue when contacting the execution manager. - * @throws EntityNotFoundException - * If no such corresponding submission exists. + * @throws NoPercentageCompleteException An exception that indicates there is no percentage complete for the + * submission. + * @throws ExecutionManagerException If there was an issue when contacting the execution manager. + * @throws EntityNotFoundException If no such corresponding submission exists. */ - public float getPercentCompleteForAnalysisSubmission(Long id) throws EntityNotFoundException, - NoPercentageCompleteException, ExecutionManagerException; + public float getPercentCompleteForAnalysisSubmission(Long id) + throws EntityNotFoundException, NoPercentageCompleteException, ExecutionManagerException; /** * Get the {@link JobError} objects for a {@link AnalysisSubmission} id + * * @param id {@link AnalysisSubmission} id * @return {@link JobError} objects for a {@link AnalysisSubmission} - * @throws EntityNotFoundException If no such {@link AnalysisSubmission} exists. + * @throws EntityNotFoundException If no such {@link AnalysisSubmission} exists. * @throws ExecutionManagerException If there was an issue contacting the execution manager. */ List getJobErrors(Long id) throws EntityNotFoundException, ExecutionManagerException; /** * Get first {@link JobError} for a {@link AnalysisSubmission} id + * * @param id {@link AnalysisSubmission} id * @return {@link JobError} object - * @throws EntityNotFoundException If no such {@link AnalysisSubmission} exists. + * @throws EntityNotFoundException If no such {@link AnalysisSubmission} exists. * @throws ExecutionManagerException If there was an issue contacting the execution manager. */ - JobError getFirstJobError(Long id) throws EntityNotFoundException, ExecutionManagerException; + JobError getFirstJobError(Long id) throws EntityNotFoundException, ExecutionManagerException; /** * Share an {@link AnalysisSubmission} with a given {@link Project} * - * @param submission - * {@link AnalysisSubmission} to share - * @param project - * {@link Project} to share with - * @return a {@link ProjectAnalysisSubmissionJoin} describing the - * relationship + * @param submission {@link AnalysisSubmission} to share + * @param project {@link Project} to share with + * @return a {@link ProjectAnalysisSubmissionJoin} describing the relationship */ public ProjectAnalysisSubmissionJoin shareAnalysisSubmissionWithProject(AnalysisSubmission submission, Project project); /** - * Cancel the share of an {@link AnalysisSubmission} with a given - * {@link Project} + * Cancel the share of an {@link AnalysisSubmission} with a given {@link Project} * - * @param submission - * the {@link AnalysisSubmission} to stop sharing - * @param project - * the {@link Project} to stop sharing with + * @param submission the {@link AnalysisSubmission} to stop sharing + * @param project the {@link Project} to stop sharing with */ public void removeAnalysisProjectShare(AnalysisSubmission submission, Project project); - + /** - * Get a list of all {@link AnalysisSubmission}s with a given - * {@link AnalysisState} + * Get a list of all {@link AnalysisSubmission}s with a given {@link AnalysisState} * - * @param states - * A list of {@link AnalysisState} to find - * {@link AnalysisSubmission}s for + * @param states A list of {@link AnalysisState} to find {@link AnalysisSubmission}s for * @return a Collection of {@link AnalysisSubmission} */ public Collection findAnalysesByState(Collection states); /** - * Get a collection of all {@link AnalysisSubmission}s shared with a - * {@link Project}. + * Get a collection of all {@link AnalysisSubmission}s shared with a {@link Project}. * - * @param project - * The {@link Project} to search. + * @param project The {@link Project} to search. * @return A collection of {@link AnalysisSubmission}s. */ public Collection getAnalysisSubmissionsSharedToProject(Project project); @@ -268,7 +244,7 @@ public ProjectAnalysisSubmissionJoin shareAnalysisSubmissionWithProject(Analysis * * @param search basic search string * @param name analysis submission name - * @param states Set of {@link AnalysisState} of the submission to search + * @param states Set of {@link AnalysisState} of the submission to search * @param workflowIds set of workflow UUIDs to search * @param project {@link Project} to search in * @param pageRequest a {@link PageRequest} for the results to show @@ -282,7 +258,7 @@ public Page listSubmissionsForProject(String search, String * * @param search basic search string * @param name analysis submission name - * @param states Set of {@link AnalysisState} of the submission to search + * @param states Set of {@link AnalysisState} of the submission to search * @param workflowIds set of workflow UUIDs to search * @param pageRequest a {@link PageRequest} for the results to show * @return a page of {@link AnalysisSubmission} @@ -301,8 +277,8 @@ public Page listAllSubmissions(String search, String name, S * @param pageRequest a {@link PageRequest} for the restults to show * @return a page of {@link AnalysisSubmission}s for the given user */ - public Page listSubmissionsForUser(String search, String name, Set states, User user, - Set workflowIds, PageRequest pageRequest); + public Page listSubmissionsForUser(String search, String name, Set states, + User user, Set workflowIds, PageRequest pageRequest); /** * Update the priority of an {@link AnalysisSubmission} @@ -314,7 +290,9 @@ public Page listSubmissionsForUser(String search, String nam public AnalysisSubmission updatePriority(AnalysisSubmission submission, AnalysisSubmission.Priority priority); /** - * Get all {@link User} generated {@link ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile} info + * Get all {@link User} generated + * {@link ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile} info + * * @param user {@link User} * @return List of {@link ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile} info */ @@ -337,7 +315,8 @@ public Page listSubmissionsForUser(String search, String nam List getAllAutomatedAnalysisOutputInfoForAProject(Long projectId); /** - * Get the status of the analysis service. This will be the number of running and queued analyses + * Get the status of the analysis service. This will be the number of running and queued analyses + * * @return An {@link AnalysisServiceStatus} object showing the number of running and queued analyses */ public AnalysisServiceStatus getAnalysisServiceStatus(); @@ -351,18 +330,16 @@ public Page listSubmissionsForUser(String search, String nam public Long getAnalysesRanInTimePeriod(Date createdDate); /** - * Get list of {@link GenericStatModel} of analyses run in the past n time period - * grouped by the format provided. + * Get list of {@link GenericStatModel} of analyses run in the past n time period grouped by the format provided. * - * @param createdDate the minimum date for submissions ran + * @param createdDate the minimum date for submissions ran * @param statisticTimePeriod the enum containing format for which to group the results by * @return An {@link GenericStatModel} list */ public List getAnalysesRanGrouped(Date createdDate, StatisticTimePeriod statisticTimePeriod); /** - * Get count of {@link AnalysisSubmission} for the user - * grouped by the format provided. + * Get count of {@link AnalysisSubmission} for the user grouped by the format provided. * * @param user The user identifier * @return Count of analyses ran by user diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/GenomeAssemblyService.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/GenomeAssemblyService.java index 597a9fe0cfd..8ff17f1abb6 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/GenomeAssemblyService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/GenomeAssemblyService.java @@ -1,10 +1,15 @@ package ca.corefacility.bioinformatics.irida.service; import java.util.Collection; +import java.util.Map; +import java.util.Set; +import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException; import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.joins.impl.SampleGenomeAssemblyJoin; import ca.corefacility.bioinformatics.irida.model.sample.Sample; +import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; +import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; /** * A service for storing and retrieving {@link GenomeAssembly} entities. @@ -44,4 +49,22 @@ public interface GenomeAssemblyService extends CRUDService * @param genomeAssemblyId The genome assembly. */ public void removeGenomeAssemblyFromSample(Sample sample, Long genomeAssemblyId); + + /** + * Get a map of {@link GenomeAssembly}s and corresponding {@link Sample}s. + * + * @param genomeAssemblies A {@link Set} of {@link GenomeAssembly}s. + * @return A {@link Map} of between {@link Sample} and {@link SequencingObject}. + * @throws DuplicateSampleException If there is a duplicate sample. + */ + public Map getUniqueSamplesForGenomeAssemblies(Set genomeAssemblies) + throws DuplicateSampleException; + + /** + * Get a set of {@link GenomeAssembly}s used as inputs in an {@link AnalysisSubmission} + * + * @param submission The {@link AnalysisSubmission} to get genome assemblies from + * @return A {@link Set} of {@link GenomeAssembly}s + */ + public Set getGenomeAssembliesForAnalysisSubmission(AnalysisSubmission submission); } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisCollectionServiceGalaxy.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisCollectionServiceGalaxy.java index 5c18cafb88d..6425d42a033 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisCollectionServiceGalaxy.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisCollectionServiceGalaxy.java @@ -1,7 +1,15 @@ package ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy; +import java.io.IOException; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException; import ca.corefacility.bioinformatics.irida.exceptions.UploadException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; @@ -9,6 +17,7 @@ import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.DatasetCollectionType; import ca.corefacility.bioinformatics.irida.pipeline.upload.DataStorage; import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyHistoriesService; + import com.github.jmchilton.blend4j.galaxy.beans.History; import com.github.jmchilton.blend4j.galaxy.beans.Library; import com.github.jmchilton.blend4j.galaxy.beans.collection.request.CollectionDescription; @@ -16,22 +25,14 @@ import com.github.jmchilton.blend4j.galaxy.beans.collection.request.HistoryDatasetElement; import com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse; -import java.io.IOException; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - /** - * A service for constructing dataset collections of input files for workflows - * in galaxy. - * + * A service for constructing dataset collections of input files for workflows in galaxy. */ public class AnalysisCollectionServiceGalaxy { private static final String COLLECTION_NAME_SINGLE = "irida_sequence_files_single"; private static final String COLLECTION_NAME_PAIRED = "irida_sequence_files_paired"; + private static final String COLLECTION_NAME_ASSEMBLIES = "irida_assemblies"; private static final String FORWARD_NAME = "forward"; private static final String REVERSE_NAME = "reverse"; @@ -39,32 +40,26 @@ public class AnalysisCollectionServiceGalaxy { private GalaxyHistoriesService galaxyHistoriesService; /** - * Builds a new {@link AnalysisCollectionServiceGalaxy} with the given - * information. + * Builds a new {@link AnalysisCollectionServiceGalaxy} with the given information. * - * @param galaxyHistoriesService A GalaxyHistoriesService for interacting with - * Galaxy Histories. + * @param galaxyHistoriesService A GalaxyHistoriesService for interacting with Galaxy Histories. */ public AnalysisCollectionServiceGalaxy(GalaxyHistoriesService galaxyHistoriesService) { this.galaxyHistoriesService = galaxyHistoriesService; } /** - * Uploads a list of single sequence files belonging to the given samples to - * Galaxy. + * Uploads a list of single sequence files belonging to the given samples to Galaxy. * - * @param sampleSequenceFiles A map between {@link Sample} and - * {@link SingleEndSequenceFile}. + * @param sampleSequenceFiles A map between {@link Sample} and {@link SingleEndSequenceFile}. * @param workflowHistory The history to upload the sequence files into. * @param workflowLibrary A temporary library to upload files into. - * @return A CollectionResponse for the dataset collection constructed from the - * given files. + * @return A CollectionResponse for the dataset collection constructed from the given files. * @throws ExecutionManagerException If there was an error uploading the files. - * @throws IOException If there was an error reading the sequence file. + * @throws IOException If there was an error reading the sequence file. */ - public CollectionResponse uploadSequenceFilesSingleEnd( - Map sampleSequenceFiles, History workflowHistory, - Library workflowLibrary) throws ExecutionManagerException, IOException { + public CollectionResponse uploadSequenceFilesSingleEnd(Map sampleSequenceFiles, + History workflowHistory, Library workflowLibrary) throws ExecutionManagerException, IOException { CollectionDescription description = new CollectionDescription(); description.setCollectionType(DatasetCollectionType.LIST.toString()); @@ -99,22 +94,17 @@ public CollectionResponse uploadSequenceFilesSingleEnd( } /** - * Uploads a list of paired sequence files belonging to the given samples to - * Galaxy. + * Uploads a list of paired sequence files belonging to the given samples to Galaxy. * - * @param sampleSequenceFilesPaired A map between {@link Sample} and - * {@link SequenceFilePair}. - * @param workflowHistory The history to upload the sequence files - * into. + * @param sampleSequenceFilesPaired A map between {@link Sample} and {@link SequenceFilePair}. + * @param workflowHistory The history to upload the sequence files into. * @param workflowLibrary A temporary library to upload files into. - * @return A CollectionResponse for the dataset collection constructed from the - * given files. + * @return A CollectionResponse for the dataset collection constructed from the given files. * @throws ExecutionManagerException If there was an error uploading the files. - * @throws IOException If there was an error reading the sequence file. + * @throws IOException If there was an error reading the sequence file. */ - public CollectionResponse uploadSequenceFilesPaired( - Map sampleSequenceFilesPaired, History workflowHistory, - Library workflowLibrary) throws ExecutionManagerException, IOException { + public CollectionResponse uploadSequenceFilesPaired(Map sampleSequenceFilesPaired, + History workflowHistory, Library workflowLibrary) throws ExecutionManagerException, IOException { CollectionDescription description = new CollectionDescription(); description.setCollectionType(DatasetCollectionType.LIST_PAIRED.toString()); @@ -170,4 +160,49 @@ public CollectionResponse uploadSequenceFilesPaired( return galaxyHistoriesService.constructCollection(description, workflowHistory); } + + /** + * Uploads a list of genome assembly files belonging to the given samples to Galaxy. + * + * @param sampleAssemblies A map between {@link Sample} and {@link GenomeAssembly}. + * @param workflowHistory The history to upload the sequence files into. + * @param workflowLibrary A temporary library to upload files into. + * @return A CollectionResponse for the dataset collection constructed from the given files. + * @throws ExecutionManagerException If there was an error uploading the files. + * @throws IOException If there was an error reading the assembly file. + */ + public CollectionResponse uploadGenomeAssemblies(Map sampleAssemblies, + History workflowHistory, Library workflowLibrary) throws ExecutionManagerException, IOException { + + CollectionDescription description = new CollectionDescription(); + description.setCollectionType(DatasetCollectionType.LIST.toString()); + description.setName(COLLECTION_NAME_ASSEMBLIES); + + Map samplesMap = new HashMap<>(); + for (Sample sample : sampleAssemblies.keySet()) { + GenomeAssembly assembly = sampleAssemblies.get(sample); + samplesMap.put(assembly.getFile(), sample); + } + + // upload files to library and then to a history + Map pathHistoryDatasetId = galaxyHistoriesService.filesToLibraryToHistory(samplesMap.keySet(), + workflowHistory, workflowLibrary, DataStorage.LOCAL); + + for (Path assemblyFilePath : samplesMap.keySet()) { + if (!pathHistoryDatasetId.containsKey(assemblyFilePath)) { + throw new UploadException("Error, no corresponding history item found for " + assemblyFilePath); + } + + Sample sample = samplesMap.get(assemblyFilePath); + String datasetHistoryId = pathHistoryDatasetId.get(assemblyFilePath); + + HistoryDatasetElement datasetElement = new HistoryDatasetElement(); + datasetElement.setId(datasetHistoryId); + datasetElement.setName(sample.getSampleName()); + + description.addDatasetElement(datasetElement); + } + + return galaxyHistoriesService.constructCollection(description, workflowHistory); + } } \ No newline at end of file diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisWorkspaceServiceGalaxy.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisWorkspaceServiceGalaxy.java index 2a24b5281ee..eec4f9f2229 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisWorkspaceServiceGalaxy.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/AnalysisWorkspaceServiceGalaxy.java @@ -1,7 +1,20 @@ package ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import ca.corefacility.bioinformatics.irida.exceptions.*; import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; @@ -23,6 +36,7 @@ import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyHistoriesService; import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyLibrariesService; import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyWorkflowService; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.analysis.workspace.AnalysisWorkspaceService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; @@ -31,24 +45,12 @@ import com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; /** * A service for performing tasks for analysis in Galaxy. - * */ public class AnalysisWorkspaceServiceGalaxy implements AnalysisWorkspaceService { @@ -70,12 +72,12 @@ public class AnalysisWorkspaceServiceGalaxy implements AnalysisWorkspaceService private SequencingObjectService sequencingObjectService; + private GenomeAssemblyService genomeAssemblyService; + /** - * Builds a new {@link AnalysisWorkspaceServiceGalaxy} with the given - * information. + * Builds a new {@link AnalysisWorkspaceServiceGalaxy} with the given information. * - * @param galaxyHistoriesService A GalaxyHistoriesService for interacting with Galaxy - * Histories. + * @param galaxyHistoriesService A GalaxyHistoriesService for interacting with Galaxy Histories. * @param galaxyWorkflowService A GalaxyWorkflowService for interacting with Galaxy workflows. * @param galaxyLibrariesService An object for building libraries in Galaxy. * @param iridaWorkflowsService A service used for loading workflows from IRIDA. @@ -83,6 +85,7 @@ public class AnalysisWorkspaceServiceGalaxy implements AnalysisWorkspaceService * @param analysisProvenanceServiceGalaxy The service for provenance information. * @param analysisParameterServiceGalaxy A service for setting up parameters in Galaxy. * @param sequencingObjectService A service for reading {@link SequencingObject}s + * @param genomeAssemblyService A service for read {@link GenomeAssembly}s */ public AnalysisWorkspaceServiceGalaxy(GalaxyHistoriesService galaxyHistoriesService, GalaxyWorkflowService galaxyWorkflowService, GalaxyLibrariesService galaxyLibrariesService, @@ -90,7 +93,7 @@ public AnalysisWorkspaceServiceGalaxy(GalaxyHistoriesService galaxyHistoriesServ AnalysisCollectionServiceGalaxy analysisCollectionServiceGalaxy, AnalysisProvenanceServiceGalaxy analysisProvenanceServiceGalaxy, AnalysisParameterServiceGalaxy analysisParameterServiceGalaxy, - SequencingObjectService sequencingObjectService) { + SequencingObjectService sequencingObjectService, GenomeAssemblyService genomeAssemblyService) { this.galaxyHistoriesService = galaxyHistoriesService; this.galaxyWorkflowService = galaxyWorkflowService; this.galaxyLibrariesService = galaxyLibrariesService; @@ -99,6 +102,7 @@ public AnalysisWorkspaceServiceGalaxy(GalaxyHistoriesService galaxyHistoriesServ this.analysisProvenanceServiceGalaxy = analysisProvenanceServiceGalaxy; this.analysisParameterServiceGalaxy = analysisParameterServiceGalaxy; this.sequencingObjectService = sequencingObjectService; + this.genomeAssemblyService = genomeAssemblyService; } /** @@ -117,22 +121,14 @@ public String prepareAnalysisWorkspace(AnalysisSubmission analysisSubmission) th /** * Builds a new AnalysisOutputFile from the given file in Galaxy. * - * @param analysisId - * The id of the analysis performed in Galaxy. - * @param labelPrefix - * The prefix to add to the label of this file. - * @param dataset - * The dataset containing the data for the AnalysisOutputFile. - * @param outputDirectory - * A directory to download the resulting output files. + * @param analysisId The id of the analysis performed in Galaxy. + * @param labelPrefix The prefix to add to the label of this file. + * @param dataset The dataset containing the data for the AnalysisOutputFile. + * @param outputDirectory A directory to download the resulting output files. * @return An AnalysisOutputFile storing a local copy of the Galaxy file. - * @throws IOException - * If there was an issue creating a local file. - * @throws ExecutionManagerDownloadException - * If there was an issue downloading the data from Galaxy. - * @throws ExecutionManagerException - * If there was an issue extracting tool execution provenance - * from Galaxy. + * @throws IOException If there was an issue creating a local file. + * @throws ExecutionManagerDownloadException If there was an issue downloading the data from Galaxy. + * @throws ExecutionManagerException If there was an issue extracting tool execution provenance from Galaxy. */ private AnalysisOutputFile buildOutputFile(String analysisId, String labelPrefix, Dataset dataset, Path outputDirectory) throws IOException, ExecutionManagerDownloadException, ExecutionManagerException { @@ -168,6 +164,8 @@ public PreparedWorkflowGalaxy prepareAnalysisFiles(AnalysisSubmission analysisSu .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class); Set pairedEndFiles = sequencingObjectService .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class); + Set genomeAssemblies = genomeAssemblyService + .getGenomeAssembliesForAnalysisSubmission(analysisSubmission); if (iridaWorkflow.getWorkflowDescription().requiresReference()) { checkArgument(analysisSubmission.getReferenceFile().isPresent(), @@ -189,6 +187,12 @@ public PreparedWorkflowGalaxy prepareAnalysisFiles(AnalysisSubmission analysisSu + analysisSubmission); } + if (!iridaWorkflow.getWorkflowDescription().acceptsGenomeAssemblies()) { + checkArgument(genomeAssemblies.isEmpty(), + "workflow does not accept genome assemblies, but genome assemblies are passed as input to " + + analysisSubmission); + } + String temporaryLibraryName = AnalysisSubmission.class.getSimpleName() + "-" + UUID.randomUUID().toString(); History workflowHistory = galaxyHistoriesService.findById(analysisSubmission.getRemoteAnalysisId()); @@ -201,6 +205,9 @@ public PreparedWorkflowGalaxy prepareAnalysisFiles(AnalysisSubmission analysisSu Map pairedFiles = sequencingObjectService .getUniqueSamplesForSequencingObjects(pairedEndFiles); + Map assemblies = genomeAssemblyService + .getUniqueSamplesForGenomeAssemblies(genomeAssemblies); + // check that there aren't common sample names between single and paired if (samplesInCommon(singleFiles, pairedFiles)) { throw new SampleAnalysisDuplicateException( @@ -236,6 +243,16 @@ public PreparedWorkflowGalaxy prepareAnalysisFiles(AnalysisSubmission analysisSu collectionResponsePaired.getId(), WorkflowInvocationInputs.InputSourceType.HDCA)); } + if (!assemblies.isEmpty()) { + String genomeAssembliesLabel = workflowInput.getGenomeAssemblies().get(); + String workflowGenomeAssembliesInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, + genomeAssembliesLabel); + CollectionResponse collectionResponseAssemblies = analysisCollectionServiceGalaxy + .uploadGenomeAssemblies(assemblies, workflowHistory, workflowLibrary); + inputs.setInput(workflowGenomeAssembliesInputId, new WorkflowInvocationInputs.WorkflowInvocationInput( + collectionResponseAssemblies.getId(), WorkflowInvocationInputs.InputSourceType.HDCA)); + } + String analysisId = workflowHistory.getId(); if (iridaWorkflow.getWorkflowDescription().requiresReference()) { @@ -248,15 +265,11 @@ public PreparedWorkflowGalaxy prepareAnalysisFiles(AnalysisSubmission analysisSu } /** - * Determines if the two data structures of samples/sequence files share a - * common sample. + * Determines if the two data structures of samples/sequence files share a common sample. * - * @param sampleSequenceFilesSingle - * A map of single sequence files and samples. - * @param sampleSequenceFilesPaired - * A map of sequence file pairs and samples. - * @return True if the two data structures share a common sample, false - * otherwise. + * @param sampleSequenceFilesSingle A map of single sequence files and samples. + * @param sampleSequenceFilesPaired A map of sequence file pairs and samples. + * @return True if the two data structures share a common sample, false otherwise. */ private boolean samplesInCommon(Map sampleSequenceFilesSingle, Map sampleSequenceFilesPaired) { @@ -273,24 +286,14 @@ private boolean samplesInCommon(Map sampleSequenceFilesSingle, /** * Prepares a reference file for input to the workflow. * - * @param referenceFile - * The {@link ReferenceFile} for the workflow. - * @param workflowHistory - * The {@link History} for the workflow. - * @param referenceFileLabel - * The label for the reference file in the workflow. - * @param workflowDetails - * The {@link WorkflowDetails} for the workflow. - * @param inputs - * The {@link WorkflowInputs} object used to setup inputs for the - * workflow. - * @throws UploadException - * If there's an exception when uploading files to the workflow - * engine. - * @throws GalaxyDatasetException - * If there's an exception with Galaxy datasets. - * @throws WorkflowException - * If there's an exception with workflow methods. + * @param referenceFile The {@link ReferenceFile} for the workflow. + * @param workflowHistory The {@link History} for the workflow. + * @param referenceFileLabel The label for the reference file in the workflow. + * @param workflowDetails The {@link WorkflowDetails} for the workflow. + * @param inputs The {@link WorkflowInputs} object used to setup inputs for the workflow. + * @throws UploadException If there's an exception when uploading files to the workflow engine. + * @throws GalaxyDatasetException If there's an exception with Galaxy datasets. + * @throws WorkflowException If there's an exception with workflow methods. */ private void prepareReferenceFile(ReferenceFile referenceFile, History workflowHistory, String referenceFileLabel, WorkflowDetails workflowDetails, WorkflowInvocationInputs inputs) @@ -302,20 +305,17 @@ private void prepareReferenceFile(ReferenceFile referenceFile, History workflowH String workflowReferenceFileInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, referenceFileLabel); - inputs.setInput(workflowReferenceFileInputId, - new WorkflowInvocationInputs.WorkflowInvocationInput(referenceDataset.getId(), WorkflowInvocationInputs.InputSourceType.HDA)); + inputs.setInput(workflowReferenceFileInputId, new WorkflowInvocationInputs.WorkflowInvocationInput( + referenceDataset.getId(), WorkflowInvocationInputs.InputSourceType.HDA)); } /** - * Gets the prefix for a label for an output file based on the input - * {@link Sample} name. + * Gets the prefix for a label for an output file based on the input {@link Sample} name. * - * @param analysisSubmission - * The submission containing input {@link Sample}s. - * @param iridaWorkflow - * The {@link IridaWorkflow}. - * @return The label prefix (sample name) if this workflow operates only on - * a single sample, otherwise an empty String. + * @param analysisSubmission The submission containing input {@link Sample}s. + * @param iridaWorkflow The {@link IridaWorkflow}. + * @return The label prefix (sample name) if this workflow operates only on a single sample, otherwise an empty + * String. */ private String getLabelPrefix(AnalysisSubmission analysisSubmission, IridaWorkflow iridaWorkflow) { String labelPrefix = null; @@ -323,11 +323,19 @@ private String getLabelPrefix(AnalysisSubmission analysisSubmission, IridaWorkfl if (iridaWorkflow.getWorkflowDescription().getInputs().requiresSingleSample()) { try { + Set samples = Sets.newHashSet(); + Set sequencingObjectsForAnalysisSubmission = sequencingObjectService .getSequencingObjectsForAnalysisSubmission(analysisSubmission); - Set samples = Sets.newHashSet(); samples.addAll(sequencingObjectService - .getUniqueSamplesForSequencingObjects(sequencingObjectsForAnalysisSubmission).keySet()); + .getUniqueSamplesForSequencingObjects(sequencingObjectsForAnalysisSubmission) + .keySet()); + + Set genomeAssembliesForAnalysisSubmission = genomeAssemblyService + .getGenomeAssembliesForAnalysisSubmission(analysisSubmission); + samples.addAll( + genomeAssemblyService.getUniqueSamplesForGenomeAssemblies(genomeAssembliesForAnalysisSubmission) + .keySet()); Set sampleNames = samples.stream().map(Sample::getSampleName).collect(Collectors.toSet()); @@ -387,7 +395,7 @@ public Analysis getAnalysisResults(AnalysisSubmission analysisSubmission) throws } AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType(); - + return new Analysis(analysisId, analysisOutputFiles, analysisType); } @@ -401,13 +409,13 @@ public boolean outputFilesExist(AnalysisSubmission analysisSubmission) IridaWorkflowDescription workflowDescription = flow.getWorkflowDescription(); List workflowDescriptionOutputs = workflowDescription.getOutputs(); - List historyContents = galaxyHistoriesService.showHistoryContents(analysisSubmission.getRemoteAnalysisId()); + List historyContents = galaxyHistoriesService + .showHistoryContents(analysisSubmission.getRemoteAnalysisId()); - result = workflowDescriptionOutputs.stream().allMatch( workflowOutput-> - historyContents.stream().map(HistoryContents::getName).anyMatch( historyContentName-> - historyContentName.equals(workflowOutput.getFileName()) - ) - ); + result = workflowDescriptionOutputs.stream() + .allMatch(workflowOutput -> historyContents.stream() + .map(HistoryContents::getName) + .anyMatch(historyContentName -> historyContentName.equals(workflowOutput.getFileName()))); return result; } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/GenomeAssemblyServiceImpl.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/GenomeAssemblyServiceImpl.java index f6a72de41f9..a6eb0599bf5 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/GenomeAssemblyServiceImpl.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/GenomeAssemblyServiceImpl.java @@ -1,6 +1,9 @@ package ca.corefacility.bioinformatics.irida.service.impl; import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import javax.validation.Validator; @@ -11,10 +14,12 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException; import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.joins.impl.SampleGenomeAssemblyJoin; import ca.corefacility.bioinformatics.irida.model.sample.Sample; +import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.repositories.assembly.GenomeAssemblyRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleGenomeAssemblyJoinRepository; import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; @@ -27,10 +32,13 @@ public class GenomeAssemblyServiceImpl extends CRUDServiceImpl getUniqueSamplesForGenomeAssemblies(Set genomeAssemblies) + throws DuplicateSampleException { + Map genomeAssembliesSampleMap = new HashMap<>(); + + for (GenomeAssembly genomeAssembly : genomeAssemblies) { + SampleGenomeAssemblyJoin join = sampleGenomeAssemblyJoinRepository + .getSampleForGenomeAssembly(genomeAssembly); + + if (join == null) { + throw new EntityNotFoundException("No sample associated with genome assembly " + + genomeAssembly.getClass() + "[id=" + genomeAssembly.getId() + "]"); + } else { + Sample sample = join.getSubject(); + if (genomeAssembliesSampleMap.containsKey(sample)) { + GenomeAssembly prevGenomeAssembly = genomeAssembliesSampleMap.get(sample); + throw new DuplicateSampleException("Genome Assemblies " + genomeAssembly + ", " + prevGenomeAssembly + + " have the sample sample " + sample); + } else { + genomeAssembliesSampleMap.put(sample, genomeAssembly); + } + } + } + + return genomeAssembliesSampleMap; + } + + /** + * {@inheritDoc} + */ + @Override + @PreAuthorize("hasPermission(#submission, 'canReadAnalysisSubmission')") + public Set getGenomeAssembliesForAnalysisSubmission(AnalysisSubmission submission) { + return repository.findGenomeAssembliesForAnalysisSubmission(submission); + } + + /** + * {@inheritDoc} + */ + @Override + @PreAuthorize("hasRole('ROLE_ADMIN') or hasPermission(#idents, 'canReadGenomeAssembly')") + public Iterable readMultiple(Iterable idents) { + return super.readMultiple(idents); + } } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/analysis/submission/AnalysisSubmissionServiceImpl.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/analysis/submission/AnalysisSubmissionServiceImpl.java index 5dcd30fda77..ce82c0e9490 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/analysis/submission/AnalysisSubmissionServiceImpl.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/analysis/submission/AnalysisSubmissionServiceImpl.java @@ -28,6 +28,7 @@ import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException; import ca.corefacility.bioinformatics.irida.exceptions.NoPercentageCompleteException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisCleanedState; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState; import ca.corefacility.bioinformatics.irida.model.enums.StatisticTimePeriod; @@ -35,7 +36,6 @@ import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; -import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile; import ca.corefacility.bioinformatics.irida.model.user.User; import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow; @@ -57,6 +57,7 @@ import ca.corefacility.bioinformatics.irida.repositories.user.UserRepository; import ca.corefacility.bioinformatics.irida.ria.web.admin.dto.statistics.GenericStatModel; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.analysis.execution.galaxy.AnalysisExecutionServiceGalaxyCleanupAsync; import ca.corefacility.bioinformatics.irida.service.impl.CRUDServiceImpl; @@ -105,6 +106,7 @@ public class AnalysisSubmissionServiceImpl extends CRUDServiceImpl createSingleSampleSubmission(IridaWorkflow workflow, Long ref, List sequenceFiles, List sequenceFilePairs, - Map params, IridaWorkflowNamedParameters namedParameters, String name, - String analysisDescription, List projectsToShare, boolean writeResultsToSamples, + List assemblies, Map params, IridaWorkflowNamedParameters namedParameters, + String name, String analysisDescription, List projectsToShare, boolean writeResultsToSamples, boolean emailPipelineResultCompleted, boolean emailPipelineResultError) { final Collection createdSubmissions = new HashSet(); @@ -619,6 +623,48 @@ public Collection createSingleSampleSubmission(IridaWorkflow } } + // genome assemblies + if (description.acceptsGenomeAssemblies()) { + final Map samplesMap = genomeAssemblyService + .getUniqueSamplesForGenomeAssemblies(Sets.newHashSet(assemblies)); + + for (final Map.Entry entry : samplesMap.entrySet()) { + Sample s = entry.getKey(); + GenomeAssembly assembly = entry.getValue(); + // Build the analysis submission + AnalysisSubmission.Builder builder = AnalysisSubmission.builder(workflow.getWorkflowIdentifier()); + builder.name(name + "_" + s.getSampleName()); + builder.inputAssemblies(ImmutableSet.of(assembly)); + builder.updateSamples(writeResultsToSamples); + builder.priority(AnalysisSubmission.Priority.MEDIUM); + // Add if user should be emailed on pipeline completion/error + builder.emailPipelineResultCompleted(emailPipelineResultCompleted); + builder.emailPipelineResultError(emailPipelineResultError); + // Add reference file + if (ref != null && description.requiresReference()) { + // Note: This cannot be empty if through the UI if the + // pipeline required a reference file. + ReferenceFile referenceFile = referenceFileRepository.findById(ref).orElse(null); + builder.referenceFile(referenceFile); + } + + if (description.acceptsParameters()) { + if (namedParameters != null) { + builder.withNamedParameters(namedParameters); + } else { + if (!params.isEmpty()) { + // Note: This cannot be empty if through the UI if + // the pipeline required params. + builder.inputParameters(params); + } + } + } + + // Create the submission + createdSubmissions.add(create(builder.build())); + } + } + // Share with the required projects for (AnalysisSubmission submission : createdSubmissions) { for (Project project : projectsToShare) { @@ -636,8 +682,8 @@ public Collection createSingleSampleSubmission(IridaWorkflow @PreAuthorize("hasRole('ROLE_USER')") public AnalysisSubmission createMultipleSampleSubmission(IridaWorkflow workflow, Long ref, List sequenceFiles, List sequenceFilePairs, - Map params, IridaWorkflowNamedParameters namedParameters, String name, - String newAnalysisDescription, List projectsToShare, boolean writeResultsToSamples, + List assemblies, Map params, IridaWorkflowNamedParameters namedParameters, + String name, String newAnalysisDescription, List projectsToShare, boolean writeResultsToSamples, boolean emailPipelineResultCompleted, boolean emailPipelineResultError) { AnalysisSubmission.Builder builder = AnalysisSubmission.builder(workflow.getWorkflowIdentifier()); builder.name(name); @@ -665,6 +711,13 @@ public AnalysisSubmission createMultipleSampleSubmission(IridaWorkflow workflow, } } + // Add any genome assemblies. + if (description.acceptsGenomeAssemblies()) { + if (!assemblies.isEmpty()) { + builder.inputAssemblies(Sets.newHashSet(assemblies)); + } + } + if (description.acceptsParameters()) { if (namedParameters != null) { builder.withNamedParameters(namedParameters); diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/sample/SampleServiceImpl.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/sample/SampleServiceImpl.java index 7669f922f88..15e0857f921 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/sample/SampleServiceImpl.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/impl/sample/SampleServiceImpl.java @@ -50,6 +50,7 @@ import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisFastQC; import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.repositories.analysis.AnalysisRepository; +import ca.corefacility.bioinformatics.irida.repositories.assembly.GenomeAssemblyRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.project.ProjectSampleJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleGenomeAssemblyJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleSequencingObjectJoinRepository; @@ -100,6 +101,8 @@ public class SampleServiceImpl extends CRUDServiceImpl implements private final SampleGenomeAssemblyJoinRepository sampleGenomeAssemblyJoinRepository; + private final GenomeAssemblyRepository genomeAssemblyRepository; + private final UserRepository userRepository; private final MetadataEntryRepository metadataEntryRepository; @@ -114,6 +117,7 @@ public class SampleServiceImpl extends CRUDServiceImpl implements * @param sequencingObjectRepository the {@link SequencingObject} repository * @param qcEntryRepository a repository for storing and reading {@link QCEntry} * @param sampleGenomeAssemblyJoinRepository A {@link SampleGenomeAssemblyJoinRepository} + * @param genomeAssemblyRepository The {@link GenomeAssembly} repository * @param userRepository A {@link UserRepository} * @param metadataEntryRepository A {@link MetadataEntryRepository} * @param validator validator. @@ -122,7 +126,8 @@ public class SampleServiceImpl extends CRUDServiceImpl implements public SampleServiceImpl(SampleRepository sampleRepository, ProjectSampleJoinRepository psjRepository, final AnalysisRepository analysisRepository, SampleSequencingObjectJoinRepository ssoRepository, QCEntryRepository qcEntryRepository, SequencingObjectRepository sequencingObjectRepository, - SampleGenomeAssemblyJoinRepository sampleGenomeAssemblyJoinRepository, UserRepository userRepository, + SampleGenomeAssemblyJoinRepository sampleGenomeAssemblyJoinRepository, + GenomeAssemblyRepository genomeAssemblyRepository, UserRepository userRepository, MetadataEntryRepository metadataEntryRepository, Validator validator) { super(sampleRepository, validator, Sample.class); this.sampleRepository = sampleRepository; @@ -133,6 +138,7 @@ public SampleServiceImpl(SampleRepository sampleRepository, ProjectSampleJoinRep this.sequencingObjectRepository = sequencingObjectRepository; this.userRepository = userRepository; this.sampleGenomeAssemblyJoinRepository = sampleGenomeAssemblyJoinRepository; + this.genomeAssemblyRepository = genomeAssemblyRepository; this.metadataEntryRepository = metadataEntryRepository; } @@ -321,6 +327,15 @@ public SampleSequencingObjectJoin getSampleForSequencingObject(SequencingObject return ssoRepository.getSampleForSequencingObject(seqObject); } + /** + * {@inheritDoc} + */ + @Override + @PreAuthorize("hasPermission(#genomeAssembly, 'canReadGenomeAssembly')") + public SampleGenomeAssemblyJoin getSampleForGenomeAssembly(GenomeAssembly genomeAssembly) { + return sampleGenomeAssemblyJoinRepository.getSampleForGenomeAssembly(genomeAssembly); + } + /** * {@inheritDoc} */ @@ -629,7 +644,10 @@ public Page getFilteredSamplesForProjects(List proje public Collection getSamplesForAnalysisSubmission(AnalysisSubmission submission) { Set objectsForAnalysisSubmission = sequencingObjectRepository .findSequencingObjectsForAnalysisSubmission(submission); + Set assembliesForAnalysisSubmission = genomeAssemblyRepository + .findGenomeAssembliesForAnalysisSubmission(submission); Set samples = null; + try { samples = objectsForAnalysisSubmission.stream() .map(s -> ssoRepository.getSampleForSequencingObject(s).getSubject()) @@ -637,6 +655,15 @@ public Collection getSamplesForAnalysisSubmission(AnalysisSubmission sub } catch (NullPointerException e) { logger.warn("No samples were found for submission " + submission.getId()); } + + try { + samples.addAll(assembliesForAnalysisSubmission.stream() + .map(a -> sampleGenomeAssemblyJoinRepository.getSampleForGenomeAssembly(a).getSubject()) + .collect(Collectors.toSet())); + } catch (NullPointerException e) { + logger.warn("No samples were found for submission " + submission.getId()); + } + return samples; } diff --git a/src/main/java/ca/corefacility/bioinformatics/irida/service/sample/SampleService.java b/src/main/java/ca/corefacility/bioinformatics/irida/service/sample/SampleService.java index 0e740f123a7..67337705e8b 100644 --- a/src/main/java/ca/corefacility/bioinformatics/irida/service/sample/SampleService.java +++ b/src/main/java/ca/corefacility/bioinformatics/irida/service/sample/SampleService.java @@ -10,9 +10,11 @@ import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.SequenceFileAnalysisException; +import ca.corefacility.bioinformatics.irida.model.assembly.GenomeAssembly; import ca.corefacility.bioinformatics.irida.model.enums.StatisticTimePeriod; import ca.corefacility.bioinformatics.irida.model.joins.Join; import ca.corefacility.bioinformatics.irida.model.joins.impl.ProjectSampleJoin; +import ca.corefacility.bioinformatics.irida.model.joins.impl.SampleGenomeAssemblyJoin; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; import ca.corefacility.bioinformatics.irida.model.sample.QCEntry; @@ -99,6 +101,14 @@ public ProjectMetadataResponse getMetadataForProjectSamples(Project project, Lis */ public SampleSequencingObjectJoin getSampleForSequencingObject(SequencingObject seqObject); + /** + * Find a {@link Sample} assocaited with a {@link GenomeAssembly} + * + * @param genomeAssembly the {@link GenomeAssembly} to get the {@link Sample} for + * @return the {@link SampleGenomeAssemblyJoin} describing the relationship + */ + public SampleGenomeAssemblyJoin getSampleForGenomeAssembly(GenomeAssembly genomeAssembly); + /** * Get the list of {@link Sample} that belongs to a specific project. * diff --git a/src/main/resources/ca/corefacility/bioinformatics/irida/database/all-changes.xml b/src/main/resources/ca/corefacility/bioinformatics/irida/database/all-changes.xml index 8f645be54c6..cc308a20f62 100644 --- a/src/main/resources/ca/corefacility/bioinformatics/irida/database/all-changes.xml +++ b/src/main/resources/ca/corefacility/bioinformatics/irida/database/all-changes.xml @@ -73,4 +73,3 @@ - diff --git a/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/all-changes.xml b/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/all-changes.xml new file mode 100644 index 00000000000..4b85779c737 --- /dev/null +++ b/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/all-changes.xml @@ -0,0 +1,8 @@ + + + + + \ No newline at end of file diff --git a/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/analysis-assembly-input.xml b/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/analysis-assembly-input.xml new file mode 100644 index 00000000000..c6449acd383 --- /dev/null +++ b/src/main/resources/ca/corefacility/bioinformatics/irida/database/changesets/SNAPSHOT/analysis-assembly-input.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/resources/js/apis/pipelines/pipelines.js b/src/main/webapp/resources/js/apis/pipelines/pipelines.js index d07a1b5eb4b..762254ecdb9 100644 --- a/src/main/webapp/resources/js/apis/pipelines/pipelines.js +++ b/src/main/webapp/resources/js/apis/pipelines/pipelines.js @@ -54,7 +54,7 @@ export const getPipelineDetails = ({ id }) => axios .get(`${AJAX_URL}/${id}`) .then(({ data }) => data) - .catch((error) => { + .catch((e) => { return Promise.reject(e.response.error.message); }); @@ -94,10 +94,10 @@ export function saveNewPipelineParameters({ label, parameters, id }) { * @param singles * @returns {Promise} */ -export async function fetchPipelineSamples({ paired, singles }) { +export async function fetchPipelineSamples({ paired, singles, assemblies }) { try { const response = await axios.get( - `${AJAX_URL}/samples?singles=${singles}&paired=${paired}` + `${AJAX_URL}/samples?singles=${singles}&paired=${paired}&assemblies=${assemblies}` ); return response.data; } catch (e) { diff --git a/src/main/webapp/resources/js/contexts/AnalysisSamplesContext.js b/src/main/webapp/resources/js/contexts/AnalysisSamplesContext.js index b3886f717cd..6f63f6f8424 100644 --- a/src/main/webapp/resources/js/contexts/AnalysisSamplesContext.js +++ b/src/main/webapp/resources/js/contexts/AnalysisSamplesContext.js @@ -12,30 +12,36 @@ import { getAnalysisInputFiles } from "../apis/analysis/analysis"; const initialContext = { samples: null, singleEndSamples: null, + genomeAssemblySamples: null, referenceFile: [], - loading: true + loading: true, }; const AnalysisSamplesContext = React.createContext(initialContext); function AnalysisSamplesProvider(props) { - const [analysisSamplesContext, setAnalysisSamplesContext] = useState( - initialContext - ); + const [analysisSamplesContext, setAnalysisSamplesContext] = + useState(initialContext); const { analysisIdentifier } = useContext(AnalysisContext); const [sampleDisplayHeight, setSampleDisplayHeight] = useState(null); function getAnalysisInputSamples() { updateHeight(); getAnalysisInputFiles(analysisIdentifier).then( - ({ pairedEndSamples, singleEndSamples, referenceFile }) => { - setAnalysisSamplesContext(analysisSamplesContext => { + ({ + pairedEndSamples, + singleEndSamples, + genomeAssemblySamples, + referenceFile, + }) => { + setAnalysisSamplesContext((analysisSamplesContext) => { return { ...analysisSamplesContext, samples: pairedEndSamples, singleEndSamples: singleEndSamples, + genomeAssemblySamples: genomeAssemblySamples, referenceFile: referenceFile, - loading: false + loading: false, }; }); } @@ -65,7 +71,7 @@ function AnalysisSamplesProvider(props) { value={{ analysisSamplesContext, sampleDisplayHeight, - getAnalysisInputSamples + getAnalysisInputSamples, }} > {props.children} diff --git a/src/main/webapp/resources/js/pages/analysis/components/settings/AnalysisSampleRenderer.jsx b/src/main/webapp/resources/js/pages/analysis/components/settings/AnalysisSampleRenderer.jsx index e92554bcb5e..23e0c4d4179 100644 --- a/src/main/webapp/resources/js/pages/analysis/components/settings/AnalysisSampleRenderer.jsx +++ b/src/main/webapp/resources/js/pages/analysis/components/settings/AnalysisSampleRenderer.jsx @@ -35,9 +35,10 @@ export function AnalysisSampleRenderer() { }, []); const [filteredSamples, setFilteredSamples] = useState(null); - const [filteredSingleEndSamples, setSingleEndFilteredSamples] = useState( - null - ); + const [filteredSingleEndSamples, setSingleEndFilteredSamples] = + useState(null); + const [filteredGenomeAssemblySamples, setGenomeAssemblyFilteredSamples] = + useState(null); const SEQ_FILES_BASE_URL = setBaseUrl("sequenceFiles"); const SAMPLES_BASE_URL = setBaseUrl("samples"); @@ -109,7 +110,7 @@ export function AnalysisSampleRenderer() { { + return ( + { + return ( + + + + + } + title={ + item.sampleId == 0 ? ( + item.sampleName + ) : ( + + {item.sampleName} + + ) + } + description={ + + } + /> + + ); + }} + /> + ); + }; + /* * if search value is empty display all the samples otherwise * find samples with sample name or files that contain the search string @@ -171,24 +224,38 @@ export function AnalysisSampleRenderer() { ) { setFilteredSamples(analysisSamplesContext.samples); setSingleEndFilteredSamples(analysisSamplesContext.singleEndSamples); + setGenomeAssemblyFilteredSamples( + analysisSamplesContext.genomeAssemblySamples + ); } else { searchStr = String(searchStr).toLowerCase(); - const samplesContainingSearchValue = analysisSamplesContext.samples.filter( - (sample) => - sample.sampleName.toLowerCase().includes(searchStr) || - sample.forward.fileName.toLowerCase().includes(searchStr) || - sample.reverse.fileName.toLowerCase().includes(searchStr) - ); + const samplesContainingSearchValue = + analysisSamplesContext.samples.filter( + (sample) => + sample.sampleName.toLowerCase().includes(searchStr) || + sample.forward.fileName.toLowerCase().includes(searchStr) || + sample.reverse.fileName.toLowerCase().includes(searchStr) + ); - const singleEndSamplesContainingSearchValue = analysisSamplesContext.singleEndSamples.filter( - (sample) => - sample.sampleName.toLowerCase().includes(searchStr) || - sample.forward.fileName.toLowerCase().includes(searchStr) || - sample.reverse.fileName.toLowerCase().includes(searchStr) - ); + const singleEndSamplesContainingSearchValue = + analysisSamplesContext.singleEndSamples.filter( + (sample) => + sample.sampleName.toLowerCase().includes(searchStr) || + sample.sequenceFile.fileName.toLowerCase().includes(searchStr) + ); + + const genomeAssemblySamplesContainingSearchValue = + analysisSamplesContext.genomeAssemblySamples.filter( + (sample) => + sample.sampleName.toLowerCase().includes(searchStr) || + sample.genomeAssembly.fileName.toLowerCase().includes(searchStr) + ); setFilteredSamples(samplesContainingSearchValue); setSingleEndFilteredSamples(singleEndSamplesContainingSearchValue); + setGenomeAssemblyFilteredSamples( + genomeAssemblySamplesContainingSearchValue + ); } }; @@ -201,7 +268,8 @@ export function AnalysisSampleRenderer() { /> ) : analysisSamplesContext.samples.length > 0 || - analysisSamplesContext.singleEndSamples.length > 0 ? ( + analysisSamplesContext.singleEndSamples.length > 0 || + analysisSamplesContext.genomeAssemblySamples.length > 0 ? (
0 ? renderSingleEndSamples() : null} + {analysisSamplesContext.genomeAssemblySamples.length > 0 + ? renderGenomeAssemblySamples() + : null}
) : ( diff --git a/src/main/webapp/resources/js/pages/launch/LaunchFiles.jsx b/src/main/webapp/resources/js/pages/launch/LaunchFiles.jsx index 24129c08834..4576a6fc5cd 100644 --- a/src/main/webapp/resources/js/pages/launch/LaunchFiles.jsx +++ b/src/main/webapp/resources/js/pages/launch/LaunchFiles.jsx @@ -7,7 +7,11 @@ import { useLaunch } from "./launch-context"; import { removeSample } from "../../apis/cart/cart"; import { SectionHeading } from "../../components/ant.design/SectionHeading"; import { SampleFilesListItem } from "./files/SampleFilesListItem"; -import { setSelectedSampleFiles } from "./launch-dispatch"; +import { SampleAssembliesListItem } from "./assemblies/SampleAssembliesListItem"; +import { + setSelectedSampleAssemblies, + setSelectedSampleFiles, +} from "./launch-dispatch"; import { grey3 } from "../../styles/colors"; import { BORDERED_LIGHT } from "../../styles/borders"; @@ -25,7 +29,11 @@ export function LaunchFiles() { const [selected, setSelected] = React.useState(); const [height, setHeight] = React.useState(DEFAULT_HEIGHT); const [ - { acceptsPairedSequenceFiles: paired, acceptsSingleSequenceFiles: singles }, + { + acceptsPairedSequenceFiles: paired, + acceptsSingleSequenceFiles: singles, + acceptsGenomeAssemblies: assemblies, + }, dispatch, ] = useLaunch(); @@ -40,8 +48,10 @@ export function LaunchFiles() { * @returns {number} */ const getRowHeight = (index) => { - if (samples[index].files.length) { + if (!assemblies && samples[index].files.length) { return samples[index].files.length * 40 + 50; + } else if (assemblies && samples[index].assemblyFiles.length) { + return samples[index].assemblyFiles.length * 40 + 50; } return 100; }; @@ -54,14 +64,18 @@ export function LaunchFiles() { fetchPipelineSamples({ paired, singles, + assemblies, }) .then((data) => { const firstSelected = []; const firstSamples = []; data.forEach((sample) => { - if (sample.files.length) { + if (!assemblies && sample.files.length) { sample.selected = sample.files[0].identifier; firstSelected.push(sample.selected); + } else if (assemblies && sample.assemblyFiles.length) { + sample.selected = sample.assemblyFiles[0].identifier; + firstSelected.push(sample.selected); } firstSamples.push(sample); }); @@ -70,7 +84,7 @@ export function LaunchFiles() { setSelected(firstSelected); }) .catch((message) => notification.error({ message })); - }, [paired, singles]); + }, [paired, singles, assemblies]); /* Called when a user selects a different set of files to run on the sample. @@ -78,8 +92,12 @@ export function LaunchFiles() { set when the pipeline is launched. */ React.useEffect(() => { - setSelectedSampleFiles(dispatch, selected); - }, [dispatch, selected]); + if (assemblies) { + setSelectedSampleAssemblies(dispatch, selected); + } else { + setSelectedSampleFiles(dispatch, selected); + } + }, [dispatch, assemblies, selected]); /* Calculate the div size if the number of samples changes @@ -88,7 +106,7 @@ export function LaunchFiles() { if (samples) { let newHeight = 0; for (let i = 0; i < samples.length; i++) { - newHeight += getRowHeight(i); + newHeight += getRowHeight(i, assemblies); if (newHeight > DEFAULT_HEIGHT) { newHeight = DEFAULT_HEIGHT; break; @@ -96,7 +114,7 @@ export function LaunchFiles() { } setHeight(newHeight + 2); } - }, [samples]); + }, [samples, assemblies]); /* Called independently for each sample when the selected file set is changed. @@ -138,14 +156,25 @@ export function LaunchFiles() { */ const generateSample = ({ index, style }) => { const sample = samples[index]; - return ( - - ); + if (!assemblies) { + return ( + + ); + } else { + return ( + + ); + } }; return ( diff --git a/src/main/webapp/resources/js/pages/launch/assemblies/SampleAssembliesListItem.jsx b/src/main/webapp/resources/js/pages/launch/assemblies/SampleAssembliesListItem.jsx new file mode 100644 index 00000000000..9a25fe1bf18 --- /dev/null +++ b/src/main/webapp/resources/js/pages/launch/assemblies/SampleAssembliesListItem.jsx @@ -0,0 +1,81 @@ +import React from "react"; +import { Button, List, Radio } from "antd"; +import { grey1, grey3 } from "../../../styles/colors"; +import { SampleDetailViewer } from "../../../components/samples/SampleDetailViewer"; +import { BlockRadioInput } from "../../../components/ant.design/forms/BlockRadioInput"; +import { SPACE_XS } from "../../../styles/spacing"; +import { BORDERED_LIGHT } from "../../../styles/borders"; + +/** + * React component to display the list of assemblies that can be run on the current + * pipeline for a given sample. + * + * @param {object} sample - the sample to display files for. + * @param {function} removeSample - function to remove the sample from the cart + * @param {function} updateSelectedFiles - function to call if user want to run a different set of files + * @returns {JSX.Element} + * @constructor + */ +export function SampleAssembliesListItem({ + sample, + style, + removeSample, + updateSelectedFiles, +}) { + const updateSelected = (e, id) => { + e.preventDefault(); + updateSelectedFiles(sample, id); + }; + + return ( + removeSample(sample)}> + {i18n("SampleFilesListItem.remove")} + , + ]} + style={{ + backgroundColor: sample.selected ? grey1 : grey3, + boxSizing: `border-box`, + borderBottom: BORDERED_LIGHT, + ...style, + }} + > + removeSample(sample)} + > + + + } + description={ + sample.selected !== undefined ? ( + + {sample.assemblyFiles.map((assemblyFile) => ( + updateSelected(e, assemblyFile.identifier)} + > + + {assemblyFile.label} + + + ))} + + ) : ( +
+ {i18n("SampleAssembliesListItem.no-assemblies")} +
+ ) + } + /> +
+ ); +} diff --git a/src/main/webapp/resources/js/pages/launch/launch-context.js b/src/main/webapp/resources/js/pages/launch/launch-context.js index 540a31a4117..4efdb5629f6 100644 --- a/src/main/webapp/resources/js/pages/launch/launch-context.js +++ b/src/main/webapp/resources/js/pages/launch/launch-context.js @@ -22,6 +22,7 @@ const TYPES = { SAVE_MODIFIED_PARAMETERS: "launch:save_modified_params", ADD_REFERENCE: "launch:add_reference", UPDATE_FILES: "launch:update_files", + UPDATE_ASSEMBLIES: "launch:update_assemblies", }; const reducer = (state, action) => { @@ -55,6 +56,11 @@ const reducer = (state, action) => { ...state, files: action.payload.files, }; + case TYPES.UPDATE_ASSEMBLIES: + return { + ...state, + assemblies: action.payload.assemblies, + }; } }; @@ -133,6 +139,7 @@ function LaunchProvider({ children }) { parameterSets: formattedParameterSets, dynamicSources, files: [], + assemblies: [], automatedId: AUTOMATED_ID, }, }); diff --git a/src/main/webapp/resources/js/pages/launch/launch-dispatch.js b/src/main/webapp/resources/js/pages/launch/launch-dispatch.js index ddc9497c927..8651edafdf2 100644 --- a/src/main/webapp/resources/js/pages/launch/launch-dispatch.js +++ b/src/main/webapp/resources/js/pages/launch/launch-dispatch.js @@ -56,12 +56,17 @@ export async function launchNewPipeline(dispatch, formValues, state) { reference, ...parameters } = formValues; - const { files: fileIds, automatedId: automatedProjectId } = state; + const { + files: fileIds, + assemblies: assemblyIds, + automatedId: automatedProjectId, + } = state; const params = { name, description, fileIds, + assemblyIds, emailPipelineResult, projects, updateSamples, @@ -97,3 +102,18 @@ export function setSelectedSampleFiles(dispatch, files) { }, }); } + +/** + * Updated which samples assemblies are selected + * + * @param {function} dispatch - specify the launch context + * @param {array} assemblies - list of sample assemblies to run the pipeline + */ +export function setSelectedSampleAssemblies(dispatch, assemblies) { + dispatch({ + type: TYPES.UPDATE_ASSEMBLIES, + payload: { + assemblies, + }, + }); +} diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/annotation/GalaxyIntegrationTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/annotation/GalaxyIntegrationTest.java index df2660edd12..03e42a045ae 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/annotation/GalaxyIntegrationTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/annotation/GalaxyIntegrationTest.java @@ -1,11 +1,6 @@ package ca.corefacility.bioinformatics.irida.annotation; -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; +import java.lang.annotation.*; import org.junit.jupiter.api.Tag; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; @@ -18,9 +13,8 @@ import ca.corefacility.bioinformatics.irida.config.services.IridaApiServicesConfig; /** - * Annotation that is to be specified on Galaxy integration tests. Simplifies - * the configuration of tests by automatically adding a number of necessary - * annotations. + * Annotation that is to be specified on Galaxy integration tests. Simplifies the configuration of tests by + * automatically adding a number of necessary annotations. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @@ -30,8 +24,12 @@ @Tag("IntegrationTest") @Tag("Galaxy") @ActiveProfiles("test") -@SpringBootTest(classes = { DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class, - IridaApiServicesConfig.class, IridaApiTestFilesystemConfig.class, IridaApiGalaxyTestConfig.class }) +@SpringBootTest(classes = { + DataSourceAutoConfiguration.class, + HibernateJpaAutoConfiguration.class, + IridaApiServicesConfig.class, + IridaApiTestFilesystemConfig.class, + IridaApiGalaxyTestConfig.class }) public @interface GalaxyIntegrationTest { } diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceTestConfig.java b/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceTestConfig.java index 1dc086210d8..d1b48fed99e 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceTestConfig.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/AnalysisExecutionServiceTestConfig.java @@ -17,10 +17,6 @@ import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.web.authentication.preauth.PreAuthenticatedAuthenticationToken; -import com.github.jmchilton.blend4j.galaxy.JobsClient; -import com.github.jmchilton.blend4j.galaxy.ToolsClient; -import com.google.common.collect.Lists; - import ca.corefacility.bioinformatics.irida.config.conditions.NonWindowsPlatformCondition; import ca.corefacility.bioinformatics.irida.model.user.Role; import ca.corefacility.bioinformatics.irida.model.user.User; @@ -34,10 +30,7 @@ import ca.corefacility.bioinformatics.irida.repositories.analysis.submission.AnalysisSubmissionRepository; import ca.corefacility.bioinformatics.irida.repositories.referencefile.ReferenceFileRepository; import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository; -import ca.corefacility.bioinformatics.irida.service.AnalysisService; -import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; -import ca.corefacility.bioinformatics.irida.service.DatabaseSetupGalaxyITService; -import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; +import ca.corefacility.bioinformatics.irida.service.*; import ca.corefacility.bioinformatics.irida.service.analysis.execution.AnalysisExecutionService; import ca.corefacility.bioinformatics.irida.service.analysis.execution.galaxy.AnalysisExecutionServiceGalaxy; import ca.corefacility.bioinformatics.irida.service.analysis.execution.galaxy.AnalysisExecutionServiceGalaxyAsync; @@ -49,10 +42,12 @@ import ca.corefacility.bioinformatics.irida.service.sample.SampleService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; +import com.github.jmchilton.blend4j.galaxy.JobsClient; +import com.github.jmchilton.blend4j.galaxy.ToolsClient; +import com.google.common.collect.Lists; + /** * Test configuration for {@link AnalysisExecutionService} classes. - * - * */ @TestConfiguration @EnableAsync(order = AnalysisExecutionServiceConfig.ASYNC_ORDER) @@ -95,6 +90,9 @@ public class AnalysisExecutionServiceTestConfig { @Autowired private SequencingObjectService sequencingObjectService; + @Autowired + private GenomeAssemblyService genomeAssemblyService; + @Autowired private AnalysisSubmissionSampleProcessor analysisSubmissionSampleProcessor; @@ -135,7 +133,7 @@ public AnalysisExecutionServiceGalaxyCleanupAsync analysisExecutionServiceGalaxy public AnalysisWorkspaceServiceGalaxy analysisWorkspaceService() { return new AnalysisWorkspaceServiceGalaxy(galaxyHistoriesService, galaxyWorkflowService, galaxyLibrariesService, iridaWorkflowsService, analysisCollectionServiceGalaxy(), analysisProvenanceServiceGalaxy(), - analysisParameterServiceGalaxy, sequencingObjectService); + analysisParameterServiceGalaxy, sequencingObjectService, genomeAssemblyService); } @Lazy diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/GalaxyExecutionTestConfig.java b/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/GalaxyExecutionTestConfig.java index a7e809f82c2..b2f49622f7b 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/GalaxyExecutionTestConfig.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/config/analysis/GalaxyExecutionTestConfig.java @@ -60,6 +60,13 @@ public GalaxyLibrariesService galaxyLibrariesService() { return new GalaxyLibrariesService(librariesClient, LIBRARY_POLLING_TIME, LIBRARY_TIMEOUT, 1); } + @Lazy + @Bean(name = "galaxyLibrariesServiceTimeout") + public GalaxyLibrariesService galaxyLibrariesServiceTimeout() { + LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient(); + return new GalaxyLibrariesService(librariesClient, 1, 2, 1); + } + @Lazy @Bean public GalaxyWorkflowService galaxyWorkflowService() { diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/model/workflow/IridaWorkflowTestBuilder.java b/src/test/java/ca/corefacility/bioinformatics/irida/model/workflow/IridaWorkflowTestBuilder.java index 7e37cf91fb0..d66fd054d69 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/model/workflow/IridaWorkflowTestBuilder.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/model/workflow/IridaWorkflowTestBuilder.java @@ -7,30 +7,22 @@ import java.util.List; import java.util.UUID; -import com.google.common.collect.Lists; - import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.AnalysisType; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.BuiltInAnalysisTypes; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaToolParameter; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowInput; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowOutput; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowToolRepository; +import ca.corefacility.bioinformatics.irida.model.workflow.description.*; import ca.corefacility.bioinformatics.irida.model.workflow.structure.IridaWorkflowStructure; +import com.google.common.collect.Lists; + /** * Builds test {@link IridaWorkflow}s. - * - * */ public class IridaWorkflowTestBuilder { public final static UUID DEFAULT_ID = UUID.fromString("739f29ea-ae82-48b9-8914-3d2931405db6"); public static final UUID MULTI_SAMPLES_ID = UUID.fromString("a8a573ef-b51e-409a-9a26-3fb79a6b894e"); /** - * Builds a default test {@link IridaWorkflow} which accepts single input - * files. + * Builds a default test {@link IridaWorkflow} which accepts single input files. * * @return A test workflow. * @throws MalformedURLException @@ -40,8 +32,7 @@ public static IridaWorkflow buildTestWorkflowSingle() { } /** - * Builds a default test {@link IridaWorkflow} which accepts single input - * files and has no reference. + * Builds a default test {@link IridaWorkflow} which accepts single input files and has no reference. * * @return A test workflow. * @throws MalformedURLException @@ -61,8 +52,7 @@ public static IridaWorkflow buildTestWorkflowPaired() { } /** - * Builds an {@link IridaWorkflow} which accepts both single and paired - * input files. + * Builds an {@link IridaWorkflow} which accepts both single and paired input files. * * @return A test workflow. * @throws MalformedURLException @@ -74,14 +64,10 @@ public static IridaWorkflow buildTestWorkflowSinglePaired() { /** * Builds a test {@link IridaWorkflow} with the given id. * - * @param workflowId - * The workflow id. - * @param input - * The input type. - * @param reference - * The reference label. - * @param requiresSingleSample - * Whether or not this workflow requires a single sample. + * @param workflowId The workflow id. + * @param input The input type. + * @param reference The reference label. + * @param requiresSingleSample Whether or not this workflow requires a single sample. * @return A test workflow. * @throws MalformedURLException */ @@ -89,28 +75,27 @@ public static IridaWorkflow buildTestWorkflow(UUID workflowId, Input input, Stri boolean requiresSingleSample) { IridaWorkflow workflow = null; try { - workflow = new IridaWorkflow(buildTestDescription(workflowId, input, reference, requiresSingleSample), buildTestStructure()); + workflow = new IridaWorkflow(buildTestDescription(workflowId, input, reference, requiresSingleSample), + buildTestStructure()); } catch (MalformedURLException e) { e.printStackTrace(); } - + return workflow; } private static IridaWorkflowDescription buildTestDescription(UUID workflowId, Input input, String reference, boolean requiresSingleSample) throws MalformedURLException { - return buildTestDescription(workflowId, "TestWorkflow", "1.0", BuiltInAnalysisTypes.DEFAULT, input, reference, requiresSingleSample); + return buildTestDescription(workflowId, "TestWorkflow", "1.0", BuiltInAnalysisTypes.DEFAULT, input, reference, + requiresSingleSample); } /** * Builds a test {@link IridaWorkflow} with the given id. * - * @param workflowId - * The workflow id. - * @param input - * The input type. - * @param reference - * The reference label. + * @param workflowId The workflow id. + * @param input The input type. + * @param reference The reference label. * @return A test workflow. * @throws MalformedURLException */ @@ -144,24 +129,19 @@ private static IridaWorkflowStructure buildTestStructure() { private static IridaWorkflowDescription buildTestDescription(UUID workflowId, Input input, String reference) throws MalformedURLException { - return buildTestDescription(workflowId, "TestWorkflow", "1.0", BuiltInAnalysisTypes.DEFAULT, input, reference, true); + return buildTestDescription(workflowId, "TestWorkflow", "1.0", BuiltInAnalysisTypes.DEFAULT, input, reference, + true); } /** * Builds a {@link IridaWorkflowDescription} with the following information. * - * @param id - * The id of the workflow. - * @param name - * The name of the workflow. - * @param version - * The version of the workflow. - * @param analysisType - * The {@link AnalysisTypeOld} of the workflow. - * @param reference - * The reference label for the workflow. - * @param requiresSingleSample - * Whether or not this workflow requires a single sample. + * @param id The id of the workflow. + * @param name The name of the workflow. + * @param version The version of the workflow. + * @param analysisType The {@link AnalysisTypeOld} of the workflow. + * @param reference The reference label for the workflow. + * @param requiresSingleSample Whether or not this workflow requires a single sample. * @return An {@link IridaWorkflowDescription} with the given information. * @throws MalformedURLException */ @@ -180,14 +160,18 @@ public static IridaWorkflowDescription buildTestDescription(UUID id, String name IridaWorkflowInput workflowInput = null; switch (input) { case SINGLE: - workflowInput = new IridaWorkflowInput("sequence_reads", null, reference, requiresSingleSample); + workflowInput = new IridaWorkflowInput("sequence_reads", null, null, reference, requiresSingleSample); break; case PAIRED: - workflowInput = new IridaWorkflowInput(null, "sequence_reads_paired", reference, requiresSingleSample); + workflowInput = new IridaWorkflowInput(null, "sequence_reads_paired", null, reference, + requiresSingleSample); break; case SINGLE_PAIRED: - workflowInput = new IridaWorkflowInput("sequence_reads", "sequence_reads_paired", reference, requiresSingleSample); + workflowInput = new IridaWorkflowInput("sequence_reads", "sequence_reads_paired", null, reference, + requiresSingleSample); break; + case GENOME_ASSEMBLIES: + workflowInput = new IridaWorkflowInput(null, null, "genome_assemblies", reference, requiresSingleSample); } List parameters = new LinkedList<>(); @@ -206,7 +190,10 @@ public static IridaWorkflowDescription buildTestDescription(UUID id, String name } public static enum Input { - SINGLE, PAIRED, SINGLE_PAIRED + SINGLE, + PAIRED, + SINGLE_PAIRED, + GENOME_ASSEMBLIES } public static IridaWorkflow buildTestWorkflowSinglePairedMultipleSamples() { diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyHistoriesServiceIT.java b/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyHistoriesServiceIT.java index d946e27fe62..e5586d64507 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyHistoriesServiceIT.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyHistoriesServiceIT.java @@ -1,7 +1,5 @@ package ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration; -import static org.junit.jupiter.api.Assertions.*; - import java.io.File; import java.io.IOException; import java.net.URISyntaxException; @@ -25,11 +23,7 @@ import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException; import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerObjectNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.UploadException; -import ca.corefacility.bioinformatics.irida.exceptions.galaxy.CreateLibraryException; -import ca.corefacility.bioinformatics.irida.exceptions.galaxy.DeleteGalaxyObjectFailedException; -import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetException; -import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetNotFoundException; -import ca.corefacility.bioinformatics.irida.exceptions.galaxy.NoGalaxyHistoryException; +import ca.corefacility.bioinformatics.irida.exceptions.galaxy.*; import ca.corefacility.bioinformatics.irida.model.upload.galaxy.GalaxyProjectName; import ca.corefacility.bioinformatics.irida.model.workflow.execution.InputFileType; import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.DatasetCollectionType; @@ -39,18 +33,8 @@ import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyHistoriesService; import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyLibrariesService; -import com.github.jmchilton.blend4j.galaxy.GalaxyInstance; -import com.github.jmchilton.blend4j.galaxy.GalaxyResponseException; -import com.github.jmchilton.blend4j.galaxy.HistoriesClient; -import com.github.jmchilton.blend4j.galaxy.LibrariesClient; -import com.github.jmchilton.blend4j.galaxy.ToolsClient; -import com.github.jmchilton.blend4j.galaxy.beans.Dataset; -import com.github.jmchilton.blend4j.galaxy.beans.FilesystemPathsLibraryUpload; -import com.github.jmchilton.blend4j.galaxy.beans.History; -import com.github.jmchilton.blend4j.galaxy.beans.HistoryDeleteResponse; -import com.github.jmchilton.blend4j.galaxy.beans.HistoryDetails; -import com.github.jmchilton.blend4j.galaxy.beans.Library; -import com.github.jmchilton.blend4j.galaxy.beans.LibraryContent; +import com.github.jmchilton.blend4j.galaxy.*; +import com.github.jmchilton.blend4j.galaxy.beans.*; import com.github.jmchilton.blend4j.galaxy.beans.collection.request.CollectionDescription; import com.github.jmchilton.blend4j.galaxy.beans.collection.request.HistoryDatasetElement; import com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse; @@ -58,9 +42,10 @@ import com.google.common.collect.Sets; import com.sun.jersey.api.client.ClientResponse; +import static org.junit.jupiter.api.Assertions.*; + /** * Tests for building Galaxy histories. - * */ @GalaxyIntegrationTest @TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class }) @@ -73,6 +58,7 @@ public class GalaxyHistoriesServiceIT { private GalaxyHistoriesService galaxyHistory; private GalaxyInstance galaxyInstanceAdmin; + @Autowired private GalaxyLibrariesService galaxyLibrariesService; private HistoriesClient historiesClient; @@ -84,17 +70,6 @@ public class GalaxyHistoriesServiceIT { private static final InputFileType FILE_TYPE = InputFileType.FASTQ_SANGER; private static final InputFileType INVALID_FILE_TYPE = null; - /** - * Timeout in seconds to stop polling a Galaxy library. - */ - private static final int LIBRARY_TIMEOUT = 5 * 60; - - /** - * Polling time in seconds to poll a Galaxy library to check if datasets - * have been properly uploaded. - */ - private static final int LIBRARY_POLLING_TIME = 5; - /** * Sets up files for history tests. * @@ -111,8 +86,6 @@ public void setup() galaxyInstanceAdmin = localGalaxy.getGalaxyInstanceAdmin(); historiesClient = galaxyInstanceAdmin.getHistoriesClient(); ToolsClient toolsClient = galaxyInstanceAdmin.getToolsClient(); - LibrariesClient librariesClient = galaxyInstanceAdmin.getLibrariesClient(); - galaxyLibrariesService = new GalaxyLibrariesService(librariesClient, LIBRARY_POLLING_TIME, LIBRARY_TIMEOUT, 1); galaxyHistory = new GalaxyHistoriesService(historiesClient, toolsClient, galaxyLibrariesService); } @@ -120,8 +93,7 @@ public void setup() /** * Builds a library with the given name. * - * @param name - * The name of the new library. + * @param name The name of the new library. * @return A library with the given name. * @throws CreateLibraryException */ @@ -132,10 +104,8 @@ private Library buildEmptyLibrary(String name) throws CreateLibraryException { /** * Sets up library for test. * - * @param testLibrary - * The library to upload a file to. - * @param galaxyInstanceAdmin - * The Galaxy Instance to connect to Galaxy. + * @param testLibrary The library to upload a file to. + * @param galaxyInstanceAdmin The Galaxy Instance to connect to Galaxy. * @return Returns the id of the file in a library. * @throws CreateLibraryException * @throws ExecutionManagerObjectNotFoundException @@ -342,19 +312,22 @@ public void testFilesToLibraryToHistorySuccess() throws UploadException, GalaxyD String datasetId2 = datasetsMap.get(dataFile2); String datasetIdCompressed = datasetsMap.get(dataFileCompressed); - Dataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient().showDataset(history.getId(), - datasetId1); + Dataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin() + .getHistoriesClient() + .showDataset(history.getId(), datasetId1); assertNotNull(actualDataset1); assertEquals(actualDataset1.getDataTypeExt(), InputFileType.FASTQ_SANGER.toString(), "Invalid data type extension"); - Dataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient().showDataset(history.getId(), - datasetId2); + Dataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin() + .getHistoriesClient() + .showDataset(history.getId(), datasetId2); assertNotNull(actualDataset2); assertEquals(actualDataset2.getDataTypeExt(), InputFileType.FASTQ_SANGER.toString(), "Invalid data type extension"); - Dataset actualDatasetCompressed = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient() + Dataset actualDatasetCompressed = localGalaxy.getGalaxyInstanceAdmin() + .getHistoriesClient() .showDataset(history.getId(), datasetIdCompressed); assertNotNull(actualDatasetCompressed); assertEquals(actualDatasetCompressed.getDataTypeExt(), InputFileType.FASTQ_SANGER_GZ.toString(), @@ -362,8 +335,7 @@ public void testFilesToLibraryToHistorySuccess() throws UploadException, GalaxyD } /** - * Tests successful upload of a file to a Galaxy history through a Library - * (where files are remote files). + * Tests successful upload of a file to a Galaxy history through a Library (where files are remote files). * * @throws UploadException * @throws GalaxyDatasetException @@ -379,18 +351,19 @@ public void testFilesToLibraryToHistoryRemoteSuccess() throws UploadException, G String datasetId1 = datasetsMap.get(dataFile); String datasetId2 = datasetsMap.get(dataFile2); - Dataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient().showDataset(history.getId(), - datasetId1); + Dataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin() + .getHistoriesClient() + .showDataset(history.getId(), datasetId1); assertNotNull(actualDataset1); - Dataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient().showDataset(history.getId(), - datasetId2); + Dataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin() + .getHistoriesClient() + .showDataset(history.getId(), datasetId2); assertNotNull(actualDataset2); } /** - * Tests failure to upload a list of files to a Galaxy history through a - * Library (no library). + * Tests failure to upload a list of files to a Galaxy history through a Library (no library). * * @throws UploadException * @throws GalaxyDatasetException @@ -406,8 +379,7 @@ public void testFilesToLibraryToHistoryFailNoLibrary() throws UploadException, G } /** - * Tests failure to upload a list of files to a Galaxy history through a - * Library (no history). + * Tests failure to upload a list of files to a Galaxy history through a Library (no history). * * @throws UploadException * @throws GalaxyDatasetException @@ -559,8 +531,7 @@ public void testGetDatasetForFileInHistorySuccess() throws UploadException, Gala } /** - * Tests getting a dataset for a file in the history when there is a dataset - * collection with the same name. + * Tests getting a dataset for a file in the history when there is a dataset collection with the same name. * * @throws UploadException * @throws GalaxyDatasetException @@ -606,8 +577,7 @@ public void testGetDatasetForFileInHistoryFail() throws UploadException, GalaxyD } /** - * Tests getting a dataset for a file in the history and failing due to - * multiple datasets. + * Tests getting a dataset for a file in the history and failing due to multiple datasets. * * @throws UploadException * @throws GalaxyDatasetException diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyLibrariesServiceIT.java b/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyLibrariesServiceIT.java index 781bce79652..5574de4a1bb 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyLibrariesServiceIT.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/pipeline/upload/galaxy/integration/GalaxyLibrariesServiceIT.java @@ -1,7 +1,5 @@ package ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration; -import static org.junit.jupiter.api.Assertions.*; - import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Path; @@ -11,6 +9,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; @@ -35,9 +34,10 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; +import static org.junit.jupiter.api.Assertions.*; + /** * Tests for dealing with Galaxy Libraries. - * */ @GalaxyIntegrationTest @TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class }) @@ -46,29 +46,24 @@ public class GalaxyLibrariesServiceIT { @Autowired private LocalGalaxy localGalaxy; + @Autowired private GalaxyLibrariesService galaxyLibrariesService; + @Autowired + @Qualifier("galaxyLibrariesServiceTimeout") + private GalaxyLibrariesService galaxyLibrariesServiceTimeout; + private Path dataFile; private Path dataFile2; private Path dataFileCompressed; private Path dataFileFail; + @Autowired private GalaxyInstance galaxyInstanceAdmin; private LibrariesClient librariesClient; private static final InputFileType FILE_TYPE = InputFileType.FASTQ_SANGER; - /** - * Timeout in seconds to stop polling a Galaxy library. - */ - private static final int LIBRARY_TIMEOUT = 5 * 60; - - /** - * Polling time in seconds to poll a Galaxy library to check if datasets - * have been properly uploaded. - */ - private static final int LIBRARY_POLLING_TIME = 5; - /** * Sets up variables for tests * @@ -80,8 +75,6 @@ public void setup() throws URISyntaxException, IOException { galaxyInstanceAdmin = localGalaxy.getGalaxyInstanceAdmin(); librariesClient = galaxyInstanceAdmin.getLibrariesClient(); - galaxyLibrariesService = new GalaxyLibrariesService(librariesClient, LIBRARY_POLLING_TIME, LIBRARY_TIMEOUT, 1); - dataFile = Paths.get(GalaxyLibrariesServiceIT.class.getResource("testData1.fastq").toURI()); dataFile2 = Paths.get(GalaxyLibrariesServiceIT.class.getResource("testData2.fastq").toURI()); @@ -94,8 +87,7 @@ public void setup() throws URISyntaxException, IOException { /** * Builds a library with the given name. * - * @param name - * The name of the new library. + * @param name The name of the new library. * @return A library with the given name. * @throws CreateLibraryException */ @@ -115,7 +107,8 @@ public void testFileToLibrarySuccess() throws UploadException, GalaxyDatasetExce Library library = buildEmptyLibrary("testFileToLibrarySuccess"); String datasetId = galaxyLibrariesService.fileToLibrary(dataFile, FILE_TYPE, library, DataStorage.LOCAL); assertNotNull(datasetId); - LibraryDataset actualDataset = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient() + LibraryDataset actualDataset = localGalaxy.getGalaxyInstanceAdmin() + .getLibrariesClient() .showDataset(library.getId(), datasetId); assertNotNull(actualDataset); assertEquals(filename, actualDataset.getName()); @@ -154,19 +147,22 @@ public void testFilesToLibraryWaitSuccess() throws UploadException, GalaxyDatase String datasetId2 = datasetsMap.get(dataFile2); String datasetIdCompressed = datasetsMap.get(dataFileCompressed); - LibraryDataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient() + LibraryDataset actualDataset1 = localGalaxy.getGalaxyInstanceAdmin() + .getLibrariesClient() .showDataset(library.getId(), datasetId1); assertNotNull(actualDataset1); assertEquals(actualDataset1.getDataTypeExt(), InputFileType.FASTQ_SANGER.toString(), "Invalid data type extension"); - LibraryDataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient() + LibraryDataset actualDataset2 = localGalaxy.getGalaxyInstanceAdmin() + .getLibrariesClient() .showDataset(library.getId(), datasetId2); assertNotNull(actualDataset2); assertEquals(actualDataset2.getDataTypeExt(), InputFileType.FASTQ_SANGER.toString(), "Invalid data type extension"); - LibraryDataset actualDatasetCompressed = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient() + LibraryDataset actualDatasetCompressed = localGalaxy.getGalaxyInstanceAdmin() + .getLibrariesClient() .showDataset(library.getId(), datasetIdCompressed); assertNotNull(actualDatasetCompressed); assertEquals(actualDatasetCompressed.getDataTypeExt(), InputFileType.FASTQ_SANGER_GZ.toString(), @@ -174,8 +170,7 @@ public void testFilesToLibraryWaitSuccess() throws UploadException, GalaxyDatase } /** - * Tests failure to upload a list of files to a Galaxy history through a - * Library. + * Tests failure to upload a list of files to a Galaxy history through a Library. * * @throws UploadException * @throws GalaxyDatasetException @@ -197,17 +192,15 @@ public void testFilesToLibraryWaitFail() throws UploadException, GalaxyDatasetEx */ @Test public void testFilesToLibraryWaitFailTimeout() throws UploadException, GalaxyDatasetException { - galaxyLibrariesService = new GalaxyLibrariesService(librariesClient, 1, 2, 1); - Library library = buildEmptyLibrary("testFilesToLibraryWaitFailTimeout"); assertThrows(UploadTimeoutException.class, () -> { - galaxyLibrariesService.filesToLibraryWait(Sets.newHashSet(dataFile, dataFile2), library, DataStorage.LOCAL); + galaxyLibrariesServiceTimeout.filesToLibraryWait(Sets.newHashSet(dataFile, dataFile2), library, + DataStorage.LOCAL); }); } /** - * Tests failure to upload to a library due to an error with the dataset - * upload. + * Tests failure to upload to a library due to an error with the dataset upload. * * @throws UploadException * @throws GalaxyDatasetException diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisAjaxControllerTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisAjaxControllerTest.java index 2f7dca5739b..549aa1085c4 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisAjaxControllerTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisAjaxControllerTest.java @@ -13,22 +13,19 @@ import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException; import ca.corefacility.bioinformatics.irida.exceptions.PostProcessingException; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState; - import ca.corefacility.bioinformatics.irida.model.project.Project; - import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.BuiltInAnalysisTypes; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowInput; - import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.model.workflow.submission.ProjectAnalysisSubmissionJoin; import ca.corefacility.bioinformatics.irida.pipeline.results.AnalysisSubmissionSampleProcessor; import ca.corefacility.bioinformatics.irida.ria.unit.TestDataFactory; import ca.corefacility.bioinformatics.irida.ria.web.analysis.AnalysisAjaxController; -import ca.corefacility.bioinformatics.irida.ria.web.analysis.dto.*; import ca.corefacility.bioinformatics.irida.ria.web.analysis.auditing.AnalysisAudit; +import ca.corefacility.bioinformatics.irida.ria.web.analysis.dto.*; import ca.corefacility.bioinformatics.irida.security.permissions.analysis.UpdateAnalysisSubmissionPermission; import ca.corefacility.bioinformatics.irida.service.*; import ca.corefacility.bioinformatics.irida.service.sample.MetadataTemplateService; @@ -39,7 +36,6 @@ import com.google.common.collect.Lists; import static org.junit.jupiter.api.Assertions.*; -import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.*; public class AnalysisAjaxControllerTest { @@ -59,6 +55,7 @@ public class AnalysisAjaxControllerTest { private UpdateAnalysisSubmissionPermission updatePermission; private MetadataTemplateService metadataTemplateService; private SequencingObjectService sequencingObjectService; + private GenomeAssemblyService genomeAssemblyService; private AnalysisSubmissionSampleProcessor analysisSubmissionSampleProcessor; private ExecutionManagerConfig configFileMock; private AnalysisAudit analysisAuditMock; @@ -67,8 +64,7 @@ public class AnalysisAjaxControllerTest { private EmailController emailControllerMock; /** - * Analysis Output File key names from - * {@link TestDataFactory#constructAnalysis()} + * Analysis Output File key names from {@link TestDataFactory#constructAnalysis()} */ private final List outputNames = Lists.newArrayList("tree", "matrix", "table", "contigs-with-repeats", "refseq-masher-matches"); @@ -81,6 +77,7 @@ public void init() { updatePermission = mock(UpdateAnalysisSubmissionPermission.class); sampleService = mock(SampleService.class); sequencingObjectService = mock(SequencingObjectService.class); + genomeAssemblyService = mock(GenomeAssemblyService.class); analysisSubmissionSampleProcessor = mock(AnalysisSubmissionSampleProcessor.class); userServiceMock = mock(UserService.class); configFileMock = mock(ExecutionManagerConfig.class); @@ -92,8 +89,8 @@ public void init() { analysisAjaxController = new AnalysisAjaxController(analysisSubmissionServiceMock, iridaWorkflowsServiceMock, userServiceMock, sampleService, projectServiceMock, updatePermission, metadataTemplateService, - sequencingObjectService, analysisSubmissionSampleProcessor, messageSourceMock, configFileMock, - analysisAuditMock, analysisTypesServiceMock, emailControllerMock); + sequencingObjectService, genomeAssemblyService, analysisSubmissionSampleProcessor, messageSourceMock, + configFileMock, analysisAuditMock, analysisTypesServiceMock, emailControllerMock); } @@ -166,7 +163,8 @@ public void testGetOutputFileLines() throws IridaWorkflowNotFoundException { final List infos = analysisAjaxController.getOutputFilesInfo(submissionId); assertEquals(5, infos.size(), "Expecting 5 analysis output file info items"); final Optional optInfo = infos.stream() - .filter(x -> Objects.equals(x.getOutputName(), "refseq-masher-matches")).findFirst(); + .filter(x -> Objects.equals(x.getOutputName(), "refseq-masher-matches")) + .findFirst(); assertTrue(optInfo.isPresent(), "Should be a refseq-masher-matches.tsv output file"); final AnalysisOutputFileInfo info = optInfo.get(); final String firstLine = "sample\ttop_taxonomy_name\tdistance\tpvalue\tmatching\tfull_taxonomy\ttaxonomic_subspecies\ttaxonomic_species\ttaxonomic_genus\ttaxonomic_family\ttaxonomic_order\ttaxonomic_class\ttaxonomic_phylum\ttaxonomic_superkingdom\tsubspecies\tserovar\tplasmid\tbioproject\tbiosample\ttaxid\tassembly_accession\tmatch_id"; @@ -202,7 +200,8 @@ public void testGetOutputFileByteSizedChunks() throws IridaWorkflowNotFoundExcep final List infos = analysisAjaxController.getOutputFilesInfo(submissionId); assertEquals(5, infos.size(), "Expecting 5 analysis output file info items"); final Optional optInfo = infos.stream() - .filter(x -> Objects.equals(x.getOutputName(), "refseq-masher-matches")).findFirst(); + .filter(x -> Objects.equals(x.getOutputName(), "refseq-masher-matches")) + .findFirst(); assertTrue(optInfo.isPresent(), "Should be a refseq-masher-matches.tsv output file"); final AnalysisOutputFileInfo info = optInfo.get(); final String firstLine = "sample\ttop_taxonomy_name\tdistance\tpvalue\tmatching\tfull_taxonomy\ttaxonomic_subspecies\ttaxonomic_species\ttaxonomic_genus\ttaxonomic_family\ttaxonomic_order\ttaxonomic_class\ttaxonomic_phylum\ttaxonomic_superkingdom\tsubspecies\tserovar\tplasmid\tbioproject\tbiosample\ttaxid\tassembly_accession\tmatch_id"; @@ -280,7 +279,7 @@ public void testUpdateAnalysisName() throws IridaWorkflowNotFoundException { @Test public void testGetAnalysisDetails() { - final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "reference", true); + final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "assemblies", "reference", true); AnalysisSubmission submission = TestDataFactory.constructAnalysisSubmission(); IridaWorkflowDescription description = new IridaWorkflowDescription(submission.getWorkflowId(), "My Workflow", "V1", BuiltInAnalysisTypes.PHYLOGENOMICS, input, Lists.newArrayList(), Lists.newArrayList(), diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisControllerTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisControllerTest.java index 44093288608..c0f104af0ac 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisControllerTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/analysis/AnalysisControllerTest.java @@ -67,7 +67,7 @@ public void testGetAnalysisDetailsTree() throws IOException, IridaWorkflowNotFou Long submissionId = 1L; ExtendedModelMap model = new ExtendedModelMap(); - final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "reference", true); + final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "assemblies", "reference", true); AnalysisSubmission submission = TestDataFactory.constructAnalysisSubmission(); IridaWorkflowDescription description = new IridaWorkflowDescription(submission.getWorkflowId(), "My Workflow", "V1", BuiltInAnalysisTypes.PHYLOGENOMICS, input, Lists.newArrayList(), Lists.newArrayList(), @@ -77,18 +77,18 @@ public void testGetAnalysisDetailsTree() throws IOException, IridaWorkflowNotFou when(analysisSubmissionServiceMock.read(submissionId)).thenReturn(submission); when(iridaWorkflowsServiceMock.getIridaWorkflowOrUnknown(submission)).thenReturn(iridaWorkflow); - when(analysisTypesService.getViewerForAnalysisType(BuiltInAnalysisTypes.PHYLOGENOMICS)).thenReturn( - Optional.of("tree")); + when(analysisTypesService.getViewerForAnalysisType(BuiltInAnalysisTypes.PHYLOGENOMICS)) + .thenReturn(Optional.of("tree")); String analysisPage = analysisController.getDetailsPage(submissionId, model); assertEquals(AnalysisController.ANALYSIS_PAGE, analysisPage, "should be analysis page"); - assertEquals(BuiltInAnalysisTypes.PHYLOGENOMICS, model.get("analysisType"), + assertEquals(BuiltInAnalysisTypes.PHYLOGENOMICS, model.get("analysisType"), "Phylogenetic Tree tab should be available"); assertEquals(submission.getName(), model.get("analysisName"), "submission name should be in model"); - assertEquals(BuiltInAnalysisTypes.PHYLOGENOMICS, model.get("analysisType"), + assertEquals(BuiltInAnalysisTypes.PHYLOGENOMICS, model.get("analysisType"), "analysisType should be PHYLOGENOMICS"); } @@ -97,7 +97,7 @@ public void testGetAnalysisDetailsNotCompleted() throws IOException, IridaWorkfl Long submissionId = 1L; ExtendedModelMap model = new ExtendedModelMap(); - final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "reference", true); + final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "assemblies", "reference", true); AnalysisSubmission submission = TestDataFactory.constructAnalysisSubmission(); IridaWorkflowDescription description = new IridaWorkflowDescription(submission.getWorkflowId(), "My Workflow", "V1", BuiltInAnalysisTypes.PHYLOGENOMICS, input, Lists.newArrayList(), Lists.newArrayList(), @@ -107,14 +107,13 @@ public void testGetAnalysisDetailsNotCompleted() throws IOException, IridaWorkfl when(analysisSubmissionServiceMock.read(submissionId)).thenReturn(submission); when(iridaWorkflowsServiceMock.getIridaWorkflowOrUnknown(submission)).thenReturn(iridaWorkflow); - when(analysisTypesService.getViewerForAnalysisType(BuiltInAnalysisTypes.PHYLOGENOMICS)).thenReturn( - Optional.of("tree")); + when(analysisTypesService.getViewerForAnalysisType(BuiltInAnalysisTypes.PHYLOGENOMICS)) + .thenReturn(Optional.of("tree")); String analysisPage = analysisController.getDetailsPage(submissionId, model); assertEquals(AnalysisController.ANALYSIS_PAGE, analysisPage, "should be analysis page"); - assertFalse(submission.getAnalysisState() == AnalysisState.COMPLETED, - "Analysis should not be completed"); + assertFalse(submission.getAnalysisState() == AnalysisState.COMPLETED, "Analysis should not be completed"); assertEquals(submission.getName(), model.get("analysisName"), "submission name should be in model"); } @@ -129,8 +128,8 @@ public void testGetAnalysisDetailsMissingPipeline() throws IOException, IridaWor submission.setAnalysisState(AnalysisState.COMPLETED); when(analysisSubmissionServiceMock.read(submissionId)).thenReturn(submission); - when(iridaWorkflowsServiceMock.getIridaWorkflowOrUnknown(submission)).thenReturn( - createUnknownWorkflow(workflowId)); + when(iridaWorkflowsServiceMock.getIridaWorkflowOrUnknown(submission)) + .thenReturn(createUnknownWorkflow(workflowId)); when(analysisTypesService.getViewerForAnalysisType(BuiltInAnalysisTypes.UNKNOWN)).thenReturn(Optional.empty()); String analysisPage = analysisController.getDetailsPage(submissionId, model); diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineServiceTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineServiceTest.java index 99931cf9744..8865a01a578 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineServiceTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineServiceTest.java @@ -37,6 +37,7 @@ import com.google.common.collect.ImmutableSet; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; public class UIPipelineServiceTest { @@ -80,18 +81,16 @@ public void setUp() throws IridaWorkflowNotFoundException { when(messageSource.getMessage(any(), any(), any())).thenReturn("I want cookies"); when(cartService.getProjectIdsInCart()).thenReturn(new HashSet<>(PROJECT_IDS)); - List projects = PROJECT_IDS.stream() - .map(id -> { - Project project = new Project("project-" + id); - project.setId(id); - return project; - }) - .collect(Collectors.toList()); + List projects = PROJECT_IDS.stream().map(id -> { + Project project = new Project("project-" + id); + project.setId(id); + return project; + }).collect(Collectors.toList()); when(projectService.readMultiple(any())).thenReturn(projects); when(projectService.read(1L)).thenReturn(projects.get(0)); - List templates = ImmutableList.of( - new AnalysisSubmissionTemplate("Superman", WORKFLOW_ID, ImmutableMap.of(), null, true, + List templates = ImmutableList + .of(new AnalysisSubmissionTemplate("Superman", WORKFLOW_ID, ImmutableMap.of(), null, true, "Interesting superhero with cape", true, true, projects.get(0))); when(analysisSubmissionService.getAnalysisTemplatesForProject(projects.get(0))).thenReturn(templates); @@ -110,8 +109,8 @@ public void setUp() throws IridaWorkflowNotFoundException { }); when(cartService.getFullCart()).thenReturn(cart); - when(analysisSubmissionService.readAnalysisSubmissionTemplateForProject(TEMPLATE_ID, - projects.get(0))).thenReturn(templates.get(0)); + when(analysisSubmissionService.readAnalysisSubmissionTemplateForProject(TEMPLATE_ID, projects.get(0))) + .thenReturn(templates.get(0)); } @Test @@ -149,7 +148,8 @@ public void getWorkflowTypesTest() { try { IridaWorkflowDescription description = mock(IridaWorkflowDescription.class); when(description.getName()).thenReturn(type.getType()); - when(description.getInputs()).thenReturn(new IridaWorkflowInput(null, null, null, requiresIter.next())); + when(description.getInputs()) + .thenReturn(new IridaWorkflowInput(null, null, null, null, requiresIter.next())); IridaWorkflowStructure structure = mock(IridaWorkflowStructure.class); IridaWorkflow workflow = new IridaWorkflow(description, structure); when(workflowsService.getDefaultWorkflowByType(type)).thenReturn(workflow); diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineStartServiceTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineStartServiceTest.java index a4300a55846..047a66e2b55 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineStartServiceTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/services/UIPipelineStartServiceTest.java @@ -21,6 +21,7 @@ import ca.corefacility.bioinformatics.irida.ria.web.launchPipeline.dtos.LaunchRequest; import ca.corefacility.bioinformatics.irida.ria.web.services.UIPipelineStartService; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.ProjectService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; @@ -30,6 +31,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; public class UIPipelineStartServiceTest { @@ -43,6 +45,7 @@ public class UIPipelineStartServiceTest { private UIPipelineStartService service; private IridaWorkflowsService workflowsService; private SequencingObjectService sequencingObjectService; + private GenomeAssemblyService genomeAssemblyService; private AnalysisSubmissionService submissionService; private ProjectService projectService; private WorkflowNamedParametersService namedParametersService; @@ -52,18 +55,19 @@ public class UIPipelineStartServiceTest { public void startTest() throws IridaWorkflowNotFoundException, ReferenceFileRequiredException { workflowsService = Mockito.mock(IridaWorkflowsService.class); sequencingObjectService = Mockito.mock(SequencingObjectService.class); + genomeAssemblyService = Mockito.mock(GenomeAssemblyService.class); submissionService = Mockito.mock(AnalysisSubmissionService.class); projectService = Mockito.mock(ProjectService.class); namedParametersService = Mockito.mock(WorkflowNamedParametersService.class); messageSource = Mockito.mock(MessageSource.class); - service = new UIPipelineStartService(workflowsService, sequencingObjectService, submissionService, - projectService, namedParametersService, messageSource); + service = new UIPipelineStartService(workflowsService, sequencingObjectService, genomeAssemblyService, + submissionService, projectService, namedParametersService, messageSource); IridaWorkflowDescription description = mock(IridaWorkflowDescription.class); when(description.getAnalysisType()).thenReturn(ANALYSIS_TYPE); when(description.getName()).thenReturn(PIPELINE_NAME); - when(description.getInputs()).thenReturn(new IridaWorkflowInput("Wolverine", null, null, true)); + when(description.getInputs()).thenReturn(new IridaWorkflowInput("Wolverine", null, null, null, true)); IridaWorkflowStructure structure = mock(IridaWorkflowStructure.class); @@ -102,11 +106,13 @@ public String getLabel() { }; Collection submissions = ImmutableList.of(AnalysisSubmission.builder(WORKFLOW_ID) - .name("Wonder Woman").inputFiles(ImmutableSet.of(sequencingObject)).build()); + .name("Wonder Woman") + .inputFiles(ImmutableSet.of(sequencingObject)) + .build()); when(submissionService.createSingleSampleSubmission(workflow, request.getReference(), ImmutableList.of(), - ImmutableList.of(), request.getParameters(), null, request.getName(), request.getDescription(), - projects, request.isUpdateSamples(), request.sendEmailOnCompletion(), request.sendEmailOnError())) - .thenReturn(submissions); + ImmutableList.of(), ImmutableList.of(), request.getParameters(), null, request.getName(), + request.getDescription(), projects, request.isUpdateSamples(), request.sendEmailOnCompletion(), + request.sendEmailOnError())).thenReturn(submissions); when(messageSource.getMessage(any(), any(), any())).thenReturn("FOOBAR"); when(submissionService.createSingleSampleSubmissionTemplate(workflow, null, request.getParameters(), null, @@ -120,8 +126,8 @@ public String getLabel() { verify(workflowsService, timeout(1)).getIridaWorkflow(WORKFLOW_ID); verify(projectService, times(1)).readMultiple(request.getProjects()); verify(submissionService, times(1)).createSingleSampleSubmission(workflow, request.getReference(), - ImmutableList.of(), ImmutableList.of(), request.getParameters(), null, request.getName(), - request.getDescription(), ImmutableList.of(project), request.isUpdateSamples(), + ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), request.getParameters(), null, + request.getName(), request.getDescription(), ImmutableList.of(project), request.isUpdateSamples(), request.sendEmailOnCompletion(), request.sendEmailOnError()); /* diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/unit/AnalysisWorkspaceServiceGalaxyTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/unit/AnalysisWorkspaceServiceGalaxyTest.java index f453e64320f..29924fdc8cb 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/unit/AnalysisWorkspaceServiceGalaxyTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/unit/AnalysisWorkspaceServiceGalaxyTest.java @@ -1,5 +1,16 @@ package ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.impl.unit; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + import ca.corefacility.bioinformatics.irida.exceptions.*; import ca.corefacility.bioinformatics.irida.exceptions.galaxy.CreateLibraryException; import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetException; @@ -21,28 +32,20 @@ import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyLibrariesService; import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyWorkflowService; import ca.corefacility.bioinformatics.irida.repositories.sequencefile.SequenceFileRepository; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisCollectionServiceGalaxy; import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisParameterServiceGalaxy; import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisProvenanceServiceGalaxy; import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisWorkspaceServiceGalaxy; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; + import com.github.jmchilton.blend4j.galaxy.beans.*; import com.github.jmchilton.blend4j.galaxy.beans.WorkflowInvocationInputs.WorkflowInvocationInput; import com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.*; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; @@ -51,8 +54,6 @@ /** * Tests out preparing a Galaxy Phylogenomics Pipeline workflow for execution. - * - * */ public class AnalysisWorkspaceServiceGalaxyTest { @@ -85,6 +86,9 @@ public class AnalysisWorkspaceServiceGalaxyTest { @Mock private SequencingObjectService sequencingObjectService; + @Mock + private GenomeAssemblyService genomeAssemblyService; + private AnalysisWorkspaceServiceGalaxy workflowPreparation; private Set inputFiles; @@ -184,8 +188,11 @@ public void setup() throws IOException, UploadException, GalaxyDatasetException inputFiles = new HashSet<>(); inputFiles.addAll(Arrays.asList(sObjA, sObjB, sObjC)); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(inputFiles) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(inputFiles) + .referenceFile(referenceFile) + .build(); workflowHistory = new History(); workflowHistory.setId(HISTORY_ID); @@ -198,8 +205,8 @@ public void setup() throws IOException, UploadException, GalaxyDatasetException workflowPreparation = new AnalysisWorkspaceServiceGalaxy(galaxyHistoriesService, galaxyWorkflowService, galaxyLibrariesService, iridaWorkflowsService, analysisCollectionServiceGalaxy, - analysisProvenanceServiceGalaxy, analysisParameterServiceGalaxy, - sequencingObjectService); + analysisProvenanceServiceGalaxy, analysisParameterServiceGalaxy, sequencingObjectService, + genomeAssemblyService); output1Dataset = new Dataset(); output1Dataset.setId("1"); @@ -250,8 +257,7 @@ public void testPrepareAnalysisWorkspaceFail() throws ExecutionManagerException } /** - * Tests out successfully to preparing an analysis with both single and - * paired files + * Tests out successfully to preparing an analysis with both single and paired files * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -259,15 +265,19 @@ public void testPrepareAnalysisWorkspaceFail() throws ExecutionManagerException */ @SuppressWarnings("unchecked") @Test - public void testPrepareAnalysisFilesSinglePairedSuccess() throws ExecutionManagerException, IridaWorkflowException, IOException { + public void testPrepareAnalysisFilesSinglePairedSuccess() + throws ExecutionManagerException, IridaWorkflowException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMap.values()); Set joinedInput = Sets.newHashSet(singleFiles); joinedInput.addAll(pairedFiles); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(joinedInput) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(joinedInput) + .referenceFile(referenceFile) + .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -311,7 +321,8 @@ public void testPrepareAnalysisFilesSinglePairedSuccess() throws ExecutionManage assertEquals(LIBRARY_ID, preparedWorkflow.getRemoteDataId(), "preparedWorkflow library is invalid"); assertNotNull(preparedWorkflow.getWorkflowInputs(), "workflowInvocationInputs in preparedWorkflow is null"); - Map workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject() + Map workflowInputsMap = preparedWorkflow.getWorkflowInputs() + .getInputsObject() .getInputs(); assertEquals(3, workflowInputsMap.size(), "invalid number of workflow inputs"); assertTrue(workflowInputsMap.containsKey(REFERENCE_FILE_ID), "workflow inputs should contain reference entry"); @@ -334,11 +345,14 @@ public void testPrepareAnalysisFilesSinglePairedSuccess() throws ExecutionManage */ @SuppressWarnings("unchecked") @Test - public void testPrepareAnalysisFilesSingleSuccess() throws ExecutionManagerException, IridaWorkflowException, IOException { + public void testPrepareAnalysisFilesSingleSuccess() + throws ExecutionManagerException, IridaWorkflowException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(singleFiles)).referenceFile(referenceFile) + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(singleFiles)) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -374,7 +388,8 @@ public void testPrepareAnalysisFilesSingleSuccess() throws ExecutionManagerExcep assertEquals(LIBRARY_ID, preparedWorkflow.getRemoteDataId(), "preparedWorkflow library is invalid"); assertNotNull(preparedWorkflow.getWorkflowInputs(), "workflowInvocationInputs in preparedWorkflow is null"); - Map workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject() + Map workflowInputsMap = preparedWorkflow.getWorkflowInputs() + .getInputsObject() .getInputs(); assertEquals(2, workflowInputsMap.size(), "workflow inputs has invalid size"); assertTrue(workflowInputsMap.containsKey(REFERENCE_FILE_ID), @@ -396,11 +411,14 @@ public void testPrepareAnalysisFilesSingleSuccess() throws ExecutionManagerExcep */ @SuppressWarnings("unchecked") @Test - public void testPrepareAnalysisFilesPairedSuccess() throws ExecutionManagerException, IridaWorkflowException, IOException { + public void testPrepareAnalysisFilesPairedSuccess() + throws ExecutionManagerException, IridaWorkflowException, IOException { Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(pairedFiles)).referenceFile(referenceFile) + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(pairedFiles)) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); @@ -437,7 +455,8 @@ public void testPrepareAnalysisFilesPairedSuccess() throws ExecutionManagerExcep assertEquals(LIBRARY_ID, preparedWorkflow.getRemoteDataId(), "preparedWorkflow library is invalid"); assertNotNull(preparedWorkflow.getWorkflowInputs(), "workflowInvocationInputs in preparedWorkflow is null"); - Map workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject() + Map workflowInputsMap = preparedWorkflow.getWorkflowInputs() + .getInputsObject() .getInputs(); assertEquals(2, workflowInputsMap.size(), "workflow inputs has invalid size"); assertTrue(workflowInputsMap.containsKey(REFERENCE_FILE_ID), @@ -458,9 +477,12 @@ public void testPrepareAnalysisFilesPairedSuccess() throws ExecutionManagerExcep * @throws IOException */ @Test - public void testPrepareAnalysisFilesNoCreateLibraryFail() throws ExecutionManagerException, IridaWorkflowException, IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())).referenceFile(referenceFile) + public void testPrepareAnalysisFilesNoCreateLibraryFail() + throws ExecutionManagerException, IridaWorkflowException, IOException { + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -470,15 +492,14 @@ public void testPrepareAnalysisFilesNoCreateLibraryFail() throws ExecutionManage when(galaxyHistoriesService.findById(HISTORY_ID)).thenReturn(workflowHistory); when(galaxyLibrariesService.buildEmptyLibrary(any(GalaxyProjectName.class))) .thenThrow(new CreateLibraryException("")); - + assertThrows(CreateLibraryException.class, () -> { workflowPreparation.prepareAnalysisFiles(submission); }); } /** - * Tests out failing to preparing an analysis due to duplicate samples - * between single and paired input files. + * Tests out failing to preparing an analysis due to duplicate samples between single and paired input files. * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -493,8 +514,11 @@ public void testPrepareAnalysisFilesSinglePairedDuplicateFail() Set joinedInputs = Sets.newHashSet(singleFiles); joinedInputs.addAll(pairedFiles); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(joinedInputs) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(joinedInputs) + .referenceFile(referenceFile) + .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -519,20 +543,22 @@ public void testPrepareAnalysisFilesSinglePairedDuplicateFail() } /** - * Tests out failing to preparing an analysis with paired files when it - * cannot accept paired files. + * Tests out failing to preparing an analysis with paired files when it cannot accept paired files. * * @throws ExecutionManagerException * @throws IridaWorkflowException * @throws IOException */ @Test - public void testPrepareAnalysisFilesPairedNoAcceptFail() throws ExecutionManagerException, IridaWorkflowException, IOException { + public void testPrepareAnalysisFilesPairedNoAcceptFail() + throws ExecutionManagerException, IridaWorkflowException, IOException { Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMapSampleA.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis") + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") .inputFiles(Sets.newHashSet(pairedFiles)) - .referenceFile(referenceFile).build(); + .referenceFile(referenceFile) + .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -553,19 +579,21 @@ public void testPrepareAnalysisFilesPairedNoAcceptFail() throws ExecutionManager } /** - * Tests out failing to preparing an analysis with single files when it - * cannot accept single files. + * Tests out failing to preparing an analysis with single files when it cannot accept single files. * * @throws ExecutionManagerException * @throws IridaWorkflowException * @throws IOException */ @Test - public void testPrepareAnalysisFilesSingleNoAcceptFail() throws ExecutionManagerException, IridaWorkflowException, IOException { + public void testPrepareAnalysisFilesSingleNoAcceptFail() + throws ExecutionManagerException, IridaWorkflowException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())).referenceFile(referenceFile) + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -584,8 +612,7 @@ public void testPrepareAnalysisFilesSingleNoAcceptFail() throws ExecutionManager } /** - * Tests out failing to preparing an analysis which requires a reference but - * no reference found in submission. + * Tests out failing to preparing an analysis which requires a reference but no reference found in submission. * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -594,8 +621,10 @@ public void testPrepareAnalysisFilesSingleNoAcceptFail() throws ExecutionManager @Test public void testPrepareAnalysisFilesRequiresReferenceFail() throws ExecutionManagerException, IridaWorkflowException, IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())) + .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -607,8 +636,8 @@ public void testPrepareAnalysisFilesRequiresReferenceFail() } /** - * Tests out failing to preparing an analysis which does not require a - * reference but a reference is found in submission. + * Tests out failing to preparing an analysis which does not require a reference but a reference is found in + * submission. * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -617,8 +646,10 @@ public void testPrepareAnalysisFilesRequiresReferenceFail() @Test public void testPrepareAnalysisFilesNoRequiresReferenceFail() throws ExecutionManagerException, IridaWorkflowException, IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())).referenceFile(referenceFile) + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -631,8 +662,8 @@ public void testPrepareAnalysisFilesNoRequiresReferenceFail() } /** - * Tests out failing to preparing an analysis which is passed both single - * and paired files but only accepts paired files. + * Tests out failing to preparing an analysis which is passed both single and paired files but only accepts paired + * files. * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -644,8 +675,11 @@ public void testPrepareAnalysisFilesSinglePairedNoAcceptFail() Set joindInputs = Sets.newHashSet(sampleSingleSequenceFileMap.values()); joindInputs.addAll(sampleSequenceFilePairMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(joindInputs) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(joindInputs) + .referenceFile(referenceFile) + .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -667,8 +701,7 @@ public void testPrepareAnalysisFilesSinglePairedNoAcceptFail() } /** - * Tests out failing to prepare workflow files due to a failure to prepare - * parameters. + * Tests out failing to prepare workflow files due to a failure to prepare parameters. * * @throws ExecutionManagerException * @throws IridaWorkflowException @@ -676,9 +709,12 @@ public void testPrepareAnalysisFilesSinglePairedNoAcceptFail() */ @SuppressWarnings("unchecked") @Test - public void testPrepareAnalysisFilesFailParameters() throws ExecutionManagerException, IridaWorkflowException, IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis") - .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())).referenceFile(referenceFile) + public void testPrepareAnalysisFilesFailParameters() + throws ExecutionManagerException, IridaWorkflowException, IOException { + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(Sets.newHashSet(sampleSingleSequenceFileMap.values())) + .referenceFile(referenceFile) .build(); submission.setRemoteAnalysisId(HISTORY_ID); submission.setRemoteWorkflowId(WORKFLOW_ID); @@ -700,8 +736,7 @@ public void testPrepareAnalysisFilesFailParameters() throws ExecutionManagerExce } /** - * Tests successfully getting analysis results from Galaxy with single end - * input files. + * Tests successfully getting analysis results from Galaxy with single end input files. * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -713,8 +748,11 @@ public void testGetAnalysisResultsSuccessSingleEnd() throws IridaWorkflowNotFoun IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(singleInputFiles) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(singleInputFiles) + .referenceFile(referenceFile) + .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); @@ -732,14 +770,11 @@ public void testGetAnalysisResultsSuccessSingleEnd() throws IridaWorkflowNotFoun assertNotNull(analysis, "analysis is not valid"); assertEquals(2, analysis.getAnalysisOutputFiles().size(), "invalid number of output files"); - assertEquals(Paths.get("output1.txt"), - analysis.getAnalysisOutputFile("output1").getFile().getFileName(), + assertEquals(Paths.get("output1.txt"), analysis.getAnalysisOutputFile("output1").getFile().getFileName(), "missing output file for analysis"); - assertEquals("SampleA-output1.txt", - analysis.getAnalysisOutputFile("output1").getLabel(), + assertEquals("SampleA-output1.txt", analysis.getAnalysisOutputFile("output1").getLabel(), "missing label for analysis output file"); - assertEquals("SampleA-output2.txt", - analysis.getAnalysisOutputFile("output2").getLabel(), + assertEquals("SampleA-output2.txt", analysis.getAnalysisOutputFile("output2").getLabel(), "missing output file for analysis"); verify(galaxyHistoriesService).getDatasetForFileInHistory("output1.txt", HISTORY_ID); @@ -747,8 +782,7 @@ public void testGetAnalysisResultsSuccessSingleEnd() throws IridaWorkflowNotFoun } /** - * Tests successfully getting analysis results from Galaxy with paired end - * input files. + * Tests successfully getting analysis results from Galaxy with paired end input files. * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -760,8 +794,11 @@ public void testGetAnalysisResultsSuccessPairedEnd() throws IridaWorkflowNotFoun IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(pairedInputFiles) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(pairedInputFiles) + .referenceFile(referenceFile) + .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); @@ -779,8 +816,7 @@ public void testGetAnalysisResultsSuccessPairedEnd() throws IridaWorkflowNotFoun assertNotNull(analysis, "analysis is not valid"); assertEquals(2, analysis.getAnalysisOutputFiles().size(), "invalid number of output files"); - assertEquals(Paths.get("output1.txt"), - analysis.getAnalysisOutputFile("output1").getFile().getFileName(), + assertEquals(Paths.get("output1.txt"), analysis.getAnalysisOutputFile("output1").getFile().getFileName(), "missing output file for analysis"); assertEquals("SampleB-output1.txt", analysis.getAnalysisOutputFile("output1").getLabel(), "missing label for analysis output file"); @@ -792,8 +828,7 @@ public void testGetAnalysisResultsSuccessPairedEnd() throws IridaWorkflowNotFoun } /** - * Tests successfully getting analysis results from Galaxy with - * single/paired end input files. + * Tests successfully getting analysis results from Galaxy with single/paired end input files. * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -801,9 +836,8 @@ public void testGetAnalysisResultsSuccessPairedEnd() throws IridaWorkflowNotFoun * @throws IridaWorkflowAnalysisTypeException */ @Test - public void testGetAnalysisResultsSuccessSinglePairedEnd() - throws IridaWorkflowNotFoundException, IridaWorkflowAnalysisTypeException, ExecutionManagerException, - IOException { + public void testGetAnalysisResultsSuccessSinglePairedEnd() throws IridaWorkflowNotFoundException, + IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMap.values()); @@ -814,8 +848,11 @@ public void testGetAnalysisResultsSuccessSinglePairedEnd() Map joinedMap = Maps.newHashMap(sampleSingleSequenceFileMap); joinedMap.putAll(sampleSequenceFilePairMap); - submission = AnalysisSubmission.builder(workflowIdMultiSamples).name("my analysis") - .inputFiles(singleInputFiles).inputFiles(pairedInputFiles).referenceFile(referenceFile) + submission = AnalysisSubmission.builder(workflowIdMultiSamples) + .name("my analysis") + .inputFiles(singleInputFiles) + .inputFiles(pairedInputFiles) + .referenceFile(referenceFile) .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); @@ -827,8 +864,7 @@ public void testGetAnalysisResultsSuccessSinglePairedEnd() when(galaxyHistoriesService.getDatasetForFileInHistory(output1Filename, HISTORY_ID)).thenReturn(output1Dataset); when(galaxyHistoriesService.getDatasetForFileInHistory(output2Filename, HISTORY_ID)).thenReturn(output2Dataset); - when(sequencingObjectService.getUniqueSamplesForSequencingObjects(joinedFiles)) - .thenReturn(joinedMap); + when(sequencingObjectService.getUniqueSamplesForSequencingObjects(joinedFiles)).thenReturn(joinedMap); Analysis analysis = workflowPreparation.getAnalysisResults(submission); @@ -848,9 +884,8 @@ public void testGetAnalysisResultsSuccessSinglePairedEnd() } /** - * Tests successfully getting analysis results from Galaxy where there's - * multiple samples but workflow should have only accepted single sample (no - * label on name). + * Tests successfully getting analysis results from Galaxy where there's multiple samples but workflow should have + * only accepted single sample (no label on name). * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -858,14 +893,16 @@ public void testGetAnalysisResultsSuccessSinglePairedEnd() * @throws IridaWorkflowAnalysisTypeException */ @Test - public void testGetAnalysisResultsSuccessMultiSample() - throws IridaWorkflowNotFoundException, IridaWorkflowAnalysisTypeException, ExecutionManagerException, - IOException { + public void testGetAnalysisResultsSuccessMultiSample() throws IridaWorkflowNotFoundException, + IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { Set singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values()); Set pairedFiles = Sets.newHashSet(sampleSequenceFilePairMap.values()); - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(pairedInputFiles) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(pairedInputFiles) + .referenceFile(referenceFile) + .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); @@ -901,9 +938,8 @@ public void testGetAnalysisResultsSuccessMultiSample() } /** - * Tests successfully getting analysis results from Galaxy where there's no - * sample associated with the sequence files (no label is prefixed to output - * file name). + * Tests successfully getting analysis results from Galaxy where there's no sample associated with the sequence + * files (no label is prefixed to output file name). * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -911,11 +947,13 @@ public void testGetAnalysisResultsSuccessMultiSample() * @throws IridaWorkflowAnalysisTypeException */ @Test - public void testGetAnalysisResultsSuccessNoSample() - throws IridaWorkflowNotFoundException, IridaWorkflowAnalysisTypeException, ExecutionManagerException, - IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(pairedInputFiles) - .referenceFile(referenceFile).build(); + public void testGetAnalysisResultsSuccessNoSample() throws IridaWorkflowNotFoundException, + IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(pairedInputFiles) + .referenceFile(referenceFile) + .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); @@ -941,8 +979,7 @@ public void testGetAnalysisResultsSuccessNoSample() } /** - * Tests failure to get analysis results from Galaxy due to failure to get a - * dataset + * Tests failure to get analysis results from Galaxy due to failure to get a dataset * * @throws IridaWorkflowNotFoundException * @throws IOException @@ -952,8 +989,11 @@ public void testGetAnalysisResultsSuccessNoSample() @Test public void testGetAnalysisResultsFail() throws IridaWorkflowNotFoundException, IridaWorkflowAnalysisTypeException, ExecutionManagerException, IOException { - submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(singleInputFiles) - .referenceFile(referenceFile).build(); + submission = AnalysisSubmission.builder(workflowId) + .name("my analysis") + .inputFiles(singleInputFiles) + .referenceFile(referenceFile) + .build(); submission.setRemoteWorkflowId(WORKFLOW_ID); submission.setRemoteAnalysisId(HISTORY_ID); diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/analysis/submission/AnalysisSubmissionServiceImplTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/analysis/submission/AnalysisSubmissionServiceImplTest.java index 1d024e8966f..604c9a609ff 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/analysis/submission/AnalysisSubmissionServiceImplTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/analysis/submission/AnalysisSubmissionServiceImplTest.java @@ -16,6 +16,7 @@ import ca.corefacility.bioinformatics.irida.repositories.analysis.submission.ProjectAnalysisSubmissionJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.referencefile.ReferenceFileRepository; import ca.corefacility.bioinformatics.irida.repositories.user.UserRepository; +import ca.corefacility.bioinformatics.irida.service.GenomeAssemblyService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.analysis.execution.galaxy.AnalysisExecutionServiceGalaxyCleanupAsync; import ca.corefacility.bioinformatics.irida.service.impl.analysis.submission.AnalysisSubmissionServiceImpl; @@ -52,10 +53,13 @@ public class AnalysisSubmissionServiceImplTest { private ReferenceFileRepository referenceFileRepository; @Mock private SequencingObjectService sequencingObjectService; - + + @Mock + private GenomeAssemblyService genomeAssemblyService; + @Mock private ProjectAnalysisSubmissionJoinRepository pasRepository; - + @Mock private Validator validator; @Mock @@ -86,7 +90,8 @@ public void setup() { analysisSubmissionServiceImpl = new AnalysisSubmissionServiceImpl(analysisSubmissionRepository, analysisTemplateRepository, userRepository, referenceFileRepository, sequencingObjectService, - galaxyHistoriesService, pasRepository, jobErrorRepository, iridaWorkflowsService, validator); + genomeAssemblyService, galaxyHistoriesService, pasRepository, jobErrorRepository, iridaWorkflowsService, + validator); analysisSubmissionServiceImpl.setAnalysisExecutionService(analysisExecutionService); when(analysisSubmissionRepository.findById(ID)).thenReturn(Optional.of(analysisSubmission)); @@ -153,15 +158,14 @@ public void testGetPercentageCompleteStateSubmitting() throws EntityNotFoundExce } /** - * Tests getting the percent complete in the running state when the workflow - * has just started in Galaxy. + * Tests getting the percent complete in the running state when the workflow has just started in Galaxy. * * @throws EntityNotFoundException * @throws ExecutionManagerException */ @Test - public void testGetPercentageCompleteStateRunningJustStarted() throws EntityNotFoundException, - ExecutionManagerException { + public void testGetPercentageCompleteStateRunningJustStarted() + throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.RUNNING); when(galaxyHistoriesService.getStatusForHistory(HISTORY_ID)).thenReturn(galaxyWorkflowStatus); when(galaxyWorkflowStatus.getProportionComplete()).thenReturn(0.0f); @@ -172,15 +176,14 @@ public void testGetPercentageCompleteStateRunningJustStarted() throws EntityNotF } /** - * Tests getting the percent complete in the running state when the workflow - * is halfway complete in Galaxy. + * Tests getting the percent complete in the running state when the workflow is halfway complete in Galaxy. * * @throws EntityNotFoundException * @throws ExecutionManagerException */ @Test - public void testGetPercentageCompleteStateRunningHalfway() throws EntityNotFoundException, - ExecutionManagerException { + public void testGetPercentageCompleteStateRunningHalfway() + throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.RUNNING); when(galaxyHistoriesService.getStatusForHistory(HISTORY_ID)).thenReturn(galaxyWorkflowStatus); when(galaxyWorkflowStatus.getProportionComplete()).thenReturn(0.5f); @@ -194,21 +197,19 @@ public void testGetPercentageCompleteStateRunningHalfway() throws EntityNotFound } /** - * Tests getting the percent complete in the running state when the workflow - * is 100% complete in Galaxy. + * Tests getting the percent complete in the running state when the workflow is 100% complete in Galaxy. * * @throws EntityNotFoundException * @throws ExecutionManagerException */ @Test - public void testGetPercentageCompleteStateRunningFullyComplete() throws EntityNotFoundException, - ExecutionManagerException { + public void testGetPercentageCompleteStateRunningFullyComplete() + throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.RUNNING); when(galaxyHistoriesService.getStatusForHistory(HISTORY_ID)).thenReturn(galaxyWorkflowStatus); when(galaxyWorkflowStatus.getProportionComplete()).thenReturn(1.0f); - assertEquals(90.0f, - analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, + assertEquals(90.0f, analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, "invalid percent complete"); } @@ -219,12 +220,11 @@ public void testGetPercentageCompleteStateRunningFullyComplete() throws EntityNo * @throws ExecutionManagerException */ @Test - public void testGetPercentageCompleteStateFinishedRunning() throws EntityNotFoundException, - ExecutionManagerException { + public void testGetPercentageCompleteStateFinishedRunning() + throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.FINISHED_RUNNING); - assertEquals(90.0f, - analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, + assertEquals(90.0f, analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, "invalid percent complete"); } @@ -238,8 +238,7 @@ public void testGetPercentageCompleteStateFinishedRunning() throws EntityNotFoun public void testGetPercentageCompleteStateCompleting() throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.COMPLETING); - assertEquals(92.0f, - analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, + assertEquals(92.0f, analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, "invalid percent complete"); } @@ -253,8 +252,7 @@ public void testGetPercentageCompleteStateCompleting() throws EntityNotFoundExce public void testGetPercentageCompleteStateCompleted() throws EntityNotFoundException, ExecutionManagerException { when(analysisSubmission.getAnalysisState()).thenReturn(AnalysisState.COMPLETED); - assertEquals(100.0f, - analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, + assertEquals(100.0f, analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID), DELTA, "invalid percent complete"); } @@ -271,10 +269,9 @@ public void testGetPercentageCompleteError() throws EntityNotFoundException, Exe analysisSubmissionServiceImpl.getPercentCompleteForAnalysisSubmission(ID); }); } - + /** - * Tests that deleting a submission actually also tries to clean up the - * submission in galaxy. + * Tests that deleting a submission actually also tries to clean up the submission in galaxy. * * @throws ExecutionManagerException */ @@ -288,8 +285,7 @@ public void testDeleteSubmission() throws ExecutionManagerException { } /** - * Tests that deleting a submission deletes the submission even if an - * execution manager exception is thrown. + * Tests that deleting a submission deletes the submission even if an execution manager exception is thrown. * * @throws ExecutionManagerException */ diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/sample/SampleServiceImplTest.java b/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/sample/SampleServiceImplTest.java index dcd2408f158..22a9f31c9a7 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/sample/SampleServiceImplTest.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/service/impl/unit/sample/SampleServiceImplTest.java @@ -25,6 +25,7 @@ import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisFastQC; import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.repositories.analysis.AnalysisRepository; +import ca.corefacility.bioinformatics.irida.repositories.assembly.GenomeAssemblyRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.project.ProjectSampleJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleGenomeAssemblyJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleSequencingObjectJoinRepository; @@ -55,6 +56,7 @@ public class SampleServiceImplTest { private QCEntryRepository qcEntryRepository; private SequencingObjectRepository sequencingObjectRepository; private SampleGenomeAssemblyJoinRepository sampleGenomeAssemblyJoinRepository; + private GenomeAssemblyRepository genomeAssemblyRepository; private UserRepository userRepository; private MetadataEntryRepository metadataEntryRepository; @@ -72,11 +74,12 @@ public void setUp() { qcEntryRepository = mock(QCEntryRepository.class); sequencingObjectRepository = mock(SequencingObjectRepository.class); sampleGenomeAssemblyJoinRepository = mock(SampleGenomeAssemblyJoinRepository.class); + genomeAssemblyRepository = mock(GenomeAssemblyRepository.class); metadataEntryRepository = mock(MetadataEntryRepository.class); sampleService = new SampleServiceImpl(sampleRepository, psjRepository, analysisRepository, ssoRepository, - qcEntryRepository, sequencingObjectRepository, sampleGenomeAssemblyJoinRepository, userRepository, - metadataEntryRepository, null); + qcEntryRepository, sequencingObjectRepository, sampleGenomeAssemblyJoinRepository, + genomeAssemblyRepository, userRepository, metadataEntryRepository, null); } @Test diff --git a/src/test/java/ca/corefacility/bioinformatics/irida/service/workflow/integration/IridaWorkflowLoaderServiceIT.java b/src/test/java/ca/corefacility/bioinformatics/irida/service/workflow/integration/IridaWorkflowLoaderServiceIT.java index 0eb8aa067b3..79d256b8421 100644 --- a/src/test/java/ca/corefacility/bioinformatics/irida/service/workflow/integration/IridaWorkflowLoaderServiceIT.java +++ b/src/test/java/ca/corefacility/bioinformatics/irida/service/workflow/integration/IridaWorkflowLoaderServiceIT.java @@ -1,7 +1,5 @@ package ca.corefacility.bioinformatics.irida.service.workflow.integration; -import static org.junit.jupiter.api.Assertions.*; - import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; @@ -9,11 +7,7 @@ import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.UUID; +import java.util.*; import javax.xml.bind.JAXBException; @@ -26,18 +20,15 @@ import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.TestAnalysis; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.BuiltInAnalysisTypes; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaToolParameter; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowInput; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowOutput; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter; -import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowToolRepository; +import ca.corefacility.bioinformatics.irida.model.workflow.description.*; import ca.corefacility.bioinformatics.irida.model.workflow.structure.IridaWorkflowStructure; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowLoaderService; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import static org.junit.jupiter.api.Assertions.*; + /** * Tests loading up workflows. */ @@ -86,14 +77,18 @@ public void setup() throws JAXBException, URISyntaxException, FileNotFoundExcept .get(TestAnalysis.class.getResource("workflows/TestAnalysis/1.0-paired/irida_workflow.xml").toURI()); workflowSinglePairedXmlPath = Paths.get( TestAnalysis.class.getResource("workflows/TestAnalysis/1.0-single-paired/irida_workflow.xml").toURI()); - workflowRequiresSingleSampleXmlPath = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysis/1.0-requires-single-sample/irida_workflow.xml").toURI()); + workflowRequiresSingleSampleXmlPath = Paths.get( + TestAnalysis.class.getResource("workflows/TestAnalysis/1.0-requires-single-sample/irida_workflow.xml") + .toURI()); workflowRequiresSingleSampleUnsetXmlPath = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysis/1.0-requires-single-sample-unset/irida_workflow.xml").toURI()); + .getResource("workflows/TestAnalysis/1.0-requires-single-sample-unset/irida_workflow.xml") + .toURI()); workflowNotRequiresSingleSampleXmlPath = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysis/1.0-not-requires-single-sample/irida_workflow.xml").toURI()); + .getResource("workflows/TestAnalysis/1.0-not-requires-single-sample/irida_workflow.xml") + .toURI()); workflowInvalidRequiresSingleSampleXmlPath = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysis/1.0-invalid-requires-single-sample/irida_workflow.xml").toURI()); + .getResource("workflows/TestAnalysis/1.0-invalid-requires-single-sample/irida_workflow.xml") + .toURI()); workflowStructurePath = Paths .get(TestAnalysis.class.getResource("workflows/TestAnalysis/1.0/irida_workflow_structure.ga").toURI()); workflowDirectoryPath = Paths.get(TestAnalysis.class.getResource("workflows/TestAnalysis").toURI()); @@ -112,12 +107,15 @@ public void setup() throws JAXBException, URISyntaxException, FileNotFoundExcept TestAnalysis.class.getResource("workflows/TestAnalysisWithParametersNoDefaultNotRequired/1.0").toURI()); workflowDirectoryPathWithParametersNoDefaultIsRequired = Paths.get( TestAnalysis.class.getResource("workflows/TestAnalysisWithParametersNoDefaultIsRequired/1.0").toURI()); - workflowDirectoryPathWithParametersWithDefaultIsRequired = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysisWithParametersWithDefaultIsRequired/1.0").toURI()); - workflowDirectoryPathWithParametersWithDynamicSourceNotRequired = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysisWithParametersWithDynamicSourceNotRequired/1.0").toURI()); - workflowDirectoryPathWithParametersMultipleDynamicSources = Paths.get(TestAnalysis.class - .getResource("workflows/TestAnalysisWithParametersMultipleDynamicSources/1.0").toURI()); + workflowDirectoryPathWithParametersWithDefaultIsRequired = Paths + .get(TestAnalysis.class.getResource("workflows/TestAnalysisWithParametersWithDefaultIsRequired/1.0") + .toURI()); + workflowDirectoryPathWithParametersWithDynamicSourceNotRequired = Paths.get( + TestAnalysis.class.getResource("workflows/TestAnalysisWithParametersWithDynamicSourceNotRequired/1.0") + .toURI()); + workflowDirectoryPathWithParametersMultipleDynamicSources = Paths + .get(TestAnalysis.class.getResource("workflows/TestAnalysisWithParametersMultipleDynamicSources/1.0") + .toURI()); workflowDirectoryPathNoId = Paths.get(TestAnalysis.class.getResource("workflows/TestAnalysisNoId").toURI()); } @@ -130,42 +128,42 @@ private IridaWorkflowStructure buildTestStructure() { } private IridaWorkflowDescription buildTestDescriptionSingle() throws MalformedURLException { - return buildTestDescription(DEFAULT_SINGLE_ID, "TestWorkflow", "1.0", "sequence_reads", null, false); + return buildTestDescription(DEFAULT_SINGLE_ID, "TestWorkflow", "1.0", "sequence_reads", null, null, false); } private IridaWorkflowDescription buildTestDescriptionPaired() throws MalformedURLException { return buildTestDescription(DEFAULT_PAIRED_ID, "TestWorkflow", "1.0-paired", null, "sequence_reads_paired", - false); + null, false); } private IridaWorkflowDescription buildTestDescriptionSinglePaired() throws MalformedURLException { return buildTestDescription(DEFAULT_SINGLE_PAIRED_ID, "TestWorkflow", "1.0-single-paired", - "sequence_reads_single", "sequence_reads_paired", false); + "sequence_reads_single", "sequence_reads_paired", null, false); } private IridaWorkflowDescription buildTestDescriptionRequiresSingleSample() throws MalformedURLException { return buildTestDescription(DEFAULT_SINGLE_SAMPLE_ID, "TestWorkflow", "1.0-requires-single-sample", - "sequence_reads_single", "sequence_reads_paired", true); + "sequence_reads_single", "sequence_reads_paired", null, true); } private IridaWorkflowDescription buildTestDescriptionRequiresSingleSampleUnset() throws MalformedURLException { return buildTestDescription(DEFAULT_SINGLE_SAMPLE_UNSET_ID, "TestWorkflow", "1.0-requires-single-sample-unset", - "sequence_reads_single", "sequence_reads_paired", false); + "sequence_reads_single", "sequence_reads_paired", null, false); } private IridaWorkflowDescription buildTestDescriptionNotRequiresSingleSample() throws MalformedURLException { return buildTestDescription(DEFAULT_NOT_SINGLE_SAMPLE_ID, "TestWorkflow", "1.0-not-requires-single-sample", - "sequence_reads_single", "sequence_reads_paired", false); + "sequence_reads_single", "sequence_reads_paired", null, false); } private IridaWorkflowDescription buildTestDescriptionRequiresSingleSampleInvalid() throws MalformedURLException { return buildTestDescription(DEFAULT_INVALID_SINGLE_SAMPLE_ID, "TestWorkflow", - "1.0-invalid-requires-single-sample", "sequence_reads_single", "sequence_reads_paired", false); + "1.0-invalid-requires-single-sample", "sequence_reads_single", "sequence_reads_paired", null, false); } private IridaWorkflowDescription buildTestDescription(UUID id, String name, String version, - String sequenceReadsSingle, String sequenceReadsPaired, boolean requiresSingleSample) - throws MalformedURLException { + String sequenceReadsSingle, String sequenceReadsPaired, String genomeAssemblies, + boolean requiresSingleSample) throws MalformedURLException { List outputs = new LinkedList<>(); outputs.add(new IridaWorkflowOutput("output1", "output1.txt")); outputs.add(new IridaWorkflowOutput("output2", "output2.txt")); @@ -184,9 +182,9 @@ private IridaWorkflowDescription buildTestDescription(UUID id, String name, Stri Lists.newArrayList(tool1, tool2)); parameters.add(parameter1); - IridaWorkflowDescription iridaWorkflow = new IridaWorkflowDescription(id, name, version, - BuiltInAnalysisTypes.DEFAULT, - new IridaWorkflowInput(sequenceReadsSingle, sequenceReadsPaired, "reference", requiresSingleSample), + IridaWorkflowDescription iridaWorkflow = new IridaWorkflowDescription( + id, name, version, BuiltInAnalysisTypes.DEFAULT, new IridaWorkflowInput(sequenceReadsSingle, + sequenceReadsPaired, genomeAssemblies, "reference", requiresSingleSample), outputs, tools, parameters); return iridaWorkflow; @@ -223,8 +221,7 @@ public void testLoadWorkflowDescriptionPaired() throws IOException, IridaWorkflo } /** - * Tests loading up the workflow description file (single and paired end - * data). + * Tests loading up the workflow description file (single and paired end data). * * @throws IOException * @throws IridaWorkflowLoadException @@ -239,8 +236,7 @@ public void testLoadWorkflowDescriptionSinglePaired() throws IOException, IridaW } /** - * Tests loading up the workflow description file that does not require a - * single sample. + * Tests loading up the workflow description file that does not require a single sample. * * @throws IOException * @throws IridaWorkflowLoadException @@ -255,8 +251,7 @@ public void testLoadWorkflowDescriptionNotRequiresSingleSample() throws IOExcept } /** - * Tests loading up the workflow description file that requires a single - * sample. + * Tests loading up the workflow description file that requires a single sample. * * @throws IOException * @throws IridaWorkflowLoadException @@ -271,8 +266,7 @@ public void testLoadWorkflowDescriptionRequiresSingleSample() throws IOException } /** - * Tests loading up the workflow description file that has no requires - * single sample parameter set. + * Tests loading up the workflow description file that has no requires single sample parameter set. * * @throws IOException * @throws IridaWorkflowLoadException @@ -287,8 +281,8 @@ public void testLoadWorkflowDescriptionRequiresSingleSampleUnset() throws IOExce } /** - * Tests loading up the workflow description file with an invalid string for - * requires single sample and setting to default. + * Tests loading up the workflow description file with an invalid string for requires single sample and setting to + * default. * * @throws IOException * @throws IridaWorkflowLoadException @@ -344,8 +338,7 @@ public void testLoadWorkflowFromDirectory() throws IOException, IridaWorkflowLoa } /** - * Tests successfully loading up all implementations of a workflow from a - * directory. + * Tests successfully loading up all implementations of a workflow from a directory. */ @Test public void testLoadAllWorkflowImplementationsSuccess() throws IOException, IridaWorkflowLoadException { @@ -407,8 +400,7 @@ public void testLoadWorkflowWithParameters() throws IridaWorkflowLoadException, } /** - * Test to make sure we fail to load a workflow with no default value and - * without a required="true" attribute. + * Test to make sure we fail to load a workflow with no default value and without a required="true" attribute. * * @throws IridaWorkflowLoadException * @throws IOException @@ -423,8 +415,7 @@ public void testLoadWorkflowWithParametersNoDefaultValueNotRequiredFail() } /** - * Test to make sure we fail to load a workflow with no default value and - * without a required="true" attribute. + * Test to make sure we fail to load a workflow with no default value and without a required="true" attribute. * * @throws IridaWorkflowLoadException * @throws IOException @@ -440,8 +431,7 @@ public void testLoadWorkflowWithParametersNoDefaultValueIsRequiredSuccess() } /** - * Test to make sure we fail to load a workflow with a default value and a - * required="true" attribute. + * Test to make sure we fail to load a workflow with a default value and a required="true" attribute. * * @throws IridaWorkflowLoadException * @throws IOException @@ -456,8 +446,8 @@ public void testLoadWorkflowWithParametersWithDefaultValueIsRequiredFail() } /** - * Test to make sure we fail to load a workflow with a child - * element and without a required="true" attribute. + * Test to make sure we fail to load a workflow with a child element and without a required="true" + * attribute. * * @throws IridaWorkflowLoadException * @throws IOException @@ -472,8 +462,7 @@ public void testLoadWorkflowWithParametersWithDynamicSourceNotRequiredFail() } /** - * Test to make sure we fail to load a workflow with multiple - * child elements. + * Test to make sure we fail to load a workflow with multiple child elements. * * @throws IridaWorkflowLoadException * @throws IOException @@ -488,8 +477,7 @@ public void testLoadWorkflowWithParametersMultipleDynamicSourcesFail() } /** - * Tests failure to load up all implementations of a workflow from a - * directory. + * Tests failure to load up all implementations of a workflow from a directory. */ @Test public void testLoadAllWorkflowImplementationsFail() throws IOException, IridaWorkflowLoadException { @@ -499,8 +487,7 @@ public void testLoadAllWorkflowImplementationsFail() throws IOException, IridaWo } /** - * Tests failing to load up a workflow from a directory (no definition - * file). + * Tests failing to load up a workflow from a directory (no definition file). * * @throws IridaWorkflowLoadException */