Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/genepi/imputationserver i…
Browse files Browse the repository at this point in the history
…nto release
  • Loading branch information
abought committed Apr 6, 2023
2 parents 809af96 + 780f92e commit dd84c6e
Show file tree
Hide file tree
Showing 9 changed files with 11 additions and 317 deletions.
2 changes: 1 addition & 1 deletion files/imputationserver-beagle.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
id: imputationserver-beagle
name: Genotype Imputation supporting Beagle (Minimac4)
description: This is the new Michigan Imputation Server Pipeline using <a href="https://github.com/statgen/Minimac4">Minimac4</a>. Documentation can be found <a href="http://imputationserver.readthedocs.io/en/latest/">here</a>.<br><br>If your input data is <b>GRCh37/hg19</b> please ensure chromosomes are encoded without prefix (e.g. <b>20</b>).<br>If your input data is <b>GRCh38hg38</b> please ensure chromosomes are encoded with prefix 'chr' (e.g. <b>chr20</b>).
version: 1.7.1
version: 1.7.2
website: https://imputationserver.readthedocs.io
category:

Expand Down
2 changes: 1 addition & 1 deletion files/imputationserver-hla.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
id: imputationserver-hla
name: Genotype Imputation HLA (Minimac4)
description: This is the new Michigan Imputation Server Pipeline using <a href="https://github.com/statgen/Minimac4">Minimac4</a>. Documentation can be found <a href="http://imputationserver.readthedocs.io/en/latest/">here</a>.<br><br>If your input data is <b>GRCh37/hg19</b> please ensure chromosomes are encoded without prefix (e.g. <b>20</b>).<br>If your input data is <b>GRCh38hg38</b> please ensure chromosomes are encoded with prefix 'chr' (e.g. <b>chr20</b>).
version: 1.7.1
version: 1.7.2
website: https://imputationserver.readthedocs.io
category:

Expand Down
2 changes: 1 addition & 1 deletion files/imputationserver-pgs.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
id: imputationserver-pgs
name: Genotype Imputation (PGS Calc Integration)
description: This is the new Michigan Imputation Server Pipeline using <a href="https://github.com/statgen/Minimac4">Minimac4</a>. Documentation can be found <a href="http://imputationserver.readthedocs.io/en/latest/">here</a>.<br><br>If your input data is <b>GRCh37/hg19</b> please ensure chromosomes are encoded without prefix (e.g. <b>20</b>).<br>If your input data is <b>GRCh38hg38</b> please ensure chromosomes are encoded with prefix 'chr' (e.g. <b>chr20</b>).
version: 1.7.1
version: 1.7.2
website: https://imputationserver.readthedocs.io
category:

Expand Down
2 changes: 1 addition & 1 deletion files/minimac4.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
id: imputationserver
name: Genotype Imputation (Minimac4)
description: This is the new Michigan Imputation Server Pipeline using <a href="https://github.com/statgen/Minimac4">Minimac4</a>. Documentation can be found <a href="http://imputationserver.readthedocs.io/en/latest/">here</a>.<br><br>If your input data is <b>GRCh37/hg19</b> please ensure chromosomes are encoded without prefix (e.g. <b>20</b>).<br>If your input data is <b>GRCh38hg38</b> please ensure chromosomes are encoded with prefix 'chr' (e.g. <b>chr20</b>).
version: 1.7.1
version: 1.7.2
website: https://imputationserver.readthedocs.io
category:

Expand Down
11 changes: 2 additions & 9 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<groupId>genepi</groupId>
<artifactId>imputationserver</artifactId>

<version>1.7.1</version>
<version>1.7.2</version>

<packaging>jar</packaging>

Expand Down Expand Up @@ -284,13 +284,6 @@
</exclusions>
</dependency>

<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-sftp</artifactId>
<version>0.9.0</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy</artifactId>
Expand All @@ -300,7 +293,7 @@
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-templates</artifactId>
<version>3.0.9</version>
<version>3.0.17</version>
</dependency>

<dependency>
Expand Down
67 changes: 3 additions & 64 deletions src/main/java/genepi/imputationserver/steps/InputValidation.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

import cloudgene.sdk.internal.WorkflowContext;
import cloudgene.sdk.internal.WorkflowStep;
import genepi.hadoop.importer.IImporter;
import genepi.hadoop.importer.ImporterFactory;
import genepi.imputationserver.steps.imputation.ImputationPipeline;
import genepi.imputationserver.steps.vcf.VcfFile;
Expand Down Expand Up @@ -349,70 +348,10 @@ private boolean importVcfFiles(WorkflowContext context) {

if (ImporterFactory.needsImport(context.get(input))) {

context.beginTask("Importing files...");

String[] urlList = context.get(input).split(";")[0].split("\\s+");

String username = "";
if (context.get(input).split(";").length > 1) {
username = context.get(input).split(";")[1];
}

String password = "";
if (context.get(input).split(";").length > 2) {
password = context.get(input).split(";")[2];
}

for (String url2 : urlList) {

String url = url2 + ";" + username + ";" + password;
String target = FileUtil.path(context.getLocalTemp(), "importer", input);
FileUtil.createDirectory(target);
context.println("Import to local workspace " + target + "...");

try {

context.updateTask("Import " + url2 + "...", WorkflowContext.RUNNING);
context.log("Import " + url2 + "...");
IImporter importer = ImporterFactory.createImporter(url, target);

if (importer != null) {

boolean successful = importer.importFiles("vcf.gz");

if (successful) {

context.setInput(input, target);

} else {

context.updateTask("Import " + url2 + " failed: " + importer.getErrorMessage(),
WorkflowContext.ERROR);

return false;

}

} else {

context.updateTask("Import " + url2 + " failed: Protocol not supported",
WorkflowContext.ERROR);

return false;

}

} catch (Exception e) {
context.updateTask("Import File(s) " + url2 + " failed: " + e.toString(),
WorkflowContext.ERROR);

return false;
}

}

context.updateTask("File Import successful. ", WorkflowContext.OK);
context.log("URL-based uploads are no longer supported. Please use direct file uploads instead.");
context.error("URL-based uploads are no longer supported. Please use direct file uploads instead.");

return false;
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

public class ImputationPipeline {

public static final String PIPELINE_VERSION = "michigan-imputationserver-1.7.1";
public static final String PIPELINE_VERSION = "michigan-imputationserver-1.7.2";

public static final String IMPUTATION_VERSION = "minimac4-1.0.2";

Expand Down
175 changes: 1 addition & 174 deletions src/test/java/genepi/imputationserver/steps/ImputationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -274,179 +274,6 @@ public void testPipelineWidthInvalidFileFormat() throws IOException, ZipExceptio

}

@Test
public void testPipelineWithHttpUrl() throws IOException, ZipException {

String configFolder = "test-data/configs/hapmap-chr1";
String inputFolder = "https://imputationserver.sph.umich.edu/static/downloads/hapmap300.chr1.recode.vcf.gz";

// create workflow context
WorkflowTestContext context = buildContext(inputFolder, "hapmap2");

// create step instance
InputValidation inputValidation = new InputValidationMock(configFolder);

// run and test
boolean result = run(context, inputValidation);

// check if step is failed
assertEquals(true, result);

// run qc to create chunkfile
QcStatisticsMock qcStats = new QcStatisticsMock(configFolder);
result = run(context, qcStats);

// add panel to hdfs
importRefPanel(FileUtil.path(configFolder, "ref-panels"));
// importMinimacMap("test-data/B38_MAP_FILE.map");
importBinaries("files/bin");

// run imputation
ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder);
result = run(context, imputation);
assertTrue(result);

// run export
CompressionEncryptionMock export = new CompressionEncryptionMock("files");
result = run(context, export);
assertTrue(result);

ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_1.zip", PASSWORD.toCharArray());
zipFile.extractAll("test-data/tmp");

VcfFile file = VcfFileUtil.load("test-data/tmp/chr1.dose.vcf.gz", 100000000, false);

assertEquals("1", file.getChromosome());
assertEquals(60, file.getNoSamples());
assertEquals(true, file.isPhased());

FileUtil.deleteDirectory("test-data/tmp");

}

/*
* @Test public void testPipelineWithS3() throws IOException, ZipException {
*
* String configFolder = "test-data/configs/hapmap-chr1"; String inputFolder =
* "s3://imputationserver-aws-testdata/test-s3/hapmap300.chr1.recode.vcf.gz";
*
* // create workflow context WorkflowTestContext context =
* buildContext(inputFolder, "hapmap2");
*
* // create step instance InputValidation inputValidation = new
* InputValidationMock(configFolder);
*
* // run and test boolean result = run(context, inputValidation);
*
* // check if step is failed assertEquals(true, result);
*
* // run qc to create chunkfile QcStatisticsMock qcStats = new
* QcStatisticsMock(configFolder); result = run(context, qcStats);
*
* // add panel to hdfs importRefPanel(FileUtil.path(configFolder,
* "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map");
* importBinaries("files/bin");
*
* // run imputation ImputationMinimac3Mock imputation = new
* ImputationMinimac3Mock(configFolder); result = run(context, imputation);
* assertTrue(result);
*
* // run export CompressionEncryptionMock export = new
* CompressionEncryptionMock("files"); result = run(context, export);
* assertTrue(result);
*
* ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_1.zip"); if
* (zipFile.isEncrypted()) { zipFile.setPassword(PASSWORD); }
* zipFile.extractAll("test-data/tmp");
*
* VcfFile file = VcfFileUtil.load("test-data/tmp/chr1.dose.vcf.gz", 100000000,
* false);
*
* assertEquals("1", file.getChromosome()); assertEquals(60,
* file.getNoSamples()); assertEquals(true, file.isPhased());
*
* FileUtil.deleteDirectory("test-data/tmp");
*
* }
*/

/*
* @Test public void testPipelineWithSFTP() throws IOException, ZipException,
* InterruptedException {
*
* TestSFTPServer server = new TestSFTPServer("test-data/data");
*
* String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder =
* "sftp://localhost:8001/" + new
* File("test-data/data/chr20-phased").getAbsolutePath() + ";" +
* TestSFTPServer.USERNAME + ";" + TestSFTPServer.PASSWORD;
*
* // create workflow context WorkflowTestContext context =
* buildContext(inputFolder, "hapmap2");
*
* // create step instance InputValidation inputValidation = new
* InputValidationMock(configFolder);
*
* // run and test boolean result = run(context, inputValidation);
*
* // check if step is failed assertEquals(true, result);
*
* // run qc to create chunkfile QcStatisticsMock qcStats = new
* QcStatisticsMock(configFolder); result = run(context, qcStats);
*
* // add panel to hdfs importRefPanel(FileUtil.path(configFolder,
* "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map");
* importBinaries("files/bin");
*
* // run imputation ImputationMinimac3Mock imputation = new
* ImputationMinimac3Mock(configFolder); result = run(context, imputation);
* assertTrue(result);
*
* // run export CompressionEncryptionMock export = new
* CompressionEncryptionMock("files"); result = run(context, export);
* assertTrue(result);
*
* ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip"); if
* (zipFile.isEncrypted()) { zipFile.setPassword(PASSWORD); }
* zipFile.extractAll("test-data/tmp");
*
* VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000,
* false);
*
* assertEquals("20", file.getChromosome()); assertEquals(51,
* file.getNoSamples()); assertEquals(true, file.isPhased());
* assertEquals(TOTAL_REFPANEL_CHR20_B37 + ONLY_IN_INPUT, file.getNoSnps());
*
* FileUtil.deleteDirectory("test-data/tmp");
*
* server.stop();
*
* }
*
* @Test public void testPipelineWithWrongSFTPCredentials() throws IOException,
* ZipException, InterruptedException {
*
* TestSFTPServer server = new TestSFTPServer("test-data/data");
*
* String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder =
* "sftp://localhost:8001/" + new File("data/chr20-phased").getAbsolutePath() +
* ";" + "WRONG_USERNAME" + ";" + TestSFTPServer.PASSWORD;
*
* // create workflow context WorkflowTestContext context =
* buildContext(inputFolder, "hapmap2");
*
* // create step instance InputValidation inputValidation = new
* InputValidationMock(configFolder);
*
* // run and test boolean result = run(context, inputValidation);
*
* // check if step is failed assertEquals(false, result);
*
* server.stop();
*
* }
*/

@Test
public void testPipelineWithEagle() throws IOException, ZipException {

Expand Down Expand Up @@ -1571,7 +1398,7 @@ protected WorkflowTestContext buildContext(String folder, String refpanel) {

context.setOutput("pgs_output", file.getAbsolutePath() + "/pgs_output");
FileUtil.createDirectory(file.getAbsolutePath() + "/pgs_output");

context.setOutput("logfile", file.getAbsolutePath() + "/logfile");
FileUtil.createDirectory(file.getAbsolutePath() + "/logfile");

Expand Down
Loading

0 comments on commit dd84c6e

Please sign in to comment.