Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### `Changed`

- [#244](https://github.com/nf-core/phaseimpute/pull/244) - Start migration to strict syntax.
- [#237](https://github.com/nf-core/phaseimpute/pull/237) - Bump version to 1.2.0dev and update `CHANGELOG.md`.
- [#232](https://github.com/nf-core/phaseimpute/pull/232) - Make posfile generated in the panelprep step.
- [#231](https://github.com/nf-core/phaseimpute/pull/231) - Use "panel_id" instead of "id" in the meta map for the panel.
Expand Down
6 changes: 3 additions & 3 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ workflow NFCORE_PHASEIMPUTE {
// Initialise input channels
//

ch_input_impute = Channel.empty()
ch_input_simulate = Channel.empty()
ch_input_validate = Channel.empty()
ch_input_impute = channel.empty()
ch_input_simulate = channel.empty()
ch_input_validate = channel.empty()

// Check input files for contigs names consistency
lst_chr = ch_regions.map { it[0].chr }
Expand Down
8 changes: 4 additions & 4 deletions modules/local/addcolumns/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ nextflow_process {
when {
process {
"""
file = Channel.of("# Other content\\ntest\\ntest1\\n#Genotype concordance by allele frequency bin (Variants: SNPs + indels)\\nCol1\\tCol2\\nA\\t1\\nB\\t2")
file = channel.of("# Other content\\ntest\\ntest1\\n#Genotype concordance by allele frequency bin (Variants: SNPs + indels)\\nCol1\\tCol2\\nA\\t1\\nB\\t2")
.collectFile(name: 'sample.txt', newLine: true)
input[0] = Channel.of([[id:"MyId", depth:2, gparray:"SNPArray", tools:"Glimpse", panel:"1000GP"]]).combine(file)
input[0] = channel.of([[id:"MyId", depth:2, gparray:"SNPArray", tools:"Glimpse", panel:"1000GP"]]).combine(file)
"""
}
}
Expand All @@ -36,11 +36,11 @@ nextflow_process {
when {
process {
"""
file = Channel.of("# Other content\\ntest\\ntest1\\n" +
file = channel.of("# Other content\\ntest\\ntest1\\n" +
"#Genotype concordance by allele frequency bin (Variants: SNPs + indels)\\n" +
"Col1\\tCol2\\nA\\t1\\nB\\t2")
.collectFile(name: 'sample.txt', newLine: true)
input[0] = Channel.of([[id:"MyId", tools:"Glimpse"]]).combine(file)
input[0] = channel.of([[id:"MyId", tools:"Glimpse"]]).combine(file)
"""
}
}
Expand Down
2 changes: 1 addition & 1 deletion modules/local/bamchrextract/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ nextflow_process {
when {
process {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[ id:'test_single_end_bam' ], // meta map
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.single_end.bam', checkIfExists: true)
Expand Down
2 changes: 1 addition & 1 deletion modules/local/listtofile/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ nextflow_process {
when {
process {
"""
input[0] = Channel.of([
input[0] = channel.of([
[id: "all"],
[file("file1.txt"), file("file2.txt")],
["A", "B"]
Expand Down
2 changes: 1 addition & 1 deletion modules/local/vcfchrextract/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ nextflow_process {
when {
process {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id:'test_bcf'], // meta map
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/vcf/test.bcf', checkIfExists: true)
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/bam_chr_rename_samtools/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ workflow BAM_CHR_RENAME_SAMTOOLS {

main:

ch_versions = Channel.empty()
ch_versions = channel.empty()

// Rename the chromosome with or without prefix
SAMTOOLS_REHEADER(
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/bam_chr_rename_samtools/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ nextflow_workflow {
when {
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "test_paired"],
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExist:true),
Expand Down Expand Up @@ -64,7 +64,7 @@ nextflow_workflow {
script "../main.nf"
process {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "test_paired"],
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExist:true),
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/bam_downsample_samtools/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ workflow BAM_DOWNSAMPLE_SAMTOOLS {
ch_fasta // channel: [ [genome], fasta, fai ]

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()

// Compute mean depth
SAMTOOLS_DEPTH(ch_bam, [[], []])
Expand Down
12 changes: 6 additions & 6 deletions subworkflows/local/bam_downsample_samtools/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ nextflow_workflow {
}
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -37,8 +37,8 @@ nextflow_workflow {
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA19401/NA19401.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([[depth: 2], 2]).collect()
input[2] = Channel.of([
input[1] = channel.of([[depth: 2], 2]).collect()
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down Expand Up @@ -74,7 +74,7 @@ nextflow_workflow {
}
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -86,8 +86,8 @@ nextflow_workflow {
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA19401/NA19401.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([[depth: 4], 4]).collect()
input[2] = Channel.of([
input[1] = channel.of([[depth: 4], 4]).collect()
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/bam_extract_region_samtools/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ workflow BAM_EXTRACT_REGION_SAMTOOLS {
ch_fasta // channel: [ [genome], fasta, fai ]
main:

ch_versions = Channel.empty()
ch_versions = channel.empty()

// Add fasta and region to bam channel
ch_input_region = ch_bam
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ nextflow_workflow {
}
workflow {
"""
input[0] = input[0] = Channel.fromList([
input[0] = input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -36,10 +36,10 @@ nextflow_workflow {
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA19401/NA19401.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([
input[1] = channel.of([
[chr: "chr22", region: "chr22:16600000-16610000"], "chr22:16600000-16610000"
]).collect()
input[2] = Channel.of([
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down Expand Up @@ -75,7 +75,7 @@ nextflow_workflow {
}
workflow {
"""
input[0] = input[0] = Channel.fromList([
input[0] = input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -87,10 +87,10 @@ nextflow_workflow {
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA19401/NA19401.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([
input[1] = channel.of([
[chr: "chr22", region: "chr22:16609000-16610000"], "chr22:16609000-16610000"
]).collect()
input[2] = Channel.of([
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down
12 changes: 6 additions & 6 deletions subworkflows/local/bam_gl_bcftools/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ nextflow_workflow {
when {
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -34,11 +34,11 @@ nextflow_workflow {
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA19401/NA19401.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([
input[1] = channel.of([
[panel_id: "1000GP", chr: "22"],
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.posfile", checkIfExist:true)
]).collect()
input[2] = Channel.of([
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down Expand Up @@ -70,18 +70,18 @@ nextflow_workflow {
when {
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam.bai", checkIfExist:true),
],
])
input[1] = Channel.of([
input[1] = channel.of([
[panel_id: "1000GP", chr: "22"],
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.posfile", checkIfExist:true)
]).collect()
input[2] = Channel.of([
input[2] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true),
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/bam_impute_quilt/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ workflow BAM_IMPUTE_QUILT {

main:

ch_versions = Channel.empty()
ch_versions = channel.empty()

genetic_map_file = []

Expand Down Expand Up @@ -46,7 +46,7 @@ workflow BAM_IMPUTE_QUILT {
// Annotate the variants
BCFTOOLS_ANNOTATE(QUILT_QUILT.out.vcf
.join(QUILT_QUILT.out.tbi)
.combine(Channel.of([[], [], [], []]))
.combine(channel.of([[], [], [], []]))
)
ch_versions = ch_versions.mix(BCFTOOLS_ANNOTATE.out.versions.first())

Expand Down
32 changes: 16 additions & 16 deletions subworkflows/local/bam_impute_quilt/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ nextflow_workflow {
when {
workflow {
"""
bampath = Channel.of(
bampath = channel.of(
"NA12878.s.bam",
"NA19401.s.bam"
).collectFile(name: 'bampath.txt', newLine: true)
ch_samples = Channel.fromList([
ch_samples = channel.fromList([
[
[id: "allid"],
[
Expand All @@ -38,17 +38,17 @@ nextflow_workflow {
]
],
])
input[0] = ch_samples.combine(bampath).combine(Channel.of([[]]))
input[1] = Channel.of([
input[0] = ch_samples.combine(bampath).combine(channel.of([[]]))
input[1] = channel.of([
[panel_id: "1000GP", chr: "chr22"],
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.hap.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.legend.gz", checkIfExist:true)
]).collect()
input[2] = Channel.of(
input[2] = channel.of(
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16570065", "16597215"],
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16587172", "16609999"]
)
input[3] = Channel.of([
input[3] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true)
Expand Down Expand Up @@ -78,15 +78,15 @@ nextflow_workflow {
when {
workflow {
"""
bampath = Channel.of(
bampath = channel.of(
"NA12878.s.bam",
"NA19401.s.bam"
).collectFile(name: 'bampath.txt', newLine: true)
bamname = Channel.of(
bamname = channel.of(
"MySample1",
"MySample2"
).collectFile(name: 'bamname.txt', newLine: true)
ch_samples = Channel.fromList([
ch_samples = channel.fromList([
[
[id: "allid"],
[
Expand All @@ -100,16 +100,16 @@ nextflow_workflow {
],
])
input[0] = ch_samples.combine(bampath).combine(bamname)
input[1] = Channel.of([
input[1] = channel.of([
[panel_id: "1000GP", chr: "chr22"],
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.hap.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.legend.gz", checkIfExist:true)
]).collect()
input[2] = Channel.of(
input[2] = channel.of(
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16570065", "16597215"],
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16587172", "16609999"]
)
input[3] = Channel.of([
input[3] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true)
Expand Down Expand Up @@ -140,7 +140,7 @@ nextflow_workflow {
when {
workflow {
"""
input[0] = Channel.fromList([
input[0] = channel.fromList([
[
[id: "NA12878"],
file(params.pipelines_testdata_base_path + "hum_data/individuals/NA12878/NA12878.s.bam", checkIfExist:true),
Expand All @@ -149,16 +149,16 @@ nextflow_workflow {
[]
]
])
input[1] = Channel.of([
input[1] = channel.of([
[panel_id: "1000GP", chr: "chr22"],
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.hap.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/panel/chr22/1000GP.chr22.legend.gz", checkIfExist:true)
]).collect()
input[2] = Channel.of(
input[2] = channel.of(
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16570065", "16597215"],
[[chr: "chr22", panel_id: "1000GP"], "chr22", "16587172", "16609999"]
)
input[3] = Channel.of([
input[3] = channel.of([
[id: "GRCh38"],
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz", checkIfExist:true),
file(params.pipelines_testdata_base_path + "hum_data/reference_genome/GRCh38.s.fa.gz.fai", checkIfExist:true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ workflow PIPELINE_INITIALISATION {
error "Panel file provided is of another format than CSV (not yet supported). Please separate your panel by chromosome and use the samplesheet format."
}
} else {
ch_input_truth = Channel.of([[], [], []])
ch_input_truth = channel.of([[], [], []])
input_truth_ext = ""
}

Expand Down
Loading
Loading