File: Snakefile

package info (click to toggle)
snakemake 7.32.4-8.1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 25,836 kB
  • sloc: python: 32,846; javascript: 1,287; makefile: 247; sh: 163; ansic: 57; lisp: 9
file content (55 lines) | stat: -rw-r--r-- 1,309 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# a target rule to define the desired final output
rule all:
    input:
        "processed2.txt",


# the checkpoint that shall trigger re-evaluation of the DAG
# an number of file is created in a defined directory
checkpoint somestep:
    output:
        directory("my_directory/"),
    shell:
        """
        mkdir my_directory/
        cd my_directory
        for i in 1 2 3; do touch $i.txt; done
        """


# input function for rule aggregate, return paths to all files produced by the checkpoint 'somestep'
def aggregate_input(wildcards):
    checkpoint_output = checkpoints.somestep.get(**wildcards).output[0]
    return expand(
        "my_directory/{i}.txt",
        i=glob_wildcards(os.path.join(checkpoint_output, "{i}.txt")).i,
    )


rule aggregate:
    input:
        aggregate_input,
    output:
        "aggregated.txt",
    shell:
        "echo AGGREGATED > {output}"


# Fail here if the job runs again, as we want to ensure that snakemake does not false trigger a rerun
# as reported in issue #1818.
rule process:
    input:
        "aggregated.txt",
    output:
        "processed.txt",
    shell:
        "exit 1; echo PROCESSED > {output}"


rule process2:
    input:
        "processed.txt",
    output:
        "processed2.txt",
    shell:
        "echo PROCESSED2 > {output}"