aboutsummaryrefslogtreecommitdiffhomepage
path: root/examples/jsonnet/wordcount.jsonnet
blob: f1dfe2bfefa277fb4c539f236766c086f92667d4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//    http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

local workflow = import "examples/jsonnet/workflow.jsonnet";

// Workflow that performs a wordcount using shell commands.
{
  wordcount: workflow.Workflow {
    retries: 12,
    schedule: workflow.Schedule {
      start_date: "2015-11-15",
      start_time: "17:30",
      repeat_frequency: 1,
      repeat_type: "week",
    },
    jobs: {
      local input_file = "/tmp/passage_test",
      local tokens_file = "/tmp/tokens",
      local sorted_tokens_file = "/tmp/sorted_tokens",
      local counts_file = "/tmp/counts",

      // Reads the input file and produces an output file with one word per
      // line.
      tokenize: workflow.ShJob {
        command: "tr ' ' '\n' < %s > %s" % [input_file, tokens_file],
        inputs: [input_file],
        outputs: [tokens_file],
      },

      // Takes the tokens file and produces a file with the tokens sorted.
      sort: workflow.ShJob {
        deps: [":tokenize"],
        command: "sort %s > %s" % [tokens_file, sorted_tokens_file],
        inputs: [tokens_file],
        outputs: [sorted_tokens_file],
      },

      // Takes the file containing sorted tokens and produces a file containing
      // the counts for each word.
      count: workflow.ShJob {
        deps: [":sort"],
        command: "uniq -c %s > %s" % [sorted_tokens_file, counts_file],
        inputs: [sorted_tokens_file],
        outputs: [counts_file],
      },
    }
  }
}