Browse code

builder: parser and beginnings of the evaluator

Docker-DCO-1.1-Signed-off-by: Erik Hollensbe <github@hollensbe.org> (github: erikh)

Erik Hollensbe authored on 2014/08/06 05:17:40
Showing 41 changed files
1 1
new file mode 100644
... ...
@@ -0,0 +1,2 @@
0
+main
1
+gopath
0 2
new file mode 100644
... ...
@@ -0,0 +1,2 @@
0
+builder
1
+Dockerfile
0 2
new file mode 100644
... ...
@@ -0,0 +1,31 @@
0
+package main
1
+
2
+import (
3
+	"os"
4
+
5
+	"github.com/erikh/buildfile/evaluator"
6
+)
7
+
8
+func main() {
9
+	if len(os.Args) < 2 {
10
+		os.Stderr.WriteString("Please supply filename(s) to evaluate")
11
+		os.Exit(1)
12
+	}
13
+
14
+	for _, fn := range os.Args[1:] {
15
+		f, err := os.Open(fn)
16
+		if err != nil {
17
+			panic(err)
18
+		}
19
+
20
+		opts := &evaluator.BuildOpts{}
21
+
22
+		bf, err := opts.NewBuildFile(f)
23
+		if err != nil {
24
+			panic(err)
25
+		}
26
+		if err := bf.Run(); err != nil {
27
+			panic(err)
28
+		}
29
+	}
30
+}
0 31
new file mode 100644
... ...
@@ -0,0 +1,45 @@
0
+package evaluator
1
+
2
+import (
3
+	"fmt"
4
+	"strings"
5
+)
6
+
7
+func env(b *buildFile, args ...string) error {
8
+	if len(args) != 2 {
9
+		return fmt.Errorf("ENV accepts two arguments")
10
+	}
11
+
12
+	// the duplication here is intended to ease the replaceEnv() call's env
13
+	// handling. This routine gets much shorter with the denormalization here.
14
+	key := args[0]
15
+	b.env[key] = args[1]
16
+	b.config.Env = append(b.config.Env, strings.Join("=", key, b.env[key]))
17
+
18
+	return b.commit("", b.config.Cmd, fmt.Sprintf("ENV %s", value))
19
+}
20
+
21
+func maintainer(b *buildFile, args ...string) error {
22
+	if len(args) != 1 {
23
+		return fmt.Errorf("MAINTAINER requires only one argument")
24
+	}
25
+
26
+	b.maintainer = args[0]
27
+	return b.commit("", b.config.Cmd, fmt.Sprintf("MAINTAINER %s", b.maintainer))
28
+}
29
+
30
+func add(b *buildFile, args ...string) error {
31
+	if len(args) != 2 {
32
+		return fmt.Errorf("ADD requires two arguments")
33
+	}
34
+
35
+	return b.runContextCommand(args, true, true, "ADD")
36
+}
37
+
38
+func dispatchCopy(b *buildFile, args ...string) error {
39
+	if len(args) != 2 {
40
+		return fmt.Errorf("COPY requires two arguments")
41
+	}
42
+
43
+	return b.runContextCommand(args, false, false, "COPY")
44
+}
0 45
new file mode 100644
... ...
@@ -0,0 +1,118 @@
0
+package evaluator
1
+
2
+import (
3
+	"fmt"
4
+	"io"
5
+	"regexp"
6
+	"strings"
7
+
8
+	"github.com/erikh/buildfile/parser"
9
+
10
+	"github.com/docker/docker/daemon"
11
+	"github.com/docker/docker/engine"
12
+	"github.com/docker/docker/nat"
13
+	"github.com/docker/docker/registry"
14
+	"github.com/docker/docker/runconfig"
15
+	"github.com/docker/docker/utils"
16
+)
17
+
18
+var (
19
+	evaluateTable = map[string]func(*buildFile, ...string) error{
20
+		"env":        env,
21
+		"maintainer": maintainer,
22
+		"add":        add,
23
+		"copy":       dispatchCopy, // copy() is a go builtin
24
+		//"onbuild":        parseMaybeJSON,
25
+		//"workdir":        parseString,
26
+		//"docker-version": parseString,
27
+		//"run":            parseMaybeJSON,
28
+		//"cmd":            parseMaybeJSON,
29
+		//"entrypoint":     parseMaybeJSON,
30
+		//"expose":         parseMaybeJSON,
31
+		//"volume":         parseMaybeJSON,
32
+	}
33
+)
34
+
35
+type buildFile struct {
36
+	dockerfile *parser.Node
37
+	env        envMap
38
+	image      string
39
+	config     *runconfig.Config
40
+	options    *BuildOpts
41
+	maintainer string
42
+}
43
+
44
+type BuildOpts struct {
45
+	Daemon          *daemon.Daemon
46
+	Engine          *engine.Engine
47
+	OutStream       io.Writer
48
+	ErrStream       io.Writer
49
+	Verbose         bool
50
+	UtilizeCache    bool
51
+	Remove          bool
52
+	ForceRm         bool
53
+	OutOld          io.Writer
54
+	StreamFormatter *utils.StreamFormatter
55
+	Auth            *registry.AuthConfig
56
+	AuthConfigFile  *registry.ConfigFile
57
+}
58
+
59
+func (opts *BuildOpts) NewBuildFile(file io.ReadWriteCloser) (*buildFile, error) {
60
+	ast, err := parser.Parse(file)
61
+	if err != nil {
62
+		return nil, err
63
+	}
64
+
65
+	return &buildFile{
66
+		dockerfile: ast,
67
+		env:        envMap{},
68
+		config:     initRunConfig(),
69
+		options:    opts,
70
+	}, nil
71
+}
72
+
73
+func (b *buildFile) Run() error {
74
+	node := b.dockerfile
75
+
76
+	for i, n := range node.Children {
77
+		if err := b.dispatch(i, n); err != nil {
78
+			return err
79
+		}
80
+	}
81
+
82
+	return nil
83
+}
84
+
85
+func initRunConfig() *runconfig.Config {
86
+	return &runconfig.Config{
87
+		PortSpecs: []string{},
88
+		// FIXME(erikh) this should be a type that lives in runconfig
89
+		ExposedPorts: map[nat.Port]struct{}{},
90
+		Env:          []string{},
91
+		Cmd:          []string{},
92
+
93
+		// FIXME(erikh) this should also be a type in runconfig
94
+		Volumes:    map[string]struct{}{},
95
+		Entrypoint: []string{},
96
+		OnBuild:    []string{},
97
+	}
98
+}
99
+
100
+func (b *buildFile) dispatch(stepN int, ast *parser.Node) error {
101
+	cmd := ast.Value
102
+	strs := []string{}
103
+	for ast.Next != nil {
104
+		ast = ast.Next
105
+		strs = append(strs, replaceEnv(b, stripQuotes(ast.Value)))
106
+	}
107
+
108
+	fmt.Fprintf(b.outStream, "Step %d : %s\n", i, cmd, expression)
109
+
110
+	// XXX yes, we skip any cmds that are not valid; the parser should have
111
+	// picked these out already.
112
+	if f, ok := evaluateTable[cmd]; ok {
113
+		return f(b, strs...)
114
+	}
115
+
116
+	return nil
117
+}
0 118
new file mode 100644
... ...
@@ -0,0 +1,247 @@
0
+package evaluator
1
+
2
+func (b *buildFile) addContext(context io.Reader) (string, error) {
3
+	tmpdirPath, err := ioutil.TempDir("", "docker-build")
4
+	if err != nil {
5
+		return err
6
+	}
7
+
8
+	decompressedStream, err := archive.DecompressStream(context)
9
+	if err != nil {
10
+		return err
11
+	}
12
+
13
+	b.context = &tarsum.TarSum{Reader: decompressedStream, DisableCompression: true}
14
+	if err := archive.Untar(b.context, tmpdirPath, nil); err != nil {
15
+		return err
16
+	}
17
+
18
+	b.contextPath = tmpdirPath
19
+	return tmpdirPath
20
+}
21
+
22
+func (b *buildFile) commit(id string, autoCmd []string, comment string) error {
23
+	if b.image == "" {
24
+		return fmt.Errorf("Please provide a source image with `from` prior to commit")
25
+	}
26
+	b.config.Image = b.image
27
+	if id == "" {
28
+		cmd := b.config.Cmd
29
+		b.config.Cmd = []string{"/bin/sh", "-c", "#(nop) " + comment}
30
+		defer func(cmd []string) { b.config.Cmd = cmd }(cmd)
31
+
32
+		hit, err := b.probeCache()
33
+		if err != nil {
34
+			return err
35
+		}
36
+		if hit {
37
+			return nil
38
+		}
39
+
40
+		container, warnings, err := b.daemon.Create(b.config, "")
41
+		if err != nil {
42
+			return err
43
+		}
44
+		for _, warning := range warnings {
45
+			fmt.Fprintf(b.outStream, " ---> [Warning] %s\n", warning)
46
+		}
47
+		b.tmpContainers[container.ID] = struct{}{}
48
+		fmt.Fprintf(b.outStream, " ---> Running in %s\n", utils.TruncateID(container.ID))
49
+		id = container.ID
50
+
51
+		if err := container.Mount(); err != nil {
52
+			return err
53
+		}
54
+		defer container.Unmount()
55
+	}
56
+	container := b.daemon.Get(id)
57
+	if container == nil {
58
+		return fmt.Errorf("An error occured while creating the container")
59
+	}
60
+
61
+	// Note: Actually copy the struct
62
+	autoConfig := *b.config
63
+	autoConfig.Cmd = autoCmd
64
+	// Commit the container
65
+	image, err := b.daemon.Commit(container, "", "", "", b.maintainer, true, &autoConfig)
66
+	if err != nil {
67
+		return err
68
+	}
69
+	b.tmpImages[image.ID] = struct{}{}
70
+	b.image = image.ID
71
+	return nil
72
+}
73
+
74
+func (b *buildFile) runContextCommand(args string, allowRemote bool, allowDecompression bool, cmdName string) error {
75
+	if b.context == nil {
76
+		return fmt.Errorf("No context given. Impossible to use %s", cmdName)
77
+	}
78
+	tmp := strings.SplitN(args, " ", 2)
79
+	if len(tmp) != 2 {
80
+		return fmt.Errorf("Invalid %s format", cmdName)
81
+	}
82
+
83
+	orig, err := b.ReplaceEnvMatches(strings.Trim(tmp[0], " \t"))
84
+	if err != nil {
85
+		return err
86
+	}
87
+
88
+	dest, err := b.ReplaceEnvMatches(strings.Trim(tmp[1], " \t"))
89
+	if err != nil {
90
+		return err
91
+	}
92
+
93
+	cmd := b.config.Cmd
94
+	b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s %s in %s", cmdName, orig, dest)}
95
+	defer func(cmd []string) { b.config.Cmd = cmd }(cmd)
96
+	b.config.Image = b.image
97
+
98
+	var (
99
+		origPath   = orig
100
+		destPath   = dest
101
+		remoteHash string
102
+		isRemote   bool
103
+		decompress = true
104
+	)
105
+
106
+	isRemote = utils.IsURL(orig)
107
+	if isRemote && !allowRemote {
108
+		return fmt.Errorf("Source can't be an URL for %s", cmdName)
109
+	} else if utils.IsURL(orig) {
110
+		// Initiate the download
111
+		resp, err := utils.Download(orig)
112
+		if err != nil {
113
+			return err
114
+		}
115
+
116
+		// Create a tmp dir
117
+		tmpDirName, err := ioutil.TempDir(b.contextPath, "docker-remote")
118
+		if err != nil {
119
+			return err
120
+		}
121
+
122
+		// Create a tmp file within our tmp dir
123
+		tmpFileName := path.Join(tmpDirName, "tmp")
124
+		tmpFile, err := os.OpenFile(tmpFileName, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
125
+		if err != nil {
126
+			return err
127
+		}
128
+		defer os.RemoveAll(tmpDirName)
129
+
130
+		// Download and dump result to tmp file
131
+		if _, err := io.Copy(tmpFile, resp.Body); err != nil {
132
+			tmpFile.Close()
133
+			return err
134
+		}
135
+		tmpFile.Close()
136
+
137
+		// Remove the mtime of the newly created tmp file
138
+		if err := system.UtimesNano(tmpFileName, make([]syscall.Timespec, 2)); err != nil {
139
+			return err
140
+		}
141
+
142
+		origPath = path.Join(filepath.Base(tmpDirName), filepath.Base(tmpFileName))
143
+
144
+		// Process the checksum
145
+		r, err := archive.Tar(tmpFileName, archive.Uncompressed)
146
+		if err != nil {
147
+			return err
148
+		}
149
+		tarSum := &tarsum.TarSum{Reader: r, DisableCompression: true}
150
+		if _, err := io.Copy(ioutil.Discard, tarSum); err != nil {
151
+			return err
152
+		}
153
+		remoteHash = tarSum.Sum(nil)
154
+		r.Close()
155
+
156
+		// If the destination is a directory, figure out the filename.
157
+		if strings.HasSuffix(dest, "/") {
158
+			u, err := url.Parse(orig)
159
+			if err != nil {
160
+				return err
161
+			}
162
+			path := u.Path
163
+			if strings.HasSuffix(path, "/") {
164
+				path = path[:len(path)-1]
165
+			}
166
+			parts := strings.Split(path, "/")
167
+			filename := parts[len(parts)-1]
168
+			if filename == "" {
169
+				return fmt.Errorf("cannot determine filename from url: %s", u)
170
+			}
171
+			destPath = dest + filename
172
+		}
173
+	}
174
+
175
+	if err := b.checkPathForAddition(origPath); err != nil {
176
+		return err
177
+	}
178
+
179
+	// Hash path and check the cache
180
+	if b.utilizeCache {
181
+		var (
182
+			hash string
183
+			sums = b.context.GetSums()
184
+		)
185
+
186
+		if remoteHash != "" {
187
+			hash = remoteHash
188
+		} else if fi, err := os.Stat(path.Join(b.contextPath, origPath)); err != nil {
189
+			return err
190
+		} else if fi.IsDir() {
191
+			var subfiles []string
192
+			for file, sum := range sums {
193
+				absFile := path.Join(b.contextPath, file)
194
+				absOrigPath := path.Join(b.contextPath, origPath)
195
+				if strings.HasPrefix(absFile, absOrigPath) {
196
+					subfiles = append(subfiles, sum)
197
+				}
198
+			}
199
+			sort.Strings(subfiles)
200
+			hasher := sha256.New()
201
+			hasher.Write([]byte(strings.Join(subfiles, ",")))
202
+			hash = "dir:" + hex.EncodeToString(hasher.Sum(nil))
203
+		} else {
204
+			if origPath[0] == '/' && len(origPath) > 1 {
205
+				origPath = origPath[1:]
206
+			}
207
+			origPath = strings.TrimPrefix(origPath, "./")
208
+			if h, ok := sums[origPath]; ok {
209
+				hash = "file:" + h
210
+			}
211
+		}
212
+		b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s %s in %s", cmdName, hash, dest)}
213
+		hit, err := b.probeCache()
214
+		if err != nil {
215
+			return err
216
+		}
217
+		// If we do not have a hash, never use the cache
218
+		if hit && hash != "" {
219
+			return nil
220
+		}
221
+	}
222
+
223
+	// Create the container
224
+	container, _, err := b.daemon.Create(b.config, "")
225
+	if err != nil {
226
+		return err
227
+	}
228
+	b.tmpContainers[container.ID] = struct{}{}
229
+
230
+	if err := container.Mount(); err != nil {
231
+		return err
232
+	}
233
+	defer container.Unmount()
234
+
235
+	if !allowDecompression || isRemote {
236
+		decompress = false
237
+	}
238
+	if err := b.addContext(container, origPath, destPath, decompress); err != nil {
239
+		return err
240
+	}
241
+
242
+	if err := b.commit(container.ID, cmd, fmt.Sprintf("%s %s in %s", cmdName, orig, dest)); err != nil {
243
+		return err
244
+	}
245
+	return nil
246
+}
0 247
new file mode 100644
... ...
@@ -0,0 +1,33 @@
0
+package evaluator
1
+
2
+import (
3
+	"regexp"
4
+	"strings"
5
+)
6
+
7
+var (
8
+	TOKEN_ESCAPED_QUOTE     = regexp.MustCompile(`\\"`)
9
+	TOKEN_ESCAPED_ESCAPE    = regexp.MustCompile(`\\\\`)
10
+	TOKEN_ENV_INTERPOLATION = regexp.MustCompile("(\\\\\\\\+|[^\\\\]|\\b|\\A)\\$({?)([[:alnum:]_]+)(}?)")
11
+)
12
+
13
+func stripQuotes(str string) string {
14
+	str = str[1 : len(str)-1]
15
+	str = TOKEN_ESCAPED_QUOTE.ReplaceAllString(str, `"`)
16
+	return TOKEN_ESCAPED_ESCAPE.ReplaceAllString(str, `\`)
17
+}
18
+
19
+func replaceEnv(b *buildFile, str string) string {
20
+	for _, match := range TOKEN_ENV_INTERPOLATION.FindAllString(str, -1) {
21
+		match = match[strings.Index(match, "$"):]
22
+		matchKey := strings.Trim(match, "${}")
23
+
24
+		for envKey, envValue := range b.env {
25
+			if matchKey == envKey {
26
+				str = strings.Replace(str, match, envValue, -1)
27
+			}
28
+		}
29
+	}
30
+
31
+	return str
32
+}
0 33
new file mode 100644
... ...
@@ -0,0 +1,2 @@
0
+dumper
1
+Dockerfile
0 2
new file mode 100644
... ...
@@ -0,0 +1,32 @@
0
+package main
1
+
2
+import (
3
+	"fmt"
4
+	"os"
5
+
6
+	"github.com/erikh/buildfile/parser"
7
+)
8
+
9
+func main() {
10
+	var f *os.File
11
+	var err error
12
+
13
+	if len(os.Args) < 2 {
14
+		fmt.Println("please supply filename(s)")
15
+		os.Exit(1)
16
+	}
17
+
18
+	for _, fn := range os.Args[1:] {
19
+		f, err = os.Open(fn)
20
+		if err != nil {
21
+			panic(err)
22
+		}
23
+
24
+		ast, err := parser.Parse(f)
25
+		if err != nil {
26
+			panic(err)
27
+		} else {
28
+			fmt.Print(ast.Dump())
29
+		}
30
+	}
31
+}
0 32
new file mode 100644
... ...
@@ -0,0 +1,99 @@
0
+package parser
1
+
2
+import (
3
+	"encoding/json"
4
+	"strconv"
5
+	"strings"
6
+)
7
+
8
+// ignore the current argument. This will still leave a command parsed, but
9
+// will not incorporate the arguments into the ast.
10
+func parseIgnore(rest string) (*Node, error) {
11
+	return blankNode(), nil
12
+}
13
+
14
+func parseSubCommand(rest string) (*Node, error) {
15
+	_, child, err := parseLine(rest)
16
+	if err != nil {
17
+		return nil, err
18
+	}
19
+
20
+	return &Node{Children: []*Node{child}}, nil
21
+}
22
+
23
+// parse environment like statements. Note that this does *not* handle
24
+// variable interpolation, which will be handled in the evaluator.
25
+func parseEnv(rest string) (*Node, error) {
26
+	node := blankNode()
27
+	rootnode := node
28
+	strs := TOKEN_WHITESPACE.Split(rest, 2)
29
+	node.Value = QuoteString(strs[0])
30
+	node.Next = blankNode()
31
+	node.Next.Value = QuoteString(strs[1])
32
+
33
+	return rootnode, nil
34
+
35
+	return node, nil
36
+}
37
+
38
+// parses a whitespace-delimited set of arguments. The result is effectively a
39
+// linked list of string arguments.
40
+func parseStringsWhitespaceDelimited(rest string) (*Node, error) {
41
+	node := blankNode()
42
+	rootnode := node
43
+	for _, str := range TOKEN_WHITESPACE.Split(rest, -1) { // use regexp
44
+		node.Value = QuoteString(str)
45
+		node.Next = blankNode()
46
+		node = node.Next
47
+	}
48
+
49
+	return rootnode, nil
50
+}
51
+
52
+// parsestring just wraps the string in quotes and returns a working node.
53
+func parseString(rest string) (*Node, error) {
54
+	return &Node{QuoteString(rest), nil, nil}, nil
55
+}
56
+
57
+// parseJSON converts JSON arrays to an AST.
58
+func parseJSON(rest string) (*Node, error) {
59
+	var (
60
+		myJson   []interface{}
61
+		next     = blankNode()
62
+		orignext = next
63
+	)
64
+
65
+	if err := json.Unmarshal([]byte(rest), &myJson); err != nil {
66
+		return nil, err
67
+	}
68
+
69
+	for _, str := range myJson {
70
+		switch str.(type) {
71
+		case float64:
72
+			str = strconv.FormatFloat(str.(float64), 'G', -1, 64)
73
+		}
74
+		next.Value = QuoteString(str.(string))
75
+		next.Next = blankNode()
76
+		next = next.Next
77
+	}
78
+
79
+	return orignext, nil
80
+}
81
+
82
+// parseMaybeJSON determines if the argument appears to be a JSON array. If
83
+// so, passes to parseJSON; if not, quotes the result and returns a single
84
+// node.
85
+func parseMaybeJSON(rest string) (*Node, error) {
86
+	rest = strings.TrimSpace(rest)
87
+
88
+	if strings.HasPrefix(rest, "[") {
89
+		node, err := parseJSON(rest)
90
+		if err == nil {
91
+			return node, nil
92
+		}
93
+	}
94
+
95
+	node := blankNode()
96
+	node.Value = QuoteString(rest)
97
+	return node, nil
98
+}
0 99
new file mode 100644
... ...
@@ -0,0 +1,147 @@
0
+// This package implements a parser and parse tree dumper for Dockerfiles.
1
+package parser
2
+
3
+import (
4
+	"bufio"
5
+	"io"
6
+	"regexp"
7
+	"strings"
8
+)
9
+
10
+// Node is the building block of the AST this package will create.
11
+//
12
+// Nodes are structured to have a value, next, and child, the latter two of
13
+// which are Nodes themselves.
14
+//
15
+// This terminology is unfortunately rather confusing, so here's a diagram.
16
+// Anything after the ; is a comment.
17
+//
18
+//     (
19
+//       (run "foo") ; value run, and next is a value foo.
20
+//       (run "1" "2" "3") ;
21
+//       (something (really cool))
22
+//     )
23
+//
24
+// Will give you something like this:
25
+//
26
+//     &Node{
27
+//       Value:"",
28
+//       Child: &Node{Value: "run", Next: &Node{Value: "foo"}, Child: nil},
29
+//       Next: &Node{Value:"", Child: &Node{Value:"run", Next: &Node{Value:`"1"`....
30
+//
31
+// ... and so on.
32
+//
33
+// The short and fast rule is that anything that starts with ( is a child of
34
+// something. Anything which follows a previous statement is a next of
35
+// something.
36
+//
37
+type Node struct {
38
+	Value    string  // actual content
39
+	Next     *Node   // the next item in the current sexp
40
+	Children []*Node // the children of this sexp
41
+}
42
+
43
+var (
44
+	dispatch                map[string]func(string) (*Node, error)
45
+	TOKEN_WHITESPACE        = regexp.MustCompile(`\s+`)
46
+	TOKEN_LINE_CONTINUATION = regexp.MustCompile(`\\$`)
47
+	TOKEN_COMMENT           = regexp.MustCompile(`^#.*$`)
48
+)
49
+
50
+func init() {
51
+	// Dispatch Table. see line_parsers.go for the parse functions.
52
+	// The command is parsed and mapped to the line parser. The line parser
53
+	// recieves the arguments but not the command, and returns an AST after
54
+	// reformulating the arguments according to the rules in the parser
55
+	// functions. Errors are propogated up by Parse() and the resulting AST can
56
+	// be incorporated directly into the existing AST as a next.
57
+	dispatch = map[string]func(string) (*Node, error){
58
+		"user":           parseString,
59
+		"onbuild":        parseSubCommand,
60
+		"workdir":        parseString,
61
+		"env":            parseEnv,
62
+		"maintainer":     parseString,
63
+		"docker-version": parseString,
64
+		"from":           parseString,
65
+		"add":            parseStringsWhitespaceDelimited,
66
+		"copy":           parseStringsWhitespaceDelimited,
67
+		"run":            parseMaybeJSON,
68
+		"cmd":            parseMaybeJSON,
69
+		"entrypoint":     parseMaybeJSON,
70
+		"expose":         parseStringsWhitespaceDelimited,
71
+		"volume":         parseMaybeJSON,
72
+	}
73
+}
74
+
75
+// empty node. Useful for managing structure.
76
+func blankNode() *Node {
77
+	return &Node{"", nil, []*Node{}}
78
+}
79
+
80
+func parseLine(line string) (string, *Node, error) {
81
+	if line = stripComments(line); line == "" {
82
+		return "", nil, nil
83
+	}
84
+
85
+	if TOKEN_LINE_CONTINUATION.MatchString(line) {
86
+		line = TOKEN_LINE_CONTINUATION.ReplaceAllString(line, "")
87
+		return line, nil, nil
88
+	}
89
+
90
+	cmd, args := splitCommand(line)
91
+
92
+	node := blankNode()
93
+	node.Value = cmd
94
+
95
+	sexp, err := fullDispatch(cmd, args)
96
+	if err != nil {
97
+		return "", nil, err
98
+	}
99
+
100
+	node.Next = sexp
101
+
102
+	return "", node, nil
103
+}
104
+
105
+// The main parse routine. Handles an io.ReadWriteCloser and returns the root
106
+// of the AST.
107
+func Parse(rwc io.Reader) (*Node, error) {
108
+	var child *Node
109
+	var line string
110
+	var err error
111
+	root := blankNode()
112
+	scanner := bufio.NewScanner(rwc)
113
+
114
+	for scanner.Scan() {
115
+		line, child, err = parseLine(strings.TrimSpace(scanner.Text()))
116
+		if err != nil {
117
+			return nil, err
118
+		}
119
+
120
+		if line != "" && child == nil {
121
+			for {
122
+				scanner.Scan()
123
+				newline := strings.TrimSpace(scanner.Text())
124
+
125
+				if newline == "" {
126
+					continue
127
+				}
128
+
129
+				line, child, err = parseLine(line + newline)
130
+				if err != nil {
131
+					return nil, err
132
+				}
133
+
134
+				if child != nil {
135
+					break
136
+				}
137
+			}
138
+		}
139
+
140
+		if child != nil {
141
+			root.Children = append(root.Children, child)
142
+		}
143
+	}
144
+
145
+	return root, nil
146
+}
0 147
new file mode 100644
... ...
@@ -0,0 +1,56 @@
0
+package parser
1
+
2
+import (
3
+	"io/ioutil"
4
+	"os"
5
+	"path/filepath"
6
+	"testing"
7
+)
8
+
9
+const testDir = "testfiles"
10
+
11
+func TestTestData(t *testing.T) {
12
+	f, err := os.Open(testDir)
13
+	if err != nil {
14
+		t.Fatal(err)
15
+	}
16
+
17
+	defer f.Close()
18
+
19
+	dirs, err := f.Readdir(0)
20
+	if err != nil {
21
+		t.Fatal(err)
22
+	}
23
+
24
+	for _, dir := range dirs {
25
+		dockerfile := filepath.Join(testDir, dir.Name(), "Dockerfile")
26
+		resultfile := filepath.Join(testDir, dir.Name(), "result")
27
+
28
+		df, err := os.Open(dockerfile)
29
+		if err != nil {
30
+			t.Fatalf("Dockerfile missing for %s: %s", dir.Name(), err.Error())
31
+		}
32
+
33
+		rf, err := os.Open(resultfile)
34
+		if err != nil {
35
+			t.Fatalf("Result file missing for %s: %s", dir.Name(), err.Error())
36
+		}
37
+
38
+		ast, err := Parse(df)
39
+		if err != nil {
40
+			t.Fatalf("Error parsing %s's dockerfile: %s", dir.Name(), err.Error())
41
+		}
42
+
43
+		content, err := ioutil.ReadAll(rf)
44
+		if err != nil {
45
+			t.Fatalf("Error reading %s's result file: %s", dir.Name(), err.Error())
46
+		}
47
+
48
+		if ast.Dump() != string(content) {
49
+			t.Fatalf("%s: AST dump of dockerfile does not match result", dir.Name())
50
+		}
51
+
52
+		df.Close()
53
+		rf.Close()
54
+	}
55
+}
0 56
new file mode 100644
... ...
@@ -0,0 +1,25 @@
0
+FROM brimstone/ubuntu:14.04
1
+
2
+MAINTAINER brimstone@the.narro.ws
3
+
4
+# TORUN -v /var/run/docker.sock:/var/run/docker.sock
5
+
6
+ENV GOPATH /go
7
+
8
+# Set our command
9
+ENTRYPOINT ["/usr/local/bin/consuldock"]
10
+
11
+# Install the packages we need, clean up after them and us
12
+RUN apt-get update \
13
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
14
+    && apt-get install -y --no-install-recommends git golang ca-certificates \
15
+    && apt-get clean \
16
+    && rm -rf /var/lib/apt/lists \
17
+
18
+	&& go get -v github.com/brimstone/consuldock \
19
+    && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
20
+
21
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
22
+	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
23
+	&& rm /tmp/dpkg.* \
24
+	&& rm -rf $GOPATH
0 25
new file mode 100644
... ...
@@ -0,0 +1,5 @@
0
+(from "brimstone/ubuntu:14.04")
1
+(maintainer "brimstone@the.narro.ws")
2
+(env "GOPATH" "/go")
3
+(entrypoint "/usr/local/bin/consuldock")
4
+(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates && apt-get clean && rm -rf /var/lib/apt/lists && go get -v github.com/brimstone/consuldock && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.* && rm -rf $GOPATH")
0 5
\ No newline at end of file
1 6
new file mode 100644
... ...
@@ -0,0 +1,52 @@
0
+FROM brimstone/ubuntu:14.04
1
+
2
+CMD []
3
+
4
+ENTRYPOINT ["/usr/bin/consul", "agent", "-server", "-data-dir=/consul", "-client=0.0.0.0", "-ui-dir=/webui"]
5
+
6
+EXPOSE 8500 8600 8400 8301 8302
7
+
8
+RUN apt-get update \
9
+    && apt-get install -y unzip wget \
10
+	&& apt-get clean \
11
+	&& rm -rf /var/lib/apt/lists
12
+
13
+RUN cd /tmp \
14
+    && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
15
+       -O web_ui.zip \
16
+    && unzip web_ui.zip \
17
+    && mv dist /webui \
18
+    && rm web_ui.zip
19
+
20
+RUN apt-get update \
21
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
22
+    && apt-get install -y --no-install-recommends unzip wget \
23
+    && apt-get clean \
24
+    && rm -rf /var/lib/apt/lists \
25
+
26
+    && cd /tmp \
27
+    && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
28
+       -O web_ui.zip \
29
+    && unzip web_ui.zip \
30
+    && mv dist /webui \
31
+    && rm web_ui.zip \
32
+
33
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
34
+	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
35
+	&& rm /tmp/dpkg.*
36
+
37
+ENV GOPATH /go
38
+
39
+RUN apt-get update \
40
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
41
+    && apt-get install -y --no-install-recommends git golang ca-certificates build-essential \
42
+    && apt-get clean \
43
+    && rm -rf /var/lib/apt/lists \
44
+
45
+	&& go get -v github.com/hashicorp/consul \
46
+	&& mv $GOPATH/bin/consul /usr/bin/consul \
47
+
48
+	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
49
+	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
50
+	&& rm /tmp/dpkg.* \
51
+	&& rm -rf $GOPATH
0 52
new file mode 100644
... ...
@@ -0,0 +1,9 @@
0
+(from "brimstone/ubuntu:14.04")
1
+(cmd)
2
+(entrypoint "/usr/bin/consul" "agent" "-server" "-data-dir=/consul" "-client=0.0.0.0" "-ui-dir=/webui")
3
+(expose "8500" "8600" "8400" "8301" "8302")
4
+(run "apt-get update && apt-get install -y unzip wget && apt-get clean && rm -rf /var/lib/apt/lists")
5
+(run "cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip")
6
+(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends unzip wget && apt-get clean && rm -rf /var/lib/apt/lists && cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.*")
7
+(env "GOPATH" "/go")
8
+(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates build-essential && apt-get clean && rm -rf /var/lib/apt/lists && go get -v github.com/hashicorp/consul && mv $GOPATH/bin/consul /usr/bin/consul && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.* && rm -rf $GOPATH")
0 9
\ No newline at end of file
1 10
new file mode 100644
... ...
@@ -0,0 +1,54 @@
0
+FROM cpuguy83/ubuntu
1
+ENV NAGIOS_HOME /opt/nagios
2
+ENV NAGIOS_USER nagios
3
+ENV NAGIOS_GROUP nagios
4
+ENV NAGIOS_CMDUSER nagios
5
+ENV NAGIOS_CMDGROUP nagios
6
+ENV NAGIOSADMIN_USER nagiosadmin
7
+ENV NAGIOSADMIN_PASS nagios
8
+ENV APACHE_RUN_USER nagios
9
+ENV APACHE_RUN_GROUP nagios
10
+ENV NAGIOS_TIMEZONE UTC
11
+
12
+RUN sed -i 's/universe/universe multiverse/' /etc/apt/sources.list
13
+RUN apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx
14
+RUN ( egrep -i  "^${NAGIOS_GROUP}" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i "^${NAGIOS_CMDGROUP}" /etc/group || groupadd $NAGIOS_CMDGROUP )
15
+RUN ( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )
16
+
17
+ADD http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3 /tmp/nagios.tar.gz
18
+RUN cd /tmp && tar -zxvf nagios.tar.gz && cd nagios  && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf
19
+ADD http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz /tmp/
20
+RUN cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install
21
+
22
+RUN sed -i.bak 's/.*\=www\-data//g' /etc/apache2/envvars
23
+RUN export DOC_ROOT="DocumentRoot $(echo $NAGIOS_HOME/share)"; sed -i "s,DocumentRoot.*,$DOC_ROOT," /etc/apache2/sites-enabled/000-default
24
+
25
+RUN ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo
26
+
27
+RUN echo "use_timezone=$NAGIOS_TIMEZONE" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo "SetEnv TZ \"${NAGIOS_TIMEZONE}\"" >> /etc/apache2/conf.d/nagios.conf
28
+
29
+RUN mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs
30
+RUN echo "cfg_dir=${NAGIOS_HOME}/etc/conf.d" >> ${NAGIOS_HOME}/etc/nagios.cfg
31
+RUN echo "cfg_dir=${NAGIOS_HOME}/etc/monitor" >> ${NAGIOS_HOME}/etc/nagios.cfg
32
+RUN download-mibs && echo "mibs +ALL" > /etc/snmp/snmp.conf
33
+
34
+RUN sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && \
35
+  sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg
36
+RUN cp /etc/services /var/spool/postfix/etc/
37
+
38
+RUN mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix
39
+ADD nagios.init /etc/sv/nagios/run
40
+ADD apache.init /etc/sv/apache/run
41
+ADD postfix.init /etc/sv/postfix/run
42
+ADD postfix.stop /etc/sv/postfix/finish
43
+
44
+ADD start.sh /usr/local/bin/start_nagios
45
+
46
+ENV APACHE_LOCK_DIR /var/run
47
+ENV APACHE_LOG_DIR /var/log/apache2
48
+
49
+EXPOSE 80
50
+
51
+VOLUME ["/opt/nagios/var", "/opt/nagios/etc", "/opt/nagios/libexec", "/var/log/apache2", "/usr/share/snmp/mibs"]
52
+
53
+CMD ["/usr/local/bin/start_nagios"]
0 54
new file mode 100644
... ...
@@ -0,0 +1,40 @@
0
+(from "cpuguy83/ubuntu")
1
+(env "NAGIOS_HOME" "/opt/nagios")
2
+(env "NAGIOS_USER" "nagios")
3
+(env "NAGIOS_GROUP" "nagios")
4
+(env "NAGIOS_CMDUSER" "nagios")
5
+(env "NAGIOS_CMDGROUP" "nagios")
6
+(env "NAGIOSADMIN_USER" "nagiosadmin")
7
+(env "NAGIOSADMIN_PASS" "nagios")
8
+(env "APACHE_RUN_USER" "nagios")
9
+(env "APACHE_RUN_GROUP" "nagios")
10
+(env "NAGIOS_TIMEZONE" "UTC")
11
+(run "sed -i 's/universe/universe multiverse/' /etc/apt/sources.list")
12
+(run "apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx")
13
+(run "( egrep -i  \"^${NAGIOS_GROUP}\" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i \"^${NAGIOS_CMDGROUP}\" /etc/group || groupadd $NAGIOS_CMDGROUP )")
14
+(run "( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )")
15
+(add "http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3" "/tmp/nagios.tar.gz")
16
+(run "cd /tmp && tar -zxvf nagios.tar.gz && cd nagios  && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf")
17
+(add "http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz" "/tmp/")
18
+(run "cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install")
19
+(run "sed -i.bak 's/.*\\=www\\-data//g' /etc/apache2/envvars")
20
+(run "export DOC_ROOT=\"DocumentRoot $(echo $NAGIOS_HOME/share)\"; sed -i \"s,DocumentRoot.*,$DOC_ROOT,\" /etc/apache2/sites-enabled/000-default")
21
+(run "ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo")
22
+(run "echo \"use_timezone=$NAGIOS_TIMEZONE\" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo \"SetEnv TZ \\\"${NAGIOS_TIMEZONE}\\\"\" >> /etc/apache2/conf.d/nagios.conf")
23
+(run "mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs")
24
+(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/conf.d\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
25
+(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/monitor\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
26
+(run "download-mibs && echo \"mibs +ALL\" > /etc/snmp/snmp.conf")
27
+(run "sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg")
28
+(run "cp /etc/services /var/spool/postfix/etc/")
29
+(run "mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix")
30
+(add "nagios.init" "/etc/sv/nagios/run")
31
+(add "apache.init" "/etc/sv/apache/run")
32
+(add "postfix.init" "/etc/sv/postfix/run")
33
+(add "postfix.stop" "/etc/sv/postfix/finish")
34
+(add "start.sh" "/usr/local/bin/start_nagios")
35
+(env "APACHE_LOCK_DIR" "/var/run")
36
+(env "APACHE_LOG_DIR" "/var/log/apache2")
37
+(expose "80")
38
+(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")
39
+(cmd "/usr/local/bin/start_nagios")
0 40
\ No newline at end of file
1 41
new file mode 100644
... ...
@@ -0,0 +1,105 @@
0
+# This file describes the standard way to build Docker, using docker
1
+#
2
+# Usage:
3
+#
4
+# # Assemble the full dev environment. This is slow the first time.
5
+# docker build -t docker .
6
+#
7
+# # Mount your source in an interactive container for quick testing:
8
+# docker run -v `pwd`:/go/src/github.com/docker/docker --privileged -i -t docker bash
9
+#
10
+# # Run the test suite:
11
+# docker run --privileged docker hack/make.sh test
12
+#
13
+# # Publish a release:
14
+# docker run --privileged \
15
+#  -e AWS_S3_BUCKET=baz \
16
+#  -e AWS_ACCESS_KEY=foo \
17
+#  -e AWS_SECRET_KEY=bar \
18
+#  -e GPG_PASSPHRASE=gloubiboulga \
19
+#  docker hack/release.sh
20
+#
21
+# Note: Apparmor used to mess with privileged mode, but this is no longer
22
+# the case. Therefore, you don't have to disable it anymore.
23
+#
24
+
25
+docker-version	0.6.1
26
+FROM	ubuntu:14.04
27
+MAINTAINER	Tianon Gravi <admwiggin@gmail.com> (@tianon)
28
+
29
+# Packaged dependencies
30
+RUN	apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \
31
+	apt-utils \
32
+	aufs-tools \
33
+	automake \
34
+	btrfs-tools \
35
+	build-essential \
36
+	curl \
37
+	dpkg-sig \
38
+	git \
39
+	iptables \
40
+	libapparmor-dev \
41
+	libcap-dev \
42
+	libsqlite3-dev \
43
+	lxc=1.0* \
44
+	mercurial \
45
+	pandoc \
46
+	parallel \
47
+	reprepro \
48
+	ruby1.9.1 \
49
+	ruby1.9.1-dev \
50
+	s3cmd=1.1.0* \
51
+	--no-install-recommends
52
+
53
+# Get lvm2 source for compiling statically
54
+RUN	git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103
55
+# see https://git.fedorahosted.org/cgit/lvm2.git/refs/tags for release tags
56
+# note: we don't use "git clone -b" above because it then spews big nasty warnings about 'detached HEAD' state that we can't silence as easily as we can silence them using "git checkout" directly
57
+
58
+# Compile and install lvm2
59
+RUN	cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper
60
+# see https://git.fedorahosted.org/cgit/lvm2.git/tree/INSTALL
61
+
62
+# Install Go
63
+RUN	curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz
64
+ENV	PATH	/usr/local/go/bin:$PATH
65
+ENV	GOPATH	/go:/go/src/github.com/docker/docker/vendor
66
+RUN	cd /usr/local/go/src && ./make.bash --no-clean 2>&1
67
+
68
+# Compile Go for cross compilation
69
+ENV	DOCKER_CROSSPLATFORMS	\
70
+	linux/386 linux/arm \
71
+	darwin/amd64 darwin/386 \
72
+	freebsd/amd64 freebsd/386 freebsd/arm
73
+# (set an explicit GOARM of 5 for maximum compatibility)
74
+ENV	GOARM	5
75
+RUN	cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'
76
+
77
+# Grab Go's cover tool for dead-simple code coverage testing
78
+RUN	go get code.google.com/p/go.tools/cmd/cover
79
+
80
+# TODO replace FPM with some very minimal debhelper stuff
81
+RUN	gem install --no-rdoc --no-ri fpm --version 1.0.2
82
+
83
+# Get the "busybox" image source so we can build locally instead of pulling
84
+RUN	git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox
85
+
86
+# Setup s3cmd config
87
+RUN	/bin/echo -e '[default]\naccess_key=$AWS_ACCESS_KEY\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg
88
+
89
+# Set user.email so crosbymichael's in-container merge commits go smoothly
90
+RUN	git config --global user.email 'docker-dummy@example.com'
91
+
92
+# Add an unprivileged user to be used for tests which need it
93
+RUN groupadd -r docker
94
+RUN useradd --create-home --gid docker unprivilegeduser
95
+
96
+VOLUME	/var/lib/docker
97
+WORKDIR	/go/src/github.com/docker/docker
98
+ENV	DOCKER_BUILDTAGS	apparmor selinux
99
+
100
+# Wrap all commands in the "docker-in-docker" script to allow nested containers
101
+ENTRYPOINT	["hack/dind"]
102
+
103
+# Upload docker source
104
+COPY	.	/go/src/github.com/docker/docker
0 105
new file mode 100644
... ...
@@ -0,0 +1,25 @@
0
+(docker-version "0.6.1")
1
+(from "ubuntu:14.04")
2
+(maintainer "Tianon Gravi <admwiggin@gmail.com> (@tianon)")
3
+(run "apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq apt-utils aufs-tools automake btrfs-tools build-essential curl dpkg-sig git iptables libapparmor-dev libcap-dev libsqlite3-dev lxc=1.0* mercurial pandoc parallel reprepro ruby1.9.1 ruby1.9.1-dev s3cmd=1.1.0* --no-install-recommends")
4
+(run "git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103")
5
+(run "cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper")
6
+(run "curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz")
7
+(env "PATH" "/usr/local/go/bin:$PATH")
8
+(env "GOPATH" "/go:/go/src/github.com/docker/docker/vendor")
9
+(run "cd /usr/local/go/src && ./make.bash --no-clean 2>&1")
10
+(env "DOCKER_CROSSPLATFORMS" "linux/386 linux/arm darwin/amd64 darwin/386 freebsd/amd64 freebsd/386 freebsd/arm")
11
+(env "GOARM" "5")
12
+(run "cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'")
13
+(run "go get code.google.com/p/go.tools/cmd/cover")
14
+(run "gem install --no-rdoc --no-ri fpm --version 1.0.2")
15
+(run "git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox")
16
+(run "/bin/echo -e '[default]\\naccess_key=$AWS_ACCESS_KEY\\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg")
17
+(run "git config --global user.email 'docker-dummy@example.com'")
18
+(run "groupadd -r docker")
19
+(run "useradd --create-home --gid docker unprivilegeduser")
20
+(volume "/var/lib/docker")
21
+(workdir "/go/src/github.com/docker/docker")
22
+(env "DOCKER_BUILDTAGS" "apparmor selinux")
23
+(entrypoint "hack/dind")
24
+(copy "." "/go/src/github.com/docker/docker")
0 25
\ No newline at end of file
1 26
new file mode 100644
... ...
@@ -0,0 +1,8 @@
0
+FROM ubuntu:14.04
1
+MAINTAINER Erik \\Hollensbe <erik@hollensbe.org>\"
2
+
3
+RUN apt-get \update && \
4
+  apt-get \"install znc -y
5
+ADD \conf\\" /.znc
6
+
7
+CMD [ "\/usr\\\"/bin/znc", "-f", "-r" ]
0 8
new file mode 100644
... ...
@@ -0,0 +1,5 @@
0
+(from "ubuntu:14.04")
1
+(maintainer "Erik \\\\Hollensbe <erik@hollensbe.org>\\\"")
2
+(run "apt-get \\update && apt-get \\\"install znc -y")
3
+(add "\\conf\\\\\"" "/.znc")
4
+(cmd "/usr\\\"/bin/znc" "-f" "-r")
0 5
\ No newline at end of file
1 6
new file mode 100644
... ...
@@ -0,0 +1,15 @@
0
+FROM ubuntu:14.04
1
+
2
+RUN apt-get update && apt-get install wget -y
3
+RUN wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb
4
+RUN dpkg -i influxdb_latest_amd64.deb
5
+RUN rm -r /opt/influxdb/shared
6
+
7
+VOLUME /opt/influxdb/shared
8
+
9
+CMD /usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml
10
+
11
+EXPOSE 8083
12
+EXPOSE 8086
13
+EXPOSE 8090
14
+EXPOSE 8099
0 15
new file mode 100644
... ...
@@ -0,0 +1,11 @@
0
+(from "ubuntu:14.04")
1
+(run "apt-get update && apt-get install wget -y")
2
+(run "wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb")
3
+(run "dpkg -i influxdb_latest_amd64.deb")
4
+(run "rm -r /opt/influxdb/shared")
5
+(volume "/opt/influxdb/shared")
6
+(cmd "/usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml")
7
+(expose "8083")
8
+(expose "8086")
9
+(expose "8090")
10
+(expose "8099")
0 11
\ No newline at end of file
1 12
new file mode 100644
... ...
@@ -0,0 +1,7 @@
0
+FROM ubuntu:14.04
1
+MAINTAINER James Turnbull "james@example.com"
2
+ENV REFRESHED_AT 2014-06-01
3
+RUN apt-get update
4
+RUN apt-get -y install redis-server redis-tools
5
+EXPOSE 6379
6
+ENTRYPOINT [ "/usr/bin/redis-server" ]
0 7
new file mode 100644
... ...
@@ -0,0 +1,7 @@
0
+(from "ubuntu:14.04")
1
+(maintainer "James Turnbull \"james@example.com\"")
2
+(env "REFRESHED_AT" "2014-06-01")
3
+(run "apt-get update")
4
+(run "apt-get -y install redis-server redis-tools")
5
+(expose "6379")
6
+(entrypoint "/usr/bin/redis-server")
0 7
\ No newline at end of file
1 8
new file mode 100644
... ...
@@ -0,0 +1,48 @@
0
+FROM busybox:buildroot-2014.02
1
+
2
+MAINTAINER docker <docker@docker.io>
3
+
4
+ONBUILD RUN ["echo", "test"]
5
+ONBUILD RUN echo test
6
+ONBUILD COPY . /
7
+
8
+
9
+# RUN Commands \
10
+# linebreak in comment \
11
+RUN ["ls", "-la"]
12
+RUN ["echo", "'1234'"]
13
+RUN echo "1234"
14
+RUN echo 1234
15
+RUN echo '1234' && \
16
+    echo "456" && \
17
+    echo 789
18
+RUN    sh -c 'echo root:testpass \
19
+        > /tmp/passwd'
20
+RUN mkdir -p /test /test2 /test3/test
21
+
22
+# ENV \
23
+ENV SCUBA 1 DUBA 3
24
+ENV SCUBA "1 DUBA 3"
25
+
26
+# CMD \
27
+CMD ["echo", "test"]
28
+CMD echo test
29
+CMD echo "test"
30
+CMD echo 'test'
31
+CMD echo 'test' | wc -
32
+
33
+#EXPOSE\
34
+EXPOSE 3000
35
+EXPOSE 9000 5000 6000
36
+
37
+USER docker
38
+USER docker:root
39
+
40
+VOLUME ["/test"]
41
+VOLUME ["/test", "/test2"]
42
+VOLUME /test3
43
+
44
+WORKDIR /test
45
+
46
+ADD . /
47
+COPY . copy
0 48
new file mode 100644
... ...
@@ -0,0 +1,29 @@
0
+(from "busybox:buildroot-2014.02")
1
+(maintainer "docker <docker@docker.io>")
2
+(onbuild (run "echo" "test"))
3
+(onbuild (run "echo test"))
4
+(onbuild (copy "." "/"))
5
+(run "ls" "-la")
6
+(run "echo" "'1234'")
7
+(run "echo \"1234\"")
8
+(run "echo 1234")
9
+(run "echo '1234' && echo \"456\" && echo 789")
10
+(run "sh -c 'echo root:testpass > /tmp/passwd'")
11
+(run "mkdir -p /test /test2 /test3/test")
12
+(env "SCUBA" "1 DUBA 3")
13
+(env "SCUBA" "\"1 DUBA 3\"")
14
+(cmd "echo" "test")
15
+(cmd "echo test")
16
+(cmd "echo \"test\"")
17
+(cmd "echo 'test'")
18
+(cmd "echo 'test' | wc -")
19
+(expose "3000")
20
+(expose "9000" "5000" "6000")
21
+(user "docker")
22
+(user "docker:root")
23
+(volume "/test")
24
+(volume "/test" "/test2")
25
+(volume "/test3")
26
+(workdir "/test")
27
+(add "." "/")
28
+(copy "." "copy")
0 29
\ No newline at end of file
1 30
new file mode 100644
... ...
@@ -0,0 +1,16 @@
0
+FROM ubuntu:14.04
1
+
2
+RUN apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y
3
+ADD .muttrc /
4
+ADD .offlineimaprc /
5
+ADD .tmux.conf /
6
+ADD mutt /.mutt
7
+ADD vim /.vim
8
+ADD vimrc /.vimrc
9
+ADD crontab /etc/crontab
10
+RUN chmod 644 /etc/crontab
11
+RUN mkdir /Mail
12
+RUN mkdir /.offlineimap
13
+RUN echo "export TERM=screen-256color" >/.zshenv
14
+
15
+CMD setsid cron; tmux -2
0 16
new file mode 100644
... ...
@@ -0,0 +1,14 @@
0
+(from "ubuntu:14.04")
1
+(run "apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y")
2
+(add ".muttrc" "/")
3
+(add ".offlineimaprc" "/")
4
+(add ".tmux.conf" "/")
5
+(add "mutt" "/.mutt")
6
+(add "vim" "/.vim")
7
+(add "vimrc" "/.vimrc")
8
+(add "crontab" "/etc/crontab")
9
+(run "chmod 644 /etc/crontab")
10
+(run "mkdir /Mail")
11
+(run "mkdir /.offlineimap")
12
+(run "echo \"export TERM=screen-256color\" >/.zshenv")
13
+(cmd "setsid cron; tmux -2")
0 14
\ No newline at end of file
1 15
new file mode 100644
... ...
@@ -0,0 +1,7 @@
0
+FROM ubuntu:14.04
1
+
2
+RUN apt-get update && apt-get install libcap2-bin mumble-server -y
3
+
4
+ADD ./mumble-server.ini /etc/mumble-server.ini
5
+
6
+CMD /usr/sbin/murmurd
0 7
new file mode 100644
... ...
@@ -0,0 +1,4 @@
0
+(from "ubuntu:14.04")
1
+(run "apt-get update && apt-get install libcap2-bin mumble-server -y")
2
+(add "./mumble-server.ini" "/etc/mumble-server.ini")
3
+(cmd "/usr/sbin/murmurd")
0 4
\ No newline at end of file
1 5
new file mode 100644
... ...
@@ -0,0 +1,14 @@
0
+FROM ubuntu:14.04
1
+MAINTAINER Erik Hollensbe <erik@hollensbe.org>
2
+
3
+RUN apt-get update && apt-get install nginx-full -y
4
+RUN rm -rf /etc/nginx
5
+ADD etc /etc/nginx
6
+RUN chown -R root:root /etc/nginx
7
+RUN /usr/sbin/nginx -qt
8
+RUN mkdir /www
9
+
10
+CMD ["/usr/sbin/nginx"]
11
+
12
+VOLUME /www
13
+EXPOSE 80
0 14
new file mode 100644
... ...
@@ -0,0 +1,11 @@
0
+(from "ubuntu:14.04")
1
+(maintainer "Erik Hollensbe <erik@hollensbe.org>")
2
+(run "apt-get update && apt-get install nginx-full -y")
3
+(run "rm -rf /etc/nginx")
4
+(add "etc" "/etc/nginx")
5
+(run "chown -R root:root /etc/nginx")
6
+(run "/usr/sbin/nginx -qt")
7
+(run "mkdir /www")
8
+(cmd "/usr/sbin/nginx")
9
+(volume "/www")
10
+(expose "80")
0 11
\ No newline at end of file
1 12
new file mode 100644
... ...
@@ -0,0 +1,23 @@
0
+FROM ubuntu:12.04
1
+
2
+EXPOSE 27015
3
+EXPOSE 27005
4
+EXPOSE 26901
5
+EXPOSE 27020
6
+
7
+RUN apt-get update && apt-get install libc6-dev-i386 curl unzip -y
8
+RUN mkdir -p /steam
9
+RUN curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam
10
+ADD ./script /steam/script
11
+RUN /steam/steamcmd.sh +runscript /steam/script
12
+RUN curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf
13
+RUN curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf
14
+ADD ./server.cfg /steam/tf2/tf/cfg/server.cfg
15
+ADD ./ctf_2fort.cfg /steam/tf2/tf/cfg/ctf_2fort.cfg
16
+ADD ./sourcemod.cfg /steam/tf2/tf/cfg/sourcemod/sourcemod.cfg
17
+RUN rm -r /steam/tf2/tf/addons/sourcemod/configs
18
+ADD ./configs /steam/tf2/tf/addons/sourcemod/configs
19
+RUN mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en
20
+RUN cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en
21
+
22
+CMD cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill
0 23
new file mode 100644
... ...
@@ -0,0 +1,20 @@
0
+(from "ubuntu:12.04")
1
+(expose "27015")
2
+(expose "27005")
3
+(expose "26901")
4
+(expose "27020")
5
+(run "apt-get update && apt-get install libc6-dev-i386 curl unzip -y")
6
+(run "mkdir -p /steam")
7
+(run "curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam")
8
+(add "./script" "/steam/script")
9
+(run "/steam/steamcmd.sh +runscript /steam/script")
10
+(run "curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf")
11
+(run "curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf")
12
+(add "./server.cfg" "/steam/tf2/tf/cfg/server.cfg")
13
+(add "./ctf_2fort.cfg" "/steam/tf2/tf/cfg/ctf_2fort.cfg")
14
+(add "./sourcemod.cfg" "/steam/tf2/tf/cfg/sourcemod/sourcemod.cfg")
15
+(run "rm -r /steam/tf2/tf/addons/sourcemod/configs")
16
+(add "./configs" "/steam/tf2/tf/addons/sourcemod/configs")
17
+(run "mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en")
18
+(run "cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en")
19
+(cmd "cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill")
0 20
\ No newline at end of file
1 21
new file mode 100644
... ...
@@ -0,0 +1,9 @@
0
+FROM ubuntu:14.04
1
+
2
+RUN apt-get update -qy && apt-get install tmux zsh weechat-curses -y
3
+
4
+ADD .weechat /.weechat
5
+ADD .tmux.conf /
6
+RUN echo "export TERM=screen-256color" >/.zshenv
7
+
8
+CMD zsh -c weechat
0 9
new file mode 100644
... ...
@@ -0,0 +1,6 @@
0
+(from "ubuntu:14.04")
1
+(run "apt-get update -qy && apt-get install tmux zsh weechat-curses -y")
2
+(add ".weechat" "/.weechat")
3
+(add ".tmux.conf" "/")
4
+(run "echo \"export TERM=screen-256color\" >/.zshenv")
5
+(cmd "zsh -c weechat")
0 6
\ No newline at end of file
1 7
new file mode 100644
... ...
@@ -0,0 +1,7 @@
0
+FROM ubuntu:14.04
1
+MAINTAINER Erik Hollensbe <erik@hollensbe.org>
2
+
3
+RUN apt-get update && apt-get install znc -y
4
+ADD conf /.znc
5
+
6
+CMD [ "/usr/bin/znc", "-f", "-r" ]
0 7
new file mode 100644
... ...
@@ -0,0 +1,5 @@
0
+(from "ubuntu:14.04")
1
+(maintainer "Erik Hollensbe <erik@hollensbe.org>")
2
+(run "apt-get update && apt-get install znc -y")
3
+(add "conf" "/.znc")
4
+(cmd "/usr/bin/znc" "-f" "-r")
0 5
\ No newline at end of file
1 6
new file mode 100644
... ...
@@ -0,0 +1,86 @@
0
+package parser
1
+
2
+import (
3
+	"fmt"
4
+	"strings"
5
+)
6
+
7
+// QuoteString walks characters (after trimming), escapes any quotes and
8
+// escapes, then wraps the whole thing in quotes. Very useful for generating
9
+// argument output in nodes.
10
+func QuoteString(str string) string {
11
+	result := ""
12
+	chars := strings.Split(strings.TrimSpace(str), "")
13
+
14
+	for _, char := range chars {
15
+		switch char {
16
+		case `"`:
17
+			result += `\"`
18
+		case `\`:
19
+			result += `\\`
20
+		default:
21
+			result += char
22
+		}
23
+	}
24
+
25
+	return `"` + result + `"`
26
+}
27
+
28
+// dumps the AST defined by `node` as a list of sexps. Returns a string
29
+// suitable for printing.
30
+func (node *Node) Dump() string {
31
+	str := ""
32
+	str += node.Value
33
+
34
+	for _, n := range node.Children {
35
+		str += "(" + n.Dump() + ")\n"
36
+	}
37
+
38
+	if node.Next != nil {
39
+		for n := node.Next; n != nil; n = n.Next {
40
+			if len(n.Children) > 0 {
41
+				str += " " + n.Dump()
42
+			} else {
43
+				str += " " + n.Value
44
+			}
45
+		}
46
+	}
47
+
48
+	return strings.TrimSpace(str)
49
+}
50
+
51
+// performs the dispatch based on the two primal strings, cmd and args. Please
52
+// look at the dispatch table in parser.go to see how these dispatchers work.
53
+func fullDispatch(cmd, args string) (*Node, error) {
54
+	if _, ok := dispatch[cmd]; !ok {
55
+		return nil, fmt.Errorf("'%s' is not a valid dockerfile command", cmd)
56
+	}
57
+
58
+	sexp, err := dispatch[cmd](args)
59
+	if err != nil {
60
+		return nil, err
61
+	}
62
+
63
+	return sexp, nil
64
+}
65
+
66
+// splitCommand takes a single line of text and parses out the cmd and args,
67
+// which are used for dispatching to more exact parsing functions.
68
+func splitCommand(line string) (string, string) {
69
+	cmdline := TOKEN_WHITESPACE.Split(line, 2)
70
+	cmd := strings.ToLower(cmdline[0])
71
+	// the cmd should never have whitespace, but it's possible for the args to
72
+	// have trailing whitespace.
73
+	return cmd, strings.TrimSpace(cmdline[1])
74
+}
75
+
76
+// covers comments and empty lines. Lines should be trimmed before passing to
77
+// this function.
78
+func stripComments(line string) string {
79
+	// string is already trimmed at this point
80
+	if TOKEN_COMMENT.MatchString(line) {
81
+		return TOKEN_COMMENT.ReplaceAllString(line, "")
82
+	}
83
+
84
+	return line
85
+}