10 "github.com/hashicorp/hcl/hcl/token"
13 var f100 = strings.Repeat("f", 100)
15 type tokenPair struct {
20 var tokenLists = map[string][]tokenPair{
21 "comment": []tokenPair{
22 {token.COMMENT, "//"},
23 {token.COMMENT, "////"},
24 {token.COMMENT, "// comment"},
25 {token.COMMENT, "// /* comment */"},
26 {token.COMMENT, "// // comment //"},
27 {token.COMMENT, "//" + f100},
29 {token.COMMENT, "##"},
30 {token.COMMENT, "# comment"},
31 {token.COMMENT, "# /* comment */"},
32 {token.COMMENT, "# # comment #"},
33 {token.COMMENT, "#" + f100},
34 {token.COMMENT, "/**/"},
35 {token.COMMENT, "/***/"},
36 {token.COMMENT, "/* comment */"},
37 {token.COMMENT, "/* // comment */"},
38 {token.COMMENT, "/* /* comment */"},
39 {token.COMMENT, "/*\n comment\n*/"},
40 {token.COMMENT, "/*" + f100 + "*/"},
42 "operator": []tokenPair{
55 {token.BOOL, "false"},
60 {token.IDENT, "foobar"},
61 {token.IDENT, "foo-bar"},
62 {token.IDENT, "abc123"},
63 {token.IDENT, "LGTM"},
65 {token.IDENT, "_abc123"},
66 {token.IDENT, "abc123_"},
67 {token.IDENT, "_abc_123_"},
68 {token.IDENT, "_äöü"},
72 {token.IDENT, "a۰۱۸"},
73 {token.IDENT, "foo६४"},
74 {token.IDENT, "bar9876"},
76 "heredoc": []tokenPair{
77 {token.HEREDOC, "<<EOF\nhello\nworld\nEOF"},
78 {token.HEREDOC, "<<EOF123\nhello\nworld\nEOF123"},
80 "string": []tokenPair{
81 {token.STRING, `" "`},
82 {token.STRING, `"a"`},
83 {token.STRING, `"本"`},
84 {token.STRING, `"${file("foo")}"`},
85 {token.STRING, `"${file(\"foo\")}"`},
86 {token.STRING, `"\a"`},
87 {token.STRING, `"\b"`},
88 {token.STRING, `"\f"`},
89 {token.STRING, `"\n"`},
90 {token.STRING, `"\r"`},
91 {token.STRING, `"\t"`},
92 {token.STRING, `"\v"`},
93 {token.STRING, `"\""`},
94 {token.STRING, `"\000"`},
95 {token.STRING, `"\777"`},
96 {token.STRING, `"\x00"`},
97 {token.STRING, `"\xff"`},
98 {token.STRING, `"\u0000"`},
99 {token.STRING, `"\ufA16"`},
100 {token.STRING, `"\U00000000"`},
101 {token.STRING, `"\U0000ffAB"`},
102 {token.STRING, `"` + f100 + `"`},
104 "number": []tokenPair{
108 {token.NUMBER, "42"},
109 {token.NUMBER, "1234567890"},
110 {token.NUMBER, "00"},
111 {token.NUMBER, "01"},
112 {token.NUMBER, "07"},
113 {token.NUMBER, "042"},
114 {token.NUMBER, "01234567"},
115 {token.NUMBER, "0x0"},
116 {token.NUMBER, "0x1"},
117 {token.NUMBER, "0xf"},
118 {token.NUMBER, "0x42"},
119 {token.NUMBER, "0x123456789abcDEF"},
120 {token.NUMBER, "0x" + f100},
121 {token.NUMBER, "0X0"},
122 {token.NUMBER, "0X1"},
123 {token.NUMBER, "0XF"},
124 {token.NUMBER, "0X42"},
125 {token.NUMBER, "0X123456789abcDEF"},
126 {token.NUMBER, "0X" + f100},
127 {token.NUMBER, "-0"},
128 {token.NUMBER, "-1"},
129 {token.NUMBER, "-9"},
130 {token.NUMBER, "-42"},
131 {token.NUMBER, "-1234567890"},
132 {token.NUMBER, "-00"},
133 {token.NUMBER, "-01"},
134 {token.NUMBER, "-07"},
135 {token.NUMBER, "-29"},
136 {token.NUMBER, "-042"},
137 {token.NUMBER, "-01234567"},
138 {token.NUMBER, "-0x0"},
139 {token.NUMBER, "-0x1"},
140 {token.NUMBER, "-0xf"},
141 {token.NUMBER, "-0x42"},
142 {token.NUMBER, "-0x123456789abcDEF"},
143 {token.NUMBER, "-0x" + f100},
144 {token.NUMBER, "-0X0"},
145 {token.NUMBER, "-0X1"},
146 {token.NUMBER, "-0XF"},
147 {token.NUMBER, "-0X42"},
148 {token.NUMBER, "-0X123456789abcDEF"},
149 {token.NUMBER, "-0X" + f100},
151 "float": []tokenPair{
154 {token.FLOAT, "42."},
155 {token.FLOAT, "01234567890."},
158 {token.FLOAT, ".42"},
159 {token.FLOAT, ".0123456789"},
160 {token.FLOAT, "0.0"},
161 {token.FLOAT, "1.0"},
162 {token.FLOAT, "42.0"},
163 {token.FLOAT, "01234567890.0"},
164 {token.FLOAT, "0e0"},
165 {token.FLOAT, "1e0"},
166 {token.FLOAT, "42e0"},
167 {token.FLOAT, "01234567890e0"},
168 {token.FLOAT, "0E0"},
169 {token.FLOAT, "1E0"},
170 {token.FLOAT, "42E0"},
171 {token.FLOAT, "01234567890E0"},
172 {token.FLOAT, "0e+10"},
173 {token.FLOAT, "1e-10"},
174 {token.FLOAT, "42e+10"},
175 {token.FLOAT, "01234567890e-10"},
176 {token.FLOAT, "0E+10"},
177 {token.FLOAT, "1E-10"},
178 {token.FLOAT, "42E+10"},
179 {token.FLOAT, "01234567890E-10"},
180 {token.FLOAT, "01.8e0"},
181 {token.FLOAT, "1.4e0"},
182 {token.FLOAT, "42.2e0"},
183 {token.FLOAT, "01234567890.12e0"},
184 {token.FLOAT, "0.E0"},
185 {token.FLOAT, "1.12E0"},
186 {token.FLOAT, "42.123E0"},
187 {token.FLOAT, "01234567890.213E0"},
188 {token.FLOAT, "0.2e+10"},
189 {token.FLOAT, "1.2e-10"},
190 {token.FLOAT, "42.54e+10"},
191 {token.FLOAT, "01234567890.98e-10"},
192 {token.FLOAT, "0.1E+10"},
193 {token.FLOAT, "1.1E-10"},
194 {token.FLOAT, "42.1E+10"},
195 {token.FLOAT, "01234567890.1E-10"},
196 {token.FLOAT, "-0.0"},
197 {token.FLOAT, "-1.0"},
198 {token.FLOAT, "-42.0"},
199 {token.FLOAT, "-01234567890.0"},
200 {token.FLOAT, "-0e0"},
201 {token.FLOAT, "-1e0"},
202 {token.FLOAT, "-42e0"},
203 {token.FLOAT, "-01234567890e0"},
204 {token.FLOAT, "-0E0"},
205 {token.FLOAT, "-1E0"},
206 {token.FLOAT, "-42E0"},
207 {token.FLOAT, "-01234567890E0"},
208 {token.FLOAT, "-0e+10"},
209 {token.FLOAT, "-1e-10"},
210 {token.FLOAT, "-42e+10"},
211 {token.FLOAT, "-01234567890e-10"},
212 {token.FLOAT, "-0E+10"},
213 {token.FLOAT, "-1E-10"},
214 {token.FLOAT, "-42E+10"},
215 {token.FLOAT, "-01234567890E-10"},
216 {token.FLOAT, "-01.8e0"},
217 {token.FLOAT, "-1.4e0"},
218 {token.FLOAT, "-42.2e0"},
219 {token.FLOAT, "-01234567890.12e0"},
220 {token.FLOAT, "-0.E0"},
221 {token.FLOAT, "-1.12E0"},
222 {token.FLOAT, "-42.123E0"},
223 {token.FLOAT, "-01234567890.213E0"},
224 {token.FLOAT, "-0.2e+10"},
225 {token.FLOAT, "-1.2e-10"},
226 {token.FLOAT, "-42.54e+10"},
227 {token.FLOAT, "-01234567890.98e-10"},
228 {token.FLOAT, "-0.1E+10"},
229 {token.FLOAT, "-1.1E-10"},
230 {token.FLOAT, "-42.1E+10"},
231 {token.FLOAT, "-01234567890.1E-10"},
235 var orderedTokenLists = []string{
246 func TestPosition(t *testing.T) {
247 // create artifical source code
248 buf := new(bytes.Buffer)
250 for _, listName := range orderedTokenLists {
251 for _, ident := range tokenLists[listName] {
252 fmt.Fprintf(buf, "\t\t\t\t%s\n", ident.text)
256 s := New(buf.Bytes())
258 pos := token.Pos{"", 4, 1, 5}
260 for _, listName := range orderedTokenLists {
262 for _, k := range tokenLists[listName] {
264 // fmt.Printf("[%q] s = %+v:%+v\n", k.text, curPos.Offset, curPos.Column)
266 if curPos.Offset != pos.Offset {
267 t.Fatalf("offset = %d, want %d for %q", curPos.Offset, pos.Offset, k.text)
269 if curPos.Line != pos.Line {
270 t.Fatalf("line = %d, want %d for %q", curPos.Line, pos.Line, k.text)
272 if curPos.Column != pos.Column {
273 t.Fatalf("column = %d, want %d for %q", curPos.Column, pos.Column, k.text)
275 pos.Offset += 4 + len(k.text) + 1 // 4 tabs + token bytes + newline
276 pos.Line += countNewlines(k.text) + 1 // each token is on a new line
280 // make sure there were no token-internal errors reported by scanner
281 if s.ErrorCount != 0 {
282 t.Errorf("%d errors", s.ErrorCount)
286 func TestNullChar(t *testing.T) {
287 s := New([]byte("\"\\0"))
288 s.Scan() // Used to panic
291 func TestComment(t *testing.T) {
292 testTokenList(t, tokenLists["comment"])
295 func TestOperator(t *testing.T) {
296 testTokenList(t, tokenLists["operator"])
299 func TestBool(t *testing.T) {
300 testTokenList(t, tokenLists["bool"])
303 func TestIdent(t *testing.T) {
304 testTokenList(t, tokenLists["ident"])
307 func TestString(t *testing.T) {
308 testTokenList(t, tokenLists["string"])
311 func TestNumber(t *testing.T) {
312 testTokenList(t, tokenLists["number"])
315 func TestFloat(t *testing.T) {
316 testTokenList(t, tokenLists["float"])
319 func TestWindowsLineEndings(t *testing.T) {
320 hcl := `// This should have Windows line endings
321 resource "aws_instance" "foo" {
326 hclWindowsEndings := strings.Replace(hcl, "\n", "\r\n", -1)
328 literals := []struct {
332 {token.COMMENT, "// This should have Windows line endings\r"},
333 {token.IDENT, `resource`},
334 {token.STRING, `"aws_instance"`},
335 {token.STRING, `"foo"`},
337 {token.IDENT, `user_data`},
339 {token.HEREDOC, "<<HEREDOC\r\n test script\r\nHEREDOC\r\n"},
343 s := New([]byte(hclWindowsEndings))
344 for _, l := range literals {
347 if l.tokenType != tok.Type {
348 t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
351 if l.literal != tok.Text {
352 t.Errorf("got:\n%v\nwant:\n%v\n", []byte(tok.Text), []byte(l.literal))
357 func TestRealExample(t *testing.T) {
358 complexHCL := `// This comes from Terraform, as a test
366 secret_key = "${replace(var.foo, ".", "\\.")}"
369 resource "aws_security_group" "firewall" {
373 resource aws_instance "web" {
377 "${aws_security_group.firewall.foo}"
396 literals := []struct {
400 {token.COMMENT, `// This comes from Terraform, as a test`},
401 {token.IDENT, `variable`},
402 {token.STRING, `"foo"`},
404 {token.IDENT, `default`},
406 {token.STRING, `"bar"`},
407 {token.IDENT, `description`},
409 {token.STRING, `"bar"`},
411 {token.IDENT, `provider`},
412 {token.STRING, `"aws"`},
414 {token.IDENT, `access_key`},
416 {token.STRING, `"foo"`},
417 {token.IDENT, `secret_key`},
419 {token.STRING, `"${replace(var.foo, ".", "\\.")}"`},
421 {token.IDENT, `resource`},
422 {token.STRING, `"aws_security_group"`},
423 {token.STRING, `"firewall"`},
425 {token.IDENT, `count`},
429 {token.IDENT, `resource`},
430 {token.IDENT, `aws_instance`},
431 {token.STRING, `"web"`},
433 {token.IDENT, `ami`},
435 {token.STRING, `"${var.foo}"`},
436 {token.IDENT, `security_groups`},
439 {token.STRING, `"foo"`},
441 {token.STRING, `"${aws_security_group.firewall.foo}"`},
443 {token.IDENT, `network_interface`},
445 {token.IDENT, `device_index`},
448 {token.IDENT, `description`},
450 {token.HEREDOC, "<<EOF\nMain interface\nEOF\n"},
452 {token.IDENT, `network_interface`},
454 {token.IDENT, `device_index`},
457 {token.IDENT, `description`},
459 {token.HEREDOC, "<<-EOF\n\t\t\tOuter text\n\t\t\t\tIndented text\n\t\t\tEOF\n"},
465 s := New([]byte(complexHCL))
466 for _, l := range literals {
468 if l.tokenType != tok.Type {
469 t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
472 if l.literal != tok.Text {
473 t.Errorf("got:\n%+v\n%s\n want:\n%+v\n%s\n", []byte(tok.String()), tok, []byte(l.literal), l.literal)
479 func TestScan_crlf(t *testing.T) {
480 complexHCL := "foo {\r\n bar = \"baz\"\r\n}\r\n"
482 literals := []struct {
486 {token.IDENT, `foo`},
488 {token.IDENT, `bar`},
490 {token.STRING, `"baz"`},
495 s := New([]byte(complexHCL))
496 for _, l := range literals {
498 if l.tokenType != tok.Type {
499 t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
502 if l.literal != tok.Text {
503 t.Errorf("got:\n%+v\n%s\n want:\n%+v\n%s\n", []byte(tok.String()), tok, []byte(l.literal), l.literal)
509 func TestError(t *testing.T) {
510 testError(t, "\x80", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
511 testError(t, "\xff", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
513 testError(t, "ab\x80", "1:3", "illegal UTF-8 encoding", token.IDENT)
514 testError(t, "abc\xff", "1:4", "illegal UTF-8 encoding", token.IDENT)
516 testError(t, `"ab`+"\x80", "1:4", "illegal UTF-8 encoding", token.STRING)
517 testError(t, `"abc`+"\xff", "1:5", "illegal UTF-8 encoding", token.STRING)
519 testError(t, `01238`, "1:6", "illegal octal number", token.NUMBER)
520 testError(t, `01238123`, "1:9", "illegal octal number", token.NUMBER)
521 testError(t, `0x`, "1:3", "illegal hexadecimal number", token.NUMBER)
522 testError(t, `0xg`, "1:3", "illegal hexadecimal number", token.NUMBER)
523 testError(t, `'aa'`, "1:1", "illegal char", token.ILLEGAL)
525 testError(t, `"`, "1:2", "literal not terminated", token.STRING)
526 testError(t, `"abc`, "1:5", "literal not terminated", token.STRING)
527 testError(t, `"abc`+"\n", "1:5", "literal not terminated", token.STRING)
528 testError(t, `"${abc`+"\n", "2:1", "literal not terminated", token.STRING)
529 testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT)
530 testError(t, `/foo`, "1:1", "expected '/' for comment", token.COMMENT)
533 func testError(t *testing.T, src, pos, msg string, tok token.Type) {
534 s := New([]byte(src))
537 s.Error = func(p token.Pos, m string) {
539 if pos != p.String() {
540 t.Errorf("pos = %q, want %q for %q", p, pos, src)
544 t.Errorf("msg = %q, want %q for %q", m, msg, src)
552 t.Errorf("tok = %s, want %s for %q", tk, tok, src)
555 t.Errorf("error handler not called for %q", src)
557 if s.ErrorCount == 0 {
558 t.Errorf("count = %d, want > 0 for %q", s.ErrorCount, src)
562 func testTokenList(t *testing.T, tokenList []tokenPair) {
563 // create artifical source code
564 buf := new(bytes.Buffer)
565 for _, ident := range tokenList {
566 fmt.Fprintf(buf, "%s\n", ident.text)
569 s := New(buf.Bytes())
570 for _, ident := range tokenList {
572 if tok.Type != ident.tok {
573 t.Errorf("tok = %q want %q for %q\n", tok, ident.tok, ident.text)
576 if tok.Text != ident.text {
577 t.Errorf("text = %q want %q", tok.String(), ident.text)
583 func countNewlines(s string) int {
585 for _, ch := range s {