OSDN Git Service

Hulk did something
[bytom/vapor.git] / vendor / github.com / hashicorp / hcl / hcl / scanner / scanner_test.go
1 package scanner
2
3 import (
4         "bytes"
5         "fmt"
6         "testing"
7
8         "strings"
9
10         "github.com/hashicorp/hcl/hcl/token"
11 )
12
13 var f100 = strings.Repeat("f", 100)
14
15 type tokenPair struct {
16         tok  token.Type
17         text string
18 }
19
20 var tokenLists = map[string][]tokenPair{
21         "comment": []tokenPair{
22                 {token.COMMENT, "//"},
23                 {token.COMMENT, "////"},
24                 {token.COMMENT, "// comment"},
25                 {token.COMMENT, "// /* comment */"},
26                 {token.COMMENT, "// // comment //"},
27                 {token.COMMENT, "//" + f100},
28                 {token.COMMENT, "#"},
29                 {token.COMMENT, "##"},
30                 {token.COMMENT, "# comment"},
31                 {token.COMMENT, "# /* comment */"},
32                 {token.COMMENT, "# # comment #"},
33                 {token.COMMENT, "#" + f100},
34                 {token.COMMENT, "/**/"},
35                 {token.COMMENT, "/***/"},
36                 {token.COMMENT, "/* comment */"},
37                 {token.COMMENT, "/* // comment */"},
38                 {token.COMMENT, "/* /* comment */"},
39                 {token.COMMENT, "/*\n comment\n*/"},
40                 {token.COMMENT, "/*" + f100 + "*/"},
41         },
42         "operator": []tokenPair{
43                 {token.LBRACK, "["},
44                 {token.LBRACE, "{"},
45                 {token.COMMA, ","},
46                 {token.PERIOD, "."},
47                 {token.RBRACK, "]"},
48                 {token.RBRACE, "}"},
49                 {token.ASSIGN, "="},
50                 {token.ADD, "+"},
51                 {token.SUB, "-"},
52         },
53         "bool": []tokenPair{
54                 {token.BOOL, "true"},
55                 {token.BOOL, "false"},
56         },
57         "ident": []tokenPair{
58                 {token.IDENT, "a"},
59                 {token.IDENT, "a0"},
60                 {token.IDENT, "foobar"},
61                 {token.IDENT, "foo-bar"},
62                 {token.IDENT, "abc123"},
63                 {token.IDENT, "LGTM"},
64                 {token.IDENT, "_"},
65                 {token.IDENT, "_abc123"},
66                 {token.IDENT, "abc123_"},
67                 {token.IDENT, "_abc_123_"},
68                 {token.IDENT, "_äöü"},
69                 {token.IDENT, "_本"},
70                 {token.IDENT, "äöü"},
71                 {token.IDENT, "本"},
72                 {token.IDENT, "a۰۱۸"},
73                 {token.IDENT, "foo६४"},
74                 {token.IDENT, "bar9876"},
75         },
76         "heredoc": []tokenPair{
77                 {token.HEREDOC, "<<EOF\nhello\nworld\nEOF"},
78                 {token.HEREDOC, "<<EOF123\nhello\nworld\nEOF123"},
79         },
80         "string": []tokenPair{
81                 {token.STRING, `" "`},
82                 {token.STRING, `"a"`},
83                 {token.STRING, `"本"`},
84                 {token.STRING, `"${file("foo")}"`},
85                 {token.STRING, `"${file(\"foo\")}"`},
86                 {token.STRING, `"\a"`},
87                 {token.STRING, `"\b"`},
88                 {token.STRING, `"\f"`},
89                 {token.STRING, `"\n"`},
90                 {token.STRING, `"\r"`},
91                 {token.STRING, `"\t"`},
92                 {token.STRING, `"\v"`},
93                 {token.STRING, `"\""`},
94                 {token.STRING, `"\000"`},
95                 {token.STRING, `"\777"`},
96                 {token.STRING, `"\x00"`},
97                 {token.STRING, `"\xff"`},
98                 {token.STRING, `"\u0000"`},
99                 {token.STRING, `"\ufA16"`},
100                 {token.STRING, `"\U00000000"`},
101                 {token.STRING, `"\U0000ffAB"`},
102                 {token.STRING, `"` + f100 + `"`},
103         },
104         "number": []tokenPair{
105                 {token.NUMBER, "0"},
106                 {token.NUMBER, "1"},
107                 {token.NUMBER, "9"},
108                 {token.NUMBER, "42"},
109                 {token.NUMBER, "1234567890"},
110                 {token.NUMBER, "00"},
111                 {token.NUMBER, "01"},
112                 {token.NUMBER, "07"},
113                 {token.NUMBER, "042"},
114                 {token.NUMBER, "01234567"},
115                 {token.NUMBER, "0x0"},
116                 {token.NUMBER, "0x1"},
117                 {token.NUMBER, "0xf"},
118                 {token.NUMBER, "0x42"},
119                 {token.NUMBER, "0x123456789abcDEF"},
120                 {token.NUMBER, "0x" + f100},
121                 {token.NUMBER, "0X0"},
122                 {token.NUMBER, "0X1"},
123                 {token.NUMBER, "0XF"},
124                 {token.NUMBER, "0X42"},
125                 {token.NUMBER, "0X123456789abcDEF"},
126                 {token.NUMBER, "0X" + f100},
127                 {token.NUMBER, "-0"},
128                 {token.NUMBER, "-1"},
129                 {token.NUMBER, "-9"},
130                 {token.NUMBER, "-42"},
131                 {token.NUMBER, "-1234567890"},
132                 {token.NUMBER, "-00"},
133                 {token.NUMBER, "-01"},
134                 {token.NUMBER, "-07"},
135                 {token.NUMBER, "-29"},
136                 {token.NUMBER, "-042"},
137                 {token.NUMBER, "-01234567"},
138                 {token.NUMBER, "-0x0"},
139                 {token.NUMBER, "-0x1"},
140                 {token.NUMBER, "-0xf"},
141                 {token.NUMBER, "-0x42"},
142                 {token.NUMBER, "-0x123456789abcDEF"},
143                 {token.NUMBER, "-0x" + f100},
144                 {token.NUMBER, "-0X0"},
145                 {token.NUMBER, "-0X1"},
146                 {token.NUMBER, "-0XF"},
147                 {token.NUMBER, "-0X42"},
148                 {token.NUMBER, "-0X123456789abcDEF"},
149                 {token.NUMBER, "-0X" + f100},
150         },
151         "float": []tokenPair{
152                 {token.FLOAT, "0."},
153                 {token.FLOAT, "1."},
154                 {token.FLOAT, "42."},
155                 {token.FLOAT, "01234567890."},
156                 {token.FLOAT, ".0"},
157                 {token.FLOAT, ".1"},
158                 {token.FLOAT, ".42"},
159                 {token.FLOAT, ".0123456789"},
160                 {token.FLOAT, "0.0"},
161                 {token.FLOAT, "1.0"},
162                 {token.FLOAT, "42.0"},
163                 {token.FLOAT, "01234567890.0"},
164                 {token.FLOAT, "0e0"},
165                 {token.FLOAT, "1e0"},
166                 {token.FLOAT, "42e0"},
167                 {token.FLOAT, "01234567890e0"},
168                 {token.FLOAT, "0E0"},
169                 {token.FLOAT, "1E0"},
170                 {token.FLOAT, "42E0"},
171                 {token.FLOAT, "01234567890E0"},
172                 {token.FLOAT, "0e+10"},
173                 {token.FLOAT, "1e-10"},
174                 {token.FLOAT, "42e+10"},
175                 {token.FLOAT, "01234567890e-10"},
176                 {token.FLOAT, "0E+10"},
177                 {token.FLOAT, "1E-10"},
178                 {token.FLOAT, "42E+10"},
179                 {token.FLOAT, "01234567890E-10"},
180                 {token.FLOAT, "01.8e0"},
181                 {token.FLOAT, "1.4e0"},
182                 {token.FLOAT, "42.2e0"},
183                 {token.FLOAT, "01234567890.12e0"},
184                 {token.FLOAT, "0.E0"},
185                 {token.FLOAT, "1.12E0"},
186                 {token.FLOAT, "42.123E0"},
187                 {token.FLOAT, "01234567890.213E0"},
188                 {token.FLOAT, "0.2e+10"},
189                 {token.FLOAT, "1.2e-10"},
190                 {token.FLOAT, "42.54e+10"},
191                 {token.FLOAT, "01234567890.98e-10"},
192                 {token.FLOAT, "0.1E+10"},
193                 {token.FLOAT, "1.1E-10"},
194                 {token.FLOAT, "42.1E+10"},
195                 {token.FLOAT, "01234567890.1E-10"},
196                 {token.FLOAT, "-0.0"},
197                 {token.FLOAT, "-1.0"},
198                 {token.FLOAT, "-42.0"},
199                 {token.FLOAT, "-01234567890.0"},
200                 {token.FLOAT, "-0e0"},
201                 {token.FLOAT, "-1e0"},
202                 {token.FLOAT, "-42e0"},
203                 {token.FLOAT, "-01234567890e0"},
204                 {token.FLOAT, "-0E0"},
205                 {token.FLOAT, "-1E0"},
206                 {token.FLOAT, "-42E0"},
207                 {token.FLOAT, "-01234567890E0"},
208                 {token.FLOAT, "-0e+10"},
209                 {token.FLOAT, "-1e-10"},
210                 {token.FLOAT, "-42e+10"},
211                 {token.FLOAT, "-01234567890e-10"},
212                 {token.FLOAT, "-0E+10"},
213                 {token.FLOAT, "-1E-10"},
214                 {token.FLOAT, "-42E+10"},
215                 {token.FLOAT, "-01234567890E-10"},
216                 {token.FLOAT, "-01.8e0"},
217                 {token.FLOAT, "-1.4e0"},
218                 {token.FLOAT, "-42.2e0"},
219                 {token.FLOAT, "-01234567890.12e0"},
220                 {token.FLOAT, "-0.E0"},
221                 {token.FLOAT, "-1.12E0"},
222                 {token.FLOAT, "-42.123E0"},
223                 {token.FLOAT, "-01234567890.213E0"},
224                 {token.FLOAT, "-0.2e+10"},
225                 {token.FLOAT, "-1.2e-10"},
226                 {token.FLOAT, "-42.54e+10"},
227                 {token.FLOAT, "-01234567890.98e-10"},
228                 {token.FLOAT, "-0.1E+10"},
229                 {token.FLOAT, "-1.1E-10"},
230                 {token.FLOAT, "-42.1E+10"},
231                 {token.FLOAT, "-01234567890.1E-10"},
232         },
233 }
234
235 var orderedTokenLists = []string{
236         "comment",
237         "operator",
238         "bool",
239         "ident",
240         "heredoc",
241         "string",
242         "number",
243         "float",
244 }
245
246 func TestPosition(t *testing.T) {
247         // create artifical source code
248         buf := new(bytes.Buffer)
249
250         for _, listName := range orderedTokenLists {
251                 for _, ident := range tokenLists[listName] {
252                         fmt.Fprintf(buf, "\t\t\t\t%s\n", ident.text)
253                 }
254         }
255
256         s := New(buf.Bytes())
257
258         pos := token.Pos{"", 4, 1, 5}
259         s.Scan()
260         for _, listName := range orderedTokenLists {
261
262                 for _, k := range tokenLists[listName] {
263                         curPos := s.tokPos
264                         // fmt.Printf("[%q] s = %+v:%+v\n", k.text, curPos.Offset, curPos.Column)
265
266                         if curPos.Offset != pos.Offset {
267                                 t.Fatalf("offset = %d, want %d for %q", curPos.Offset, pos.Offset, k.text)
268                         }
269                         if curPos.Line != pos.Line {
270                                 t.Fatalf("line = %d, want %d for %q", curPos.Line, pos.Line, k.text)
271                         }
272                         if curPos.Column != pos.Column {
273                                 t.Fatalf("column = %d, want %d for %q", curPos.Column, pos.Column, k.text)
274                         }
275                         pos.Offset += 4 + len(k.text) + 1     // 4 tabs + token bytes + newline
276                         pos.Line += countNewlines(k.text) + 1 // each token is on a new line
277                         s.Scan()
278                 }
279         }
280         // make sure there were no token-internal errors reported by scanner
281         if s.ErrorCount != 0 {
282                 t.Errorf("%d errors", s.ErrorCount)
283         }
284 }
285
286 func TestNullChar(t *testing.T) {
287         s := New([]byte("\"\\0"))
288         s.Scan() // Used to panic
289 }
290
291 func TestComment(t *testing.T) {
292         testTokenList(t, tokenLists["comment"])
293 }
294
295 func TestOperator(t *testing.T) {
296         testTokenList(t, tokenLists["operator"])
297 }
298
299 func TestBool(t *testing.T) {
300         testTokenList(t, tokenLists["bool"])
301 }
302
303 func TestIdent(t *testing.T) {
304         testTokenList(t, tokenLists["ident"])
305 }
306
307 func TestString(t *testing.T) {
308         testTokenList(t, tokenLists["string"])
309 }
310
311 func TestNumber(t *testing.T) {
312         testTokenList(t, tokenLists["number"])
313 }
314
315 func TestFloat(t *testing.T) {
316         testTokenList(t, tokenLists["float"])
317 }
318
319 func TestWindowsLineEndings(t *testing.T) {
320         hcl := `// This should have Windows line endings
321 resource "aws_instance" "foo" {
322     user_data=<<HEREDOC
323     test script
324 HEREDOC
325 }`
326         hclWindowsEndings := strings.Replace(hcl, "\n", "\r\n", -1)
327
328         literals := []struct {
329                 tokenType token.Type
330                 literal   string
331         }{
332                 {token.COMMENT, "// This should have Windows line endings\r"},
333                 {token.IDENT, `resource`},
334                 {token.STRING, `"aws_instance"`},
335                 {token.STRING, `"foo"`},
336                 {token.LBRACE, `{`},
337                 {token.IDENT, `user_data`},
338                 {token.ASSIGN, `=`},
339                 {token.HEREDOC, "<<HEREDOC\r\n    test script\r\nHEREDOC\r\n"},
340                 {token.RBRACE, `}`},
341         }
342
343         s := New([]byte(hclWindowsEndings))
344         for _, l := range literals {
345                 tok := s.Scan()
346
347                 if l.tokenType != tok.Type {
348                         t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
349                 }
350
351                 if l.literal != tok.Text {
352                         t.Errorf("got:\n%v\nwant:\n%v\n", []byte(tok.Text), []byte(l.literal))
353                 }
354         }
355 }
356
357 func TestRealExample(t *testing.T) {
358         complexHCL := `// This comes from Terraform, as a test
359         variable "foo" {
360             default = "bar"
361             description = "bar"
362         }
363
364         provider "aws" {
365           access_key = "foo"
366           secret_key = "${replace(var.foo, ".", "\\.")}"
367         }
368
369         resource "aws_security_group" "firewall" {
370             count = 5
371         }
372
373         resource aws_instance "web" {
374             ami = "${var.foo}"
375             security_groups = [
376                 "foo",
377                 "${aws_security_group.firewall.foo}"
378             ]
379
380             network_interface {
381                 device_index = 0
382                 description = <<EOF
383 Main interface
384 EOF
385             }
386
387                 network_interface {
388                 device_index = 1
389                 description = <<-EOF
390                         Outer text
391                                 Indented text
392                         EOF
393                 }
394         }`
395
396         literals := []struct {
397                 tokenType token.Type
398                 literal   string
399         }{
400                 {token.COMMENT, `// This comes from Terraform, as a test`},
401                 {token.IDENT, `variable`},
402                 {token.STRING, `"foo"`},
403                 {token.LBRACE, `{`},
404                 {token.IDENT, `default`},
405                 {token.ASSIGN, `=`},
406                 {token.STRING, `"bar"`},
407                 {token.IDENT, `description`},
408                 {token.ASSIGN, `=`},
409                 {token.STRING, `"bar"`},
410                 {token.RBRACE, `}`},
411                 {token.IDENT, `provider`},
412                 {token.STRING, `"aws"`},
413                 {token.LBRACE, `{`},
414                 {token.IDENT, `access_key`},
415                 {token.ASSIGN, `=`},
416                 {token.STRING, `"foo"`},
417                 {token.IDENT, `secret_key`},
418                 {token.ASSIGN, `=`},
419                 {token.STRING, `"${replace(var.foo, ".", "\\.")}"`},
420                 {token.RBRACE, `}`},
421                 {token.IDENT, `resource`},
422                 {token.STRING, `"aws_security_group"`},
423                 {token.STRING, `"firewall"`},
424                 {token.LBRACE, `{`},
425                 {token.IDENT, `count`},
426                 {token.ASSIGN, `=`},
427                 {token.NUMBER, `5`},
428                 {token.RBRACE, `}`},
429                 {token.IDENT, `resource`},
430                 {token.IDENT, `aws_instance`},
431                 {token.STRING, `"web"`},
432                 {token.LBRACE, `{`},
433                 {token.IDENT, `ami`},
434                 {token.ASSIGN, `=`},
435                 {token.STRING, `"${var.foo}"`},
436                 {token.IDENT, `security_groups`},
437                 {token.ASSIGN, `=`},
438                 {token.LBRACK, `[`},
439                 {token.STRING, `"foo"`},
440                 {token.COMMA, `,`},
441                 {token.STRING, `"${aws_security_group.firewall.foo}"`},
442                 {token.RBRACK, `]`},
443                 {token.IDENT, `network_interface`},
444                 {token.LBRACE, `{`},
445                 {token.IDENT, `device_index`},
446                 {token.ASSIGN, `=`},
447                 {token.NUMBER, `0`},
448                 {token.IDENT, `description`},
449                 {token.ASSIGN, `=`},
450                 {token.HEREDOC, "<<EOF\nMain interface\nEOF\n"},
451                 {token.RBRACE, `}`},
452                 {token.IDENT, `network_interface`},
453                 {token.LBRACE, `{`},
454                 {token.IDENT, `device_index`},
455                 {token.ASSIGN, `=`},
456                 {token.NUMBER, `1`},
457                 {token.IDENT, `description`},
458                 {token.ASSIGN, `=`},
459                 {token.HEREDOC, "<<-EOF\n\t\t\tOuter text\n\t\t\t\tIndented text\n\t\t\tEOF\n"},
460                 {token.RBRACE, `}`},
461                 {token.RBRACE, `}`},
462                 {token.EOF, ``},
463         }
464
465         s := New([]byte(complexHCL))
466         for _, l := range literals {
467                 tok := s.Scan()
468                 if l.tokenType != tok.Type {
469                         t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
470                 }
471
472                 if l.literal != tok.Text {
473                         t.Errorf("got:\n%+v\n%s\n want:\n%+v\n%s\n", []byte(tok.String()), tok, []byte(l.literal), l.literal)
474                 }
475         }
476
477 }
478
479 func TestScan_crlf(t *testing.T) {
480         complexHCL := "foo {\r\n  bar = \"baz\"\r\n}\r\n"
481
482         literals := []struct {
483                 tokenType token.Type
484                 literal   string
485         }{
486                 {token.IDENT, `foo`},
487                 {token.LBRACE, `{`},
488                 {token.IDENT, `bar`},
489                 {token.ASSIGN, `=`},
490                 {token.STRING, `"baz"`},
491                 {token.RBRACE, `}`},
492                 {token.EOF, ``},
493         }
494
495         s := New([]byte(complexHCL))
496         for _, l := range literals {
497                 tok := s.Scan()
498                 if l.tokenType != tok.Type {
499                         t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
500                 }
501
502                 if l.literal != tok.Text {
503                         t.Errorf("got:\n%+v\n%s\n want:\n%+v\n%s\n", []byte(tok.String()), tok, []byte(l.literal), l.literal)
504                 }
505         }
506
507 }
508
509 func TestError(t *testing.T) {
510         testError(t, "\x80", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
511         testError(t, "\xff", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
512
513         testError(t, "ab\x80", "1:3", "illegal UTF-8 encoding", token.IDENT)
514         testError(t, "abc\xff", "1:4", "illegal UTF-8 encoding", token.IDENT)
515
516         testError(t, `"ab`+"\x80", "1:4", "illegal UTF-8 encoding", token.STRING)
517         testError(t, `"abc`+"\xff", "1:5", "illegal UTF-8 encoding", token.STRING)
518
519         testError(t, `01238`, "1:6", "illegal octal number", token.NUMBER)
520         testError(t, `01238123`, "1:9", "illegal octal number", token.NUMBER)
521         testError(t, `0x`, "1:3", "illegal hexadecimal number", token.NUMBER)
522         testError(t, `0xg`, "1:3", "illegal hexadecimal number", token.NUMBER)
523         testError(t, `'aa'`, "1:1", "illegal char", token.ILLEGAL)
524
525         testError(t, `"`, "1:2", "literal not terminated", token.STRING)
526         testError(t, `"abc`, "1:5", "literal not terminated", token.STRING)
527         testError(t, `"abc`+"\n", "1:5", "literal not terminated", token.STRING)
528         testError(t, `"${abc`+"\n", "2:1", "literal not terminated", token.STRING)
529         testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT)
530         testError(t, `/foo`, "1:1", "expected '/' for comment", token.COMMENT)
531 }
532
533 func testError(t *testing.T, src, pos, msg string, tok token.Type) {
534         s := New([]byte(src))
535
536         errorCalled := false
537         s.Error = func(p token.Pos, m string) {
538                 if !errorCalled {
539                         if pos != p.String() {
540                                 t.Errorf("pos = %q, want %q for %q", p, pos, src)
541                         }
542
543                         if m != msg {
544                                 t.Errorf("msg = %q, want %q for %q", m, msg, src)
545                         }
546                         errorCalled = true
547                 }
548         }
549
550         tk := s.Scan()
551         if tk.Type != tok {
552                 t.Errorf("tok = %s, want %s for %q", tk, tok, src)
553         }
554         if !errorCalled {
555                 t.Errorf("error handler not called for %q", src)
556         }
557         if s.ErrorCount == 0 {
558                 t.Errorf("count = %d, want > 0 for %q", s.ErrorCount, src)
559         }
560 }
561
562 func testTokenList(t *testing.T, tokenList []tokenPair) {
563         // create artifical source code
564         buf := new(bytes.Buffer)
565         for _, ident := range tokenList {
566                 fmt.Fprintf(buf, "%s\n", ident.text)
567         }
568
569         s := New(buf.Bytes())
570         for _, ident := range tokenList {
571                 tok := s.Scan()
572                 if tok.Type != ident.tok {
573                         t.Errorf("tok = %q want %q for %q\n", tok, ident.tok, ident.text)
574                 }
575
576                 if tok.Text != ident.text {
577                         t.Errorf("text = %q want %q", tok.String(), ident.text)
578                 }
579
580         }
581 }
582
583 func countNewlines(s string) int {
584         n := 0
585         for _, ch := range s {
586                 if ch == '\n' {
587                         n++
588                 }
589         }
590         return n
591 }