Prepare moving vdl from v.io/v23 to v.io/core/veyron/... (step 2)
diff --git a/lib/vdl/parse/const.go b/lib/vdl/parse/const.go
new file mode 100644
index 0000000..d141e01
--- /dev/null
+++ b/lib/vdl/parse/const.go
@@ -0,0 +1,156 @@
+package parse
+
+import (
+ "fmt"
+ "math/big"
+ "strconv"
+)
+
+// ConstExpr is the interface for all nodes in an expression.
+type ConstExpr interface {
+ String() string
+ Pos() Pos
+}
+
+// ConstLit represents scalar literals in const expressions. The supported
+// types for Lit are:
+// string - Represents all string constants.
+// *big.Int - Represents all integer constants.
+// *big.Rat - Represents all rational constants.
+// *BigImag - Represents all imaginary constants.
+type ConstLit struct {
+ Lit interface{}
+ P Pos
+}
+
+// BigImag represents a literal imaginary number.
+type BigImag big.Rat
+
+// ConstCompositeLit represents composite literals in const expressions.
+type ConstCompositeLit struct {
+ Type Type
+ KVList []KVLit
+ P Pos
+}
+
+// KVLit represents a key/value literal in composite literals.
+type KVLit struct {
+ Key ConstExpr
+ Value ConstExpr
+}
+
+// ConstNamed represents named references to other consts.
+type ConstNamed struct {
+ Name string
+ P Pos
+}
+
+// ConstIndexed represents an index operation on a composite type.
+type ConstIndexed struct {
+ Expr *ConstNamed
+ IndexExpr ConstExpr
+ P Pos
+}
+
+// ConstTypeConv represents explicit type conversions.
+type ConstTypeConv struct {
+ Type Type
+ Expr ConstExpr
+ P Pos
+}
+
+// ConstTypeObject represents typeobject; a type used as a value.
+type ConstTypeObject struct {
+ Type Type
+ P Pos
+}
+
+// ConstUnaryOp represents all unary operations.
+type ConstUnaryOp struct {
+ Op string
+ Expr ConstExpr
+ P Pos
+}
+
+// ConstBinaryOp represents all binary operations.
+type ConstBinaryOp struct {
+ Op string
+ Lexpr ConstExpr
+ Rexpr ConstExpr
+ P Pos
+}
+
+// ConstDef represents a user-defined named const.
+type ConstDef struct {
+ NamePos
+ Expr ConstExpr
+}
+
+// cvString returns a human-readable string representing the const value.
+func cvString(val interface{}) string {
+ switch tv := val.(type) {
+ case string:
+ return strconv.Quote(tv)
+ case *big.Int:
+ return tv.String()
+ case *big.Rat:
+ if tv.IsInt() {
+ return tv.Num().String() + ".0"
+ }
+ fv, _ := tv.Float64()
+ return strconv.FormatFloat(fv, 'g', -1, 64)
+ case *BigImag:
+ return cvString((*big.Rat)(tv)) + "i"
+ default:
+ panic(fmt.Errorf("vdl: unhandled const type %T value %v", val, val))
+ }
+}
+
+func (c *ConstLit) String() string {
+ return cvString(c.Lit)
+}
+func (c *ConstCompositeLit) String() string {
+ var s string
+ if c.Type != nil {
+ s += c.Type.String()
+ }
+ s += "{"
+ for index, kv := range c.KVList {
+ if index > 0 {
+ s += ", "
+ }
+ if kv.Key != nil {
+ s += kv.Key.String() + ": "
+ }
+ s += kv.Value.String()
+ }
+ return s + "}"
+}
+func (c *ConstNamed) String() string {
+ return c.Name
+}
+func (c *ConstIndexed) String() string {
+ return c.Expr.String() + "[" + c.IndexExpr.String() + "]"
+}
+func (c *ConstTypeConv) String() string {
+ return c.Type.String() + "(" + c.Expr.String() + ")"
+}
+func (c *ConstTypeObject) String() string {
+ return c.Type.String()
+}
+func (c *ConstUnaryOp) String() string {
+ return c.Op + c.Expr.String()
+}
+func (c *ConstBinaryOp) String() string {
+ return "(" + c.Lexpr.String() + c.Op + c.Rexpr.String() + ")"
+}
+func (c *ConstDef) String() string { return fmt.Sprintf("%+v", *c) }
+
+func (c *ConstLit) Pos() Pos { return c.P }
+func (c *ConstCompositeLit) Pos() Pos { return c.P }
+func (c *ConstNamed) Pos() Pos { return c.P }
+func (c *ConstIndexed) Pos() Pos { return c.P }
+func (c *ConstTypeConv) Pos() Pos { return c.P }
+func (c *ConstTypeObject) Pos() Pos { return c.P }
+func (c *ConstUnaryOp) Pos() Pos { return c.P }
+func (c *ConstBinaryOp) Pos() Pos { return c.P }
diff --git a/lib/vdl/parse/grammar.y b/lib/vdl/parse/grammar.y
new file mode 100644
index 0000000..6d19942
--- /dev/null
+++ b/lib/vdl/parse/grammar.y
@@ -0,0 +1,646 @@
+// Yacc grammar file for the veyron VDL langage.
+// http://goto/veyron:vdl
+//
+// Similar to Go, the formal grammar uses semicolons ';' as terminators, but
+// idiomatic usage may omit most semicolons using the following rules:
+// 1) During the tokenization phase, semicolons are always auto-inserted at
+// the end of each line after certain tokens. This is implemented in
+// the lexer via the autoSemi function.
+// 2) Semicolons may be omitted before a closing ')' or '}'. This is
+// implemented via the osemi rule below.
+//
+// To generate the grammar.go source file containing the parser, run
+// grammar_gen.sh in this same directory, or run go generate on this package.
+
+////////////////////////////////////////////////////////////////////////
+// Declarations section.
+%{
+// This grammar.y.go file was auto-generated by yacc from grammar.y.
+
+package parse
+
+import (
+ "math/big"
+ "strings"
+)
+
+type intPos struct {
+ int *big.Int
+ pos Pos
+}
+
+type ratPos struct {
+ rat *big.Rat
+ pos Pos
+}
+
+type imagPos struct {
+ imag *BigImag
+ pos Pos
+}
+
+// typeListToStrList converts a slice of Type to a slice of StringPos. Each
+// type must be a TypeNamed with an empty PackageName, otherwise errors are
+// reported, and ok=false is returned.
+func typeListToStrList(yylex yyLexer, typeList []Type) (strList []StringPos, ok bool) {
+ ok = true
+ for _, t := range typeList {
+ var tn *TypeNamed
+ if tn, ok = t.(*TypeNamed); !ok {
+ lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names)", t.String())
+ return
+ }
+ if strings.ContainsRune(tn.Name, '.') {
+ ok = false
+ lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names).", tn.Name)
+ return
+ }
+ strList = append(strList, StringPos{tn.Name, tn.P})
+ }
+ return
+}
+%}
+
+// This union is turned into the struct type yySymType. Most symbols include
+// positional information; this is necessary since Go yacc doesn't support
+// passing positional information, so we need to track it ourselves.
+%union {
+ pos Pos
+ strpos StringPos
+ intpos intPos
+ ratpos ratPos
+ imagpos imagPos
+ namepos NamePos
+ nameposes []NamePos
+ typeexpr Type
+ typeexprs []Type
+ fields []*Field
+ iface *Interface
+ constexpr ConstExpr
+ constexprs []ConstExpr
+ complit *ConstCompositeLit
+ kvlit KVLit
+ kvlits []KVLit
+ errordef ErrorDef
+}
+
+// Terminal tokens. We leave single-char tokens as-is using their ascii code as
+// their id, to make the grammar more readable; multi-char tokens get their own
+// id. The start* tokens are dummy tokens to kick off the parse.
+%token startFileImports startFile startConfigImports startConfig
+%token startExprs
+%token <pos> ';' ':' ',' '.' '(' ')' '[' ']' '{' '}' '<' '>' '='
+%token <pos> '!' '+' '-' '*' '/' '%' '|' '&' '^' '?'
+%token <pos> tOROR tANDAND tLE tGE tNE tEQEQ tLSH tRSH
+%token <pos> tCONST tENUM tERROR tIMPORT tINTERFACE tMAP tPACKAGE
+%token <pos> tSET tSTREAM tSTRUCT tTYPE tTYPEOBJECT tUNION
+%token <strpos> tIDENT tSTRLIT
+%token <intpos> tINTLIT
+%token <ratpos> tRATLIT
+%token <imagpos> tIMAGLIT
+
+// Labeled rules holding typed values.
+%type <strpos> nameref dotnameref
+%type <namepos> label_spec
+%type <nameposes> label_spec_list
+%type <typeexpr> type type_no_typeobject otype
+%type <typeexprs> type_comma_list streamargs
+%type <fields> field_spec_list field_spec named_arg_list inargs outargs
+%type <iface> iface_item_list iface_item
+%type <constexpr> expr unary_expr operand
+%type <constexprs> tags expr_comma_list
+%type <complit> comp_lit
+%type <kvlit> kv_lit
+%type <kvlits> kv_lit_list
+%type <errordef> error_details error_detail_list error_detail
+
+// There are 5 precedence levels for operators, all left-associative, just like
+// Go. Lines are listed in order of increasing precedence.
+%left tOROR
+%left tANDAND
+%left '<' '>' tLE tGE tNE tEQEQ
+%left '+' '-' '|' '^'
+%left '*' '/' '%' '&' tLSH tRSH
+
+%left notPackage notConfig
+
+%start start
+
+%%
+////////////////////////////////////////////////////////////////////////
+// Rules section.
+
+// Note that vdl files and config files use an identical grammar, other than the
+// initial package or config clause respectively. Error checking for config
+// files that include error, type or interface definitions occurs afterwards, to
+// improve error reporting.
+start:
+ startFileImports package imports gen_imports_eof
+| startFile package imports defs
+| startConfigImports config imports gen_imports_eof
+| startConfig config imports defs
+| startExprs expr_comma_list ';'
+ { lexStoreExprs(yylex, $2) }
+
+// Dummy rule to terminate the parse after the imports, regardless of whether
+// there are any defs. Defs always start with either the tTYPE, tCONST or
+// tERROR tokens, and the rule handles all cases - either there's no trailing
+// text (the empty case, which would have resulted in EOF anyways), or there's
+// one or more defs, where we need to force an EOF.
+gen_imports_eof:
+ // Empty.
+ { lexGenEOF(yylex) }
+| tTYPE
+ { lexGenEOF(yylex) }
+| tCONST
+ { lexGenEOF(yylex) }
+| tERROR
+ { lexGenEOF(yylex) }
+
+// PACKAGE
+package:
+ %prec notPackage
+ { lexPosErrorf(yylex, Pos{}, "vdl file must start with package clause") }
+| tPACKAGE tIDENT ';'
+ { lexVDLFile(yylex).PackageDef = NamePos{Name:$2.String, Pos:$2.Pos} }
+
+// CONFIG
+config:
+ %prec notConfig
+ { lexPosErrorf(yylex, Pos{}, "config file must start with config clause") }
+| tIDENT '=' expr ';'
+ {
+ // We allow "config" as an identifier; it is not a keyword. So we check
+ // manually to make sure the syntax is correct.
+ if $1.String != "config" {
+ lexPosErrorf(yylex, $1.Pos, "config file must start with config clause")
+ return 1 // Any non-zero code indicates an error
+ }
+ file := lexVDLFile(yylex)
+ file.PackageDef = NamePos{Name:"config", Pos:$1.Pos}
+ file.ConstDefs = []*ConstDef{{Expr:$3}}
+ }
+
+// IMPORTS
+imports:
+ // Empty.
+| imports import ';'
+
+import:
+ tIMPORT '(' ')'
+| tIMPORT '(' import_spec_list osemi ')'
+| tIMPORT import_spec
+
+import_spec_list:
+ import_spec
+| import_spec_list ';' import_spec
+
+import_spec:
+ tSTRLIT
+ {
+ imps := &lexVDLFile(yylex).Imports
+ *imps = append(*imps, &Import{Path:$1.String, NamePos:NamePos{Pos:$1.Pos}})
+ }
+| tIDENT tSTRLIT
+ {
+ imps := &lexVDLFile(yylex).Imports
+ *imps = append(*imps, &Import{Path:$2.String, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+ }
+
+// DEFINITIONS
+defs:
+ // Empty.
+| defs type_def ';'
+| defs const_def ';'
+| defs error_def ';'
+
+type_def:
+ tTYPE '(' ')'
+| tTYPE '(' type_spec_list osemi ')'
+| tTYPE type_spec
+| tTYPE interface_spec
+
+const_def:
+ tCONST '(' ')'
+| tCONST '(' const_spec_list osemi ')'
+| tCONST const_spec
+
+error_def:
+ tERROR '(' ')'
+| tERROR '(' error_spec_list osemi ')'
+| tERROR error_spec
+
+// TYPE DEFINITIONS
+type_spec_list:
+ type_spec
+| type_spec_list ';' type_spec
+
+type_spec:
+ tIDENT type
+ {
+ tds := &lexVDLFile(yylex).TypeDefs
+ *tds = append(*tds, &TypeDef{Type:$2, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+ }
+
+// The type_no_typeobject rule is necessary to avoid a shift/reduce conflict
+// between type conversions and typeobject const expressions. E.g.
+// type(expr) // type conversion
+// typeobject(type) // typeobject const expression
+//
+// We've chosen similar syntax to make it easier for the user to remember how to
+// use the feature, but since "typeobject" is itself a type, there is a problem.
+// We resolve the conflict by restricting the type conversion to the rule:
+// type_no_typeobject '(' expr ')'
+//
+// Note that if we wanted to add general-purpose functions with the func(expr)
+// syntax, we'll need to pull nameref out of type_no_typeobject, and parse both
+// func(expr) and nameref(expr) into a generic structure. We can't use that
+// same mechanism for typeobject, since the thing inside the parens is a value
+// expression for type conversions, but a type expression for typeobject.
+type_no_typeobject:
+ nameref
+ { $$ = &TypeNamed{Name:$1.String, P:$1.Pos} }
+| tERROR // Special-case to allow the "error" keyword as a named type.
+ { $$ = &TypeNamed{Name:"error", P:$1} }
+| '[' tINTLIT ']' type
+ { $$ = &TypeArray{Len:int($2.int.Int64()), Elem:$4, P:$1} }
+| '[' ']' type
+ { $$ = &TypeList{Elem:$3, P:$1} }
+| tENUM '{' label_spec_list osemi '}'
+ { $$ = &TypeEnum{Labels:$3, P:$1} }
+| tSET '[' type ']'
+ { $$ = &TypeSet{Key:$3, P:$1} }
+| tMAP '[' type ']' type
+ { $$ = &TypeMap{Key:$3, Elem:$5, P:$1} }
+| tSTRUCT '{' field_spec_list osemi '}'
+ { $$ = &TypeStruct{Fields:$3, P:$1} }
+| tSTRUCT '{' '}'
+ { $$ = &TypeStruct{P:$1} }
+| tUNION '{' field_spec_list osemi '}'
+ { $$ = &TypeUnion{Fields:$3, P:$1} }
+| tUNION '{' '}'
+ { $$ = &TypeUnion{P:$1} }
+| '?' type
+ { $$ = &TypeOptional{Base:$2, P:$1} }
+
+// The type rule expands to all the actual types, including typeobject.
+type:
+ type_no_typeobject
+ { $$ = $1}
+| tTYPEOBJECT
+ { $$ = &TypeNamed{Name:"typeobject", P:$1} }
+
+label_spec_list:
+ label_spec
+ { $$ = []NamePos{$1} }
+| label_spec_list ';' label_spec
+ { $$ = append($1, $3) }
+
+label_spec:
+ tIDENT
+ { $$ = NamePos{Name:$1.String, Pos:$1.Pos} }
+
+field_spec_list:
+ field_spec
+ { $$ = $1 }
+| field_spec_list ';' field_spec
+ { $$ = append($1, $3...) }
+
+// The field_spec rule is intended to capture the following patterns:
+// var type
+// var0, var1, var2 type
+// where var* refers to a variable name, and type refers to a type. Each var
+// is expressed as an identifier. An oddity here is that we use a type_list to
+// capture the list of variables rather than using a list of IDENTS. This means
+// the grammar accepts invalid constructions, and we must validate afterwards.
+//
+// We do this to avoid a LALR reduce/reduce conflict with function arguments.
+// The problem is exhibited by the in-args of these two functions, where func1
+// has three args respectively named A, B, C all of type t1, and func2 has three
+// args with name and type t2, t3 and t4 respectively. The func1 style is
+// captured by field_spec in named_arg_list, while the func2 style is captured
+// by type_list in args.
+// func1(A, B, C t1)
+// func2(t2, t3, t4)
+//
+// If we used an ident_list to capture "A, B, C" in func1, but used a type_list
+// to capture "t2, t3, t4" in func2, we'd have a reduce/reduce conflict since
+// yacc cannot determine whether to reduce as an ident_list or as a type_list;
+// we don't know until we've reached token t1 in func1, or token ')' in func2.
+//
+// The fix can be considered both beautiful and a huge hack. To avoid the
+// conflict we force both forms to use type_list to capture both "A, B, C" and
+// "t2, t3, t4". This avoids the conflict since we're now always reducing via
+// type_list, but allows invalid constructions like "[]int, []int []int". So we
+// validate in the action and throw errors.
+//
+// An alternate fix would have been to remove the IDENT case from the type rule,
+// use ident_list to capture both cases, and manually "expand" the grammar to
+// distinguish the cases appropriately. That would ensure we don't allow
+// constructions like "int, int int" in the grammar itself, but would lead to a
+// much more complicated grammar. As a bonus, with the type_list solution we
+// can give better error messages.
+field_spec:
+ type_comma_list type
+ {
+ if names, ok := typeListToStrList(yylex, $1); ok {
+ for _, n := range names {
+ $$ = append($$, &Field{Type:$2, NamePos:NamePos{Name:n.String, Pos:n.Pos}})
+ }
+ } else {
+ lexPosErrorf(yylex, $2.Pos(), "perhaps you forgot a comma before %q?.", $2.String())
+ }
+ }
+
+type_comma_list:
+ type
+ { $$ = []Type{$1} }
+| type_comma_list ',' type
+ { $$ = append($1, $3) }
+
+// INTERFACE DEFINITIONS
+interface_spec:
+ tIDENT tINTERFACE '{' '}'
+ {
+ ifs := &lexVDLFile(yylex).Interfaces
+ *ifs = append(*ifs, &Interface{NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+ }
+| tIDENT tINTERFACE '{' iface_item_list osemi '}'
+ {
+ $4.Name, $4.Pos = $1.String, $1.Pos
+ ifs := &lexVDLFile(yylex).Interfaces
+ *ifs = append(*ifs, $4)
+ }
+
+iface_item_list:
+ iface_item
+ { $$ = $1 }
+| iface_item_list ';' iface_item
+ {
+ $1.Embeds = append($1.Embeds, $3.Embeds...)
+ $1.Methods = append($1.Methods, $3.Methods...)
+ $$ = $1
+ }
+
+iface_item:
+ tIDENT inargs streamargs outargs tags
+ { $$ = &Interface{Methods: []*Method{{InArgs:$2, InStream:$3[0], OutStream:$3[1], OutArgs:$4, Tags:$5, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}}}} }
+| nameref
+ { $$ = &Interface{Embeds: []*NamePos{{Name:$1.String, Pos:$1.Pos}}} }
+
+inargs:
+ '(' ')'
+ { $$ = nil }
+| '(' named_arg_list ocomma ')'
+ { $$ = $2 }
+| '(' type_comma_list ocomma ')'
+ // Just like Go, we allow a list of types without variable names. See the
+ // field_spec rule for a workaround to avoid a reduce/reduce conflict.
+ {
+ for _, t := range $2 {
+ $$ = append($$, &Field{Type:t, NamePos:NamePos{Pos:t.Pos()}})
+ }
+ }
+
+// The named_arg_list rule is just like the field_spec_list, but uses comma ','
+// as a delimiter rather than semicolon ';'.
+named_arg_list:
+ field_spec
+ { $$ = $1 }
+| named_arg_list ',' field_spec
+ { $$ = append($1, $3...) }
+
+// The outargs use special syntax to denote the error associated with each
+// method. For parsing we accept these forms:
+// error
+// (string | error)
+// (a, b string, c bool | error)
+//
+// TODO(toddw): Improve parser syntax errors.
+outargs:
+ tERROR
+ { $$ = nil }
+| '(' named_arg_list ocomma '|' tERROR ')'
+ { $$ = $2 }
+| '(' type_comma_list ocomma '|' tERROR ')'
+ // Just like Go, we allow a list of types without variable names. See the
+ // field_spec rule for a workaround to avoid a reduce/reduce conflict.
+ {
+ for _, t := range $2 {
+ $$ = append($$, &Field{Type:t, NamePos:NamePos{Pos:t.Pos()}})
+ }
+ }
+
+streamargs:
+ // Empty.
+ { $$ = []Type{nil, nil} }
+| tSTREAM '<' '>'
+ { $$ = []Type{nil, nil} }
+| tSTREAM '<' type '>'
+ { $$ = []Type{$3, nil} }
+| tSTREAM '<' type ',' type '>'
+ { $$ = []Type{$3, $5} }
+
+tags:
+ // Empty.
+ { $$ = nil }
+| '{' '}'
+ { $$ = nil }
+| '{' expr_comma_list ocomma '}'
+ { $$ = $2 }
+
+expr_comma_list:
+ expr
+ { $$ = []ConstExpr{$1} }
+| expr_comma_list ',' expr
+ { $$ = append($1, $3) }
+
+// CONST DEFINITIONS
+const_spec_list:
+ const_spec
+| const_spec_list ';' const_spec
+
+const_spec:
+ tIDENT '=' expr
+ {
+ cds := &lexVDLFile(yylex).ConstDefs
+ *cds = append(*cds, &ConstDef{Expr:$3, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+ }
+
+expr:
+ unary_expr
+ { $$ = $1 }
+| expr tOROR expr
+ { $$ = &ConstBinaryOp{"||", $1, $3, $2} }
+| expr tANDAND expr
+ { $$ = &ConstBinaryOp{"&&", $1, $3, $2} }
+| expr '<' expr
+ { $$ = &ConstBinaryOp{"<", $1, $3, $2} }
+| expr '>' expr
+ { $$ = &ConstBinaryOp{">", $1, $3, $2} }
+| expr tLE expr
+ { $$ = &ConstBinaryOp{"<=", $1, $3, $2} }
+| expr tGE expr
+ { $$ = &ConstBinaryOp{">=", $1, $3, $2} }
+| expr tNE expr
+ { $$ = &ConstBinaryOp{"!=", $1, $3, $2} }
+| expr tEQEQ expr
+ { $$ = &ConstBinaryOp{"==", $1, $3, $2} }
+| expr '+' expr
+ { $$ = &ConstBinaryOp{"+", $1, $3, $2} }
+| expr '-' expr
+ { $$ = &ConstBinaryOp{"-", $1, $3, $2} }
+| expr '*' expr
+ { $$ = &ConstBinaryOp{"*", $1, $3, $2} }
+| expr '/' expr
+ { $$ = &ConstBinaryOp{"/", $1, $3, $2} }
+| expr '%' expr
+ { $$ = &ConstBinaryOp{"%", $1, $3, $2} }
+| expr '|' expr
+ { $$ = &ConstBinaryOp{"|", $1, $3, $2} }
+| expr '&' expr
+ { $$ = &ConstBinaryOp{"&", $1, $3, $2} }
+| expr '^' expr
+ { $$ = &ConstBinaryOp{"^", $1, $3, $2} }
+| expr tLSH expr
+ { $$ = &ConstBinaryOp{"<<", $1, $3, $2} }
+| expr tRSH expr
+ { $$ = &ConstBinaryOp{">>", $1, $3, $2} }
+
+unary_expr:
+ operand
+ { $$ = $1 }
+| '!' unary_expr
+ { $$ = &ConstUnaryOp{"!", $2, $1} }
+| '+' unary_expr
+ { $$ = &ConstUnaryOp{"+", $2, $1} }
+| '-' unary_expr
+ { $$ = &ConstUnaryOp{"-", $2, $1} }
+| '^' unary_expr
+ { $$ = &ConstUnaryOp{"^", $2, $1} }
+| type_no_typeobject '(' expr ')'
+ { $$ = &ConstTypeConv{$1, $3, $1.Pos()} }
+| tTYPEOBJECT '(' type ')'
+ { $$ = &ConstTypeObject{$3, $1} }
+// TODO(bprosnitz) Add .real() and .imag() for complex.
+
+operand:
+ tSTRLIT
+ { $$ = &ConstLit{$1.String, $1.Pos} }
+| tINTLIT
+ { $$ = &ConstLit{$1.int, $1.pos} }
+| tRATLIT
+ { $$ = &ConstLit{$1.rat, $1.pos} }
+| tIMAGLIT
+ { $$ = &ConstLit{$1.imag, $1.pos} }
+| nameref
+ { $$ = &ConstNamed{$1.String, $1.Pos} }
+| comp_lit
+ { $$ = $1 }
+| comp_lit '.' tIDENT
+ { lexPosErrorf(yylex, $2, "cannot apply selector operator to unnamed constant")}
+| comp_lit '[' expr ']'
+ { lexPosErrorf(yylex, $2, "cannot apply index operator to unnamed constant")}
+| nameref '[' expr ']'
+ { $$ = &ConstIndexed{&ConstNamed{$1.String, $1.Pos}, $3, $1.Pos} }
+| '(' expr ')'
+ { $$ = $2 }
+
+comp_lit:
+ otype '{' '}'
+ { $$ = &ConstCompositeLit{$1, nil, $2} }
+| otype '{' kv_lit_list ocomma '}'
+ { $$ = &ConstCompositeLit{$1, $3, $2} }
+
+kv_lit_list:
+ kv_lit
+ { $$ = []KVLit{$1} }
+| kv_lit_list ',' kv_lit
+ { $$ = append($1, $3) }
+
+kv_lit:
+ expr
+ { $$ = KVLit{Value:$1} }
+| expr ':' expr
+ { $$ = KVLit{Key:$1, Value:$3} }
+
+// ERROR DEFINITIONS
+error_spec_list:
+ error_spec
+| error_spec_list ';' error_spec
+
+error_spec:
+ tIDENT inargs error_details
+ {
+ // Create *ErrorDef starting with a copy of error_details, filling in the
+ // name and params
+ ed := $3
+ ed.NamePos = NamePos{Name:$1.String, Pos:$1.Pos}
+ ed.Params = $2
+ eds := &lexVDLFile(yylex).ErrorDefs
+ *eds = append(*eds, &ed)
+ }
+
+error_details:
+ // Empty.
+ { $$ = ErrorDef{} }
+| '{' '}'
+ { $$ = ErrorDef{} }
+| '{' error_detail_list ocomma '}'
+ { $$ = $2 }
+
+error_detail_list:
+ error_detail
+ { $$ = $1 }
+| error_detail_list ',' error_detail
+ {
+ // Merge each ErrorDef in-order to build the final ErrorDef.
+ $$ = $1
+ switch {
+ case len($3.Actions) > 0:
+ $$.Actions = append($$.Actions, $3.Actions...)
+ case len($3.Formats) > 0:
+ $$.Formats = append($$.Formats, $3.Formats...)
+ }
+ }
+
+error_detail:
+ tIDENT
+ { $$ = ErrorDef{Actions: []StringPos{$1}} }
+| tSTRLIT ':' tSTRLIT
+ { $$ = ErrorDef{Formats: []LangFmt{{Lang: $1, Fmt: $3}}} }
+
+// MISC TOKENS
+
+// nameref describes a named reference to another type, interface or const. We
+// allow the following forms:
+// foo
+// foo.bar (and multi-dot variants)
+// "pkg/path".foo
+// "pkg/path".foo.bar (and multi-dot variants)
+nameref:
+ dotnameref
+ { $$ = $1 }
+| tSTRLIT '.' dotnameref
+ { $$ = StringPos{"\""+$1.String+"\"."+$3.String, $1.Pos} }
+
+// dotnameref describes just the dotted portion of nameref.
+dotnameref:
+ tIDENT
+ { $$ = $1 }
+| dotnameref '.' tIDENT
+ { $$ = StringPos{$1.String+"."+$3.String, $1.Pos} }
+
+otype:
+ // Empty.
+ { $$ = nil }
+| type
+ { $$ = $1 }
+
+osemi:
+ // Empty.
+| ';'
+
+ocomma:
+ // Empty.
+| ','
diff --git a/lib/vdl/parse/grammar.y.debug b/lib/vdl/parse/grammar.y.debug
new file mode 100644
index 0000000..a3fb1fc
--- /dev/null
+++ b/lib/vdl/parse/grammar.y.debug
@@ -0,0 +1,4347 @@
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+* The main reason this file has been generated and submitted is to try to ensure
+* we never submit changes that cause shift/reduce or reduce/reduce conflicts.
+* The Go yacc tool doesn't support the %expect directive, and will happily
+* generate a parser even if such conflicts exist; it's up to the developer
+* running the tool to notice that an error message is reported. The bottom of
+* this file contains stats, including the number of conflicts. If you're
+* reviewing a change make sure it says 0 conflicts.
+*
+* If you're updating the grammar, just cut-and-paste this message from the old
+* file to the new one, so that this comment block persists.
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+
+state 0
+ $accept: .start $end
+
+ startFileImports shift 2
+ startFile shift 3
+ startConfigImports shift 4
+ startConfig shift 5
+ startExprs shift 6
+ . error
+
+ start goto 1
+
+state 1
+ $accept: start.$end
+
+ $end accept
+ . error
+
+
+state 2
+ start: startFileImports.package imports gen_imports_eof
+ package: . (10)
+
+ tPACKAGE shift 8
+ . reduce 10 (src line 161)
+
+ package goto 7
+
+state 3
+ start: startFile.package imports defs
+ package: . (10)
+
+ tPACKAGE shift 8
+ . reduce 10 (src line 161)
+
+ package goto 9
+
+state 4
+ start: startConfigImports.config imports gen_imports_eof
+ config: . (12)
+
+ tIDENT shift 11
+ . reduce 12 (src line 168)
+
+ config goto 10
+
+state 5
+ start: startConfig.config imports defs
+ config: . (12)
+
+ tIDENT shift 11
+ . reduce 12 (src line 168)
+
+ config goto 12
+
+state 6
+ start: startExprs.expr_comma_list ';'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 14
+ unary_expr goto 15
+ operand goto 16
+ expr_comma_list goto 13
+ comp_lit goto 28
+
+state 7
+ start: startFileImports package.imports gen_imports_eof
+ imports: . (14)
+
+ . reduce 14 (src line 185)
+
+ imports goto 42
+
+state 8
+ package: tPACKAGE.tIDENT ';'
+
+ tIDENT shift 43
+ . error
+
+
+state 9
+ start: startFile package.imports defs
+ imports: . (14)
+
+ . reduce 14 (src line 185)
+
+ imports goto 44
+
+state 10
+ start: startConfigImports config.imports gen_imports_eof
+ imports: . (14)
+
+ . reduce 14 (src line 185)
+
+ imports goto 45
+
+state 11
+ config: tIDENT.'=' expr ';'
+
+ '=' shift 46
+ . error
+
+
+state 12
+ start: startConfig config.imports defs
+ imports: . (14)
+
+ . reduce 14 (src line 185)
+
+ imports goto 47
+
+state 13
+ start: startExprs expr_comma_list.';'
+ expr_comma_list: expr_comma_list.',' expr
+
+ ';' shift 48
+ ',' shift 49
+ . error
+
+
+state 14
+ expr_comma_list: expr. (83)
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 83 (src line 452)
+
+
+state 15
+ expr: unary_expr. (88)
+
+ . reduce 88 (src line 470)
+
+
+state 16
+ unary_expr: operand. (107)
+
+ . reduce 107 (src line 510)
+
+
+state 17
+ unary_expr: '!'.unary_expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ unary_expr goto 68
+ operand goto 16
+ comp_lit goto 28
+
+state 18
+ unary_expr: '+'.unary_expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ unary_expr goto 69
+ operand goto 16
+ comp_lit goto 28
+
+state 19
+ unary_expr: '-'.unary_expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ unary_expr goto 70
+ operand goto 16
+ comp_lit goto 28
+
+state 20
+ unary_expr: '^'.unary_expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ unary_expr goto 71
+ operand goto 16
+ comp_lit goto 28
+
+state 21
+ type: type_no_typeobject. (52)
+ unary_expr: type_no_typeobject.'(' expr ')'
+
+ '(' shift 72
+ . reduce 52 (src line 287)
+
+
+state 22
+ type: tTYPEOBJECT. (53)
+ unary_expr: tTYPEOBJECT.'(' type ')'
+
+ '(' shift 73
+ . reduce 53 (src line 290)
+
+
+state 23
+ operand: tSTRLIT. (114)
+ nameref: tSTRLIT.'.' dotnameref
+
+ '.' shift 74
+ . reduce 114 (src line 527)
+
+
+state 24
+ operand: tINTLIT. (115)
+
+ . reduce 115 (src line 530)
+
+
+state 25
+ operand: tRATLIT. (116)
+
+ . reduce 116 (src line 532)
+
+
+state 26
+ operand: tIMAGLIT. (117)
+
+ . reduce 117 (src line 534)
+
+
+state 27
+ type_no_typeobject: nameref. (40)
+ operand: nameref. (118)
+ operand: nameref.'[' expr ']'
+
+ '(' reduce 40 (src line 260)
+ '[' shift 75
+ '{' reduce 40 (src line 260)
+ . reduce 118 (src line 536)
+
+
+state 28
+ operand: comp_lit. (119)
+ operand: comp_lit.'.' tIDENT
+ operand: comp_lit.'[' expr ']'
+
+ '.' shift 76
+ '[' shift 77
+ . reduce 119 (src line 538)
+
+
+state 29
+ operand: '('.expr ')'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 78
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 30
+ type_no_typeobject: tERROR. (41)
+
+ . reduce 41 (src line 263)
+
+
+state 31
+ type_no_typeobject: '['.tINTLIT ']' type
+ type_no_typeobject: '['.']' type
+
+ ']' shift 80
+ tINTLIT shift 79
+ . error
+
+
+state 32
+ type_no_typeobject: tENUM.'{' label_spec_list osemi '}'
+
+ '{' shift 81
+ . error
+
+
+state 33
+ type_no_typeobject: tSET.'[' type ']'
+
+ '[' shift 82
+ . error
+
+
+state 34
+ type_no_typeobject: tMAP.'[' type ']' type
+
+ '[' shift 83
+ . error
+
+
+state 35
+ type_no_typeobject: tSTRUCT.'{' field_spec_list osemi '}'
+ type_no_typeobject: tSTRUCT.'{' '}'
+
+ '{' shift 84
+ . error
+
+
+state 36
+ type_no_typeobject: tUNION.'{' field_spec_list osemi '}'
+ type_no_typeobject: tUNION.'{' '}'
+
+ '{' shift 85
+ . error
+
+
+state 37
+ type_no_typeobject: '?'.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 86
+ type_no_typeobject goto 87
+
+state 38
+ nameref: dotnameref. (140)
+ dotnameref: dotnameref.'.' tIDENT
+
+ '.' shift 91
+ . reduce 140 (src line 621)
+
+
+state 39
+ comp_lit: otype.'{' '}'
+ comp_lit: otype.'{' kv_lit_list ocomma '}'
+
+ '{' shift 92
+ . error
+
+
+state 40
+ dotnameref: tIDENT. (142)
+
+ . reduce 142 (src line 628)
+
+
+state 41
+ otype: type. (145)
+
+ . reduce 145 (src line 637)
+
+
+state 42
+ start: startFileImports package imports.gen_imports_eof
+ imports: imports.import ';'
+ gen_imports_eof: . (6)
+
+ tCONST shift 96
+ tERROR shift 97
+ tIMPORT shift 98
+ tTYPE shift 95
+ . reduce 6 (src line 150)
+
+ gen_imports_eof goto 93
+ import goto 94
+
+state 43
+ package: tPACKAGE tIDENT.';'
+
+ ';' shift 99
+ . error
+
+
+state 44
+ start: startFile package imports.defs
+ imports: imports.import ';'
+ defs: . (23)
+
+ tIMPORT shift 98
+ . reduce 23 (src line 211)
+
+ defs goto 100
+ import goto 94
+
+state 45
+ start: startConfigImports config imports.gen_imports_eof
+ imports: imports.import ';'
+ gen_imports_eof: . (6)
+
+ tCONST shift 96
+ tERROR shift 97
+ tIMPORT shift 98
+ tTYPE shift 95
+ . reduce 6 (src line 150)
+
+ gen_imports_eof goto 101
+ import goto 94
+
+state 46
+ config: tIDENT '='.expr ';'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 102
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 47
+ start: startConfig config imports.defs
+ imports: imports.import ';'
+ defs: . (23)
+
+ tIMPORT shift 98
+ . reduce 23 (src line 211)
+
+ defs goto 103
+ import goto 94
+
+state 48
+ start: startExprs expr_comma_list ';'. (5)
+
+ . reduce 5 (src line 142)
+
+
+state 49
+ expr_comma_list: expr_comma_list ','.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 104
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 50
+ expr: expr tOROR.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 105
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 51
+ expr: expr tANDAND.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 106
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 52
+ expr: expr '<'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 107
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 53
+ expr: expr '>'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 108
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 54
+ expr: expr tLE.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 109
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 55
+ expr: expr tGE.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 110
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 56
+ expr: expr tNE.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 111
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 57
+ expr: expr tEQEQ.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 112
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 58
+ expr: expr '+'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 113
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 59
+ expr: expr '-'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 114
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 60
+ expr: expr '*'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 115
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 61
+ expr: expr '/'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 116
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 62
+ expr: expr '%'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 117
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 63
+ expr: expr '|'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 118
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 64
+ expr: expr '&'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 119
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 65
+ expr: expr '^'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 120
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 66
+ expr: expr tLSH.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 121
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 67
+ expr: expr tRSH.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 122
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 68
+ unary_expr: '!' unary_expr. (108)
+
+ . reduce 108 (src line 513)
+
+
+state 69
+ unary_expr: '+' unary_expr. (109)
+
+ . reduce 109 (src line 515)
+
+
+state 70
+ unary_expr: '-' unary_expr. (110)
+
+ . reduce 110 (src line 517)
+
+
+state 71
+ unary_expr: '^' unary_expr. (111)
+
+ . reduce 111 (src line 519)
+
+
+state 72
+ unary_expr: type_no_typeobject '('.expr ')'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 123
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 73
+ unary_expr: tTYPEOBJECT '('.type ')'
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 124
+ type_no_typeobject goto 87
+
+state 74
+ nameref: tSTRLIT '.'.dotnameref
+
+ tIDENT shift 40
+ . error
+
+ dotnameref goto 125
+
+state 75
+ operand: nameref '['.expr ']'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 126
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 76
+ operand: comp_lit '.'.tIDENT
+
+ tIDENT shift 127
+ . error
+
+
+state 77
+ operand: comp_lit '['.expr ']'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 128
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 78
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ operand: '(' expr.')'
+
+ ')' shift 129
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . error
+
+
+state 79
+ type_no_typeobject: '[' tINTLIT.']' type
+
+ ']' shift 130
+ . error
+
+
+state 80
+ type_no_typeobject: '[' ']'.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 131
+ type_no_typeobject goto 87
+
+state 81
+ type_no_typeobject: tENUM '{'.label_spec_list osemi '}'
+
+ tIDENT shift 134
+ . error
+
+ label_spec goto 133
+ label_spec_list goto 132
+
+state 82
+ type_no_typeobject: tSET '['.type ']'
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 135
+ type_no_typeobject goto 87
+
+state 83
+ type_no_typeobject: tMAP '['.type ']' type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 136
+ type_no_typeobject goto 87
+
+state 84
+ type_no_typeobject: tSTRUCT '{'.field_spec_list osemi '}'
+ type_no_typeobject: tSTRUCT '{'.'}'
+
+ '[' shift 31
+ '}' shift 138
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 140
+ field_spec_list goto 137
+ field_spec goto 139
+
+state 85
+ type_no_typeobject: tUNION '{'.field_spec_list osemi '}'
+ type_no_typeobject: tUNION '{'.'}'
+
+ '[' shift 31
+ '}' shift 143
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 140
+ field_spec_list goto 142
+ field_spec goto 139
+
+state 86
+ type_no_typeobject: '?' type. (51)
+
+ . reduce 51 (src line 283)
+
+
+state 87
+ type: type_no_typeobject. (52)
+
+ . reduce 52 (src line 287)
+
+
+state 88
+ type: tTYPEOBJECT. (53)
+
+ . reduce 53 (src line 290)
+
+
+state 89
+ type_no_typeobject: nameref. (40)
+
+ . reduce 40 (src line 260)
+
+
+state 90
+ nameref: tSTRLIT.'.' dotnameref
+
+ '.' shift 74
+ . error
+
+
+state 91
+ dotnameref: dotnameref '.'.tIDENT
+
+ tIDENT shift 144
+ . error
+
+
+state 92
+ comp_lit: otype '{'.'}'
+ comp_lit: otype '{'.kv_lit_list ocomma '}'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '}' shift 145
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 148
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+ kv_lit goto 147
+ kv_lit_list goto 146
+
+state 93
+ start: startFileImports package imports gen_imports_eof. (1)
+
+ . reduce 1 (src line 137)
+
+
+state 94
+ imports: imports import.';'
+
+ ';' shift 149
+ . error
+
+
+state 95
+ gen_imports_eof: tTYPE. (7)
+
+ . reduce 7 (src line 153)
+
+
+state 96
+ gen_imports_eof: tCONST. (8)
+
+ . reduce 8 (src line 155)
+
+
+state 97
+ gen_imports_eof: tERROR. (9)
+
+ . reduce 9 (src line 157)
+
+
+state 98
+ import: tIMPORT.'(' ')'
+ import: tIMPORT.'(' import_spec_list osemi ')'
+ import: tIMPORT.import_spec
+
+ '(' shift 150
+ tIDENT shift 153
+ tSTRLIT shift 152
+ . error
+
+ import_spec goto 151
+
+state 99
+ package: tPACKAGE tIDENT ';'. (11)
+
+ . reduce 11 (src line 164)
+
+
+state 100
+ start: startFile package imports defs. (2)
+ defs: defs.type_def ';'
+ defs: defs.const_def ';'
+ defs: defs.error_def ';'
+
+ tCONST shift 158
+ tERROR shift 159
+ tTYPE shift 157
+ . reduce 2 (src line 139)
+
+ type_def goto 154
+ const_def goto 155
+ error_def goto 156
+
+state 101
+ start: startConfigImports config imports gen_imports_eof. (3)
+
+ . reduce 3 (src line 140)
+
+
+state 102
+ config: tIDENT '=' expr.';'
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ ';' shift 160
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . error
+
+
+state 103
+ start: startConfig config imports defs. (4)
+ defs: defs.type_def ';'
+ defs: defs.const_def ';'
+ defs: defs.error_def ';'
+
+ tCONST shift 158
+ tERROR shift 159
+ tTYPE shift 157
+ . reduce 4 (src line 141)
+
+ type_def goto 154
+ const_def goto 155
+ error_def goto 156
+
+state 104
+ expr_comma_list: expr_comma_list ',' expr. (84)
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 84 (src line 455)
+
+
+state 105
+ expr: expr.tOROR expr
+ expr: expr tOROR expr. (89)
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 89 (src line 473)
+
+
+state 106
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr tANDAND expr. (90)
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 90 (src line 475)
+
+
+state 107
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr '<' expr. (91)
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 91 (src line 477)
+
+
+state 108
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr '>' expr. (92)
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 92 (src line 479)
+
+
+state 109
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr tLE expr. (93)
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 93 (src line 481)
+
+
+state 110
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr tGE expr. (94)
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 94 (src line 483)
+
+
+state 111
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr tNE expr. (95)
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 95 (src line 485)
+
+
+state 112
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr tEQEQ expr. (96)
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 96 (src line 487)
+
+
+state 113
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr '+' expr. (97)
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '&' shift 64
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 97 (src line 489)
+
+
+state 114
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr '-' expr. (98)
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '&' shift 64
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 98 (src line 491)
+
+
+state 115
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr '*' expr. (99)
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ . reduce 99 (src line 493)
+
+
+state 116
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr '/' expr. (100)
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ . reduce 100 (src line 495)
+
+
+state 117
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr '%' expr. (101)
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ . reduce 101 (src line 497)
+
+
+state 118
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr '|' expr. (102)
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '&' shift 64
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 102 (src line 499)
+
+
+state 119
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr '&' expr. (103)
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ . reduce 103 (src line 501)
+
+
+state 120
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr '^' expr. (104)
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '&' shift 64
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 104 (src line 503)
+
+
+state 121
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr tLSH expr. (105)
+ expr: expr.tRSH expr
+
+ . reduce 105 (src line 505)
+
+
+state 122
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ expr: expr tRSH expr. (106)
+
+ . reduce 106 (src line 507)
+
+
+state 123
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ unary_expr: type_no_typeobject '(' expr.')'
+
+ ')' shift 161
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . error
+
+
+state 124
+ unary_expr: tTYPEOBJECT '(' type.')'
+
+ ')' shift 162
+ . error
+
+
+state 125
+ nameref: tSTRLIT '.' dotnameref. (141)
+ dotnameref: dotnameref.'.' tIDENT
+
+ '.' shift 91
+ . reduce 141 (src line 624)
+
+
+state 126
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ operand: nameref '[' expr.']'
+
+ ']' shift 163
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . error
+
+
+state 127
+ operand: comp_lit '.' tIDENT. (120)
+
+ . reduce 120 (src line 540)
+
+
+state 128
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ operand: comp_lit '[' expr.']'
+
+ ']' shift 164
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . error
+
+
+state 129
+ operand: '(' expr ')'. (123)
+
+ . reduce 123 (src line 546)
+
+
+state 130
+ type_no_typeobject: '[' tINTLIT ']'.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 165
+ type_no_typeobject goto 87
+
+state 131
+ type_no_typeobject: '[' ']' type. (43)
+
+ . reduce 43 (src line 267)
+
+
+state 132
+ type_no_typeobject: tENUM '{' label_spec_list.osemi '}'
+ label_spec_list: label_spec_list.';' label_spec
+ osemi: . (146)
+
+ ';' shift 167
+ . reduce 146 (src line 640)
+
+ osemi goto 166
+
+state 133
+ label_spec_list: label_spec. (54)
+
+ . reduce 54 (src line 293)
+
+
+state 134
+ label_spec: tIDENT. (56)
+
+ . reduce 56 (src line 299)
+
+
+state 135
+ type_no_typeobject: tSET '[' type.']'
+
+ ']' shift 168
+ . error
+
+
+state 136
+ type_no_typeobject: tMAP '[' type.']' type
+
+ ']' shift 169
+ . error
+
+
+state 137
+ type_no_typeobject: tSTRUCT '{' field_spec_list.osemi '}'
+ field_spec_list: field_spec_list.';' field_spec
+ osemi: . (146)
+
+ ';' shift 171
+ . reduce 146 (src line 640)
+
+ osemi goto 170
+
+state 138
+ type_no_typeobject: tSTRUCT '{' '}'. (48)
+
+ . reduce 48 (src line 277)
+
+
+state 139
+ field_spec_list: field_spec. (57)
+
+ . reduce 57 (src line 303)
+
+
+state 140
+ field_spec: type_comma_list.type
+ type_comma_list: type_comma_list.',' type
+
+ ',' shift 173
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 172
+ type_no_typeobject goto 87
+
+state 141
+ type_comma_list: type. (60)
+
+ . reduce 60 (src line 355)
+
+
+state 142
+ type_no_typeobject: tUNION '{' field_spec_list.osemi '}'
+ field_spec_list: field_spec_list.';' field_spec
+ osemi: . (146)
+
+ ';' shift 171
+ . reduce 146 (src line 640)
+
+ osemi goto 174
+
+state 143
+ type_no_typeobject: tUNION '{' '}'. (50)
+
+ . reduce 50 (src line 281)
+
+
+state 144
+ dotnameref: dotnameref '.' tIDENT. (143)
+
+ . reduce 143 (src line 631)
+
+
+state 145
+ comp_lit: otype '{' '}'. (124)
+
+ . reduce 124 (src line 549)
+
+
+state 146
+ comp_lit: otype '{' kv_lit_list.ocomma '}'
+ kv_lit_list: kv_lit_list.',' kv_lit
+ ocomma: . (148)
+
+ ',' shift 176
+ . reduce 148 (src line 644)
+
+ ocomma goto 175
+
+state 147
+ kv_lit_list: kv_lit. (126)
+
+ . reduce 126 (src line 555)
+
+
+state 148
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ kv_lit: expr. (128)
+ kv_lit: expr.':' expr
+
+ ':' shift 177
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 128 (src line 561)
+
+
+state 149
+ imports: imports import ';'. (15)
+
+ . reduce 15 (src line 187)
+
+
+state 150
+ import: tIMPORT '('.')'
+ import: tIMPORT '('.import_spec_list osemi ')'
+
+ ')' shift 178
+ tIDENT shift 153
+ tSTRLIT shift 152
+ . error
+
+ import_spec_list goto 179
+ import_spec goto 180
+
+state 151
+ import: tIMPORT import_spec. (18)
+
+ . reduce 18 (src line 192)
+
+
+state 152
+ import_spec: tSTRLIT. (21)
+
+ . reduce 21 (src line 198)
+
+
+state 153
+ import_spec: tIDENT.tSTRLIT
+
+ tSTRLIT shift 181
+ . error
+
+
+state 154
+ defs: defs type_def.';'
+
+ ';' shift 182
+ . error
+
+
+state 155
+ defs: defs const_def.';'
+
+ ';' shift 183
+ . error
+
+
+state 156
+ defs: defs error_def.';'
+
+ ';' shift 184
+ . error
+
+
+state 157
+ type_def: tTYPE.'(' ')'
+ type_def: tTYPE.'(' type_spec_list osemi ')'
+ type_def: tTYPE.type_spec
+ type_def: tTYPE.interface_spec
+
+ '(' shift 185
+ tIDENT shift 188
+ . error
+
+ type_spec goto 186
+ interface_spec goto 187
+
+state 158
+ const_def: tCONST.'(' ')'
+ const_def: tCONST.'(' const_spec_list osemi ')'
+ const_def: tCONST.const_spec
+
+ '(' shift 189
+ tIDENT shift 191
+ . error
+
+ const_spec goto 190
+
+state 159
+ error_def: tERROR.'(' ')'
+ error_def: tERROR.'(' error_spec_list osemi ')'
+ error_def: tERROR.error_spec
+
+ '(' shift 192
+ tIDENT shift 194
+ . error
+
+ error_spec goto 193
+
+state 160
+ config: tIDENT '=' expr ';'. (13)
+
+ . reduce 13 (src line 171)
+
+
+state 161
+ unary_expr: type_no_typeobject '(' expr ')'. (112)
+
+ . reduce 112 (src line 521)
+
+
+state 162
+ unary_expr: tTYPEOBJECT '(' type ')'. (113)
+
+ . reduce 113 (src line 523)
+
+
+state 163
+ operand: nameref '[' expr ']'. (122)
+
+ . reduce 122 (src line 544)
+
+
+state 164
+ operand: comp_lit '[' expr ']'. (121)
+
+ . reduce 121 (src line 542)
+
+
+state 165
+ type_no_typeobject: '[' tINTLIT ']' type. (42)
+
+ . reduce 42 (src line 265)
+
+
+state 166
+ type_no_typeobject: tENUM '{' label_spec_list osemi.'}'
+
+ '}' shift 195
+ . error
+
+
+state 167
+ label_spec_list: label_spec_list ';'.label_spec
+ osemi: ';'. (147)
+
+ tIDENT shift 134
+ . reduce 147 (src line 642)
+
+ label_spec goto 196
+
+state 168
+ type_no_typeobject: tSET '[' type ']'. (45)
+
+ . reduce 45 (src line 271)
+
+
+state 169
+ type_no_typeobject: tMAP '[' type ']'.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 197
+ type_no_typeobject goto 87
+
+state 170
+ type_no_typeobject: tSTRUCT '{' field_spec_list osemi.'}'
+
+ '}' shift 198
+ . error
+
+
+state 171
+ field_spec_list: field_spec_list ';'.field_spec
+ osemi: ';'. (147)
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . reduce 147 (src line 642)
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 140
+ field_spec goto 199
+
+state 172
+ field_spec: type_comma_list type. (59)
+
+ . reduce 59 (src line 343)
+
+
+state 173
+ type_comma_list: type_comma_list ','.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 200
+ type_no_typeobject goto 87
+
+state 174
+ type_no_typeobject: tUNION '{' field_spec_list osemi.'}'
+
+ '}' shift 201
+ . error
+
+
+state 175
+ comp_lit: otype '{' kv_lit_list ocomma.'}'
+
+ '}' shift 202
+ . error
+
+
+state 176
+ kv_lit_list: kv_lit_list ','.kv_lit
+ ocomma: ','. (149)
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '}' reduce 149 (src line 646)
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 148
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+ kv_lit goto 203
+
+state 177
+ kv_lit: expr ':'.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 204
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 178
+ import: tIMPORT '(' ')'. (16)
+
+ . reduce 16 (src line 189)
+
+
+state 179
+ import: tIMPORT '(' import_spec_list.osemi ')'
+ import_spec_list: import_spec_list.';' import_spec
+ osemi: . (146)
+
+ ';' shift 206
+ . reduce 146 (src line 640)
+
+ osemi goto 205
+
+state 180
+ import_spec_list: import_spec. (19)
+
+ . reduce 19 (src line 194)
+
+
+state 181
+ import_spec: tIDENT tSTRLIT. (22)
+
+ . reduce 22 (src line 204)
+
+
+state 182
+ defs: defs type_def ';'. (24)
+
+ . reduce 24 (src line 213)
+
+
+state 183
+ defs: defs const_def ';'. (25)
+
+ . reduce 25 (src line 214)
+
+
+state 184
+ defs: defs error_def ';'. (26)
+
+ . reduce 26 (src line 215)
+
+
+state 185
+ type_def: tTYPE '('.')'
+ type_def: tTYPE '('.type_spec_list osemi ')'
+
+ ')' shift 207
+ tIDENT shift 210
+ . error
+
+ type_spec_list goto 208
+ type_spec goto 209
+
+state 186
+ type_def: tTYPE type_spec. (29)
+
+ . reduce 29 (src line 220)
+
+
+state 187
+ type_def: tTYPE interface_spec. (30)
+
+ . reduce 30 (src line 221)
+
+
+state 188
+ type_spec: tIDENT.type
+ interface_spec: tIDENT.tINTERFACE '{' '}'
+ interface_spec: tIDENT.tINTERFACE '{' iface_item_list osemi '}'
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tINTERFACE shift 212
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 211
+ type_no_typeobject goto 87
+
+state 189
+ const_def: tCONST '('.')'
+ const_def: tCONST '('.const_spec_list osemi ')'
+
+ ')' shift 213
+ tIDENT shift 191
+ . error
+
+ const_spec_list goto 214
+ const_spec goto 215
+
+state 190
+ const_def: tCONST const_spec. (33)
+
+ . reduce 33 (src line 226)
+
+
+state 191
+ const_spec: tIDENT.'=' expr
+
+ '=' shift 216
+ . error
+
+
+state 192
+ error_def: tERROR '('.')'
+ error_def: tERROR '('.error_spec_list osemi ')'
+
+ ')' shift 217
+ tIDENT shift 194
+ . error
+
+ error_spec_list goto 218
+ error_spec goto 219
+
+state 193
+ error_def: tERROR error_spec. (36)
+
+ . reduce 36 (src line 231)
+
+
+state 194
+ error_spec: tIDENT.inargs error_details
+
+ '(' shift 221
+ . error
+
+ inargs goto 220
+
+state 195
+ type_no_typeobject: tENUM '{' label_spec_list osemi '}'. (44)
+
+ . reduce 44 (src line 269)
+
+
+state 196
+ label_spec_list: label_spec_list ';' label_spec. (55)
+
+ . reduce 55 (src line 296)
+
+
+state 197
+ type_no_typeobject: tMAP '[' type ']' type. (46)
+
+ . reduce 46 (src line 273)
+
+
+state 198
+ type_no_typeobject: tSTRUCT '{' field_spec_list osemi '}'. (47)
+
+ . reduce 47 (src line 275)
+
+
+state 199
+ field_spec_list: field_spec_list ';' field_spec. (58)
+
+ . reduce 58 (src line 306)
+
+
+state 200
+ type_comma_list: type_comma_list ',' type. (61)
+
+ . reduce 61 (src line 358)
+
+
+state 201
+ type_no_typeobject: tUNION '{' field_spec_list osemi '}'. (49)
+
+ . reduce 49 (src line 279)
+
+
+state 202
+ comp_lit: otype '{' kv_lit_list ocomma '}'. (125)
+
+ . reduce 125 (src line 552)
+
+
+state 203
+ kv_lit_list: kv_lit_list ',' kv_lit. (127)
+
+ . reduce 127 (src line 558)
+
+
+state 204
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+ kv_lit: expr ':' expr. (129)
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 129 (src line 564)
+
+
+state 205
+ import: tIMPORT '(' import_spec_list osemi.')'
+
+ ')' shift 222
+ . error
+
+
+state 206
+ import_spec_list: import_spec_list ';'.import_spec
+ osemi: ';'. (147)
+
+ tIDENT shift 153
+ tSTRLIT shift 152
+ . reduce 147 (src line 642)
+
+ import_spec goto 223
+
+state 207
+ type_def: tTYPE '(' ')'. (27)
+
+ . reduce 27 (src line 217)
+
+
+state 208
+ type_def: tTYPE '(' type_spec_list.osemi ')'
+ type_spec_list: type_spec_list.';' type_spec
+ osemi: . (146)
+
+ ';' shift 225
+ . reduce 146 (src line 640)
+
+ osemi goto 224
+
+state 209
+ type_spec_list: type_spec. (37)
+
+ . reduce 37 (src line 234)
+
+
+state 210
+ type_spec: tIDENT.type
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 211
+ type_no_typeobject goto 87
+
+state 211
+ type_spec: tIDENT type. (39)
+
+ . reduce 39 (src line 238)
+
+
+state 212
+ interface_spec: tIDENT tINTERFACE.'{' '}'
+ interface_spec: tIDENT tINTERFACE.'{' iface_item_list osemi '}'
+
+ '{' shift 226
+ . error
+
+
+state 213
+ const_def: tCONST '(' ')'. (31)
+
+ . reduce 31 (src line 223)
+
+
+state 214
+ const_def: tCONST '(' const_spec_list.osemi ')'
+ const_spec_list: const_spec_list.';' const_spec
+ osemi: . (146)
+
+ ';' shift 228
+ . reduce 146 (src line 640)
+
+ osemi goto 227
+
+state 215
+ const_spec_list: const_spec. (85)
+
+ . reduce 85 (src line 459)
+
+
+state 216
+ const_spec: tIDENT '='.expr
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 229
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 217
+ error_def: tERROR '(' ')'. (34)
+
+ . reduce 34 (src line 228)
+
+
+state 218
+ error_def: tERROR '(' error_spec_list.osemi ')'
+ error_spec_list: error_spec_list.';' error_spec
+ osemi: . (146)
+
+ ';' shift 231
+ . reduce 146 (src line 640)
+
+ osemi goto 230
+
+state 219
+ error_spec_list: error_spec. (130)
+
+ . reduce 130 (src line 568)
+
+
+state 220
+ error_spec: tIDENT inargs.error_details
+ error_details: . (133)
+
+ '{' shift 233
+ . reduce 133 (src line 584)
+
+ error_details goto 232
+
+state 221
+ inargs: '('.')'
+ inargs: '('.named_arg_list ocomma ')'
+ inargs: '('.type_comma_list ocomma ')'
+
+ ')' shift 234
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 236
+ field_spec goto 237
+ named_arg_list goto 235
+
+state 222
+ import: tIMPORT '(' import_spec_list osemi ')'. (17)
+
+ . reduce 17 (src line 191)
+
+
+state 223
+ import_spec_list: import_spec_list ';' import_spec. (20)
+
+ . reduce 20 (src line 196)
+
+
+state 224
+ type_def: tTYPE '(' type_spec_list osemi.')'
+
+ ')' shift 238
+ . error
+
+
+state 225
+ type_spec_list: type_spec_list ';'.type_spec
+ osemi: ';'. (147)
+
+ tIDENT shift 210
+ . reduce 147 (src line 642)
+
+ type_spec goto 239
+
+state 226
+ interface_spec: tIDENT tINTERFACE '{'.'}'
+ interface_spec: tIDENT tINTERFACE '{'.iface_item_list osemi '}'
+
+ '}' shift 240
+ tIDENT shift 243
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 244
+ dotnameref goto 38
+ iface_item_list goto 241
+ iface_item goto 242
+
+state 227
+ const_def: tCONST '(' const_spec_list osemi.')'
+
+ ')' shift 245
+ . error
+
+
+state 228
+ const_spec_list: const_spec_list ';'.const_spec
+ osemi: ';'. (147)
+
+ tIDENT shift 191
+ . reduce 147 (src line 642)
+
+ const_spec goto 246
+
+state 229
+ const_spec: tIDENT '=' expr. (87)
+ expr: expr.tOROR expr
+ expr: expr.tANDAND expr
+ expr: expr.'<' expr
+ expr: expr.'>' expr
+ expr: expr.tLE expr
+ expr: expr.tGE expr
+ expr: expr.tNE expr
+ expr: expr.tEQEQ expr
+ expr: expr.'+' expr
+ expr: expr.'-' expr
+ expr: expr.'*' expr
+ expr: expr.'/' expr
+ expr: expr.'%' expr
+ expr: expr.'|' expr
+ expr: expr.'&' expr
+ expr: expr.'^' expr
+ expr: expr.tLSH expr
+ expr: expr.tRSH expr
+
+ '<' shift 52
+ '>' shift 53
+ '+' shift 58
+ '-' shift 59
+ '*' shift 60
+ '/' shift 61
+ '%' shift 62
+ '|' shift 63
+ '&' shift 64
+ '^' shift 65
+ tOROR shift 50
+ tANDAND shift 51
+ tLE shift 54
+ tGE shift 55
+ tNE shift 56
+ tEQEQ shift 57
+ tLSH shift 66
+ tRSH shift 67
+ . reduce 87 (src line 463)
+
+
+state 230
+ error_def: tERROR '(' error_spec_list osemi.')'
+
+ ')' shift 247
+ . error
+
+
+state 231
+ error_spec_list: error_spec_list ';'.error_spec
+ osemi: ';'. (147)
+
+ tIDENT shift 194
+ . reduce 147 (src line 642)
+
+ error_spec goto 248
+
+state 232
+ error_spec: tIDENT inargs error_details. (132)
+
+ . reduce 132 (src line 572)
+
+
+state 233
+ error_details: '{'.'}'
+ error_details: '{'.error_detail_list ocomma '}'
+
+ '}' shift 249
+ tIDENT shift 252
+ tSTRLIT shift 253
+ . error
+
+ error_detail_list goto 250
+ error_detail goto 251
+
+state 234
+ inargs: '(' ')'. (68)
+
+ . reduce 68 (src line 391)
+
+
+state 235
+ inargs: '(' named_arg_list.ocomma ')'
+ named_arg_list: named_arg_list.',' field_spec
+ ocomma: . (148)
+
+ ',' shift 255
+ . reduce 148 (src line 644)
+
+ ocomma goto 254
+
+state 236
+ field_spec: type_comma_list.type
+ type_comma_list: type_comma_list.',' type
+ inargs: '(' type_comma_list.ocomma ')'
+ ocomma: . (148)
+
+ ',' shift 256
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . reduce 148 (src line 644)
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 172
+ type_no_typeobject goto 87
+ ocomma goto 257
+
+state 237
+ named_arg_list: field_spec. (71)
+
+ . reduce 71 (src line 407)
+
+
+state 238
+ type_def: tTYPE '(' type_spec_list osemi ')'. (28)
+
+ . reduce 28 (src line 219)
+
+
+state 239
+ type_spec_list: type_spec_list ';' type_spec. (38)
+
+ . reduce 38 (src line 236)
+
+
+state 240
+ interface_spec: tIDENT tINTERFACE '{' '}'. (62)
+
+ . reduce 62 (src line 362)
+
+
+state 241
+ interface_spec: tIDENT tINTERFACE '{' iface_item_list.osemi '}'
+ iface_item_list: iface_item_list.';' iface_item
+ osemi: . (146)
+
+ ';' shift 259
+ . reduce 146 (src line 640)
+
+ osemi goto 258
+
+state 242
+ iface_item_list: iface_item. (64)
+
+ . reduce 64 (src line 375)
+
+
+state 243
+ iface_item: tIDENT.inargs streamargs outargs tags
+ dotnameref: tIDENT. (142)
+
+ '(' shift 221
+ . reduce 142 (src line 628)
+
+ inargs goto 260
+
+state 244
+ iface_item: nameref. (67)
+
+ . reduce 67 (src line 388)
+
+
+state 245
+ const_def: tCONST '(' const_spec_list osemi ')'. (32)
+
+ . reduce 32 (src line 225)
+
+
+state 246
+ const_spec_list: const_spec_list ';' const_spec. (86)
+
+ . reduce 86 (src line 461)
+
+
+state 247
+ error_def: tERROR '(' error_spec_list osemi ')'. (35)
+
+ . reduce 35 (src line 230)
+
+
+state 248
+ error_spec_list: error_spec_list ';' error_spec. (131)
+
+ . reduce 131 (src line 570)
+
+
+state 249
+ error_details: '{' '}'. (134)
+
+ . reduce 134 (src line 587)
+
+
+state 250
+ error_details: '{' error_detail_list.ocomma '}'
+ error_detail_list: error_detail_list.',' error_detail
+ ocomma: . (148)
+
+ ',' shift 262
+ . reduce 148 (src line 644)
+
+ ocomma goto 261
+
+state 251
+ error_detail_list: error_detail. (136)
+
+ . reduce 136 (src line 592)
+
+
+state 252
+ error_detail: tIDENT. (138)
+
+ . reduce 138 (src line 607)
+
+
+state 253
+ error_detail: tSTRLIT.':' tSTRLIT
+
+ ':' shift 263
+ . error
+
+
+state 254
+ inargs: '(' named_arg_list ocomma.')'
+
+ ')' shift 264
+ . error
+
+
+state 255
+ named_arg_list: named_arg_list ','.field_spec
+ ocomma: ','. (149)
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . reduce 149 (src line 646)
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 140
+ field_spec goto 265
+
+state 256
+ type_comma_list: type_comma_list ','.type
+ ocomma: ','. (149)
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . reduce 149 (src line 646)
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 200
+ type_no_typeobject goto 87
+
+state 257
+ inargs: '(' type_comma_list ocomma.')'
+
+ ')' shift 266
+ . error
+
+
+state 258
+ interface_spec: tIDENT tINTERFACE '{' iface_item_list osemi.'}'
+
+ '}' shift 267
+ . error
+
+
+state 259
+ iface_item_list: iface_item_list ';'.iface_item
+ osemi: ';'. (147)
+
+ tIDENT shift 243
+ tSTRLIT shift 90
+ . reduce 147 (src line 642)
+
+ nameref goto 244
+ dotnameref goto 38
+ iface_item goto 268
+
+state 260
+ iface_item: tIDENT inargs.streamargs outargs tags
+ streamargs: . (76)
+
+ tSTREAM shift 270
+ . reduce 76 (src line 434)
+
+ streamargs goto 269
+
+state 261
+ error_details: '{' error_detail_list ocomma.'}'
+
+ '}' shift 271
+ . error
+
+
+state 262
+ error_detail_list: error_detail_list ','.error_detail
+ ocomma: ','. (149)
+
+ tIDENT shift 252
+ tSTRLIT shift 253
+ . reduce 149 (src line 646)
+
+ error_detail goto 272
+
+state 263
+ error_detail: tSTRLIT ':'.tSTRLIT
+
+ tSTRLIT shift 273
+ . error
+
+
+state 264
+ inargs: '(' named_arg_list ocomma ')'. (69)
+
+ . reduce 69 (src line 394)
+
+
+state 265
+ named_arg_list: named_arg_list ',' field_spec. (72)
+
+ . reduce 72 (src line 410)
+
+
+state 266
+ inargs: '(' type_comma_list ocomma ')'. (70)
+
+ . reduce 70 (src line 396)
+
+
+state 267
+ interface_spec: tIDENT tINTERFACE '{' iface_item_list osemi '}'. (63)
+
+ . reduce 63 (src line 368)
+
+
+state 268
+ iface_item_list: iface_item_list ';' iface_item. (65)
+
+ . reduce 65 (src line 378)
+
+
+state 269
+ iface_item: tIDENT inargs streamargs.outargs tags
+
+ '(' shift 276
+ tERROR shift 275
+ . error
+
+ outargs goto 274
+
+state 270
+ streamargs: tSTREAM.'<' '>'
+ streamargs: tSTREAM.'<' type '>'
+ streamargs: tSTREAM.'<' type ',' type '>'
+
+ '<' shift 277
+ . error
+
+
+state 271
+ error_details: '{' error_detail_list ocomma '}'. (135)
+
+ . reduce 135 (src line 589)
+
+
+state 272
+ error_detail_list: error_detail_list ',' error_detail. (137)
+
+ . reduce 137 (src line 595)
+
+
+state 273
+ error_detail: tSTRLIT ':' tSTRLIT. (139)
+
+ . reduce 139 (src line 610)
+
+
+state 274
+ iface_item: tIDENT inargs streamargs outargs.tags
+ tags: . (80)
+
+ '{' shift 279
+ . reduce 80 (src line 444)
+
+ tags goto 278
+
+state 275
+ outargs: tERROR. (73)
+
+ . reduce 73 (src line 420)
+
+
+state 276
+ outargs: '('.named_arg_list ocomma '|' tERROR ')'
+ outargs: '('.type_comma_list ocomma '|' tERROR ')'
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 141
+ type_no_typeobject goto 87
+ type_comma_list goto 281
+ field_spec goto 237
+ named_arg_list goto 280
+
+state 277
+ streamargs: tSTREAM '<'.'>'
+ streamargs: tSTREAM '<'.type '>'
+ streamargs: tSTREAM '<'.type ',' type '>'
+
+ '[' shift 31
+ '>' shift 282
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 283
+ type_no_typeobject goto 87
+
+state 278
+ iface_item: tIDENT inargs streamargs outargs tags. (66)
+
+ . reduce 66 (src line 385)
+
+
+state 279
+ tags: '{'.'}'
+ tags: '{'.expr_comma_list ocomma '}'
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '}' shift 284
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 14
+ unary_expr goto 15
+ operand goto 16
+ expr_comma_list goto 285
+ comp_lit goto 28
+
+state 280
+ named_arg_list: named_arg_list.',' field_spec
+ outargs: '(' named_arg_list.ocomma '|' tERROR ')'
+ ocomma: . (148)
+
+ ',' shift 255
+ . reduce 148 (src line 644)
+
+ ocomma goto 286
+
+state 281
+ field_spec: type_comma_list.type
+ type_comma_list: type_comma_list.',' type
+ outargs: '(' type_comma_list.ocomma '|' tERROR ')'
+ ocomma: . (148)
+
+ ',' shift 256
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . reduce 148 (src line 644)
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 172
+ type_no_typeobject goto 87
+ ocomma goto 287
+
+state 282
+ streamargs: tSTREAM '<' '>'. (77)
+
+ . reduce 77 (src line 437)
+
+
+state 283
+ streamargs: tSTREAM '<' type.'>'
+ streamargs: tSTREAM '<' type.',' type '>'
+
+ ',' shift 289
+ '>' shift 288
+ . error
+
+
+state 284
+ tags: '{' '}'. (81)
+
+ . reduce 81 (src line 447)
+
+
+state 285
+ tags: '{' expr_comma_list.ocomma '}'
+ expr_comma_list: expr_comma_list.',' expr
+ ocomma: . (148)
+
+ ',' shift 291
+ . reduce 148 (src line 644)
+
+ ocomma goto 290
+
+state 286
+ outargs: '(' named_arg_list ocomma.'|' tERROR ')'
+
+ '|' shift 292
+ . error
+
+
+state 287
+ outargs: '(' type_comma_list ocomma.'|' tERROR ')'
+
+ '|' shift 293
+ . error
+
+
+state 288
+ streamargs: tSTREAM '<' type '>'. (78)
+
+ . reduce 78 (src line 439)
+
+
+state 289
+ streamargs: tSTREAM '<' type ','.type '>'
+
+ '[' shift 31
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 88
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 90
+ . error
+
+ nameref goto 89
+ dotnameref goto 38
+ type goto 294
+ type_no_typeobject goto 87
+
+state 290
+ tags: '{' expr_comma_list ocomma.'}'
+
+ '}' shift 295
+ . error
+
+
+state 291
+ expr_comma_list: expr_comma_list ','.expr
+ ocomma: ','. (149)
+ otype: . (144)
+
+ '(' shift 29
+ '[' shift 31
+ '}' reduce 149 (src line 646)
+ '!' shift 17
+ '+' shift 18
+ '-' shift 19
+ '^' shift 20
+ '?' shift 37
+ tENUM shift 32
+ tERROR shift 30
+ tMAP shift 34
+ tSET shift 33
+ tSTRUCT shift 35
+ tTYPEOBJECT shift 22
+ tUNION shift 36
+ tIDENT shift 40
+ tSTRLIT shift 23
+ tINTLIT shift 24
+ tRATLIT shift 25
+ tIMAGLIT shift 26
+ . reduce 144 (src line 634)
+
+ nameref goto 27
+ dotnameref goto 38
+ type goto 41
+ type_no_typeobject goto 21
+ otype goto 39
+ expr goto 104
+ unary_expr goto 15
+ operand goto 16
+ comp_lit goto 28
+
+state 292
+ outargs: '(' named_arg_list ocomma '|'.tERROR ')'
+
+ tERROR shift 296
+ . error
+
+
+state 293
+ outargs: '(' type_comma_list ocomma '|'.tERROR ')'
+
+ tERROR shift 297
+ . error
+
+
+state 294
+ streamargs: tSTREAM '<' type ',' type.'>'
+
+ '>' shift 298
+ . error
+
+
+state 295
+ tags: '{' expr_comma_list ocomma '}'. (82)
+
+ . reduce 82 (src line 449)
+
+
+state 296
+ outargs: '(' named_arg_list ocomma '|' tERROR.')'
+
+ ')' shift 299
+ . error
+
+
+state 297
+ outargs: '(' type_comma_list ocomma '|' tERROR.')'
+
+ ')' shift 300
+ . error
+
+
+state 298
+ streamargs: tSTREAM '<' type ',' type '>'. (79)
+
+ . reduce 79 (src line 441)
+
+
+state 299
+ outargs: '(' named_arg_list ocomma '|' tERROR ')'. (74)
+
+ . reduce 74 (src line 423)
+
+
+state 300
+ outargs: '(' type_comma_list ocomma '|' tERROR ')'. (75)
+
+ . reduce 75 (src line 425)
+
+
+59 terminals, 49 nonterminals
+150 grammar rules, 301/2000 states
+0 shift/reduce, 0 reduce/reduce conflicts reported
+98 working sets used
+memory: parser 590/30000
+142 extra closures
+1361 shift entries, 5 exceptions
+192 goto entries
+303 entries saved by goto default
+Optimizer space used: output 800/30000
+800 table entries, 148 zero
+maximum spread: 57, maximum offset: 291
diff --git a/lib/vdl/parse/grammar.y.go b/lib/vdl/parse/grammar.y.go
new file mode 100644
index 0000000..effcb3a
--- /dev/null
+++ b/lib/vdl/parse/grammar.y.go
@@ -0,0 +1,1289 @@
+//line grammar.y:18
+
+// This grammar.y.go file was auto-generated by yacc from grammar.y.
+
+package parse
+
+import __yyfmt__ "fmt"
+
+//line grammar.y:20
+import (
+ "math/big"
+ "strings"
+)
+
+type intPos struct {
+ int *big.Int
+ pos Pos
+}
+
+type ratPos struct {
+ rat *big.Rat
+ pos Pos
+}
+
+type imagPos struct {
+ imag *BigImag
+ pos Pos
+}
+
+// typeListToStrList converts a slice of Type to a slice of StringPos. Each
+// type must be a TypeNamed with an empty PackageName, otherwise errors are
+// reported, and ok=false is returned.
+func typeListToStrList(yylex yyLexer, typeList []Type) (strList []StringPos, ok bool) {
+ ok = true
+ for _, t := range typeList {
+ var tn *TypeNamed
+ if tn, ok = t.(*TypeNamed); !ok {
+ lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names)", t.String())
+ return
+ }
+ if strings.ContainsRune(tn.Name, '.') {
+ ok = false
+ lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names).", tn.Name)
+ return
+ }
+ strList = append(strList, StringPos{tn.Name, tn.P})
+ }
+ return
+}
+
+//line grammar.y:67
+type yySymType struct {
+ yys int
+ pos Pos
+ strpos StringPos
+ intpos intPos
+ ratpos ratPos
+ imagpos imagPos
+ namepos NamePos
+ nameposes []NamePos
+ typeexpr Type
+ typeexprs []Type
+ fields []*Field
+ iface *Interface
+ constexpr ConstExpr
+ constexprs []ConstExpr
+ complit *ConstCompositeLit
+ kvlit KVLit
+ kvlits []KVLit
+ errordef ErrorDef
+}
+
+const startFileImports = 57346
+const startFile = 57347
+const startConfigImports = 57348
+const startConfig = 57349
+const startExprs = 57350
+const tOROR = 57351
+const tANDAND = 57352
+const tLE = 57353
+const tGE = 57354
+const tNE = 57355
+const tEQEQ = 57356
+const tLSH = 57357
+const tRSH = 57358
+const tCONST = 57359
+const tENUM = 57360
+const tERROR = 57361
+const tIMPORT = 57362
+const tINTERFACE = 57363
+const tMAP = 57364
+const tPACKAGE = 57365
+const tSET = 57366
+const tSTREAM = 57367
+const tSTRUCT = 57368
+const tTYPE = 57369
+const tTYPEOBJECT = 57370
+const tUNION = 57371
+const tIDENT = 57372
+const tSTRLIT = 57373
+const tINTLIT = 57374
+const tRATLIT = 57375
+const tIMAGLIT = 57376
+const notPackage = 57377
+const notConfig = 57378
+
+var yyToknames = []string{
+ "startFileImports",
+ "startFile",
+ "startConfigImports",
+ "startConfig",
+ "startExprs",
+ "';'",
+ "':'",
+ "','",
+ "'.'",
+ "'('",
+ "')'",
+ "'['",
+ "']'",
+ "'{'",
+ "'}'",
+ "'<'",
+ "'>'",
+ "'='",
+ "'!'",
+ "'+'",
+ "'-'",
+ "'*'",
+ "'/'",
+ "'%'",
+ "'|'",
+ "'&'",
+ "'^'",
+ "'?'",
+ "tOROR",
+ "tANDAND",
+ "tLE",
+ "tGE",
+ "tNE",
+ "tEQEQ",
+ "tLSH",
+ "tRSH",
+ "tCONST",
+ "tENUM",
+ "tERROR",
+ "tIMPORT",
+ "tINTERFACE",
+ "tMAP",
+ "tPACKAGE",
+ "tSET",
+ "tSTREAM",
+ "tSTRUCT",
+ "tTYPE",
+ "tTYPEOBJECT",
+ "tUNION",
+ "tIDENT",
+ "tSTRLIT",
+ "tINTLIT",
+ "tRATLIT",
+ "tIMAGLIT",
+ "notPackage",
+ "notConfig",
+}
+var yyStatenames = []string{}
+
+const yyEofCode = 1
+const yyErrCode = 2
+const yyMaxDepth = 200
+
+//line yacctab:1
+var yyExca = []int{
+ -1, 1,
+ 1, -1,
+ -2, 0,
+ -1, 27,
+ 13, 40,
+ 17, 40,
+ -2, 118,
+ -1, 176,
+ 18, 149,
+ -2, 144,
+ -1, 291,
+ 18, 149,
+ -2, 144,
+}
+
+const yyNprod = 150
+const yyPrivate = 57344
+
+var yyTokenNames []string
+var yyStates []string
+
+const yyLast = 800
+
+var yyAct = []int{
+
+ 14, 27, 13, 140, 139, 242, 251, 235, 220, 151,
+ 186, 193, 175, 137, 147, 38, 190, 252, 253, 133,
+ 166, 243, 90, 153, 152, 273, 181, 194, 191, 210,
+ 78, 21, 134, 217, 192, 249, 144, 127, 80, 89,
+ 40, 43, 213, 11, 240, 270, 8, 102, 98, 178,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 87,
+ 252, 253, 194, 123, 194, 89, 126, 79, 128, 243,
+ 90, 191, 89, 150, 89, 89, 89, 89, 153, 152,
+ 125, 207, 297, 148, 189, 185, 158, 276, 159, 142,
+ 41, 296, 93, 293, 292, 87, 157, 216, 100, 46,
+ 289, 29, 87, 31, 87, 87, 87, 87, 295, 288,
+ 17, 18, 19, 153, 152, 298, 275, 277, 20, 37,
+ 210, 271, 89, 267, 191, 188, 202, 201, 86, 32,
+ 30, 198, 89, 34, 195, 33, 279, 35, 101, 22,
+ 36, 40, 23, 24, 25, 26, 103, 233, 170, 226,
+ 180, 92, 87, 174, 85, 84, 60, 61, 62, 169,
+ 64, 89, 87, 89, 124, 89, 199, 148, 204, 66,
+ 67, 131, 81, 135, 136, 141, 141, 196, 168, 15,
+ 89, 203, 96, 130, 97, 98, 209, 83, 82, 75,
+ 205, 87, 95, 87, 219, 87, 215, 68, 69, 70,
+ 71, 76, 89, 221, 77, 300, 223, 229, 299, 266,
+ 87, 264, 247, 89, 245, 236, 237, 238, 244, 224,
+ 222, 165, 162, 73, 72, 227, 239, 91, 89, 230,
+ 74, 172, 87, 248, 48, 246, 49, 291, 254, 257,
+ 255, 262, 260, 87, 176, 263, 259, 89, 89, 231,
+ 265, 244, 258, 261, 228, 268, 225, 206, 87, 272,
+ 197, 184, 141, 183, 200, 182, 171, 167, 89, 89,
+ 281, 237, 285, 89, 280, 218, 149, 87, 87, 211,
+ 99, 89, 104, 286, 287, 214, 187, 208, 290, 58,
+ 59, 60, 61, 62, 63, 64, 65, 42, 87, 87,
+ 10, 211, 156, 87, 66, 67, 12, 44, 45, 155,
+ 47, 87, 141, 2, 3, 4, 5, 6, 154, 7,
+ 29, 179, 31, 9, 94, 284, 1, 172, 250, 17,
+ 18, 19, 232, 146, 28, 278, 16, 20, 37, 241,
+ 274, 269, 39, 132, 0, 0, 141, 200, 32, 30,
+ 0, 0, 34, 0, 33, 0, 35, 0, 22, 36,
+ 40, 23, 24, 25, 26, 0, 0, 141, 283, 0,
+ 0, 0, 172, 29, 0, 31, 0, 0, 145, 0,
+ 294, 0, 17, 18, 19, 0, 0, 0, 256, 0,
+ 20, 37, 31, 0, 0, 0, 0, 0, 0, 0,
+ 0, 32, 30, 0, 0, 34, 0, 33, 37, 35,
+ 0, 22, 36, 40, 23, 24, 25, 26, 32, 30,
+ 31, 0, 34, 0, 33, 282, 35, 0, 88, 36,
+ 40, 90, 0, 0, 234, 31, 37, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 32, 30, 0, 0,
+ 34, 37, 33, 0, 35, 0, 88, 36, 40, 90,
+ 31, 32, 30, 0, 0, 34, 0, 33, 0, 35,
+ 0, 88, 36, 40, 90, 173, 37, 0, 0, 31,
+ 0, 0, 0, 0, 0, 0, 32, 30, 0, 212,
+ 34, 0, 33, 0, 35, 37, 88, 36, 40, 90,
+ 0, 0, 0, 0, 31, 32, 30, 143, 0, 34,
+ 0, 33, 0, 35, 0, 88, 36, 40, 90, 31,
+ 37, 0, 138, 0, 0, 0, 0, 0, 0, 0,
+ 32, 30, 0, 0, 34, 37, 33, 0, 35, 0,
+ 88, 36, 40, 90, 31, 32, 30, 0, 0, 34,
+ 0, 33, 0, 35, 0, 88, 36, 40, 90, 0,
+ 37, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 32, 30, 0, 0, 34, 177, 33, 0, 35, 0,
+ 88, 36, 40, 90, 52, 53, 0, 0, 58, 59,
+ 60, 61, 62, 63, 64, 65, 0, 50, 51, 54,
+ 55, 56, 57, 66, 67, 164, 0, 0, 52, 53,
+ 0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+ 0, 50, 51, 54, 55, 56, 57, 66, 67, 163,
+ 0, 0, 52, 53, 0, 0, 58, 59, 60, 61,
+ 62, 63, 64, 65, 0, 50, 51, 54, 55, 56,
+ 57, 66, 67, 161, 0, 0, 0, 0, 52, 53,
+ 0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+ 160, 50, 51, 54, 55, 56, 57, 66, 67, 0,
+ 52, 53, 0, 0, 58, 59, 60, 61, 62, 63,
+ 64, 65, 0, 50, 51, 54, 55, 56, 57, 66,
+ 67, 129, 0, 0, 0, 0, 52, 53, 0, 0,
+ 58, 59, 60, 61, 62, 63, 64, 65, 0, 50,
+ 51, 54, 55, 56, 57, 66, 67, 52, 53, 0,
+ 0, 58, 59, 60, 61, 62, 63, 64, 65, 0,
+ 50, 51, 54, 55, 56, 57, 66, 67, 52, 53,
+ 0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+ 0, 0, 51, 54, 55, 56, 57, 66, 67, 52,
+ 53, 0, 0, 58, 59, 60, 61, 62, 63, 64,
+ 65, 0, 0, 0, 54, 55, 56, 57, 66, 67,
+}
+var yyPact = []int{
+
+ 319, -1000, 0, 0, -10, -10, 98, -1000, -12, -1000,
+ -1000, 88, -1000, 235, 718, -1000, -1000, 98, 98, 98,
+ 98, 221, 220, 228, -1000, -1000, -1000, 184, 199, 98,
+ -1000, 22, 165, 183, 182, 148, 147, 539, 225, 144,
+ -1000, -1000, 152, 281, 5, 152, 98, 5, -1000, 98,
+ 98, 98, 98, 98, 98, 98, 98, 98, 98, 98,
+ 98, 98, 98, 98, 98, 98, 98, 98, -1000, -1000,
+ -1000, -1000, 98, 539, -13, 98, -16, 98, 697, 177,
+ 539, -21, 539, 539, 514, 499, -1000, -1000, -1000, -1000,
+ 228, -17, 370, -1000, 277, -1000, -1000, -1000, 70, -1000,
+ 56, -1000, 671, 56, 718, 739, 760, 276, 276, 276,
+ 276, 276, 276, 141, 141, -1000, -1000, -1000, 141, -1000,
+ 141, -1000, -1000, 649, 218, 225, 623, -1000, 599, -1000,
+ 539, -1000, 268, -1000, -1000, 172, 153, 267, -1000, -1000,
+ 474, -1000, 267, -1000, -1000, -1000, 243, -1000, 575, -1000,
+ 35, -1000, -1000, -28, 266, 264, 262, 82, 81, 21,
+ -1000, -1000, -1000, -1000, -1000, -1000, 126, -21, -1000, 539,
+ 123, 539, -1000, 539, 119, 118, 98, 98, -1000, 258,
+ -1000, -1000, -1000, -1000, -1000, 77, -1000, -1000, 455, 28,
+ -1000, 86, 19, -1000, 200, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, 718, 216, -30, -1000, 257, -1000,
+ 539, -1000, 142, -1000, 255, -1000, 98, -1000, 250, -1000,
+ 140, 430, -1000, -1000, 213, -24, 26, 210, -25, 718,
+ 208, -26, -1000, 17, -1000, 239, 387, -1000, -1000, -1000,
+ -1000, 247, -1000, 200, -1000, -1000, -1000, -1000, -1000, -1000,
+ 240, -1000, -1000, 245, 207, 539, 539, 205, 115, -32,
+ -3, 113, -36, -29, -1000, -1000, -1000, -1000, -1000, 84,
+ 108, -1000, -1000, -1000, 129, -1000, 539, 415, -1000, 317,
+ 239, 387, -1000, 99, -1000, 236, 76, 75, -1000, 539,
+ 100, 98, 59, 50, 105, -1000, 204, 201, -1000, -1000,
+ -1000,
+}
+var yyPgo = []int{
+
+ 0, 1, 15, 19, 353, 100, 31, 352, 3, 351,
+ 13, 4, 7, 8, 350, 349, 5, 0, 189, 346,
+ 345, 2, 344, 14, 343, 342, 338, 6, 336, 329,
+ 307, 102, 108, 310, 334, 331, 20, 9, 328, 319,
+ 312, 297, 10, 296, 295, 16, 285, 11, 12,
+}
+var yyR1 = []int{
+
+ 0, 28, 28, 28, 28, 28, 31, 31, 31, 31,
+ 29, 29, 33, 33, 30, 30, 34, 34, 34, 35,
+ 35, 37, 37, 32, 32, 32, 32, 38, 38, 38,
+ 38, 39, 39, 39, 40, 40, 40, 41, 41, 42,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 5, 5, 4, 4, 3, 10, 10, 11,
+ 8, 8, 43, 43, 15, 15, 16, 16, 13, 13,
+ 13, 12, 12, 14, 14, 14, 9, 9, 9, 9,
+ 20, 20, 20, 21, 21, 44, 44, 45, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 18, 18, 18,
+ 18, 18, 18, 18, 19, 19, 19, 19, 19, 19,
+ 19, 19, 19, 19, 22, 22, 24, 24, 23, 23,
+ 46, 46, 47, 25, 25, 25, 26, 26, 27, 27,
+ 1, 1, 2, 2, 7, 7, 36, 36, 48, 48,
+}
+var yyR2 = []int{
+
+ 0, 4, 4, 4, 4, 3, 0, 1, 1, 1,
+ 0, 3, 0, 4, 0, 3, 3, 5, 2, 1,
+ 3, 1, 2, 0, 3, 3, 3, 3, 5, 2,
+ 2, 3, 5, 2, 3, 5, 2, 1, 3, 2,
+ 1, 1, 4, 3, 5, 4, 5, 5, 3, 5,
+ 3, 2, 1, 1, 1, 3, 1, 1, 3, 2,
+ 1, 3, 4, 6, 1, 3, 5, 1, 2, 4,
+ 4, 1, 3, 1, 6, 6, 0, 3, 4, 6,
+ 0, 2, 4, 1, 3, 1, 3, 3, 1, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 1, 2, 2,
+ 2, 2, 4, 4, 1, 1, 1, 1, 1, 1,
+ 3, 4, 4, 3, 3, 5, 1, 3, 1, 3,
+ 1, 3, 3, 0, 2, 4, 1, 3, 1, 3,
+ 1, 3, 1, 3, 0, 1, 0, 1, 0, 1,
+}
+var yyChk = []int{
+
+ -1000, -28, 4, 5, 6, 7, 8, -29, 46, -29,
+ -33, 53, -33, -21, -17, -18, -19, 22, 23, 24,
+ 30, -6, 51, 54, 55, 56, 57, -1, -22, 13,
+ 42, 15, 41, 47, 45, 49, 52, 31, -2, -7,
+ 53, -5, -30, 53, -30, -30, 21, -30, 9, 11,
+ 32, 33, 19, 20, 34, 35, 36, 37, 23, 24,
+ 25, 26, 27, 28, 29, 30, 38, 39, -18, -18,
+ -18, -18, 13, 13, 12, 15, 12, 15, -17, 55,
+ 16, 17, 15, 15, 17, 17, -5, -6, 51, -1,
+ 54, 12, 17, -31, -34, 50, 40, 42, 43, 9,
+ -32, -31, -17, -32, -17, -17, -17, -17, -17, -17,
+ -17, -17, -17, -17, -17, -17, -17, -17, -17, -17,
+ -17, -17, -17, -17, -5, -2, -17, 53, -17, 14,
+ 16, -5, -4, -3, 53, -5, -5, -10, 18, -11,
+ -8, -5, -10, 18, 53, 18, -24, -23, -17, 9,
+ 13, -37, 54, 53, -38, -39, -40, 50, 40, 42,
+ 9, 14, 14, 16, 16, -5, -36, 9, 16, 16,
+ -36, 9, -5, 11, -36, -48, 11, 10, 14, -35,
+ -37, 54, 9, 9, 9, 13, -42, -43, 53, 13,
+ -45, 53, 13, -47, 53, 18, -3, -5, 18, -11,
+ -5, 18, 18, -23, -17, -36, 9, 14, -41, -42,
+ 53, -5, 44, 14, -44, -45, 21, 14, -46, -47,
+ -13, 13, 14, -37, -36, 9, 17, -36, 9, -17,
+ -36, 9, -25, 17, 14, -12, -8, -11, 14, -42,
+ 18, -15, -16, 53, -1, 14, -45, 14, -47, 18,
+ -26, -27, 53, 54, -48, 11, 11, -48, -36, 9,
+ -13, -48, 11, 10, 14, -11, 14, 18, -16, -9,
+ 48, 18, -27, 54, -14, 42, 13, 19, -20, 17,
+ -12, -8, 20, -5, 18, -21, -48, -48, 20, 11,
+ -48, 11, 28, 28, -5, 18, 42, 42, 20, 14,
+ 14,
+}
+var yyDef = []int{
+
+ 0, -2, 10, 10, 12, 12, 144, 14, 0, 14,
+ 14, 0, 14, 0, 83, 88, 107, 144, 144, 144,
+ 144, 52, 53, 114, 115, 116, 117, -2, 119, 144,
+ 41, 0, 0, 0, 0, 0, 0, 0, 140, 0,
+ 142, 145, 6, 0, 23, 6, 144, 23, 5, 144,
+ 144, 144, 144, 144, 144, 144, 144, 144, 144, 144,
+ 144, 144, 144, 144, 144, 144, 144, 144, 108, 109,
+ 110, 111, 144, 0, 0, 144, 0, 144, 0, 0,
+ 0, 0, 0, 0, 0, 0, 51, 52, 53, 40,
+ 0, 0, 144, 1, 0, 7, 8, 9, 0, 11,
+ 2, 3, 0, 4, 84, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 0, 0, 141, 0, 120, 0, 123,
+ 0, 43, 146, 54, 56, 0, 0, 146, 48, 57,
+ 0, 60, 146, 50, 143, 124, 148, 126, 128, 15,
+ 0, 18, 21, 0, 0, 0, 0, 0, 0, 0,
+ 13, 112, 113, 122, 121, 42, 0, 147, 45, 0,
+ 0, 147, 59, 0, 0, 0, -2, 144, 16, 146,
+ 19, 22, 24, 25, 26, 0, 29, 30, 0, 0,
+ 33, 0, 0, 36, 0, 44, 55, 46, 47, 58,
+ 61, 49, 125, 127, 129, 0, 147, 27, 146, 37,
+ 0, 39, 0, 31, 146, 85, 144, 34, 146, 130,
+ 133, 0, 17, 20, 0, 147, 0, 0, 147, 87,
+ 0, 147, 132, 0, 68, 148, 148, 71, 28, 38,
+ 62, 146, 64, 142, 67, 32, 86, 35, 131, 134,
+ 148, 136, 138, 0, 0, 149, 149, 0, 0, 147,
+ 76, 0, 149, 0, 69, 72, 70, 63, 65, 0,
+ 0, 135, 137, 139, 80, 73, 0, 0, 66, 144,
+ 148, 148, 77, 0, 81, 148, 0, 0, 78, 0,
+ 0, -2, 0, 0, 0, 82, 0, 0, 79, 74,
+ 75,
+}
+var yyTok1 = []int{
+
+ 1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 22, 3, 3, 3, 27, 29, 3,
+ 13, 14, 25, 23, 11, 24, 12, 26, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 10, 9,
+ 19, 21, 20, 31, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 15, 3, 16, 30, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 17, 28, 18,
+}
+var yyTok2 = []int{
+
+ 2, 3, 4, 5, 6, 7, 8, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59,
+}
+var yyTok3 = []int{
+ 0,
+}
+
+//line yaccpar:1
+
+/* parser for yacc output */
+
+var yyDebug = 0
+
+type yyLexer interface {
+ Lex(lval *yySymType) int
+ Error(s string)
+}
+
+const yyFlag = -1000
+
+func yyTokname(c int) string {
+ // 4 is TOKSTART above
+ if c >= 4 && c-4 < len(yyToknames) {
+ if yyToknames[c-4] != "" {
+ return yyToknames[c-4]
+ }
+ }
+ return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func yyStatname(s int) string {
+ if s >= 0 && s < len(yyStatenames) {
+ if yyStatenames[s] != "" {
+ return yyStatenames[s]
+ }
+ }
+ return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func yylex1(lex yyLexer, lval *yySymType) int {
+ c := 0
+ char := lex.Lex(lval)
+ if char <= 0 {
+ c = yyTok1[0]
+ goto out
+ }
+ if char < len(yyTok1) {
+ c = yyTok1[char]
+ goto out
+ }
+ if char >= yyPrivate {
+ if char < yyPrivate+len(yyTok2) {
+ c = yyTok2[char-yyPrivate]
+ goto out
+ }
+ }
+ for i := 0; i < len(yyTok3); i += 2 {
+ c = yyTok3[i+0]
+ if c == char {
+ c = yyTok3[i+1]
+ goto out
+ }
+ }
+
+out:
+ if c == 0 {
+ c = yyTok2[1] /* unknown char */
+ }
+ if yyDebug >= 3 {
+ __yyfmt__.Printf("lex %s(%d)\n", yyTokname(c), uint(char))
+ }
+ return c
+}
+
+func yyParse(yylex yyLexer) int {
+ var yyn int
+ var yylval yySymType
+ var yyVAL yySymType
+ yyS := make([]yySymType, yyMaxDepth)
+
+ Nerrs := 0 /* number of errors */
+ Errflag := 0 /* error recovery flag */
+ yystate := 0
+ yychar := -1
+ yyp := -1
+ goto yystack
+
+ret0:
+ return 0
+
+ret1:
+ return 1
+
+yystack:
+ /* put a state and value onto the stack */
+ if yyDebug >= 4 {
+ __yyfmt__.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
+ }
+
+ yyp++
+ if yyp >= len(yyS) {
+ nyys := make([]yySymType, len(yyS)*2)
+ copy(nyys, yyS)
+ yyS = nyys
+ }
+ yyS[yyp] = yyVAL
+ yyS[yyp].yys = yystate
+
+yynewstate:
+ yyn = yyPact[yystate]
+ if yyn <= yyFlag {
+ goto yydefault /* simple state */
+ }
+ if yychar < 0 {
+ yychar = yylex1(yylex, &yylval)
+ }
+ yyn += yychar
+ if yyn < 0 || yyn >= yyLast {
+ goto yydefault
+ }
+ yyn = yyAct[yyn]
+ if yyChk[yyn] == yychar { /* valid shift */
+ yychar = -1
+ yyVAL = yylval
+ yystate = yyn
+ if Errflag > 0 {
+ Errflag--
+ }
+ goto yystack
+ }
+
+yydefault:
+ /* default state action */
+ yyn = yyDef[yystate]
+ if yyn == -2 {
+ if yychar < 0 {
+ yychar = yylex1(yylex, &yylval)
+ }
+
+ /* look through exception table */
+ xi := 0
+ for {
+ if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
+ break
+ }
+ xi += 2
+ }
+ for xi += 2; ; xi += 2 {
+ yyn = yyExca[xi+0]
+ if yyn < 0 || yyn == yychar {
+ break
+ }
+ }
+ yyn = yyExca[xi+1]
+ if yyn < 0 {
+ goto ret0
+ }
+ }
+ if yyn == 0 {
+ /* error ... attempt to resume parsing */
+ switch Errflag {
+ case 0: /* brand new error */
+ yylex.Error("syntax error")
+ Nerrs++
+ if yyDebug >= 1 {
+ __yyfmt__.Printf("%s", yyStatname(yystate))
+ __yyfmt__.Printf(" saw %s\n", yyTokname(yychar))
+ }
+ fallthrough
+
+ case 1, 2: /* incompletely recovered error ... try again */
+ Errflag = 3
+
+ /* find a state where "error" is a legal shift action */
+ for yyp >= 0 {
+ yyn = yyPact[yyS[yyp].yys] + yyErrCode
+ if yyn >= 0 && yyn < yyLast {
+ yystate = yyAct[yyn] /* simulate a shift of "error" */
+ if yyChk[yystate] == yyErrCode {
+ goto yystack
+ }
+ }
+
+ /* the current p has no shift on "error", pop stack */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys)
+ }
+ yyp--
+ }
+ /* there is no state on the stack with an error shift ... abort */
+ goto ret1
+
+ case 3: /* no shift yet; clobber input char */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yychar))
+ }
+ if yychar == yyEofCode {
+ goto ret1
+ }
+ yychar = -1
+ goto yynewstate /* try again in the same state */
+ }
+ }
+
+ /* reduction by production yyn */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
+ }
+
+ yynt := yyn
+ yypt := yyp
+ _ = yypt // guard against "declared and not used"
+
+ yyp -= yyR2[yyn]
+ yyVAL = yyS[yyp+1]
+
+ /* consult goto table to find next state */
+ yyn = yyR1[yyn]
+ yyg := yyPgo[yyn]
+ yyj := yyg + yyS[yyp].yys + 1
+
+ if yyj >= yyLast {
+ yystate = yyAct[yyg]
+ } else {
+ yystate = yyAct[yyj]
+ if yyChk[yystate] != -yyn {
+ yystate = yyAct[yyg]
+ }
+ }
+ // dummy call; replaced with literal code
+ switch yynt {
+
+ case 5:
+ //line grammar.y:143
+ {
+ lexStoreExprs(yylex, yyS[yypt-1].constexprs)
+ }
+ case 6:
+ //line grammar.y:152
+ {
+ lexGenEOF(yylex)
+ }
+ case 7:
+ //line grammar.y:154
+ {
+ lexGenEOF(yylex)
+ }
+ case 8:
+ //line grammar.y:156
+ {
+ lexGenEOF(yylex)
+ }
+ case 9:
+ //line grammar.y:158
+ {
+ lexGenEOF(yylex)
+ }
+ case 10:
+ //line grammar.y:163
+ {
+ lexPosErrorf(yylex, Pos{}, "vdl file must start with package clause")
+ }
+ case 11:
+ //line grammar.y:165
+ {
+ lexVDLFile(yylex).PackageDef = NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}
+ }
+ case 12:
+ //line grammar.y:170
+ {
+ lexPosErrorf(yylex, Pos{}, "config file must start with config clause")
+ }
+ case 13:
+ //line grammar.y:172
+ {
+ // We allow "config" as an identifier; it is not a keyword. So we check
+ // manually to make sure the syntax is correct.
+ if yyS[yypt-3].strpos.String != "config" {
+ lexPosErrorf(yylex, yyS[yypt-3].strpos.Pos, "config file must start with config clause")
+ return 1 // Any non-zero code indicates an error
+ }
+ file := lexVDLFile(yylex)
+ file.PackageDef = NamePos{Name: "config", Pos: yyS[yypt-3].strpos.Pos}
+ file.ConstDefs = []*ConstDef{{Expr: yyS[yypt-1].constexpr}}
+ }
+ case 21:
+ //line grammar.y:200
+ {
+ imps := &lexVDLFile(yylex).Imports
+ *imps = append(*imps, &Import{Path: yyS[yypt-0].strpos.String, NamePos: NamePos{Pos: yyS[yypt-0].strpos.Pos}})
+ }
+ case 22:
+ //line grammar.y:205
+ {
+ imps := &lexVDLFile(yylex).Imports
+ *imps = append(*imps, &Import{Path: yyS[yypt-0].strpos.String, NamePos: NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}})
+ }
+ case 39:
+ //line grammar.y:240
+ {
+ tds := &lexVDLFile(yylex).TypeDefs
+ *tds = append(*tds, &TypeDef{Type: yyS[yypt-0].typeexpr, NamePos: NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}})
+ }
+ case 40:
+ //line grammar.y:262
+ {
+ yyVAL.typeexpr = &TypeNamed{Name: yyS[yypt-0].strpos.String, P: yyS[yypt-0].strpos.Pos}
+ }
+ case 41:
+ //line grammar.y:264
+ {
+ yyVAL.typeexpr = &TypeNamed{Name: "error", P: yyS[yypt-0].pos}
+ }
+ case 42:
+ //line grammar.y:266
+ {
+ yyVAL.typeexpr = &TypeArray{Len: int(yyS[yypt-2].intpos.int.Int64()), Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-3].pos}
+ }
+ case 43:
+ //line grammar.y:268
+ {
+ yyVAL.typeexpr = &TypeList{Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-2].pos}
+ }
+ case 44:
+ //line grammar.y:270
+ {
+ yyVAL.typeexpr = &TypeEnum{Labels: yyS[yypt-2].nameposes, P: yyS[yypt-4].pos}
+ }
+ case 45:
+ //line grammar.y:272
+ {
+ yyVAL.typeexpr = &TypeSet{Key: yyS[yypt-1].typeexpr, P: yyS[yypt-3].pos}
+ }
+ case 46:
+ //line grammar.y:274
+ {
+ yyVAL.typeexpr = &TypeMap{Key: yyS[yypt-2].typeexpr, Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-4].pos}
+ }
+ case 47:
+ //line grammar.y:276
+ {
+ yyVAL.typeexpr = &TypeStruct{Fields: yyS[yypt-2].fields, P: yyS[yypt-4].pos}
+ }
+ case 48:
+ //line grammar.y:278
+ {
+ yyVAL.typeexpr = &TypeStruct{P: yyS[yypt-2].pos}
+ }
+ case 49:
+ //line grammar.y:280
+ {
+ yyVAL.typeexpr = &TypeUnion{Fields: yyS[yypt-2].fields, P: yyS[yypt-4].pos}
+ }
+ case 50:
+ //line grammar.y:282
+ {
+ yyVAL.typeexpr = &TypeUnion{P: yyS[yypt-2].pos}
+ }
+ case 51:
+ //line grammar.y:284
+ {
+ yyVAL.typeexpr = &TypeOptional{Base: yyS[yypt-0].typeexpr, P: yyS[yypt-1].pos}
+ }
+ case 52:
+ //line grammar.y:289
+ {
+ yyVAL.typeexpr = yyS[yypt-0].typeexpr
+ }
+ case 53:
+ //line grammar.y:291
+ {
+ yyVAL.typeexpr = &TypeNamed{Name: "typeobject", P: yyS[yypt-0].pos}
+ }
+ case 54:
+ //line grammar.y:295
+ {
+ yyVAL.nameposes = []NamePos{yyS[yypt-0].namepos}
+ }
+ case 55:
+ //line grammar.y:297
+ {
+ yyVAL.nameposes = append(yyS[yypt-2].nameposes, yyS[yypt-0].namepos)
+ }
+ case 56:
+ //line grammar.y:301
+ {
+ yyVAL.namepos = NamePos{Name: yyS[yypt-0].strpos.String, Pos: yyS[yypt-0].strpos.Pos}
+ }
+ case 57:
+ //line grammar.y:305
+ {
+ yyVAL.fields = yyS[yypt-0].fields
+ }
+ case 58:
+ //line grammar.y:307
+ {
+ yyVAL.fields = append(yyS[yypt-2].fields, yyS[yypt-0].fields...)
+ }
+ case 59:
+ //line grammar.y:345
+ {
+ if names, ok := typeListToStrList(yylex, yyS[yypt-1].typeexprs); ok {
+ for _, n := range names {
+ yyVAL.fields = append(yyVAL.fields, &Field{Type: yyS[yypt-0].typeexpr, NamePos: NamePos{Name: n.String, Pos: n.Pos}})
+ }
+ } else {
+ lexPosErrorf(yylex, yyS[yypt-0].typeexpr.Pos(), "perhaps you forgot a comma before %q?.", yyS[yypt-0].typeexpr.String())
+ }
+ }
+ case 60:
+ //line grammar.y:357
+ {
+ yyVAL.typeexprs = []Type{yyS[yypt-0].typeexpr}
+ }
+ case 61:
+ //line grammar.y:359
+ {
+ yyVAL.typeexprs = append(yyS[yypt-2].typeexprs, yyS[yypt-0].typeexpr)
+ }
+ case 62:
+ //line grammar.y:364
+ {
+ ifs := &lexVDLFile(yylex).Interfaces
+ *ifs = append(*ifs, &Interface{NamePos: NamePos{Name: yyS[yypt-3].strpos.String, Pos: yyS[yypt-3].strpos.Pos}})
+ }
+ case 63:
+ //line grammar.y:369
+ {
+ yyS[yypt-2].iface.Name, yyS[yypt-2].iface.Pos = yyS[yypt-5].strpos.String, yyS[yypt-5].strpos.Pos
+ ifs := &lexVDLFile(yylex).Interfaces
+ *ifs = append(*ifs, yyS[yypt-2].iface)
+ }
+ case 64:
+ //line grammar.y:377
+ {
+ yyVAL.iface = yyS[yypt-0].iface
+ }
+ case 65:
+ //line grammar.y:379
+ {
+ yyS[yypt-2].iface.Embeds = append(yyS[yypt-2].iface.Embeds, yyS[yypt-0].iface.Embeds...)
+ yyS[yypt-2].iface.Methods = append(yyS[yypt-2].iface.Methods, yyS[yypt-0].iface.Methods...)
+ yyVAL.iface = yyS[yypt-2].iface
+ }
+ case 66:
+ //line grammar.y:387
+ {
+ yyVAL.iface = &Interface{Methods: []*Method{{InArgs: yyS[yypt-3].fields, InStream: yyS[yypt-2].typeexprs[0], OutStream: yyS[yypt-2].typeexprs[1], OutArgs: yyS[yypt-1].fields, Tags: yyS[yypt-0].constexprs, NamePos: NamePos{Name: yyS[yypt-4].strpos.String, Pos: yyS[yypt-4].strpos.Pos}}}}
+ }
+ case 67:
+ //line grammar.y:389
+ {
+ yyVAL.iface = &Interface{Embeds: []*NamePos{{Name: yyS[yypt-0].strpos.String, Pos: yyS[yypt-0].strpos.Pos}}}
+ }
+ case 68:
+ //line grammar.y:393
+ {
+ yyVAL.fields = nil
+ }
+ case 69:
+ //line grammar.y:395
+ {
+ yyVAL.fields = yyS[yypt-2].fields
+ }
+ case 70:
+ //line grammar.y:399
+ {
+ for _, t := range yyS[yypt-2].typeexprs {
+ yyVAL.fields = append(yyVAL.fields, &Field{Type: t, NamePos: NamePos{Pos: t.Pos()}})
+ }
+ }
+ case 71:
+ //line grammar.y:409
+ {
+ yyVAL.fields = yyS[yypt-0].fields
+ }
+ case 72:
+ //line grammar.y:411
+ {
+ yyVAL.fields = append(yyS[yypt-2].fields, yyS[yypt-0].fields...)
+ }
+ case 73:
+ //line grammar.y:422
+ {
+ yyVAL.fields = nil
+ }
+ case 74:
+ //line grammar.y:424
+ {
+ yyVAL.fields = yyS[yypt-4].fields
+ }
+ case 75:
+ //line grammar.y:428
+ {
+ for _, t := range yyS[yypt-4].typeexprs {
+ yyVAL.fields = append(yyVAL.fields, &Field{Type: t, NamePos: NamePos{Pos: t.Pos()}})
+ }
+ }
+ case 76:
+ //line grammar.y:436
+ {
+ yyVAL.typeexprs = []Type{nil, nil}
+ }
+ case 77:
+ //line grammar.y:438
+ {
+ yyVAL.typeexprs = []Type{nil, nil}
+ }
+ case 78:
+ //line grammar.y:440
+ {
+ yyVAL.typeexprs = []Type{yyS[yypt-1].typeexpr, nil}
+ }
+ case 79:
+ //line grammar.y:442
+ {
+ yyVAL.typeexprs = []Type{yyS[yypt-3].typeexpr, yyS[yypt-1].typeexpr}
+ }
+ case 80:
+ //line grammar.y:446
+ {
+ yyVAL.constexprs = nil
+ }
+ case 81:
+ //line grammar.y:448
+ {
+ yyVAL.constexprs = nil
+ }
+ case 82:
+ //line grammar.y:450
+ {
+ yyVAL.constexprs = yyS[yypt-2].constexprs
+ }
+ case 83:
+ //line grammar.y:454
+ {
+ yyVAL.constexprs = []ConstExpr{yyS[yypt-0].constexpr}
+ }
+ case 84:
+ //line grammar.y:456
+ {
+ yyVAL.constexprs = append(yyS[yypt-2].constexprs, yyS[yypt-0].constexpr)
+ }
+ case 87:
+ //line grammar.y:465
+ {
+ cds := &lexVDLFile(yylex).ConstDefs
+ *cds = append(*cds, &ConstDef{Expr: yyS[yypt-0].constexpr, NamePos: NamePos{Name: yyS[yypt-2].strpos.String, Pos: yyS[yypt-2].strpos.Pos}})
+ }
+ case 88:
+ //line grammar.y:472
+ {
+ yyVAL.constexpr = yyS[yypt-0].constexpr
+ }
+ case 89:
+ //line grammar.y:474
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"||", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 90:
+ //line grammar.y:476
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"&&", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 91:
+ //line grammar.y:478
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"<", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 92:
+ //line grammar.y:480
+ {
+ yyVAL.constexpr = &ConstBinaryOp{">", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 93:
+ //line grammar.y:482
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"<=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 94:
+ //line grammar.y:484
+ {
+ yyVAL.constexpr = &ConstBinaryOp{">=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 95:
+ //line grammar.y:486
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"!=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 96:
+ //line grammar.y:488
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"==", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 97:
+ //line grammar.y:490
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"+", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 98:
+ //line grammar.y:492
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"-", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 99:
+ //line grammar.y:494
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"*", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 100:
+ //line grammar.y:496
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"/", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 101:
+ //line grammar.y:498
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"%", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 102:
+ //line grammar.y:500
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"|", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 103:
+ //line grammar.y:502
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"&", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 104:
+ //line grammar.y:504
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"^", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 105:
+ //line grammar.y:506
+ {
+ yyVAL.constexpr = &ConstBinaryOp{"<<", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 106:
+ //line grammar.y:508
+ {
+ yyVAL.constexpr = &ConstBinaryOp{">>", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 107:
+ //line grammar.y:512
+ {
+ yyVAL.constexpr = yyS[yypt-0].constexpr
+ }
+ case 108:
+ //line grammar.y:514
+ {
+ yyVAL.constexpr = &ConstUnaryOp{"!", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 109:
+ //line grammar.y:516
+ {
+ yyVAL.constexpr = &ConstUnaryOp{"+", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 110:
+ //line grammar.y:518
+ {
+ yyVAL.constexpr = &ConstUnaryOp{"-", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 111:
+ //line grammar.y:520
+ {
+ yyVAL.constexpr = &ConstUnaryOp{"^", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+ }
+ case 112:
+ //line grammar.y:522
+ {
+ yyVAL.constexpr = &ConstTypeConv{yyS[yypt-3].typeexpr, yyS[yypt-1].constexpr, yyS[yypt-3].typeexpr.Pos()}
+ }
+ case 113:
+ //line grammar.y:524
+ {
+ yyVAL.constexpr = &ConstTypeObject{yyS[yypt-1].typeexpr, yyS[yypt-3].pos}
+ }
+ case 114:
+ //line grammar.y:529
+ {
+ yyVAL.constexpr = &ConstLit{yyS[yypt-0].strpos.String, yyS[yypt-0].strpos.Pos}
+ }
+ case 115:
+ //line grammar.y:531
+ {
+ yyVAL.constexpr = &ConstLit{yyS[yypt-0].intpos.int, yyS[yypt-0].intpos.pos}
+ }
+ case 116:
+ //line grammar.y:533
+ {
+ yyVAL.constexpr = &ConstLit{yyS[yypt-0].ratpos.rat, yyS[yypt-0].ratpos.pos}
+ }
+ case 117:
+ //line grammar.y:535
+ {
+ yyVAL.constexpr = &ConstLit{yyS[yypt-0].imagpos.imag, yyS[yypt-0].imagpos.pos}
+ }
+ case 118:
+ //line grammar.y:537
+ {
+ yyVAL.constexpr = &ConstNamed{yyS[yypt-0].strpos.String, yyS[yypt-0].strpos.Pos}
+ }
+ case 119:
+ //line grammar.y:539
+ {
+ yyVAL.constexpr = yyS[yypt-0].complit
+ }
+ case 120:
+ //line grammar.y:541
+ {
+ lexPosErrorf(yylex, yyS[yypt-1].pos, "cannot apply selector operator to unnamed constant")
+ }
+ case 121:
+ //line grammar.y:543
+ {
+ lexPosErrorf(yylex, yyS[yypt-2].pos, "cannot apply index operator to unnamed constant")
+ }
+ case 122:
+ //line grammar.y:545
+ {
+ yyVAL.constexpr = &ConstIndexed{&ConstNamed{yyS[yypt-3].strpos.String, yyS[yypt-3].strpos.Pos}, yyS[yypt-1].constexpr, yyS[yypt-3].strpos.Pos}
+ }
+ case 123:
+ //line grammar.y:547
+ {
+ yyVAL.constexpr = yyS[yypt-1].constexpr
+ }
+ case 124:
+ //line grammar.y:551
+ {
+ yyVAL.complit = &ConstCompositeLit{yyS[yypt-2].typeexpr, nil, yyS[yypt-1].pos}
+ }
+ case 125:
+ //line grammar.y:553
+ {
+ yyVAL.complit = &ConstCompositeLit{yyS[yypt-4].typeexpr, yyS[yypt-2].kvlits, yyS[yypt-3].pos}
+ }
+ case 126:
+ //line grammar.y:557
+ {
+ yyVAL.kvlits = []KVLit{yyS[yypt-0].kvlit}
+ }
+ case 127:
+ //line grammar.y:559
+ {
+ yyVAL.kvlits = append(yyS[yypt-2].kvlits, yyS[yypt-0].kvlit)
+ }
+ case 128:
+ //line grammar.y:563
+ {
+ yyVAL.kvlit = KVLit{Value: yyS[yypt-0].constexpr}
+ }
+ case 129:
+ //line grammar.y:565
+ {
+ yyVAL.kvlit = KVLit{Key: yyS[yypt-2].constexpr, Value: yyS[yypt-0].constexpr}
+ }
+ case 132:
+ //line grammar.y:574
+ {
+ // Create *ErrorDef starting with a copy of error_details, filling in the
+ // name and params
+ ed := yyS[yypt-0].errordef
+ ed.NamePos = NamePos{Name: yyS[yypt-2].strpos.String, Pos: yyS[yypt-2].strpos.Pos}
+ ed.Params = yyS[yypt-1].fields
+ eds := &lexVDLFile(yylex).ErrorDefs
+ *eds = append(*eds, &ed)
+ }
+ case 133:
+ //line grammar.y:586
+ {
+ yyVAL.errordef = ErrorDef{}
+ }
+ case 134:
+ //line grammar.y:588
+ {
+ yyVAL.errordef = ErrorDef{}
+ }
+ case 135:
+ //line grammar.y:590
+ {
+ yyVAL.errordef = yyS[yypt-2].errordef
+ }
+ case 136:
+ //line grammar.y:594
+ {
+ yyVAL.errordef = yyS[yypt-0].errordef
+ }
+ case 137:
+ //line grammar.y:596
+ {
+ // Merge each ErrorDef in-order to build the final ErrorDef.
+ yyVAL.errordef = yyS[yypt-2].errordef
+ switch {
+ case len(yyS[yypt-0].errordef.Actions) > 0:
+ yyVAL.errordef.Actions = append(yyVAL.errordef.Actions, yyS[yypt-0].errordef.Actions...)
+ case len(yyS[yypt-0].errordef.Formats) > 0:
+ yyVAL.errordef.Formats = append(yyVAL.errordef.Formats, yyS[yypt-0].errordef.Formats...)
+ }
+ }
+ case 138:
+ //line grammar.y:609
+ {
+ yyVAL.errordef = ErrorDef{Actions: []StringPos{yyS[yypt-0].strpos}}
+ }
+ case 139:
+ //line grammar.y:611
+ {
+ yyVAL.errordef = ErrorDef{Formats: []LangFmt{{Lang: yyS[yypt-2].strpos, Fmt: yyS[yypt-0].strpos}}}
+ }
+ case 140:
+ //line grammar.y:623
+ {
+ yyVAL.strpos = yyS[yypt-0].strpos
+ }
+ case 141:
+ //line grammar.y:625
+ {
+ yyVAL.strpos = StringPos{"\"" + yyS[yypt-2].strpos.String + "\"." + yyS[yypt-0].strpos.String, yyS[yypt-2].strpos.Pos}
+ }
+ case 142:
+ //line grammar.y:630
+ {
+ yyVAL.strpos = yyS[yypt-0].strpos
+ }
+ case 143:
+ //line grammar.y:632
+ {
+ yyVAL.strpos = StringPos{yyS[yypt-2].strpos.String + "." + yyS[yypt-0].strpos.String, yyS[yypt-2].strpos.Pos}
+ }
+ case 144:
+ //line grammar.y:636
+ {
+ yyVAL.typeexpr = nil
+ }
+ case 145:
+ //line grammar.y:638
+ {
+ yyVAL.typeexpr = yyS[yypt-0].typeexpr
+ }
+ }
+ goto yystack /* stack new state and value */
+}
diff --git a/lib/vdl/parse/grammar_gen.sh b/lib/vdl/parse/grammar_gen.sh
new file mode 100755
index 0000000..e2efbbe
--- /dev/null
+++ b/lib/vdl/parse/grammar_gen.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Generate the grammar.go source file, which contains the parser, by running
+# this shell script in the same directory, or by running go generate. This also
+# generates grammar.y.debug, which contains a list of all states produced for
+# the parser, and some stats.
+
+set -e
+
+go tool yacc -o grammar.y.go -v grammar.y.debug.tmp grammar.y
+gofmt -l -w grammar.y.go
+cat - grammar.y.debug.tmp > grammar.y.debug <<EOF
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+* The main reason this file has been generated and submitted is to try to ensure
+* we never submit changes that cause shift/reduce or reduce/reduce conflicts.
+* The Go yacc tool doesn't support the %expect directive, and will happily
+* generate a parser even if such conflicts exist; it's up to the developer
+* running the tool to notice that an error message is reported. The bottom of
+* this file contains stats, including the number of conflicts. If you're
+* reviewing a change make sure it says 0 conflicts.
+*
+* If you're updating the grammar, just cut-and-paste this message from the old
+* file to the new one, so that this comment block persists.
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+EOF
+
+rm grammar.y.debug.tmp
diff --git a/lib/vdl/parse/parse.go b/lib/vdl/parse/parse.go
new file mode 100644
index 0000000..c544f97
--- /dev/null
+++ b/lib/vdl/parse/parse.go
@@ -0,0 +1,679 @@
+// Package parse provides utilities to parse vdl files into a parse tree. The
+// Parse function is the main entry point.
+package parse
+
+//go:generate ./grammar_gen.sh
+
+// This is the only file in this package that uses the yacc-generated parser
+// with entrypoint yyParse. The result of the parse is the simple parse.File
+// representation, which is used by the compilation stage.
+//
+// TODO(toddw): The yacc-generated parser returns pretty lousy error messages;
+// basically "syntax error" is the only string returned. Improve them.
+import (
+ "fmt"
+ "io"
+ "log"
+ "math/big"
+ "path"
+ "strconv"
+ "strings"
+ "text/scanner"
+
+ "v.io/v23/vdl/vdlutil"
+)
+
+// Opts specifies vdl parsing options.
+type Opts struct {
+ ImportsOnly bool // Only parse imports; skip everything else.
+}
+
+// ParseFile takes a file name, the contents of the vdl file src, and the
+// accumulated errors, and parses the vdl into a parse.File containing the parse
+// tree. Returns nil if any errors are encountered, with errs containing more
+// information. Otherwise returns the parsed File.
+func ParseFile(fileName string, src io.Reader, opts Opts, errs *vdlutil.Errors) *File {
+ start := startFile
+ if opts.ImportsOnly {
+ start = startFileImports
+ }
+ return parse(fileName, src, start, errs)
+}
+
+// ParseConfig takes a file name, the contents of the config file src, and the
+// accumulated errors, and parses the config into a parse.Config containing the
+// parse tree. Returns nil if any errors are encountered, with errs containing
+// more information. Otherwise returns the parsed Config.
+func ParseConfig(fileName string, src io.Reader, opts Opts, errs *vdlutil.Errors) *Config {
+ start := startConfig
+ if opts.ImportsOnly {
+ start = startConfigImports
+ }
+ // Since the syntax is so similar between config files and vdl files, we just
+ // parse it as a vdl file and populate Config afterwards.
+ file := parse(fileName, src, start, errs)
+ if file == nil {
+ return nil
+ }
+ if len(file.ErrorDefs) > 0 || len(file.TypeDefs) > 0 || len(file.Interfaces) > 0 {
+ errs.Errorf("%s: config files may not contain error, type or interface definitions", fileName)
+ return nil
+ }
+ config := &Config{
+ FileName: fileName,
+ ConfigDef: file.PackageDef,
+ Imports: file.Imports,
+ Config: file.ConstDefs[0].Expr,
+ ConstDefs: file.ConstDefs[1:],
+ }
+ if len(config.ConstDefs) == 0 {
+ config.ConstDefs = nil
+ }
+ if opts.ImportsOnly {
+ // Clear out the const expression from the config clause.
+ config.Config = nil
+ config.ConstDefs = nil
+ }
+ return config
+}
+
+func parse(fileName string, src io.Reader, startTok int, errs *vdlutil.Errors) *File {
+ if errs == nil {
+ log.Fatal("Nil errors specified for Parse")
+ }
+ origErrs := errs.NumErrors()
+ lex := newLexer(fileName, src, startTok, errs)
+ if errCode := yyParse(lex); errCode != 0 {
+ errs.Errorf("%s: yyParse returned error code %v", fileName, errCode)
+ }
+ lex.attachComments()
+ if startTok == startFile || startTok == startConfig {
+ vdlutil.Vlog.Printf("PARSE RESULTS\n\n%v\n\n", lex.vdlFile)
+ }
+ if origErrs != errs.NumErrors() {
+ return nil
+ }
+ return lex.vdlFile
+}
+
+// ParseExprs parses data into a slice of parsed const expressions. The input
+// data is specified in VDL syntax, with commas separating multiple expressions.
+// There must be at least one expression specified in data. Errors are returned
+// in errs.
+func ParseExprs(data string, errs *vdlutil.Errors) []ConstExpr {
+ const name = "exprs"
+ lex := newLexer(name, strings.NewReader(data), startExprs, errs)
+ if errCode := yyParse(lex); errCode != 0 {
+ errs.Errorf("vdl: yyParse returned error code %d", errCode)
+ }
+ return lex.exprs
+}
+
+// lexer implements the yyLexer interface for the yacc-generated parser.
+//
+// An oddity: lexer also holds the result of the parse. Most yacc examples hold
+// parse results in package-scoped (global) variables, but doing that would mean
+// we wouldn't be able to run separate parses concurrently. To enable that we'd
+// need each invocation of yyParse to mutate its own result, but unfortunately
+// the Go yacc tool doesn't provide any way to pass extra arguments to yyParse.
+//
+// So we cheat and hold the parse result in the lexer, and in the yacc rules we
+// call lexVDLFile(yylex) to convert from the yyLexer interface back to the
+// concrete lexer type, and retrieve a pointer to the parse result.
+type lexer struct {
+ // Fields for lexing / scanning the input source file.
+ name string
+ scanner scanner.Scanner
+ errs *vdlutil.Errors
+ startTok int // One of our dummy start tokens.
+ started bool // Has the dummy start token already been emitted?
+ sawEOF bool // Have we already seen the end-of-file?
+ prevTok token // Previous token, used for auto-semicolons and errors.
+
+ // Fields holding the result of file and config parsing.
+ comments commentMap
+ vdlFile *File
+
+ // Field holding the result of expr parsing.
+ exprs []ConstExpr
+}
+
+func newLexer(fileName string, src io.Reader, startTok int, errs *vdlutil.Errors) *lexer {
+ l := &lexer{name: fileName, errs: errs, startTok: startTok, vdlFile: &File{BaseName: path.Base(fileName)}}
+ l.comments.init()
+ l.scanner.Init(src)
+ // Don't produce character literal tokens, but do scan comments.
+ l.scanner.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments
+ // Don't treat '\n' as whitespace, so we can auto-insert semicolons.
+ l.scanner.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
+ l.scanner.Error = func(s *scanner.Scanner, msg string) {
+ l.Error(msg)
+ }
+ return l
+}
+
+type token struct {
+ t rune
+ text string
+ pos Pos
+}
+
+func (t token) String() string {
+ return fmt.Sprintf("%v %U %s", t.pos, t.t, t.text)
+}
+
+// The lex* functions below all convert the yyLexer input arg into a concrete
+// lexer as their first step. The type conversion is always safe since we're
+// the ones who called yyParse, and thus know the concrete type is always lexer.
+
+// lexVDLFile retrieves the File parse result from the yyLexer interface. This
+// is called in the yacc rules to fill in the parse result.
+func lexVDLFile(yylex yyLexer) *File {
+ return yylex.(*lexer).vdlFile
+}
+
+// lexPosErrorf adds an error with positional information, on a type
+// implementing the yyLexer interface. This is called in the yacc rules to
+// throw errors.
+func lexPosErrorf(yylex yyLexer, pos Pos, format string, v ...interface{}) {
+ yylex.(*lexer).posErrorf(pos, format, v...)
+}
+
+// lexGenEOF tells the lexer to generate EOF tokens from now on, as if the end
+// of file had been seen. This is called in the yacc rules to terminate the
+// parse even if the file still has tokens.
+func lexGenEOF(yylex yyLexer) {
+ yylex.(*lexer).sawEOF = true
+}
+
+// lexStoreExprs stores the parsed exprs in the lexer.
+func lexStoreExprs(yylex yyLexer, exprs []ConstExpr) {
+ yylex.(*lexer).exprs = exprs
+}
+
+var keywords = map[string]int{
+ "const": tCONST,
+ "enum": tENUM,
+ "error": tERROR,
+ "import": tIMPORT,
+ "interface": tINTERFACE,
+ "map": tMAP,
+ "package": tPACKAGE,
+ "set": tSET,
+ "stream": tSTREAM,
+ "struct": tSTRUCT,
+ "type": tTYPE,
+ "typeobject": tTYPEOBJECT,
+ "union": tUNION,
+}
+
+type nextRune struct {
+ t rune
+ id int
+}
+
+// knownPunct is a map of our known punctuation. We support 1 and 2 rune
+// combinations, where 2 rune combos must be immediately adjacent with no
+// intervening whitespace. The 2-rune combos always take precedence over the
+// 1-rune combos. Every entry is a valid 1-rune combo, which is returned as-is
+// without a special token id; the ascii value represents itself.
+var knownPunct = map[rune][]nextRune{
+ ';': nil,
+ ':': nil,
+ ',': nil,
+ '.': nil,
+ '*': nil,
+ '(': nil,
+ ')': nil,
+ '[': nil,
+ ']': nil,
+ '{': nil,
+ '}': nil,
+ '+': nil,
+ '-': nil,
+ '/': nil,
+ '%': nil,
+ '^': nil,
+ '?': nil,
+ '!': {{'=', tNE}},
+ '=': {{'=', tEQEQ}},
+ '<': {{'=', tLE}, {'<', tLSH}},
+ '>': {{'=', tGE}, {'>', tRSH}},
+ '|': {{'|', tOROR}},
+ '&': {{'&', tANDAND}},
+}
+
+// autoSemi determines whether to automatically add a semicolon, based on the
+// rule that semicolons are always added at the end of each line after certain
+// tokens. The Go auto-semicolon rule is described here:
+// http://golang.org/ref/spec#Semicolons
+func autoSemi(prevTok token) bool {
+ return prevAutoSemi[prevTok.t] && prevTok.pos.IsValid()
+}
+
+var prevAutoSemi = map[rune]bool{
+ scanner.Ident: true,
+ scanner.Int: true,
+ scanner.Float: true,
+ scanner.String: true,
+ scanner.RawString: true,
+ ')': true,
+ ']': true,
+ '}': true,
+ '>': true,
+}
+
+const yaccEOF int = 0 // yacc interprets 0 as the end-of-file marker
+
+func init() {
+ // yyDebug is defined in the yacc-generated grammar.go file. Setting it to 1
+ // only produces output on syntax errors; set it to 4 to generate full debug
+ // output. Sadly yacc doesn't give position information describing the error.
+ yyDebug = 1
+}
+
+// A note on the comment-tracking strategy. During lexing we generate
+// commentBlocks, defined as a sequence of adjacent or abutting comments (either
+// // or /**/) with no intervening tokens. Adjacent means that the previous
+// comment ends on the line immediately before the next one starts, and abutting
+// means that the previous comment ends on the same line as the next one starts.
+//
+// At the end of the parse we try to attach comment blocks to parse tree items.
+// We use a heuristic that works for common cases, but isn't perfect - it
+// mis-associates some styles of comments, and we don't ensure all comment
+// blocks will be associated to an item.
+
+type commentBlock struct {
+ text string
+ firstLine int
+ lastLine int
+}
+
+// update returns true and adds tok to this block if tok is adjacent or
+// abutting, otherwise it returns false without mutating the block. Since we're
+// handling newlines explicitly in the lexer, we never get comment tokens with
+// trailing newlines. We can get embedded newlines via /**/ style comments.
+func (cb *commentBlock) update(tok token) bool {
+ if cb.text == "" {
+ // First update in this block.
+ cb.text = tok.text
+ cb.firstLine = tok.pos.Line
+ cb.lastLine = tok.pos.Line + strings.Count(tok.text, "\n")
+ return true
+ }
+ if cb.lastLine >= tok.pos.Line-1 {
+ // The tok is adjacent or abutting.
+ if cb.lastLine == tok.pos.Line-1 {
+ // The tok is adjacent - need a newline.
+ cb.text += "\n"
+ cb.lastLine++
+ }
+ cb.text += tok.text
+ cb.lastLine += strings.Count(tok.text, "\n")
+ return true
+ }
+ return false
+}
+
+// commentMap keeps track of blocks of comments in a file. We store comment
+// blocks in maps by first line, and by last line. Note that technically there
+// could be more than one commentBlock ending on the same line, due to /**/
+// style comments. We ignore this rare case and just keep the first one.
+type commentMap struct {
+ byFirst map[int]commentBlock
+ byLast map[int]commentBlock
+ cur commentBlock
+ prevTokenPos Pos
+}
+
+func (cm *commentMap) init() {
+ cm.byFirst = make(map[int]commentBlock)
+ cm.byLast = make(map[int]commentBlock)
+}
+
+// addComment adds a comment token to the map, either appending to the current
+// block or ending the current block and starting a new one.
+func (cm *commentMap) addComment(tok token) {
+ if !cm.cur.update(tok) {
+ cm.endBlock()
+ if !cm.cur.update(tok) {
+ panic(fmt.Errorf("vdl: couldn't update current comment block with token %v", tok))
+ }
+ }
+ // Here's an example of why we need the special case endBlock logic.
+ //
+ // type Foo struct {
+ // // doc1
+ // A int // doc2
+ // // doc3
+ // B int
+ // }
+ //
+ // The problem is that without the special-case, we'd group doc2 and doc3
+ // together into the same block. That may actually be correct some times, but
+ // it's more common for doc3 to be semantically associated with field B. Thus
+ // if we've already seen any token on the same line as this comment block, we
+ // end the block immediately. This means that comments appearing on the same
+ // line as any other token are forced to be a single comment block.
+ if cm.prevTokenPos.Line == tok.pos.Line {
+ cm.endBlock()
+ }
+}
+
+func (cm *commentMap) handleToken(tok token) {
+ cm.endBlock()
+ cm.prevTokenPos = tok.pos
+}
+
+// endBlock adds the the current comment block to the map, and resets it in
+// preparation for new comments to be added. In the rare case where we see
+// comment blocks that either start or end on the same line, we just keep the
+// first comment block that was inserted.
+func (cm *commentMap) endBlock() {
+ _, inFirst := cm.byFirst[cm.cur.firstLine]
+ _, inLast := cm.byLast[cm.cur.lastLine]
+ if cm.cur.text != "" && !inFirst && !inLast {
+ cm.byFirst[cm.cur.firstLine] = cm.cur
+ cm.byLast[cm.cur.lastLine] = cm.cur
+ }
+ cm.cur.text = ""
+ cm.cur.firstLine = 0
+ cm.cur.lastLine = 0
+}
+
+// getDoc returns the documentation string associated with pos. Our rule is the
+// last line of the documentation must end on the line immediately before pos.
+// Once a comment block has been returned it isn't eligible to be attached to
+// any other item, and is deleted from the map.
+//
+// The returned string is either empty, or is newline terminated.
+func (cm *commentMap) getDoc(pos Pos) string {
+ block := cm.byLast[pos.Line-1]
+ if block.text == "" {
+ return ""
+ }
+ doc := block.text + "\n"
+ delete(cm.byFirst, block.firstLine)
+ delete(cm.byLast, block.lastLine)
+ return doc
+}
+
+// getDocSuffix returns the suffix documentation associated with pos. Our rule
+// is the first line of the documentation must be on the same line as pos. Once
+// a comment block as been returned it isn't eligible to be attached to any
+// other item, and is deleted from the map.
+//
+// The returned string is either empty, or has a leading space.
+func (cm *commentMap) getDocSuffix(pos Pos) string {
+ block := cm.byFirst[pos.Line]
+ if block.text == "" {
+ return ""
+ }
+ doc := " " + block.text
+ delete(cm.byFirst, block.firstLine)
+ delete(cm.byLast, block.lastLine)
+ return doc
+}
+
+func attachTypeComments(t Type, cm *commentMap, suffix bool) {
+ switch tu := t.(type) {
+ case *TypeEnum:
+ for _, label := range tu.Labels {
+ if suffix {
+ label.DocSuffix = cm.getDocSuffix(label.Pos)
+ } else {
+ label.Doc = cm.getDoc(label.Pos)
+ }
+ }
+ case *TypeArray:
+ attachTypeComments(tu.Elem, cm, suffix)
+ case *TypeList:
+ attachTypeComments(tu.Elem, cm, suffix)
+ case *TypeSet:
+ attachTypeComments(tu.Key, cm, suffix)
+ case *TypeMap:
+ attachTypeComments(tu.Key, cm, suffix)
+ attachTypeComments(tu.Elem, cm, suffix)
+ case *TypeStruct:
+ for _, field := range tu.Fields {
+ if suffix {
+ field.DocSuffix = cm.getDocSuffix(field.Pos)
+ } else {
+ field.Doc = cm.getDoc(field.Pos)
+ }
+ attachTypeComments(field.Type, cm, suffix)
+ }
+ case *TypeUnion:
+ for _, field := range tu.Fields {
+ if suffix {
+ field.DocSuffix = cm.getDocSuffix(field.Pos)
+ } else {
+ field.Doc = cm.getDoc(field.Pos)
+ }
+ attachTypeComments(field.Type, cm, suffix)
+ }
+ case *TypeOptional:
+ attachTypeComments(tu.Base, cm, suffix)
+ case *TypeNamed:
+ // Terminate the recursion at named types.
+ default:
+ panic(fmt.Errorf("vdl: unhandled type %#v", t))
+ }
+}
+
+// attachComments causes all comments collected during the parse to be attached
+// to the appropriate parse tree items. This should only be called after the
+// parse has completed.
+func (l *lexer) attachComments() {
+ f := l.vdlFile
+ // First attach all suffix docs - these occur on the same line.
+ f.PackageDef.DocSuffix = l.comments.getDocSuffix(f.PackageDef.Pos)
+ for _, x := range f.Imports {
+ x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+ }
+ for _, x := range f.ErrorDefs {
+ x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+ }
+ for _, x := range f.TypeDefs {
+ x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+ attachTypeComments(x.Type, &l.comments, true)
+ }
+ for _, x := range f.ConstDefs {
+ x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+ }
+ for _, x := range f.Interfaces {
+ x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+ for _, y := range x.Embeds {
+ y.DocSuffix = l.comments.getDocSuffix(y.Pos)
+ }
+ for _, y := range x.Methods {
+ y.DocSuffix = l.comments.getDocSuffix(y.Pos)
+ }
+ }
+ // Now attach the docs - these occur on the line immediately before.
+ f.PackageDef.Doc = l.comments.getDoc(f.PackageDef.Pos)
+ for _, x := range f.Imports {
+ x.Doc = l.comments.getDoc(x.Pos)
+ }
+ for _, x := range f.ErrorDefs {
+ x.Doc = l.comments.getDoc(x.Pos)
+ }
+ for _, x := range f.TypeDefs {
+ x.Doc = l.comments.getDoc(x.Pos)
+ attachTypeComments(x.Type, &l.comments, false)
+ }
+ for _, x := range f.ConstDefs {
+ x.Doc = l.comments.getDoc(x.Pos)
+ }
+ for _, x := range f.Interfaces {
+ x.Doc = l.comments.getDoc(x.Pos)
+ for _, y := range x.Embeds {
+ y.Doc = l.comments.getDoc(y.Pos)
+ }
+ for _, y := range x.Methods {
+ y.Doc = l.comments.getDoc(y.Pos)
+ }
+ }
+}
+
+// nextToken uses the text/scanner package to scan the input for the next token.
+func (l *lexer) nextToken() (tok token) {
+ tok.t = l.scanner.Scan()
+ tok.text = l.scanner.TokenText()
+ // Both Pos and scanner.Position start line and column numbering at 1.
+ tok.pos = Pos{Line: l.scanner.Position.Line, Col: l.scanner.Position.Column}
+ return
+}
+
+// handleImag handles imaginary literals "[number]i" by peeking ahead.
+func (l *lexer) handleImag(tok token, lval *yySymType) bool {
+ if l.scanner.Peek() != 'i' {
+ return false
+ }
+ l.scanner.Next()
+
+ rat := new(big.Rat)
+ if _, ok := rat.SetString(tok.text); !ok {
+ l.posErrorf(tok.pos, "can't convert token [%v] to imaginary literal", tok)
+ }
+ lval.imagpos.pos = tok.pos
+ lval.imagpos.imag = (*BigImag)(rat)
+ return true
+}
+
+// translateToken takes the token we just scanned, and translates it into a
+// token usable by yacc (lval and id). The done return arg is true when a real
+// yacc token was generated, or false if we need another next/translate pass.
+func (l *lexer) translateToken(tok token, lval *yySymType) (id int, done bool) {
+ switch tok.t {
+ case scanner.EOF:
+ l.sawEOF = true
+ if autoSemi(l.prevTok) {
+ return ';', true
+ }
+ return yaccEOF, true
+
+ case '\n':
+ if autoSemi(l.prevTok) {
+ return ';', true
+ }
+ // Returning done=false ensures next/translate will be called again so that
+ // this newline is skipped; id=yaccEOF is a dummy value that's ignored.
+ return yaccEOF, false
+
+ case scanner.String, scanner.RawString:
+ var err error
+ lval.strpos.Pos = tok.pos
+ lval.strpos.String, err = strconv.Unquote(tok.text)
+ if err != nil {
+ l.posErrorf(tok.pos, "can't convert token [%v] to string literal", tok)
+ }
+ return tSTRLIT, true
+
+ case scanner.Int:
+ if l.handleImag(tok, lval) {
+ return tIMAGLIT, true
+ }
+ lval.intpos.pos = tok.pos
+ lval.intpos.int = new(big.Int)
+ if _, ok := lval.intpos.int.SetString(tok.text, 0); !ok {
+ l.posErrorf(tok.pos, "can't convert token [%v] to integer literal", tok)
+ }
+ return tINTLIT, true
+
+ case scanner.Float:
+ if l.handleImag(tok, lval) {
+ return tIMAGLIT, true
+ }
+ lval.ratpos.pos = tok.pos
+ lval.ratpos.rat = new(big.Rat)
+ if _, ok := lval.ratpos.rat.SetString(tok.text); !ok {
+ l.posErrorf(tok.pos, "can't convert token [%v] to float literal", tok)
+ }
+ return tRATLIT, true
+
+ case scanner.Ident:
+ // Either the identifier is a known keyword, or we pass it through as IDENT.
+ if keytok, ok := keywords[tok.text]; ok {
+ lval.pos = tok.pos
+ return keytok, true
+ }
+ lval.strpos.Pos = tok.pos
+ lval.strpos.String = tok.text
+ return tIDENT, true
+
+ case scanner.Comment:
+ l.comments.addComment(tok)
+ // Comments aren't considered tokens, just like the '\n' case.
+ return yaccEOF, false
+
+ default:
+ // Either the rune is in our known punctuation whitelist, or we've hit a
+ // syntax error.
+ if nextRunes, ok := knownPunct[tok.t]; ok {
+ // Peek at the next rune and compare against our list of next runes. If
+ // we find a match we return the id in next, otherwise just return the
+ // original rune. This means that 2-rune tokens always take precedence
+ // over 1-rune tokens. Either way the pos is set to the original rune.
+ lval.pos = tok.pos
+ peek := l.scanner.Peek()
+ for _, next := range nextRunes {
+ if peek == next.t {
+ l.scanner.Next()
+ return next.id, true
+ }
+ }
+ return int(tok.t), true
+ }
+ l.posErrorf(tok.pos, "unexpected token [%v]", tok)
+ l.sawEOF = true
+ return yaccEOF, true
+ }
+}
+
+// Lex is part of the yyLexer interface, called by the yacc-generated parser.
+func (l *lexer) Lex(lval *yySymType) int {
+ // Emit a dummy start token indicating what type of parse we're performing.
+ if !l.started {
+ l.started = true
+ switch l.startTok {
+ case startFileImports, startFile, startConfigImports, startConfig, startExprs:
+ return l.startTok
+ default:
+ panic(fmt.Errorf("vdl: unhandled parse start token %d", l.startTok))
+ }
+ }
+ // Always return EOF after we've scanned it. This ensures we emit EOF on the
+ // next Lex call after scanning EOF and adding an auto-semicolon.
+ if l.sawEOF {
+ return yaccEOF
+ }
+ // Run next/translate in a loop to handle newline-triggered auto-semicolons;
+ // nextToken needs to generate newline tokens so that we can trigger the
+ // auto-semicolon logic, but if the newline doesn't generate an auto-semicolon
+ // we should skip the token and move on to the next one.
+ for {
+ tok := l.nextToken()
+ if id, done := l.translateToken(tok, lval); done {
+ l.prevTok = tok
+ l.comments.handleToken(tok)
+ return id
+ }
+ }
+}
+
+// Error is part of the yyLexer interface, called by the yacc-generated parser.
+// Unfortunately yacc doesn't give good error information - we dump the position
+// of the previous scanned token as an approximation of where the error is.
+func (l *lexer) Error(s string) {
+ l.posErrorf(l.prevTok.pos, "%s", s)
+}
+
+// posErrorf generates an error with file and pos info.
+func (l *lexer) posErrorf(pos Pos, format string, v ...interface{}) {
+ var posstr string
+ if pos.IsValid() {
+ posstr = pos.String()
+ }
+ l.errs.Errorf(l.name+":"+posstr+" "+format, v...)
+}
diff --git a/lib/vdl/parse/parse_test.go b/lib/vdl/parse/parse_test.go
new file mode 100644
index 0000000..ca4f798
--- /dev/null
+++ b/lib/vdl/parse/parse_test.go
@@ -0,0 +1,1389 @@
+package parse_test
+
+// TODO(toddw): Add tests for imaginary literals.
+
+import (
+ "math/big"
+ "reflect"
+ "strings"
+ "testing"
+
+ "v.io/v23/vdl/parse"
+ "v.io/v23/vdl/vdltest"
+ "v.io/v23/vdl/vdlutil"
+)
+
+func pos(line, col int) parse.Pos {
+ return parse.Pos{line, col}
+}
+
+func sp(str string, line, col int) parse.StringPos {
+ return parse.StringPos{String: str, Pos: pos(line, col)}
+}
+
+func lf(l, f parse.StringPos) parse.LangFmt {
+ return parse.LangFmt{Lang: l, Fmt: f}
+}
+
+func np(name string, line, col int) parse.NamePos {
+ return parse.NamePos{Name: name, Pos: pos(line, col)}
+}
+
+func npptr(name string, line, col int) *parse.NamePos {
+ ret := np(name, line, col)
+ return &ret
+}
+
+func tn(name string, line, col int) *parse.TypeNamed {
+ return &parse.TypeNamed{Name: name, P: pos(line, col)}
+}
+
+func cn(name string, line, col int) *parse.ConstNamed {
+ return &parse.ConstNamed{Name: name, P: pos(line, col)}
+}
+
+func cl(lit interface{}, line, col int) *parse.ConstLit {
+ return &parse.ConstLit{Lit: lit, P: pos(line, col)}
+}
+
+// Tests of vdl imports and file parsing.
+type vdlTest struct {
+ name string
+ src string
+ expect *parse.File
+ errors []string
+}
+
+func testParseVDL(t *testing.T, test vdlTest, opts parse.Opts) {
+ errs := vdlutil.NewErrors(-1)
+ actual := parse.ParseFile("testfile", strings.NewReader(test.src), opts, errs)
+ vdltest.ExpectResult(t, errs, test.name, test.errors...)
+ if !reflect.DeepEqual(test.expect, actual) {
+ t.Errorf("%v\nEXPECT %+v\nACTUAL %+v", test.name, test.expect, actual)
+ }
+}
+
+func TestParseVDLImports(t *testing.T) {
+ for _, test := range vdlImportsTests {
+ testParseVDL(t, test, parse.Opts{ImportsOnly: true})
+ }
+ for _, test := range vdlFileTests {
+ // We only run the success tests from vdlFileTests on the imports only
+ // parser, since the failure tests are testing failures in stuff after the
+ // imports, which won't cause failures in the imports only parser.
+ //
+ // The imports-only parser isn't supposed to fill in fields after the
+ // imports, so we clear them from the expected result. We must copy the
+ // file to ensure the actual vdlTests isn't overwritten since the
+ // full-parser tests needs the full expectations. The test itself doesn't
+ // need to be copied, since it's already copied in the range-for.
+ if test.expect != nil {
+ copyFile := *test.expect
+ test.expect = ©File
+ test.expect.TypeDefs = nil
+ test.expect.ConstDefs = nil
+ test.expect.ErrorDefs = nil
+ test.expect.Interfaces = nil
+ testParseVDL(t, test, parse.Opts{ImportsOnly: true})
+ }
+ }
+}
+
+func TestParseVDLFile(t *testing.T) {
+ for _, test := range append(vdlImportsTests, vdlFileTests...) {
+ testParseVDL(t, test, parse.Opts{ImportsOnly: false})
+ }
+}
+
+// Tests of config imports and file parsing.
+type configTest struct {
+ name string
+ src string
+ expect *parse.Config
+ errors []string
+}
+
+func testParseConfig(t *testing.T, test configTest, opts parse.Opts) {
+ errs := vdlutil.NewErrors(-1)
+ actual := parse.ParseConfig("testfile", strings.NewReader(test.src), opts, errs)
+ vdltest.ExpectResult(t, errs, test.name, test.errors...)
+ if !reflect.DeepEqual(test.expect, actual) {
+ t.Errorf("%v\nEXPECT %+v\nACTUAL %+v", test.name, test.expect, actual)
+ }
+}
+
+func TestParseConfigImports(t *testing.T) {
+ for _, test := range configTests {
+ // We only run the success tests from configTests on the imports only
+ // parser, since the failure tests are testing failures in stuff after the
+ // imports, which won't cause failures in the imports only parser.
+ //
+ // The imports-only parser isn't supposed to fill in fields after the
+ // imports, so we clear them from the expected result. We must copy the
+ // file to ensure the actual configTests isn't overwritten since the
+ // full-parser tests needs the full expectations. The test itself doesn't
+ // need to be copied, since it's already copied in the range-for.
+ if test.expect != nil {
+ copyConfig := *test.expect
+ test.expect = ©Config
+ test.expect.Config = nil
+ test.expect.ConstDefs = nil
+ testParseConfig(t, test, parse.Opts{ImportsOnly: true})
+ }
+ }
+}
+
+func TestParseConfig(t *testing.T) {
+ for _, test := range configTests {
+ testParseConfig(t, test, parse.Opts{ImportsOnly: false})
+ }
+}
+
+// vdlImportsTests contains tests of stuff up to and including the imports.
+var vdlImportsTests = []vdlTest{
+ // Empty file isn't allowed (need at least a package clause).
+ {
+ "FAILEmptyFile",
+ "",
+ nil,
+ []string{"vdl file must start with package clause"}},
+
+ // Comment tests.
+ {
+ "PackageDocOneLiner",
+ `// One liner
+// Another line
+package testpkg`,
+ &parse.File{BaseName: "testfile", PackageDef: parse.NamePos{Name: "testpkg", Pos: pos(3, 9), Doc: `// One liner
+// Another line
+`}},
+ nil},
+ {
+ "PackageDocMultiLiner",
+ `/* Multi liner
+Another line
+*/
+package testpkg`,
+ &parse.File{BaseName: "testfile", PackageDef: parse.NamePos{Name: "testpkg", Pos: pos(4, 9), Doc: `/* Multi liner
+Another line
+*/
+`}},
+ nil},
+ {
+ "NotPackageDoc",
+ `// Extra newline, not package doc
+
+package testpkg`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 3, 9)},
+ nil},
+ {
+ "FAILUnterminatedComment",
+ `/* Unterminated
+Another line
+package testpkg`,
+ nil,
+ []string{"comment not terminated"}},
+
+ // Package tests.
+ {
+ "Package",
+ "package testpkg;",
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+ nil},
+ {
+ "PackageNoSemi",
+ "package testpkg",
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+ nil},
+ {
+ "FAILBadPackageName",
+ "package foo.bar",
+ nil,
+ []string{"testfile:1:12 syntax error"}},
+
+ // Import tests.
+ {
+ "EmptyImport",
+ `package testpkg;
+import (
+)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+ nil},
+ {
+ "OneImport",
+ `package testpkg;
+import "foo/bar";`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 2, 8)}}},
+ nil},
+ {
+ "OneImportLocalNameNoSemi",
+ `package testpkg
+import baz "foo/bar"`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 2, 8)}}},
+ nil},
+ {
+ "OneImportParens",
+ `package testpkg
+import (
+ "foo/bar";
+)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}}},
+ nil},
+ {
+ "OneImportParensNoSemi",
+ `package testpkg
+import (
+ "foo/bar"
+)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}}},
+ nil},
+ {
+ "MixedImports",
+ `package testpkg
+import "foo/bar"
+import (
+ "baz";"a/b"
+ "c/d"
+)
+import "z"`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Imports: []*parse.Import{
+ {Path: "foo/bar", NamePos: np("", 2, 8)},
+ {Path: "baz", NamePos: np("", 4, 3)},
+ {Path: "a/b", NamePos: np("", 4, 9)},
+ {Path: "c/d", NamePos: np("", 5, 3)},
+ {Path: "z", NamePos: np("", 7, 8)}}},
+ nil},
+ {
+ "FAILImportParensNotClosed",
+ `package testpkg
+import (
+ "foo/bar"`,
+ nil,
+ []string{"testfile:3:12 syntax error"}},
+}
+
+// vdlFileTests contains tests of stuff after the imports.
+var vdlFileTests = []vdlTest{
+ // Data type tests.
+ {
+ "TypeNamed",
+ `package testpkg
+type foo bar`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: tn("bar", 2, 10)}}},
+ nil},
+ {
+ "TypeNamedQualified",
+ `package testpkg
+type foo bar.baz`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: tn("bar.baz", 2, 10)}}},
+ nil},
+ {
+ "TypeNamedQualifiedPath",
+ `package testpkg
+type foo "a/b/c/bar".baz`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: tn(`"a/b/c/bar".baz`, 2, 10)}}},
+ nil},
+ {
+ "TypeEnum",
+ `package testpkg
+type foo enum{A;B;C}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeEnum{
+ Labels: []parse.NamePos{np("A", 2, 15), np("B", 2, 17), np("C", 2, 19)},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeEnumNewlines",
+ `package testpkg
+type foo enum {
+ A
+ B
+ C
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeEnum{
+ Labels: []parse.NamePos{np("A", 3, 3), np("B", 4, 3), np("C", 5, 3)},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeArray",
+ `package testpkg
+type foo [2]bar`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeArray{
+ Len: 2, Elem: tn("bar", 2, 13), P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeList",
+ `package testpkg
+type foo []bar`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeList{
+ Elem: tn("bar", 2, 12), P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeSet",
+ `package testpkg
+type foo set[bar]`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeSet{
+ Key: tn("bar", 2, 14), P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeMap",
+ `package testpkg
+type foo map[bar]baz`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeMap{
+ Key: tn("bar", 2, 14), Elem: tn("baz", 2, 18), P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructOneField",
+ `package testpkg
+type foo struct{a b;}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{{NamePos: np("a", 2, 17), Type: tn("b", 2, 19)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructOneFieldNoSemi",
+ `package testpkg
+type foo struct{a b}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{{NamePos: np("a", 2, 17), Type: tn("b", 2, 19)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructOneFieldNewline",
+ `package testpkg
+type foo struct{
+ a b;
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{{NamePos: np("a", 3, 3), Type: tn("b", 3, 5)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructOneFieldNewlineNoSemi",
+ `package testpkg
+type foo struct{
+ a b
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{{NamePos: np("a", 3, 3), Type: tn("b", 3, 5)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructOneFieldList",
+ `package testpkg
+type foo struct{a,b,c d}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{
+ {NamePos: np("a", 2, 17), Type: tn("d", 2, 23)},
+ {NamePos: np("b", 2, 19), Type: tn("d", 2, 23)},
+ {NamePos: np("c", 2, 21), Type: tn("d", 2, 23)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeStructMixed",
+ `package testpkg
+type foo struct{
+ a b;c,d e
+ f,g h
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+ Fields: []*parse.Field{
+ {NamePos: np("a", 3, 3), Type: tn("b", 3, 5)},
+ {NamePos: np("c", 3, 7), Type: tn("e", 3, 11)},
+ {NamePos: np("d", 3, 9), Type: tn("e", 3, 11)},
+ {NamePos: np("f", 4, 3), Type: tn("h", 4, 7)},
+ {NamePos: np("g", 4, 5), Type: tn("h", 4, 7)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeUnion",
+ `package testpkg
+type foo union{A a;B b;C c}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+ Fields: []*parse.Field{
+ {NamePos: np("A", 2, 16), Type: tn("a", 2, 18)},
+ {NamePos: np("B", 2, 20), Type: tn("b", 2, 22)},
+ {NamePos: np("C", 2, 24), Type: tn("c", 2, 26)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeUnionNewlines",
+ `package testpkg
+type foo union{
+ A a
+ B b
+ C c
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+ Fields: []*parse.Field{
+ {NamePos: np("A", 3, 3), Type: tn("a", 3, 5)},
+ {NamePos: np("B", 4, 3), Type: tn("b", 4, 5)},
+ {NamePos: np("C", 5, 3), Type: tn("c", 5, 5)}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeOptional",
+ `package testpkg
+type foo union{A a;B ?b;C ?c}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+ Fields: []*parse.Field{
+ {NamePos: np("A", 2, 16), Type: tn("a", 2, 18)},
+ {NamePos: np("B", 2, 20),
+ Type: &parse.TypeOptional{Base: tn("b", 2, 23), P: pos(2, 22)}},
+ {NamePos: np("C", 2, 25),
+ Type: &parse.TypeOptional{Base: tn("c", 2, 28), P: pos(2, 27)}}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "TypeOptionalNewlines",
+ `package testpkg
+type foo union{
+ A a
+ B ?b
+ C ?c
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ TypeDefs: []*parse.TypeDef{
+ {NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+ Fields: []*parse.Field{
+ {NamePos: np("A", 3, 3), Type: tn("a", 3, 5)},
+ {NamePos: np("B", 4, 3),
+ Type: &parse.TypeOptional{Base: tn("b", 4, 6), P: pos(4, 5)}},
+ {NamePos: np("C", 5, 3),
+ Type: &parse.TypeOptional{Base: tn("c", 5, 6), P: pos(5, 5)}}},
+ P: pos(2, 10)}}}},
+ nil},
+ {
+ "FAILTypeStructNotClosed",
+ `package testpkg
+type foo struct{
+ a b`,
+ nil,
+ []string{"testfile:3:6 syntax error"}},
+ {
+ "FAILTypeStructUnnamedField",
+ `package testpkg
+type foo struct{a}`,
+ nil,
+ []string{"testfile:2:18 syntax error"}},
+ {
+ "FAILTypeStructUnnamedFieldList",
+ `package testpkg
+type foo struct{a, b}`,
+ nil,
+ []string{"testfile:2:21 syntax error"}},
+
+ // Const definition tests.
+ {
+ "BoolConst",
+ `package testpkg
+const foo = true
+const bar = false`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cn("true", 2, 13)},
+ {NamePos: np("bar", 3, 7), Expr: cn("false", 3, 13)}}},
+ nil},
+ {
+ "StringConst",
+ "package testpkg\nconst foo = \"abc\"\nconst bar = `def`",
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cl("abc", 2, 13)},
+ {NamePos: np("bar", 3, 7), Expr: cl("def", 3, 13)}}},
+ nil},
+ {
+ "IntegerConst",
+ `package testpkg
+const foo = 123`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cl(big.NewInt(123), 2, 13)}}},
+ nil},
+ {
+ "FloatConst",
+ `package testpkg
+const foo = 1.5`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cl(big.NewRat(3, 2), 2, 13)}}},
+ nil},
+ {
+ "NamedConst",
+ `package testpkg
+const foo = baz
+const bar = pkg.box`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cn("baz", 2, 13)},
+ {NamePos: np("bar", 3, 7), Expr: cn("pkg.box", 3, 13)}}},
+ nil},
+ {
+ "NamedConstQualified",
+ `package testpkg
+const foo = bar.baz`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cn("bar.baz", 2, 13)}}},
+ nil},
+ {
+ "NamedConstQualifiedPath",
+ `package testpkg
+const foo = "a/b/c/bar".baz`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: cn(`"a/b/c/bar".baz`, 2, 13)}}},
+ nil},
+ {
+ "CompLitConst",
+ `package testpkg
+const foo = {"a","b"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+ KVList: []parse.KVLit{
+ {Value: cl("a", 2, 14)},
+ {Value: cl("b", 2, 18)}},
+ P: pos(2, 13)}}}},
+ nil},
+ {
+ "CompLitKVConst",
+ `package testpkg
+const foo = {"a":1,"b":2}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+ KVList: []parse.KVLit{
+ {cl("a", 2, 14), cl(big.NewInt(1), 2, 18)},
+ {cl("b", 2, 20), cl(big.NewInt(2), 2, 24)}},
+ P: pos(2, 13)}}}},
+ nil},
+ {
+ "CompLitTypedConst",
+ `package testpkg
+const foo = bar{"a","b"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+ Type: tn("bar", 2, 13),
+ KVList: []parse.KVLit{
+ {Value: cl("a", 2, 17)},
+ {Value: cl("b", 2, 21)}},
+ P: pos(2, 16)}}}},
+ nil},
+ {
+ "CompLitKVTypedConst",
+ `package testpkg
+const foo = bar{"a":1,"b":2}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+ Type: tn("bar", 2, 13),
+ KVList: []parse.KVLit{
+ {cl("a", 2, 17), cl(big.NewInt(1), 2, 21)},
+ {cl("b", 2, 23), cl(big.NewInt(2), 2, 27)}},
+ P: pos(2, 16)}}}},
+ nil},
+ {
+ "UnaryOpConst",
+ `package testpkg
+const foo = !false
+const bar = +1
+const baz = -2
+const box = ^3`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstUnaryOp{"!",
+ cn("false", 2, 14), pos(2, 13)}},
+ {NamePos: np("bar", 3, 7), Expr: &parse.ConstUnaryOp{"+",
+ cl(big.NewInt(1), 3, 14), pos(3, 13)}},
+ {NamePos: np("baz", 4, 7), Expr: &parse.ConstUnaryOp{"-",
+ cl(big.NewInt(2), 4, 14), pos(4, 13)}},
+ {NamePos: np("box", 5, 7), Expr: &parse.ConstUnaryOp{"^",
+ cl(big.NewInt(3), 5, 14), pos(5, 13)}}}},
+ nil},
+ {
+ "TypeConvConst",
+ `package testpkg
+const foo = baz(true)
+const bar = pkg.box(false)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstTypeConv{tn("baz", 2, 13),
+ cn("true", 2, 17), pos(2, 13)}},
+ {NamePos: np("bar", 3, 7), Expr: &parse.ConstTypeConv{tn("pkg.box", 3, 13),
+ cn("false", 3, 21), pos(3, 13)}}}},
+ nil},
+ {
+ "TypeObjectConst",
+ `package testpkg
+const foo = typeobject(bool)
+const bar = typeobject(pkg.box)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("foo", 2, 7), Expr: &parse.ConstTypeObject{tn("bool", 2, 24),
+ pos(2, 13)}},
+ {NamePos: np("bar", 3, 7), Expr: &parse.ConstTypeObject{tn("pkg.box", 3, 24),
+ pos(3, 13)}}}},
+ nil},
+ {
+ "BinaryOpConst",
+ `package testpkg
+const a = true || false
+const b = true && false
+const c = 1 < 2
+const d = 3 > 4
+const e = 5 <= 6
+const f = 7 >= 8
+const g = 9 != 8
+const h = 7 == 6
+const i = 5 + 4
+const j = 3 - 2
+const k = 1 * 2
+const l = 3 / 4
+const m = 5 % 6
+const n = 7 | 8
+const o = 9 & 8
+const p = 7 ^ 6
+const q = 5 << 4
+const r = 3 >> 2`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("a", 2, 7),
+ Expr: &parse.ConstBinaryOp{
+ "||", cn("true", 2, 11), cn("false", 2, 19), pos(2, 16)}},
+ {NamePos: np("b", 3, 7),
+ Expr: &parse.ConstBinaryOp{
+ "&&", cn("true", 3, 11), cn("false", 3, 19), pos(3, 16)}},
+ {NamePos: np("c", 4, 7),
+ Expr: &parse.ConstBinaryOp{"<", cl(big.NewInt(1), 4, 11),
+ cl(big.NewInt(2), 4, 15), pos(4, 13)}},
+ {NamePos: np("d", 5, 7),
+ Expr: &parse.ConstBinaryOp{">", cl(big.NewInt(3), 5, 11),
+ cl(big.NewInt(4), 5, 15), pos(5, 13)}},
+ {NamePos: np("e", 6, 7),
+ Expr: &parse.ConstBinaryOp{"<=", cl(big.NewInt(5), 6, 11),
+ cl(big.NewInt(6), 6, 16), pos(6, 13)}},
+ {NamePos: np("f", 7, 7),
+ Expr: &parse.ConstBinaryOp{">=", cl(big.NewInt(7), 7, 11),
+ cl(big.NewInt(8), 7, 16), pos(7, 13)}},
+ {NamePos: np("g", 8, 7),
+ Expr: &parse.ConstBinaryOp{"!=", cl(big.NewInt(9), 8, 11),
+ cl(big.NewInt(8), 8, 16), pos(8, 13)}},
+ {NamePos: np("h", 9, 7),
+ Expr: &parse.ConstBinaryOp{"==", cl(big.NewInt(7), 9, 11),
+ cl(big.NewInt(6), 9, 16), pos(9, 13)}},
+ {NamePos: np("i", 10, 7),
+ Expr: &parse.ConstBinaryOp{"+", cl(big.NewInt(5), 10, 11),
+ cl(big.NewInt(4), 10, 15), pos(10, 13)}},
+ {NamePos: np("j", 11, 7),
+ Expr: &parse.ConstBinaryOp{"-", cl(big.NewInt(3), 11, 11),
+ cl(big.NewInt(2), 11, 15), pos(11, 13)}},
+ {NamePos: np("k", 12, 7),
+ Expr: &parse.ConstBinaryOp{"*", cl(big.NewInt(1), 12, 11),
+ cl(big.NewInt(2), 12, 15), pos(12, 13)}},
+ {NamePos: np("l", 13, 7),
+ Expr: &parse.ConstBinaryOp{"/", cl(big.NewInt(3), 13, 11),
+ cl(big.NewInt(4), 13, 15), pos(13, 13)}},
+ {NamePos: np("m", 14, 7),
+ Expr: &parse.ConstBinaryOp{"%", cl(big.NewInt(5), 14, 11),
+ cl(big.NewInt(6), 14, 15), pos(14, 13)}},
+ {NamePos: np("n", 15, 7),
+ Expr: &parse.ConstBinaryOp{"|", cl(big.NewInt(7), 15, 11),
+ cl(big.NewInt(8), 15, 15), pos(15, 13)}},
+ {NamePos: np("o", 16, 7),
+ Expr: &parse.ConstBinaryOp{"&", cl(big.NewInt(9), 16, 11),
+ cl(big.NewInt(8), 16, 15), pos(16, 13)}},
+ {NamePos: np("p", 17, 7),
+ Expr: &parse.ConstBinaryOp{"^", cl(big.NewInt(7), 17, 11),
+ cl(big.NewInt(6), 17, 15), pos(17, 13)}},
+ {NamePos: np("q", 18, 7),
+ Expr: &parse.ConstBinaryOp{"<<", cl(big.NewInt(5), 18, 11),
+ cl(big.NewInt(4), 18, 16), pos(18, 13)}},
+ {NamePos: np("r", 19, 7),
+ Expr: &parse.ConstBinaryOp{">>", cl(big.NewInt(3), 19, 11),
+ cl(big.NewInt(2), 19, 16), pos(19, 13)}}}},
+ nil},
+ {
+ "FAILConstOnlyName",
+ `package testpkg
+const foo`,
+ nil,
+ []string{"testfile:2:10 syntax error"}},
+ {
+ "FAILConstNoEquals",
+ `package testpkg
+const foo bar`,
+ nil,
+ []string{"testfile:2:11 syntax error"}},
+ {
+ "FAILConstNoValue",
+ `package testpkg
+const foo =`,
+ nil,
+ []string{"testfile:2:12 syntax error"}},
+
+ // Error definition tests.
+ {
+ "ErrorEmpty",
+ `package testpkg
+error()`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+ nil},
+ {
+ "ErrorDefNoParamsNoDetails1",
+ `package testpkg
+error ErrFoo()`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{NamePos: np("ErrFoo", 2, 7)}}},
+ nil},
+ {
+ "ErrorDefNoParamsNoDetails2",
+ `package testpkg
+error ErrFoo() {}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{NamePos: np("ErrFoo", 2, 7)}}},
+ nil},
+ {
+ "ErrorDefNoParamsWithDetails1",
+ `package testpkg
+error ErrFoo() {NoRetry}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Actions: []parse.StringPos{sp("NoRetry", 2, 17)}}}},
+ nil},
+ {
+ "ErrorDefNoParamsWithDetails2",
+ `package testpkg
+error ErrFoo() {"en":"a"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Formats: []parse.LangFmt{lf(sp("en", 2, 17), sp("a", 2, 22))}}}},
+ nil},
+ {
+ "ErrorDefNoParamsWithDetails3",
+ `package testpkg
+error ErrFoo() {NoRetry, "en":"a", "zh":"b"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Actions: []parse.StringPos{sp("NoRetry", 2, 17)},
+ Formats: []parse.LangFmt{
+ lf(sp("en", 2, 26), sp("a", 2, 31)),
+ lf(sp("zh", 2, 36), sp("b", 2, 41)),
+ }}}},
+ nil},
+ {
+ "ErrorDefWithParamsNoDetails1",
+ `package testpkg
+error ErrFoo(x int, y bool)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Params: []*parse.Field{
+ {NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+ {NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}}}}},
+ nil},
+ {
+ "ErrorDefWithParamsNoDetails2",
+ `package testpkg
+error ErrFoo(x int, y bool) {}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Params: []*parse.Field{
+ {NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+ {NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}}}}},
+ nil},
+ {
+ "ErrorDefWithParamsWithDetails1",
+ `package testpkg
+error ErrFoo(x int, y bool) {NoRetry}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Params: []*parse.Field{
+ {NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+ {NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+ Actions: []parse.StringPos{sp("NoRetry", 2, 30)}}}},
+ nil},
+ {
+ "ErrorDefWithParamsWithDetails2",
+ `package testpkg
+error ErrFoo(x int, y bool) {"en":"a"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Params: []*parse.Field{
+ {NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+ {NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+ Formats: []parse.LangFmt{lf(sp("en", 2, 30), sp("a", 2, 35))}}}},
+ nil},
+ {
+ "ErrorDefWithParamsWithDetails3",
+ `package testpkg
+error ErrFoo(x int, y bool) {NoRetry, "en":"a", "zh":"b"}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{{
+ NamePos: np("ErrFoo", 2, 7),
+ Params: []*parse.Field{
+ {NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+ {NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+ Actions: []parse.StringPos{sp("NoRetry", 2, 30)},
+ Formats: []parse.LangFmt{
+ lf(sp("en", 2, 39), sp("a", 2, 44)),
+ lf(sp("zh", 2, 49), sp("b", 2, 54)),
+ }}}},
+ nil},
+ {
+ "ErrorDefMulti",
+ `package testpkg
+error (
+ ErrFoo()
+ ErrBar() {NoRetry, "en":"a", "zh":"b"}
+ ErrBaz(x int, y bool) {NoRetry, "en":"a", "zh":"b"}
+)`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ ErrorDefs: []*parse.ErrorDef{
+ {
+ NamePos: np("ErrFoo", 3, 3),
+ },
+ {
+ NamePos: np("ErrBar", 4, 3),
+ Actions: []parse.StringPos{sp("NoRetry", 4, 13)},
+ Formats: []parse.LangFmt{
+ lf(sp("en", 4, 22), sp("a", 4, 27)),
+ lf(sp("zh", 4, 32), sp("b", 4, 37)),
+ },
+ },
+ {
+ NamePos: np("ErrBaz", 5, 3),
+ Params: []*parse.Field{
+ {NamePos: np("x", 5, 10), Type: tn("int", 5, 12)},
+ {NamePos: np("y", 5, 17), Type: tn("bool", 5, 19)}},
+ Actions: []parse.StringPos{sp("NoRetry", 5, 26)},
+ Formats: []parse.LangFmt{
+ lf(sp("en", 5, 35), sp("a", 5, 40)),
+ lf(sp("zh", 5, 45), sp("b", 5, 50)),
+ },
+ },
+ }},
+ nil},
+
+ // Interface tests.
+ {
+ "InterfaceEmpty",
+ `package testpkg
+type foo interface{}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6)}}},
+ nil},
+ {
+ "InterfaceOneMethodOneInUnnamedOut",
+ `package testpkg
+type foo interface{meth1(a b) (c | error)}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Methods: []*parse.Method{{NamePos: np("meth1", 2, 20),
+ InArgs: []*parse.Field{{NamePos: np("a", 2, 26), Type: tn("b", 2, 28)}},
+ OutArgs: []*parse.Field{{NamePos: np("", 2, 32), Type: tn("c", 2, 32)}}}}}}},
+ nil},
+ {
+ "InterfaceErrors",
+ `package testpkg
+type foo interface{meth1(err error) error}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Methods: []*parse.Method{{NamePos: np("meth1", 2, 20),
+ InArgs: []*parse.Field{{NamePos: np("err", 2, 26), Type: tn("error", 2, 30)}}}}}}},
+ nil},
+ {
+ "InterfaceMixedMethods",
+ `package testpkg
+type foo interface{
+ meth1(a b) (c | error);meth2() error
+ meth3(e f, g, h i) (j k, l, m n | error)
+}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Methods: []*parse.Method{
+ {NamePos: np("meth1", 3, 3),
+ InArgs: []*parse.Field{{NamePos: np("a", 3, 9), Type: tn("b", 3, 11)}},
+ OutArgs: []*parse.Field{{NamePos: np("", 3, 15), Type: tn("c", 3, 15)}}},
+ {NamePos: np("meth2", 3, 26)},
+ {NamePos: np("meth3", 4, 3),
+ InArgs: []*parse.Field{
+ {NamePos: np("e", 4, 9), Type: tn("f", 4, 11)},
+ {NamePos: np("g", 4, 14), Type: tn("i", 4, 19)},
+ {NamePos: np("h", 4, 17), Type: tn("i", 4, 19)}},
+ OutArgs: []*parse.Field{
+ {NamePos: np("j", 4, 23), Type: tn("k", 4, 25)},
+ {NamePos: np("l", 4, 28), Type: tn("n", 4, 33)},
+ {NamePos: np("m", 4, 31), Type: tn("n", 4, 33)}}}}}}},
+ nil},
+ {
+ "InterfaceEmbed",
+ `package testpkg
+type foo interface{bar}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Embeds: []*parse.NamePos{npptr("bar", 2, 20)}}}},
+ nil},
+ {
+ "InterfaceEmbedQualified",
+ `package testpkg
+type foo interface{bar.baz}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Embeds: []*parse.NamePos{npptr("bar.baz", 2, 20)}}}},
+ nil},
+ {
+ "InterfaceEmbedQualifiedPath",
+ `package testpkg
+type foo interface{"a/b/c/bar".baz}`,
+ &parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+ Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+ Embeds: []*parse.NamePos{npptr(`"a/b/c/bar".baz`, 2, 20)}}}},
+ nil},
+ {
+ "FAILInterfaceUnclosedInterface",
+ `package testpkg
+type foo interface{
+ meth1()`,
+ nil,
+ []string{"testfile:3:10 syntax error"}},
+ {
+ "FAILInterfaceUnclosedArgs",
+ `package testpkg
+type foo interface{
+ meth1(
+}`,
+ nil,
+ []string{"testfile:4:1 syntax error"}},
+ {
+ "FAILInterfaceVariableNames",
+ `package testpkg
+type foo interface{
+ meth1([]a, []b []c)
+}`,
+ nil,
+ []string{"expected one or more variable names",
+ "testfile:3:18 perhaps you forgot a comma"}},
+}
+
+// configTests contains tests of config files.
+var configTests = []configTest{
+ // Empty file isn't allowed (need at least a package clause).
+ {
+ "FAILEmptyFile",
+ "",
+ nil,
+ []string{"config file must start with config clause"}},
+
+ // Comment tests.
+ {
+ "ConfigDocOneLiner",
+ `// One liner
+// Another line
+config = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: parse.NamePos{Name: "config", Pos: pos(3, 1), Doc: `// One liner
+// Another line
+`},
+ Config: cn("true", 3, 10)},
+ nil},
+ {
+ "ConfigDocMultiLiner",
+ `/* Multi liner
+Another line
+*/
+config = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: parse.NamePos{Name: "config", Pos: pos(4, 1), Doc: `/* Multi liner
+Another line
+*/
+`},
+ Config: cn("true", 4, 10)},
+ nil},
+ {
+ "NotConfigDoc",
+ `// Extra newline, not config doc
+
+config = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 3, 1),
+ Config: cn("true", 3, 10)},
+ nil},
+ {
+ "FAILUnterminatedComment",
+ `/* Unterminated
+Another line
+config = true`,
+ nil,
+ []string{"comment not terminated"}},
+
+ // Config tests.
+ {
+ "Config",
+ "config = true;",
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("true", 1, 10)},
+ nil},
+ {
+ "ConfigNoSemi",
+ "config = true",
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("true", 1, 10)},
+ nil},
+ {
+ "ConfigNamedConfig",
+ "config = config",
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("config", 1, 10)},
+ nil},
+ {
+ "FAILConfigNoEqual",
+ "config true",
+ nil,
+ []string{"testfile:1:8 syntax error"}},
+
+ // Import tests.
+ {
+ "EmptyImport",
+ `config = foo
+import (
+)`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "OneImport",
+ `config = foo
+import "foo/bar";`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 2, 8)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "OneImportLocalNameNoSemi",
+ `config = foo
+import baz "foo/bar"`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 2, 8)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "OneImportParens",
+ `config = foo
+import (
+ "foo/bar";
+)`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "OneImportParensNoSemi",
+ `config = foo
+import (
+ "foo/bar"
+)`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "OneImportParensNamed",
+ `config = foo
+import (
+ baz "foo/bar"
+)`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 3, 3)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "MixedImports",
+ `config = foo
+import "foo/bar"
+import (
+ "baz";"a/b"
+ "c/d"
+)
+import "z"`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{
+ {Path: "foo/bar", NamePos: np("", 2, 8)},
+ {Path: "baz", NamePos: np("", 4, 3)},
+ {Path: "a/b", NamePos: np("", 4, 9)},
+ {Path: "c/d", NamePos: np("", 5, 3)},
+ {Path: "z", NamePos: np("", 7, 8)}},
+ Config: cn("foo", 1, 10)},
+ nil},
+ {
+ "FAILImportParensNotClosed",
+ `config = foo
+import (
+ "foo/bar"`,
+ nil,
+ []string{"testfile:3:12 syntax error"}},
+
+ // Inline config tests.
+ {
+ "BoolConst",
+ `config = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("true", 1, 10)},
+ nil},
+ {
+ "StringConst",
+ `config = "abc"`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cl("abc", 1, 10)},
+ nil},
+ {
+ "IntegerConst",
+ `config = 123`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cl(big.NewInt(123), 1, 10)},
+ nil},
+ {
+ "FloatConst",
+ `config = 1.5`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cl(big.NewRat(3, 2), 1, 10)},
+ nil},
+ {
+ "NamedConst",
+ `config = pkg.foo`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: cn("pkg.foo", 1, 10)},
+ nil},
+ {
+ "CompLitConst",
+ `config = {"a","b"}`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: &parse.ConstCompositeLit{
+ KVList: []parse.KVLit{
+ {Value: cl("a", 1, 11)},
+ {Value: cl("b", 1, 15)}},
+ P: pos(1, 10)}},
+ nil},
+ {
+ "CompLitKVConst",
+ `config = {"a":1,"b":2}`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: &parse.ConstCompositeLit{
+ KVList: []parse.KVLit{
+ {cl("a", 1, 11), cl(big.NewInt(1), 1, 15)},
+ {cl("b", 1, 17), cl(big.NewInt(2), 1, 21)}},
+ P: pos(1, 10)}},
+ nil},
+ {
+ "CompLitTypedConst",
+ `config = foo{"a","b"}`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: &parse.ConstCompositeLit{
+ Type: tn("foo", 1, 10),
+ KVList: []parse.KVLit{
+ {Value: cl("a", 1, 14)},
+ {Value: cl("b", 1, 18)}},
+ P: pos(1, 13)}},
+ nil},
+ {
+ "CompLitKVTypedConst",
+ `config = foo{"a":1,"b":2}`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Config: &parse.ConstCompositeLit{
+ Type: tn("foo", 1, 10),
+ KVList: []parse.KVLit{
+ {cl("a", 1, 14), cl(big.NewInt(1), 1, 18)},
+ {cl("b", 1, 20), cl(big.NewInt(2), 1, 24)}},
+ P: pos(1, 13)}},
+ nil},
+ {
+ "FAILConstNoEquals",
+ `config 123`,
+ nil,
+ []string{"testfile:1:8 syntax error"}},
+ {
+ "FAILConstNoValue",
+ `config =`,
+ nil,
+ []string{"testfile:1:9 syntax error"}},
+
+ // Out-of-line config tests.
+ {
+ "BoolOutOfLineConfig",
+ `config = config
+import "foo"
+const config = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo", NamePos: np("", 2, 8)}},
+ Config: cn("config", 1, 10),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("config", 3, 7), Expr: cn("true", 3, 16)}}},
+ nil},
+ {
+ "BoolOutOfLineBar",
+ `config = bar
+import "foo"
+const bar = true`,
+ &parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+ Imports: []*parse.Import{{Path: "foo", NamePos: np("", 2, 8)}},
+ Config: cn("bar", 1, 10),
+ ConstDefs: []*parse.ConstDef{
+ {NamePos: np("bar", 3, 7), Expr: cn("true", 3, 13)}}},
+ nil},
+
+ // Errors, types and interfaces return error
+ {
+ "FAILError",
+ `config = true
+error foo()`,
+ nil,
+ []string{"config files may not contain error, type or interface definitions"}},
+ {
+ "FAILType",
+ `config = true
+type foo bool`,
+ nil,
+ []string{"config files may not contain error, type or interface definitions"}},
+ {
+ "FAILInterface",
+ `config = true
+type foo interface{}`,
+ nil,
+ []string{"config files may not contain error, type or interface definitions"}},
+}
+
+func configImports(imports ...string) *parse.Config {
+ config := new(parse.Config)
+ for _, i := range imports {
+ config.Imports = append(config.Imports, &parse.Import{Path: i})
+ }
+ return config
+}
+
+func TestConfigHasImport(t *testing.T) {
+ config := configImports("a", "b/c")
+ tests := []struct {
+ Path string
+ Want bool
+ }{
+ {"a", true},
+ {"b/c", true},
+ {"b", false},
+ {"c", false},
+ {"d", false},
+ }
+ for _, test := range tests {
+ if got, want := config.HasImport(test.Path), test.Want; got != want {
+ t.Errorf("HasImport(%q) got %v, want %v", test.Path, got, want)
+ }
+ }
+}
+
+func TestConfigAddImports(t *testing.T) {
+ tests := []struct {
+ Base *parse.Config
+ Imports []string
+ Want *parse.Config
+ }{
+ {configImports(), []string{"a", "b/c"}, configImports("a", "b/c")},
+ {configImports("a"), []string{"a", "b/c"}, configImports("a", "b/c")},
+ {configImports("a", "b/c"), []string{"a", "b/c"}, configImports("a", "b/c")},
+ {configImports("a", "b/c"), []string{"a", "b/c", "d"}, configImports("a", "b/c", "d")},
+ }
+ for _, test := range tests {
+ test.Base.AddImports(test.Imports...)
+ if got, want := test.Base, test.Want; !reflect.DeepEqual(got, want) {
+ t.Errorf("AddImports(%q) got %v, want %v", test.Imports, got, want)
+ }
+ }
+}
+
+func TestParseExprs(t *testing.T) {
+ tests := []struct {
+ Data string
+ Exprs []parse.ConstExpr
+ Err string
+ }{
+ {``, nil, "syntax error"},
+ {`true`, []parse.ConstExpr{cn("true", 1, 1)}, ""},
+ {`false`, []parse.ConstExpr{cn("false", 1, 1)}, ""},
+ {`abc`, []parse.ConstExpr{cn("abc", 1, 1)}, ""},
+ {`"abc"`, []parse.ConstExpr{cl("abc", 1, 1)}, ""},
+ {`1`, []parse.ConstExpr{cl(big.NewInt(1), 1, 1)}, ""},
+ {`123`, []parse.ConstExpr{cl(big.NewInt(123), 1, 1)}, ""},
+ {`1.0`, []parse.ConstExpr{cl(big.NewRat(1, 1), 1, 1)}, ""},
+ {`1.5`, []parse.ConstExpr{cl(big.NewRat(3, 2), 1, 1)}, ""},
+ {`{1,2}`, []parse.ConstExpr{
+ &parse.ConstCompositeLit{
+ KVList: []parse.KVLit{
+ {Value: cl(big.NewInt(1), 1, 2)},
+ {Value: cl(big.NewInt(2), 1, 4)},
+ },
+ P: pos(1, 1),
+ },
+ }, ""},
+ {`1+2`, []parse.ConstExpr{
+ &parse.ConstBinaryOp{"+",
+ cl(big.NewInt(1), 1, 1),
+ cl(big.NewInt(2), 1, 3),
+ pos(1, 2),
+ },
+ }, ""},
+ {`1,"abc"`, []parse.ConstExpr{
+ cl(big.NewInt(1), 1, 1),
+ cl("abc", 1, 3),
+ }, ""},
+ }
+ for _, test := range tests {
+ errs := vdlutil.NewErrors(-1)
+ exprs := parse.ParseExprs(test.Data, errs)
+ vdltest.ExpectResult(t, errs, test.Data, test.Err)
+ if got, want := exprs, test.Exprs; !reflect.DeepEqual(got, want) {
+ t.Errorf("%s got %v, want %v", test.Data, got, want)
+ }
+ }
+}
diff --git a/lib/vdl/parse/result.go b/lib/vdl/parse/result.go
new file mode 100644
index 0000000..7fd0e37
--- /dev/null
+++ b/lib/vdl/parse/result.go
@@ -0,0 +1,186 @@
+package parse
+
+import (
+ "fmt"
+ "path"
+ "strconv"
+ "strings"
+
+ "v.io/v23/vdl/vdlutil"
+)
+
+// Pos captures positional information during parsing.
+type Pos struct {
+ Line int // Line number, starting at 1
+ Col int // Column number (character count), starting at 1
+}
+
+// StringPos holds a string and a Pos.
+type StringPos struct {
+ String string
+ Pos Pos
+}
+
+// Returns true iff this Pos has been initialized. The zero Pos is invalid.
+func (p Pos) IsValid() bool {
+ return p.Line > 0 && p.Col > 0
+}
+
+func (p Pos) String() string {
+ if !p.IsValid() {
+ return "[no pos]"
+ }
+ return fmt.Sprintf("%v:%v", p.Line, p.Col)
+}
+
+// InferPackageName returns the package name from a group of files. Every file
+// must specify the same package name, otherwise an error is reported in errs.
+func InferPackageName(files []*File, errs *vdlutil.Errors) (pkgName string) {
+ var firstFile string
+ for _, f := range files {
+ switch {
+ case pkgName == "":
+ firstFile = f.BaseName
+ pkgName = f.PackageDef.Name
+ case pkgName != f.PackageDef.Name:
+ errs.Errorf("Files in the same directory must be in the same package; %v has package %v, but %v has package %v", firstFile, pkgName, f.BaseName, f.PackageDef.Name)
+ }
+ }
+ return
+}
+
+// Representation of the components of an vdl file. These data types represent
+// the parse tree generated by the parse.
+
+// File represents a parsed vdl file.
+type File struct {
+ BaseName string // Base name of the vdl file, e.g. "foo.vdl"
+ PackageDef NamePos // Name, position and docs of the "package" clause
+ Imports []*Import // Imports listed in this file.
+ ErrorDefs []*ErrorDef // Errors defined in this file
+ TypeDefs []*TypeDef // Types defined in this file
+ ConstDefs []*ConstDef // Consts defined in this file
+ Interfaces []*Interface // Interfaces defined in this file
+}
+
+// Config represents a parsed config file. Config files use a similar syntax as
+// vdl files, with similar concepts.
+type Config struct {
+ FileName string // Config file name, e.g. "a/b/foo.config"
+ ConfigDef NamePos // Name, position and docs of the "config" clause
+ Imports []*Import // Imports listed in this file.
+ Config ConstExpr // Const expression exported from this config.
+ ConstDefs []*ConstDef // Consts defined in this file.
+}
+
+// AddImports adds the path imports that don't already exist to c.
+func (c *Config) AddImports(path ...string) {
+ for _, p := range path {
+ if !c.HasImport(p) {
+ c.Imports = append(c.Imports, &Import{Path: p})
+ }
+ }
+}
+
+// HasImport returns true iff path exists in c.Imports.
+func (c *Config) HasImport(path string) bool {
+ for _, imp := range c.Imports {
+ if imp.Path == path {
+ return true
+ }
+ }
+ return false
+}
+
+// Import represents an import definition, which is used to import other
+// packages into an vdl file. An example of the syntax in the vdl file:
+// import foo "some/package/path"
+type Import struct {
+ NamePos // e.g. foo (from above), or typically empty
+ Path string // e.g. "some/package/path" (from above)
+}
+
+// LocalName returns the name used locally within the File to refer to the
+// imported package.
+func (i *Import) LocalName() string {
+ if i.Name != "" {
+ return i.Name
+ }
+ return path.Base(i.Path)
+}
+
+// ErrorDef represents an error definition.
+type ErrorDef struct {
+ NamePos // error name, pos and doc
+ Params []*Field // list of positional parameters
+ Actions []StringPos // list of action code identifiers
+ Formats []LangFmt // list of language / format pairs
+}
+
+// LangFmt represents a language / format string pair.
+type LangFmt struct {
+ Lang StringPos // IETF language tag
+ Fmt StringPos // i18n format string in the given language
+}
+
+// Pos returns the position of the LangFmt.
+func (x LangFmt) Pos() Pos {
+ if x.Lang.Pos.IsValid() {
+ return x.Lang.Pos
+ }
+ return x.Fmt.Pos
+}
+
+// Interface represents a set of embedded interfaces and methods.
+type Interface struct {
+ NamePos // interface name, pos and doc
+ Embeds []*NamePos // names of embedded interfaces
+ Methods []*Method // list of methods
+}
+
+// Method represents a method in an interface.
+type Method struct {
+ NamePos // method name, pos and doc
+ InArgs []*Field // list of positional in-args
+ OutArgs []*Field // list of positional out-args
+ InStream Type // in-stream type, may be nil
+ OutStream Type // out-stream type, may be nil
+ Tags []ConstExpr // list of method tags
+}
+
+// Field represents fields in structs as well as method arguments.
+type Field struct {
+ NamePos // field name, pos and doc
+ Type Type // field type, never nil
+}
+
+// NamePos represents a name, its associated position and documentation.
+type NamePos struct {
+ Name string
+ Pos Pos // position of first character in name
+ Doc string // docs that occur before the item
+ DocSuffix string // docs that occur on the same line after the item
+}
+
+func (x *File) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Import) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *ErrorDef) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Interface) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Method) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Field) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *NamePos) String() string { return fmt.Sprintf("%+v", *x) }
+
+// QuoteStripDoc takes a Doc string, which includes comment markers /**/ and
+// double-slash, and returns a raw-quoted string.
+//
+// TODO(toddw): This should remove comment markers. This is non-trivial, since
+// we should handle removing leading whitespace "rectangles", and might want to
+// retain inline /**/ or adjacent /**/ on the same line. For now we just leave
+// them in the output.
+func QuoteStripDoc(doc string) string {
+ trimmed := strings.Trim(doc, "\n")
+ if strconv.CanBackquote(doc) {
+ return "`" + trimmed + "`"
+ }
+ return strconv.Quote(trimmed)
+}
diff --git a/lib/vdl/parse/type.go b/lib/vdl/parse/type.go
new file mode 100644
index 0000000..8c2a9e3
--- /dev/null
+++ b/lib/vdl/parse/type.go
@@ -0,0 +1,139 @@
+package parse
+
+import (
+ "fmt"
+)
+
+// Type is an interface representing symbolic occurrences of types in VDL files.
+type Type interface {
+ // String returns a human-readable description of the type.
+ String() string
+ // Kind returns a short human-readable string describing the kind of type.
+ Kind() string
+ // Pos returns the position of the first character in the type.
+ Pos() Pos
+}
+
+// TypeNamed captures named references to other types. Both built-in primitives
+// and user-defined named types use this representation.
+type TypeNamed struct {
+ Name string
+ P Pos
+}
+
+// TypeEnum represents enum types.
+type TypeEnum struct {
+ Labels []NamePos
+ P Pos
+}
+
+// TypeArray represents array types.
+type TypeArray struct {
+ Len int
+ Elem Type
+ P Pos
+}
+
+// TypeList represents list types.
+type TypeList struct {
+ Elem Type
+ P Pos
+}
+
+// TypeSet represents set types.
+type TypeSet struct {
+ Key Type
+ P Pos
+}
+
+// TypeMap represents map types.
+type TypeMap struct {
+ Key Type
+ Elem Type
+ P Pos
+}
+
+// TypeStruct represents struct types.
+type TypeStruct struct {
+ Fields []*Field
+ P Pos
+}
+
+// TypeUnion represents union types.
+type TypeUnion struct {
+ Fields []*Field
+ P Pos
+}
+
+// TypeOptional represents optional types.
+type TypeOptional struct {
+ Base Type
+ P Pos
+}
+
+// TypeDef represents a user-defined named type.
+type TypeDef struct {
+ NamePos // name assigned by the user, pos and doc
+ Type Type // the underlying type of the type definition.
+}
+
+func (t *TypeNamed) Pos() Pos { return t.P }
+func (t *TypeEnum) Pos() Pos { return t.P }
+func (t *TypeArray) Pos() Pos { return t.P }
+func (t *TypeList) Pos() Pos { return t.P }
+func (t *TypeSet) Pos() Pos { return t.P }
+func (t *TypeMap) Pos() Pos { return t.P }
+func (t *TypeStruct) Pos() Pos { return t.P }
+func (t *TypeUnion) Pos() Pos { return t.P }
+func (t *TypeOptional) Pos() Pos { return t.P }
+
+func (t *TypeNamed) Kind() string { return "named" }
+func (t *TypeEnum) Kind() string { return "enum" }
+func (t *TypeArray) Kind() string { return "array" }
+func (t *TypeList) Kind() string { return "list" }
+func (t *TypeSet) Kind() string { return "set" }
+func (t *TypeMap) Kind() string { return "map" }
+func (t *TypeStruct) Kind() string { return "struct" }
+func (t *TypeUnion) Kind() string { return "union" }
+func (t *TypeOptional) Kind() string { return "optional" }
+
+func (t *TypeNamed) String() string { return t.Name }
+func (t *TypeEnum) String() string {
+ result := "enum{"
+ for index, label := range t.Labels {
+ if index > 0 {
+ result += ";"
+ }
+ result += label.Name
+ }
+ return result + "}"
+}
+func (t *TypeArray) String() string { return fmt.Sprintf("[%v]%v", t.Len, t.Elem) }
+func (t *TypeList) String() string { return fmt.Sprintf("[]%v", t.Elem) }
+func (t *TypeSet) String() string { return fmt.Sprintf("set[%v]", t.Key) }
+func (t *TypeMap) String() string { return fmt.Sprintf("map[%v]%v", t.Key, t.Elem) }
+func (t *TypeStruct) String() string {
+ result := "struct{"
+ for index, field := range t.Fields {
+ if index > 0 {
+ result += ";"
+ }
+ result += field.Name + " " + field.Type.String()
+ }
+ return result + "}"
+}
+func (t *TypeUnion) String() string {
+ result := "union{"
+ for index, field := range t.Fields {
+ if index > 0 {
+ result += ";"
+ }
+ result += field.Name + " " + field.Type.String()
+ }
+ return result + "}"
+}
+func (t *TypeOptional) String() string { return fmt.Sprintf("?%v", t.Base) }
+
+func (t *TypeDef) String() string {
+ return fmt.Sprintf("(%v %v %v)", t.Pos, t.Name, t.Type)
+}