Prepare moving vdl from v.io/v23 to v.io/core/veyron/... (step 2)
diff --git a/lib/vdl/build/build.go b/lib/vdl/build/build.go
new file mode 100644
index 0000000..c59a350
--- /dev/null
+++ b/lib/vdl/build/build.go
@@ -0,0 +1,849 @@
+// Package build provides utilities to collect VDL build information, and
+// helpers to kick off the parser and compiler.
+//
+// VDL Packages
+//
+// VDL is organized into packages, where a package is a collection of one or
+// more source files.  The files in a package collectively define the types,
+// constants, services and errors belonging to the package; these are called
+// package elements.
+//
+// The package elements in package P may be used in another package Q.  First
+// package Q must import package P, and then refer to the package elements in P.
+// Imports define the package dependency graph, which must be acyclic.
+//
+// Build Strategy
+//
+// The steps to building a VDL package P:
+//   1) Compute the transitive closure of P's dependencies DEPS.
+//   2) Sort DEPS in dependency order.
+//   3) Build each package D in DEPS.
+//   3) Build package P.
+//
+// Building a package P requires that all elements used by P are understood,
+// including elements defined outside of P.  The only way for a change to
+// package Q to affect the build of P is if Q is in the transitive closure of
+// P's package dependencies.  However there may be false positives; the change
+// to Q might not actually affect P.
+//
+// The build process may perform more work than is strictly necessary, because
+// of these false positives.  However it is simple and correct.
+//
+// The TransitivePackages* functions implement build steps 1 and 2.
+//
+// The Build* functions implement build steps 3 and 4.
+//
+// Other functions provide related build information and utilities.
+package build
+
+import (
+	"fmt"
+	"io"
+	"io/ioutil"
+	"os"
+	"path"
+	"path/filepath"
+	"reflect"
+	"regexp"
+	"sort"
+	"strings"
+
+	"v.io/lib/toposort"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdlutil"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+const vdlrootImportPrefix = "v.io/v23/vdlroot"
+
+// Package represents the build information for a vdl package.
+type Package struct {
+	// Name is the name of the package, specified in the vdl files.
+	// E.g. "bar"
+	Name string
+	// Path is the package path; the path used in VDL import clauses.
+	// E.g. "foo/bar".
+	Path string
+	// GenPath is the package path to use for code generation.  It is typically
+	// the same as Path, except for vdlroot standard packages.
+	// E.g. "v.io/v23/vdlroot/time"
+	GenPath string
+	// Dir is the absolute directory containing the package files.
+	// E.g. "/home/user/veyron/vdl/src/foo/bar"
+	Dir string
+	// BaseFileNames is the list of sorted base vdl file names for this package.
+	// Join these with Dir to get absolute file names.
+	BaseFileNames []string
+	// Config is the configuration for this package, specified by an optional
+	// "vdl.config" file in the package directory.  If no "vdl.config" file
+	// exists, the zero value of Config is used.
+	Config vdltool.Config
+
+	// OpenFilesFunc is a function that opens the files with the given filenames,
+	// and returns a map from base file name to file contents.
+	OpenFilesFunc func(filenames []string) (map[string]io.ReadCloser, error)
+
+	openedFiles []io.Closer // files that need to be closed
+}
+
+// UnknownPathMode specifies the behavior when an unknown path is encountered.
+type UnknownPathMode int
+
+const (
+	UnknownPathIsIgnored UnknownPathMode = iota // Silently ignore unknown paths
+	UnknownPathIsError                          // Produce error for unknown paths
+)
+
+func (m UnknownPathMode) String() string {
+	switch m {
+	case UnknownPathIsIgnored:
+		return "UnknownPathIsIgnored"
+	case UnknownPathIsError:
+		return "UnknownPathIsError"
+	default:
+		return fmt.Sprintf("UnknownPathMode(%d)", m)
+	}
+}
+
+func (m UnknownPathMode) logOrErrorf(errs *vdlutil.Errors, format string, v ...interface{}) {
+	if m == UnknownPathIsIgnored {
+		vdlutil.Vlog.Printf(format, v...)
+	} else {
+		errs.Errorf(format, v...)
+	}
+}
+
+func pathPrefixDotOrDotDot(path string) bool {
+	// The point of this helper is to catch cases where the path starts with a
+	// . or .. element; note that  ... returns false.
+	spath := filepath.ToSlash(path)
+	return path == "." || path == ".." || strings.HasPrefix(spath, "./") || strings.HasPrefix(spath, "../")
+}
+
+func ignorePathElem(elem string) bool {
+	return (strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_")) &&
+		!pathPrefixDotOrDotDot(elem)
+}
+
+// validPackagePath returns true iff the path is valid; i.e. if none of the path
+// elems is ignored.
+func validPackagePath(path string) bool {
+	for _, elem := range strings.Split(path, "/") {
+		if ignorePathElem(elem) {
+			return false
+		}
+	}
+	return true
+}
+
+// New packages always start with an empty Name, which is filled in when we call
+// ds.addPackageAndDeps.
+func newPackage(path, genPath, dir string, mode UnknownPathMode, opts Opts, vdlenv *compile.Env) *Package {
+	pkg := &Package{Path: path, GenPath: genPath, Dir: dir, OpenFilesFunc: openFiles}
+	if err := pkg.initBaseFileNames(opts.exts()); err != nil {
+		mode.logOrErrorf(vdlenv.Errors, "%s: bad package dir (%v)", pkg.Dir, err)
+		return nil
+	}
+	// TODO(toddw): Add a mechanism in vdlutil.Errors to distinguish categories of
+	// errors, so that it's more obvious when errors are coming from vdl.config
+	// files vs *.vdl files.
+	origErrors := vdlenv.Errors.NumErrors()
+	if pkg.initVDLConfig(opts, vdlenv); origErrors != vdlenv.Errors.NumErrors() {
+		return nil
+	}
+	return pkg
+}
+
+// initBaseFileNames initializes p.BaseFileNames from the contents of p.Dir.
+func (p *Package) initBaseFileNames(exts map[string]bool) error {
+	infos, err := ioutil.ReadDir(p.Dir)
+	if err != nil {
+		return err
+	}
+	for _, info := range infos {
+		if info.IsDir() {
+			continue
+		}
+		if ignorePathElem(info.Name()) || !exts[filepath.Ext(info.Name())] {
+			vdlutil.Vlog.Printf("%s: ignoring file", filepath.Join(p.Dir, info.Name()))
+			continue
+		}
+		vdlutil.Vlog.Printf("%s: adding vdl file", filepath.Join(p.Dir, info.Name()))
+		p.BaseFileNames = append(p.BaseFileNames, info.Name())
+	}
+	if len(p.BaseFileNames) == 0 {
+		return fmt.Errorf("no vdl files")
+	}
+	return nil
+}
+
+// initVDLConfig initializes p.Config based on the optional vdl.config file.
+func (p *Package) initVDLConfig(opts Opts, vdlenv *compile.Env) {
+	name := path.Join(p.Path, opts.vdlConfigName())
+	configData, err := os.Open(filepath.Join(p.Dir, opts.vdlConfigName()))
+	switch {
+	case os.IsNotExist(err):
+		return
+	case err != nil:
+		vdlenv.Errors.Errorf("%s: couldn't open (%v)", name, err)
+		return
+	}
+	// Build the vdl.config file with an implicit "vdltool" import.  Note that the
+	// actual "vdltool" package has already been populated into vdlenv.
+	BuildConfigValue(name, configData, []string{"vdltool"}, vdlenv, &p.Config)
+	if err := configData.Close(); err != nil {
+		vdlenv.Errors.Errorf("%s: couldn't close (%v)", name, err)
+	}
+}
+
+// OpenFiles opens all files in the package and returns a map from base file
+// name to file contents.  CloseFiles must be called to close the files.
+func (p *Package) OpenFiles() (map[string]io.Reader, error) {
+	var filenames []string
+	for _, baseName := range p.BaseFileNames {
+		filenames = append(filenames, filepath.Join(p.Dir, baseName))
+	}
+	files, err := p.OpenFilesFunc(filenames)
+	if err != nil {
+		for _, c := range files {
+			c.Close()
+		}
+		return nil, err
+	}
+	// Convert map elem type from io.ReadCloser to io.Reader.
+	res := make(map[string]io.Reader, len(files))
+	for n, f := range files {
+		res[n] = f
+		p.openedFiles = append(p.openedFiles, f)
+	}
+	return res, nil
+}
+
+func openFiles(filenames []string) (map[string]io.ReadCloser, error) {
+	files := make(map[string]io.ReadCloser, len(filenames))
+	for _, filename := range filenames {
+		file, err := os.Open(filename)
+		if err != nil {
+			for _, c := range files {
+				c.Close()
+			}
+			return nil, err
+		}
+		files[path.Base(filename)] = file
+	}
+	return files, nil
+}
+
+// CloseFiles closes all files returned by OpenFiles.  Returns nil if all files
+// were closed successfully, otherwise returns one of the errors, dropping the
+// others.  Regardless of whether an error is returned, Close will be called on
+// all files.
+func (p *Package) CloseFiles() error {
+	var err error
+	for _, c := range p.openedFiles {
+		if err2 := c.Close(); err == nil {
+			err = err2
+		}
+	}
+	p.openedFiles = nil
+	return err
+}
+
+// SrcDirs returns a list of package root source directories, based on the
+// VDLPATH, VDLROOT and VANADIUM_ROOT environment variables.
+//
+// VDLPATH is a list of directories separated by filepath.ListSeparator;
+// e.g. the separator is ":" on UNIX, and ";" on Windows.  Each VDLPATH
+// directory must have a "src/" directory that holds vdl source code.  The path
+// below "src/" determines the import path.
+//
+// VDLROOT is a single directory specifying the location of the standard vdl
+// packages.  It has the same requirements as VDLPATH components.  If VDLROOT is
+// empty, we use VANADIUM_ROOT to construct the VDLROOT.  An error is reported if
+// neither VDLROOT nor VANADIUM_ROOT is specified.
+func SrcDirs(errs *vdlutil.Errors) []string {
+	var srcDirs []string
+	if root := vdlRootDir(errs); root != "" {
+		srcDirs = append(srcDirs, root)
+	}
+	return append(srcDirs, vdlPathSrcDirs(errs)...)
+}
+
+func vdlRootDir(errs *vdlutil.Errors) string {
+	vdlroot := os.Getenv("VDLROOT")
+	if vdlroot == "" {
+		// Try to construct VDLROOT out of VANADIUM_ROOT.
+		vroot := os.Getenv("VANADIUM_ROOT")
+		if vroot == "" {
+			errs.Error("Either VDLROOT or VANADIUM_ROOT must be set")
+			return ""
+		}
+		vdlroot = filepath.Join(vroot, "release", "go", "src", "v.io", "v23", "vdlroot")
+	}
+	abs, err := filepath.Abs(vdlroot)
+	if err != nil {
+		errs.Errorf("VDLROOT %q can't be made absolute (%v)", vdlroot, err)
+		return ""
+	}
+	return abs
+}
+
+func vdlPathSrcDirs(errs *vdlutil.Errors) []string {
+	var srcDirs []string
+	for _, dir := range filepath.SplitList(os.Getenv("VDLPATH")) {
+		if dir != "" {
+			src := filepath.Join(dir, "src")
+			abs, err := filepath.Abs(src)
+			if err != nil {
+				errs.Errorf("VDLPATH src dir %q can't be made absolute (%v)", src, err)
+				continue // keep going to collect all errors
+			}
+			srcDirs = append(srcDirs, abs)
+		}
+	}
+	if len(srcDirs) == 0 {
+		errs.Error("No src dirs; set VDLPATH to a valid value")
+		return nil
+	}
+	return srcDirs
+}
+
+// IsDirPath returns true iff the path is absolute, or begins with a . or
+// .. element.  The path denotes the package in that directory.
+func IsDirPath(path string) bool {
+	return filepath.IsAbs(path) || pathPrefixDotOrDotDot(path)
+}
+
+// IsImportPath returns true iff !IsDirPath.  The path P denotes the package in
+// directory DIR/src/P, for some DIR listed in SrcDirs.
+func IsImportPath(path string) bool {
+	return !IsDirPath(path)
+}
+
+// depSorter does the main work of collecting and sorting packages and their
+// dependencies.  The full syntax from the go cmdline tool is supported; we
+// allow both dirs and import paths, as well as the "all" and "..." wildcards.
+//
+// This is slightly complicated because of dirs, and the potential for symlinks.
+// E.g. let's say we have two directories, one a symlink to the other:
+//   /home/user/release/go/src/veyron/rt/base
+//   /home/user/release/go/src/veyron/rt2     symlink to rt
+//
+// The problem is that if the user has cwd pointing at one of the two "base"
+// dirs and specifies a relative directory ".." it's ambiguous which absolute
+// dir we'll end up with; file paths form a graph rather than a tree.  For more
+// details see http://plan9.bell-labs.com/sys/doc/lexnames.html
+//
+// This means that if the user builds a dir (rather than an import path), we
+// might not be able to deduce the package path.  Note that the error definition
+// mechanism relies on the package path to create implicit error ids, and this
+// must be known at the time the package is compiled.  To handle this we call
+// deducePackagePath and attempt to deduce the package path even if the user
+// builds a directory, and return errors if this fails.
+//
+// TODO(toddw): If we care about performance we could serialize the compiled
+// compile.Package information and write it out as compiler-generated artifacts,
+// similar to how the regular go tool generates *.a files under the top-level
+// pkg directory.
+type depSorter struct {
+	opts    Opts
+	srcDirs []string
+	pathMap map[string]*Package
+	dirMap  map[string]*Package
+	sorter  *toposort.Sorter
+	errs    *vdlutil.Errors
+	vdlenv  *compile.Env
+}
+
+func newDepSorter(opts Opts, errs *vdlutil.Errors) *depSorter {
+	ds := &depSorter{
+		opts:    opts,
+		srcDirs: SrcDirs(errs),
+		errs:    errs,
+		vdlenv:  compile.NewEnvWithErrors(errs),
+	}
+	ds.reset()
+	// Resolve the "vdltool" import and build all transitive packages into vdlenv.
+	// This special env is used when building "vdl.config" files, which have the
+	// implicit "vdltool" import.
+	if !ds.ResolvePath("vdltool", UnknownPathIsError) {
+		errs.Errorf(`Can't resolve "vdltool" package`)
+	}
+	for _, pkg := range ds.Sort() {
+		BuildPackage(pkg, ds.vdlenv)
+	}
+	// We must reset back to an empty depSorter, to ensure the transitive packages
+	// returned by the depSorter don't include "vdltool".
+	ds.reset()
+	return ds
+}
+
+func (ds *depSorter) reset() {
+	ds.pathMap = make(map[string]*Package)
+	ds.dirMap = make(map[string]*Package)
+	ds.sorter = new(toposort.Sorter)
+}
+
+func (ds *depSorter) errorf(format string, v ...interface{}) {
+	ds.errs.Errorf(format, v...)
+}
+
+// ResolvePath resolves path into package(s) and adds them to the sorter.
+// Returns true iff path could be resolved.
+func (ds *depSorter) ResolvePath(path string, mode UnknownPathMode) bool {
+	if path == "all" {
+		// Special-case "all", with the same behavior as Go.
+		path = "..."
+	}
+	isDirPath := IsDirPath(path)
+	dots := strings.Index(path, "...")
+	switch {
+	case dots >= 0:
+		return ds.resolveWildcardPath(isDirPath, path[:dots], path[dots:])
+	case isDirPath:
+		return ds.resolveDirPath(path, mode) != nil
+	default:
+		return ds.resolveImportPath(path, mode) != nil
+	}
+}
+
+// resolveWildcardPath resolves wildcards for both dir and import paths.  The
+// prefix is everything before the first "...", and the suffix is everything
+// including and after the first "..."; note that multiple "..." wildcards may
+// occur within the suffix.  Returns true iff any packages were resolved.
+//
+// The strategy is to compute one or more root directories that contain
+// everything that could possibly be matched, along with a filename pattern to
+// match against.  Then we walk through each root directory, matching against
+// the pattern.
+func (ds *depSorter) resolveWildcardPath(isDirPath bool, prefix, suffix string) bool {
+	type dirAndSrc struct {
+		dir, src string
+	}
+	var rootDirs []dirAndSrc // root directories to walk through
+	var pattern string       // pattern to match against, starting after root dir
+	switch {
+	case isDirPath:
+		// prefix and suffix are directory paths.
+		dir, pre := filepath.Split(prefix)
+		pattern = filepath.Clean(pre + suffix)
+		rootDirs = append(rootDirs, dirAndSrc{filepath.Clean(dir), ""})
+	default:
+		// prefix and suffix are slash-separated import paths.
+		slashDir, pre := path.Split(prefix)
+		pattern = filepath.Clean(pre + filepath.FromSlash(suffix))
+		dir := filepath.FromSlash(slashDir)
+		for _, srcDir := range ds.srcDirs {
+			rootDirs = append(rootDirs, dirAndSrc{filepath.Join(srcDir, dir), srcDir})
+		}
+	}
+	matcher, err := createMatcher(pattern)
+	if err != nil {
+		ds.errorf("%v", err)
+		return false
+	}
+	// Walk through root dirs and subdirs, looking for matches.
+	resolvedAny := false
+	for _, root := range rootDirs {
+		filepath.Walk(root.dir, func(rootAndPath string, info os.FileInfo, err error) error {
+			// Ignore errors and non-directory elements.
+			if err != nil || !info.IsDir() {
+				return nil
+			}
+			// Skip the dir and subdirs if the elem should be ignored.
+			_, elem := filepath.Split(rootAndPath)
+			if ignorePathElem(elem) {
+				vdlutil.Vlog.Printf("%s: ignoring dir", rootAndPath)
+				return filepath.SkipDir
+			}
+			// Special-case to skip packages with the vdlroot import prefix.  These
+			// packages should only appear at the root of the package path space.
+			if root.src != "" {
+				pkgPath := strings.TrimPrefix(rootAndPath, root.src)
+				pkgPath = strings.TrimPrefix(pkgPath, "/")
+				if strings.HasPrefix(pkgPath, vdlrootImportPrefix) {
+					return filepath.SkipDir
+				}
+			}
+			// Ignore the dir if it doesn't match our pattern.  We still process the
+			// subdirs since they still might match.
+			//
+			// TODO(toddw): We could add an optimization to skip subdirs that can't
+			// possibly match the matcher.  E.g. given pattern "a..." we can skip
+			// the subdirs if the dir doesn't start with "a".
+			matchPath := rootAndPath[len(root.dir):]
+			if strings.HasPrefix(matchPath, pathSeparator) {
+				matchPath = matchPath[len(pathSeparator):]
+			}
+			if !matcher.MatchString(matchPath) {
+				return nil
+			}
+			// Finally resolve the dir.
+			if ds.resolveDirPath(rootAndPath, UnknownPathIsIgnored) != nil {
+				resolvedAny = true
+			}
+			return nil
+		})
+	}
+	return resolvedAny
+}
+
+const pathSeparator = string(filepath.Separator)
+
+// createMatcher creates a regexp matcher out of the file pattern.
+func createMatcher(pattern string) (*regexp.Regexp, error) {
+	rePat := regexp.QuoteMeta(pattern)
+	rePat = strings.Replace(rePat, `\.\.\.`, `.*`, -1)
+	// Add special-case so that x/... also matches x.
+	slashDotStar := regexp.QuoteMeta(pathSeparator) + ".*"
+	if strings.HasSuffix(rePat, slashDotStar) {
+		rePat = rePat[:len(rePat)-len(slashDotStar)] + "(" + slashDotStar + ")?"
+	}
+	rePat = `^` + rePat + `$`
+	matcher, err := regexp.Compile(rePat)
+	if err != nil {
+		return nil, fmt.Errorf("Can't compile package path regexp %s: %v", rePat, err)
+	}
+	return matcher, nil
+}
+
+// resolveDirPath resolves dir into a Package.  Returns the package, or nil if
+// it can't be resolved.
+func (ds *depSorter) resolveDirPath(dir string, mode UnknownPathMode) *Package {
+	// If the package already exists in our dir map, we can just return it.
+	absDir, err := filepath.Abs(dir)
+	if err != nil {
+		ds.errorf("%s: can't make absolute (%v)", dir, err)
+	}
+	if pkg := ds.dirMap[absDir]; pkg != nil {
+		return pkg
+	}
+	// Deduce the package path, and ensure it corresponds to exactly one package.
+	// We always deduce the package path from the package directory, even if we
+	// originally resolved from an import path, and thus already "know" the
+	// package path.  This is to ensure we correctly handle vdl standard packages.
+	// E.g. if we're given "v.io/v23/vdlroot/vdltool" as an import path, the
+	// resulting package path must be "vdltool".
+	pkgPath, genPath, err := ds.deducePackagePath(absDir)
+	if err != nil {
+		ds.errorf("%s: can't deduce package path (%v)", absDir, err)
+		return nil
+	}
+	if !validPackagePath(pkgPath) {
+		mode.logOrErrorf(ds.errs, "%s: package path %q is invalid", absDir, pkgPath)
+		return nil
+	}
+	if pkg := ds.pathMap[pkgPath]; pkg != nil {
+		mode.logOrErrorf(ds.errs, "%s: package path %q already resolved from %s", absDir, pkgPath, pkg.Dir)
+		return nil
+	}
+	// Make sure the directory really exists, and add the package and deps.
+	fileInfo, err := os.Stat(absDir)
+	if err != nil {
+		mode.logOrErrorf(ds.errs, "%v", err)
+		return nil
+	}
+	if !fileInfo.IsDir() {
+		mode.logOrErrorf(ds.errs, "%s: package isn't a directory", absDir)
+		return nil
+	}
+	return ds.addPackageAndDeps(pkgPath, genPath, absDir, mode)
+}
+
+// resolveImportPath resolves pkgPath into a Package.  Returns the package, or
+// nil if it can't be resolved.
+func (ds *depSorter) resolveImportPath(pkgPath string, mode UnknownPathMode) *Package {
+	pkgPath = path.Clean(pkgPath)
+	if pkg := ds.pathMap[pkgPath]; pkg != nil {
+		return pkg
+	}
+	if !validPackagePath(pkgPath) {
+		mode.logOrErrorf(ds.errs, "Import path %q is invalid", pkgPath)
+		return nil
+	}
+	// Special-case to disallow packages under the vdlroot dir.
+	if strings.HasPrefix(pkgPath, vdlrootImportPrefix) {
+		mode.logOrErrorf(ds.errs, "Import path %q is invalid (packages under vdlroot must be specified without the vdlroot prefix)", pkgPath)
+		return nil
+	}
+	// Look through srcDirs in-order until we find a valid package dir.
+	var dirs []string
+	for _, srcDir := range ds.srcDirs {
+		dir := filepath.Join(srcDir, filepath.FromSlash(pkgPath))
+		if pkg := ds.resolveDirPath(dir, UnknownPathIsIgnored); pkg != nil {
+			vdlutil.Vlog.Printf("%s: resolved import path %q", pkg.Dir, pkgPath)
+			return pkg
+		}
+		dirs = append(dirs, dir)
+	}
+	// We can't find a valid dir corresponding to this import path.
+	detail := "   " + strings.Join(dirs, "\n   ")
+	mode.logOrErrorf(ds.errs, "Can't resolve import path %q in any of:\n%s", pkgPath, detail)
+	return nil
+}
+
+// addPackageAndDeps adds the pkg and its dependencies to the sorter.
+func (ds *depSorter) addPackageAndDeps(path, genPath, dir string, mode UnknownPathMode) *Package {
+	pkg := newPackage(path, genPath, dir, mode, ds.opts, ds.vdlenv)
+	if pkg == nil {
+		return nil
+	}
+	vdlutil.Vlog.Printf("%s: resolved package path %q", pkg.Dir, pkg.Path)
+	ds.dirMap[pkg.Dir] = pkg
+	ds.pathMap[pkg.Path] = pkg
+	ds.sorter.AddNode(pkg)
+	pfiles := ParsePackage(pkg, parse.Opts{ImportsOnly: true}, ds.errs)
+	pkg.Name = parse.InferPackageName(pfiles, ds.errs)
+	for _, pf := range pfiles {
+		ds.addImportDeps(pkg, pf.Imports)
+	}
+	return pkg
+}
+
+// addImportDeps adds transitive dependencies represented by imports to the
+// sorter.  If the pkg is non-nil, an edge is added between the pkg and its
+// dependencies; otherwise each dependency is added as an independent node.
+func (ds *depSorter) addImportDeps(pkg *Package, imports []*parse.Import) {
+	for _, imp := range imports {
+		if dep := ds.resolveImportPath(imp.Path, UnknownPathIsError); dep != nil {
+			if pkg != nil {
+				ds.sorter.AddEdge(pkg, dep)
+			} else {
+				ds.sorter.AddNode(dep)
+			}
+		}
+	}
+}
+
+// AddConfigDeps takes a config file represented by its file name and src data,
+// and adds all transitive dependencies to the sorter.
+func (ds *depSorter) AddConfigDeps(fileName string, src io.Reader) {
+	if pconfig := parse.ParseConfig(fileName, src, parse.Opts{ImportsOnly: true}, ds.errs); pconfig != nil {
+		ds.addImportDeps(nil, pconfig.Imports)
+	}
+}
+
+// deducePackagePath deduces the package path for dir, by looking for prefix
+// matches against the src dirs.  The resulting package path may be incorrect
+// even if no errors are reported; see the depSorter comment for details.
+func (ds *depSorter) deducePackagePath(dir string) (string, string, error) {
+	for ix, srcDir := range ds.srcDirs {
+		if strings.HasPrefix(dir, srcDir) {
+			relPath, err := filepath.Rel(srcDir, dir)
+			if err != nil {
+				return "", "", err
+			}
+			pkgPath := path.Clean(filepath.ToSlash(relPath))
+			genPath := pkgPath
+			if ix == 0 {
+				// We matched against the first srcDir, which is the VDLROOT dir.  The
+				// genPath needs to include the vdlroot prefix.
+				genPath = path.Join(vdlrootImportPrefix, pkgPath)
+			}
+			return pkgPath, genPath, nil
+		}
+	}
+	return "", "", fmt.Errorf("no matching SrcDirs")
+}
+
+// Sort sorts all targets and returns the resulting list of Packages.
+func (ds *depSorter) Sort() []*Package {
+	sorted, cycles := ds.sorter.Sort()
+	if len(cycles) > 0 {
+		cycleStr := toposort.DumpCycles(cycles, printPackagePath)
+		ds.errorf("Cyclic package dependency: %v", cycleStr)
+		return nil
+	}
+	if len(sorted) == 0 {
+		return nil
+	}
+	targets := make([]*Package, len(sorted))
+	for ix, iface := range sorted {
+		targets[ix] = iface.(*Package)
+	}
+	return targets
+}
+
+func printPackagePath(v interface{}) string {
+	return v.(*Package).Path
+}
+
+// Opts specifies additional options for collecting build information.
+type Opts struct {
+	// Extensions specifies the file name extensions for valid vdl files.  If
+	// empty we use ".vdl" by default.
+	Extensions []string
+
+	// VDLConfigName specifies the name of the optional config file in each vdl
+	// source package.  If empty we use "vdl.config" by default.
+	VDLConfigName string
+}
+
+func (o Opts) exts() map[string]bool {
+	ret := make(map[string]bool)
+	for _, e := range o.Extensions {
+		ret[e] = true
+	}
+	if len(ret) == 0 {
+		ret[".vdl"] = true
+	}
+	return ret
+}
+
+func (o Opts) vdlConfigName() string {
+	if o.VDLConfigName != "" {
+		return o.VDLConfigName
+	}
+	return "vdl.config"
+}
+
+// TransitivePackages takes a list of paths, and returns the corresponding
+// packages and transitive dependencies, ordered by dependency.  Each path may
+// either be a directory (IsDirPath) or an import (IsImportPath).
+//
+// A path is a pattern if it includes one or more "..." wildcards, each of which
+// can match any string, including the empty string and strings containing
+// slashes.  Such a pattern expands to all packages found in SrcDirs with names
+// matching the pattern.  As a special-case, x/... matches x as well as x's
+// subdirectories.
+//
+// The special-case "all" is a synonym for "...", and denotes all packages found
+// in SrcDirs.
+//
+// Import path elements and file names are not allowed to begin with "." or "_";
+// such paths are ignored in wildcard matches, and return errors if specified
+// explicitly.
+//
+// The mode specifies whether we should ignore or produce errors for paths that
+// don't resolve to any packages.  The opts arg specifies additional options.
+func TransitivePackages(paths []string, mode UnknownPathMode, opts Opts, errs *vdlutil.Errors) []*Package {
+	ds := newDepSorter(opts, errs)
+	for _, path := range paths {
+		if !ds.ResolvePath(path, mode) {
+			mode.logOrErrorf(errs, "Can't resolve %q to any packages", path)
+		}
+	}
+	return ds.Sort()
+}
+
+// TransitivePackagesForConfig takes a config file represented by its file name
+// and src data, and returns all package dependencies in transitive order.
+//
+// The opts arg specifies additional options.
+func TransitivePackagesForConfig(fileName string, src io.Reader, opts Opts, errs *vdlutil.Errors) []*Package {
+	ds := newDepSorter(opts, errs)
+	ds.AddConfigDeps(fileName, src)
+	return ds.Sort()
+}
+
+// ParsePackage parses the given pkg with the given parse opts, and returns a
+// slice of parsed files, sorted by name.  Errors are reported in errs.
+func ParsePackage(pkg *Package, opts parse.Opts, errs *vdlutil.Errors) (pfiles []*parse.File) {
+	vdlutil.Vlog.Printf("Parsing package %s %q, dir %s", pkg.Name, pkg.Path, pkg.Dir)
+	files, err := pkg.OpenFiles()
+	if err != nil {
+		errs.Errorf("Can't open vdl files %v, %v", pkg.BaseFileNames, err)
+		return nil
+	}
+	for filename, src := range files {
+		if pf := parse.ParseFile(path.Join(pkg.Path, filename), src, opts, errs); pf != nil {
+			pfiles = append(pfiles, pf)
+		}
+	}
+	sort.Sort(byBaseName(pfiles))
+	pkg.CloseFiles()
+	return
+}
+
+// byBaseName implements sort.Interface
+type byBaseName []*parse.File
+
+func (b byBaseName) Len() int           { return len(b) }
+func (b byBaseName) Less(i, j int) bool { return b[i].BaseName < b[j].BaseName }
+func (b byBaseName) Swap(i, j int)      { b[i], b[j] = b[j], b[i] }
+
+// BuildPackage parses and compiles the given pkg, updates env with the compiled
+// package and returns it.  Errors are reported in env.
+//
+// All imports that pkg depend on must have already been compiled and populated
+// into env.
+func BuildPackage(pkg *Package, env *compile.Env) *compile.Package {
+	pfiles := ParsePackage(pkg, parse.Opts{}, env.Errors)
+	return compile.CompilePackage(pkg.Path, pkg.GenPath, pfiles, pkg.Config, env)
+}
+
+// BuildConfig parses and compiles the given config src and returns it.  Errors
+// are reported in env; fileName is only used for error reporting.
+//
+// The implicit type is applied to the exported config const; untyped consts and
+// composite literals with no explicit type assume the implicit type.  Errors
+// are reported if the implicit type isn't assignable from the final value.  If
+// the implicit type is nil, the exported config const must be explicitly typed.
+//
+// All packages that the config src depends on must have already been compiled
+// and populated into env.  The imports are injected into the parsed src,
+// behaving as if the src had listed the imports explicitly.
+func BuildConfig(fileName string, src io.Reader, implicit *vdl.Type, imports []string, env *compile.Env) *vdl.Value {
+	pconfig := parse.ParseConfig(fileName, src, parse.Opts{}, env.Errors)
+	if pconfig != nil {
+		pconfig.AddImports(imports...)
+	}
+	return compile.CompileConfig(implicit, pconfig, env)
+}
+
+// BuildConfigValue is a convenience function that runs BuildConfig, and then
+// converts the result into value.  The implicit type used by BuildConfig is
+// inferred from the value.
+func BuildConfigValue(fileName string, src io.Reader, imports []string, env *compile.Env, value interface{}) {
+	rv := reflect.ValueOf(value)
+	tt, err := vdl.TypeFromReflect(rv.Type())
+	if err != nil {
+		env.Errors.Errorf(err.Error())
+		return
+	}
+	if tt.Kind() == vdl.Optional {
+		// The value is typically a Go pointer, which translates into VDL optional.
+		// Remove the optional when determining the implicit type for BuildConfig.
+		tt = tt.Elem()
+	}
+	vconfig := BuildConfig(fileName, src, tt, imports, env)
+	if vconfig == nil {
+		return
+	}
+	target, err := vdl.ReflectTarget(rv)
+	if err != nil {
+		env.Errors.Errorf("Can't create reflect target for %T (%v)", value, err)
+		return
+	}
+	if err := vdl.FromValue(target, vconfig); err != nil {
+		env.Errors.Errorf("Can't convert to %T from %v (%v)", value, vconfig, err)
+		return
+	}
+}
+
+// BuildExprs parses and compiles the given data into a slice of values.  The
+// input data is specified in VDL syntax, with commas separating multiple
+// expressions.  There must be at least one expression specified in data.
+// Errors are reported in env.
+//
+// The given types specify the type of each returned value with the same slice
+// position.  If there are more types than returned values, the extra types are
+// ignored.  If there are fewer types than returned values, the last type is
+// used for all remaining values.  Nil entries in types are allowed, and
+// indicate that the expression itself must be fully typed.
+//
+// All imports that the input data depends on must have already been compiled
+// and populated into env.
+func BuildExprs(data string, types []*vdl.Type, env *compile.Env) []*vdl.Value {
+	var values []*vdl.Value
+	var t *vdl.Type
+	for ix, pexpr := range parse.ParseExprs(data, env.Errors) {
+		if ix < len(types) {
+			t = types[ix]
+		}
+		values = append(values, compile.CompileExpr(t, pexpr, env))
+	}
+	return values
+}
diff --git a/lib/vdl/build/build_test.go b/lib/vdl/build/build_test.go
new file mode 100644
index 0000000..5f234b1
--- /dev/null
+++ b/lib/vdl/build/build_test.go
@@ -0,0 +1,628 @@
+package build_test
+
+import (
+	"fmt"
+	"os"
+	"path"
+	"path/filepath"
+	"reflect"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/testdata/base"
+	"v.io/v23/vdl/vdltest"
+	"v.io/v23/vdl/vdlutil"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+func init() {
+	// Uncomment this to enable verbose logs for debugging.
+	//vdlutil.SetVerbose()
+}
+
+// The cwd is set to the directory containing this file.  Currently we have the
+// following directory structure:
+//   .../release/go/src/v.io/v23/vdl/build/build_test.go
+// We want to end up with the following:
+//   VDLROOT = .../release/go/src/v.io/v23/vdlroot
+//   VDLPATH = .../release/go
+//
+// TODO(toddw): Put a full VDLPATH tree under ../testdata and only use that.
+const (
+	defaultVDLRoot = "../../vdlroot"
+	defaultVDLPath = "../../../../.."
+)
+
+func setEnvironment(t *testing.T, vdlroot, vdlpath string) bool {
+	errRoot := os.Setenv("VDLROOT", vdlroot)
+	errPath := os.Setenv("VDLPATH", vdlpath)
+	if errRoot != nil {
+		t.Errorf("Setenv(VDLROOT, %q) failed: %v", vdlroot, errRoot)
+	}
+	if errPath != nil {
+		t.Errorf("Setenv(VDLPATH, %q) failed: %v", vdlpath, errPath)
+	}
+	return errRoot == nil && errPath == nil
+}
+
+func setVanadiumRoot(t *testing.T, root string) bool {
+	if err := os.Setenv("VANADIUM_ROOT", root); err != nil {
+		t.Errorf("Setenv(VANADIUM_ROOT, %q) failed: %v", root, err)
+		return false
+	}
+	return true
+}
+
+// Tests the VDLROOT part of SrcDirs().
+func TestSrcDirsVdlRoot(t *testing.T) {
+	cwd, err := os.Getwd()
+	if err != nil {
+		t.Fatalf("Getwd() failed: %v", err)
+	}
+	abs := func(relative string) string {
+		return filepath.Join(cwd, relative)
+	}
+	tests := []struct {
+		VDLRoot      string
+		VanadiumRoot string
+		Want         []string
+		ErrRE        string
+	}{
+		{"", "", nil, "Either VDLROOT or VANADIUM_ROOT must be set"},
+		{"/a", "", []string{"/a"}, ""},
+		{"/a/b/c", "", []string{"/a/b/c"}, ""},
+		{"", "/veyron", []string{"/veyron/release/go/src/v.io/v23/vdlroot"}, ""},
+		{"", "/a/b/c", []string{"/a/b/c/release/go/src/v.io/v23/vdlroot"}, ""},
+		// If both VDLROOT and VANADIUM_ROOT are specified, VDLROOT takes precedence.
+		{"/a", "/veyron", []string{"/a"}, ""},
+		{"/a/b/c", "/x/y/z", []string{"/a/b/c"}, ""},
+	}
+	for _, test := range tests {
+		if !setEnvironment(t, test.VDLRoot, defaultVDLPath) || !setVanadiumRoot(t, test.VanadiumRoot) {
+			continue
+		}
+		name := fmt.Sprintf("%+v", test)
+		errs := vdlutil.NewErrors(-1)
+		got := build.SrcDirs(errs)
+		vdltest.ExpectResult(t, errs, name, test.ErrRE)
+		// Every result will have our valid VDLPATH srcdir.
+		vdlpathsrc := filepath.Join(abs(defaultVDLPath), "src")
+		want := append(test.Want, vdlpathsrc)
+		if !reflect.DeepEqual(got, want) {
+			t.Errorf("SrcDirs(%s) got %v, want %v", name, got, want)
+		}
+	}
+}
+
+// Tests the VDLPATH part of SrcDirs().
+func TestSrcDirsVdlPath(t *testing.T) {
+	cwd, err := os.Getwd()
+	if err != nil {
+		t.Fatalf("Getwd() failed: %v", err)
+	}
+	abs := func(relative string) string {
+		return filepath.Join(cwd, relative)
+	}
+	tests := []struct {
+		VDLPath string
+		Want    []string
+	}{
+		{"", nil},
+		// Test absolute paths.
+		{"/a", []string{"/a/src"}},
+		{"/a/b", []string{"/a/b/src"}},
+		{"/a:/b", []string{"/a/src", "/b/src"}},
+		{"/a/1:/b/2", []string{"/a/1/src", "/b/2/src"}},
+		{"/a/1:/b/2:/c/3", []string{"/a/1/src", "/b/2/src", "/c/3/src"}},
+		{":::/a/1::::/b/2::::/c/3:::", []string{"/a/1/src", "/b/2/src", "/c/3/src"}},
+		// Test relative paths.
+		{"a", []string{abs("a/src")}},
+		{"a/b", []string{abs("a/b/src")}},
+		{"a:b", []string{abs("a/src"), abs("b/src")}},
+		{"a/1:b/2", []string{abs("a/1/src"), abs("b/2/src")}},
+		{"a/1:b/2:c/3", []string{abs("a/1/src"), abs("b/2/src"), abs("c/3/src")}},
+		{":::a/1::::b/2::::c/3:::", []string{abs("a/1/src"), abs("b/2/src"), abs("c/3/src")}},
+		// Test mixed absolute / relative paths.
+		{"a:/b", []string{abs("a/src"), "/b/src"}},
+		{"/a/1:b/2", []string{"/a/1/src", abs("b/2/src")}},
+		{"/a/1:b/2:/c/3", []string{"/a/1/src", abs("b/2/src"), "/c/3/src"}},
+		{":::/a/1::::b/2::::/c/3:::", []string{"/a/1/src", abs("b/2/src"), "/c/3/src"}},
+	}
+	for _, test := range tests {
+		if !setEnvironment(t, defaultVDLRoot, test.VDLPath) {
+			continue
+		}
+		name := fmt.Sprintf("SrcDirs(%q)", test.VDLPath)
+		errs := vdlutil.NewErrors(-1)
+		got := build.SrcDirs(errs)
+		var errRE string
+		if test.Want == nil {
+			errRE = "No src dirs; set VDLPATH to a valid value"
+		}
+		vdltest.ExpectResult(t, errs, name, errRE)
+		// Every result will have our valid VDLROOT srcdir.
+		want := append([]string{abs(defaultVDLRoot)}, test.Want...)
+		if !reflect.DeepEqual(got, want) {
+			t.Errorf("%s got %v, want %v", name, got, want)
+		}
+	}
+}
+
+// Tests Is{Dir,Import}Path.
+func TestIsDirImportPath(t *testing.T) {
+	tests := []struct {
+		Path  string
+		IsDir bool
+	}{
+		// Import paths.
+		{"", false},
+		{"...", false},
+		{".../", false},
+		{"all", false},
+		{"foo", false},
+		{"foo/", false},
+		{"foo...", false},
+		{"foo/...", false},
+		{"a/b/c", false},
+		{"a/b/c/", false},
+		{"a/b/c...", false},
+		{"a/b/c/...", false},
+		{"...a/b/c...", false},
+		{"...a/b/c/...", false},
+		{".../a/b/c/...", false},
+		{".../a/b/c...", false},
+		// Dir paths.
+		{".", true},
+		{"..", true},
+		{"./", true},
+		{"../", true},
+		{"./...", true},
+		{"../...", true},
+		{".././.././...", true},
+		{"/", true},
+		{"/.", true},
+		{"/..", true},
+		{"/...", true},
+		{"/./...", true},
+		{"/foo", true},
+		{"/foo/", true},
+		{"/foo...", true},
+		{"/foo/...", true},
+		{"/a/b/c", true},
+		{"/a/b/c/", true},
+		{"/a/b/c...", true},
+		{"/a/b/c/...", true},
+		{"/a/b/c/../../...", true},
+	}
+	for _, test := range tests {
+		if got, want := build.IsDirPath(test.Path), test.IsDir; got != want {
+			t.Errorf("IsDirPath(%q) want %v", want)
+		}
+		if got, want := build.IsImportPath(test.Path), !test.IsDir; got != want {
+			t.Errorf("IsImportPath(%q) want %v", want)
+		}
+	}
+}
+
+var allModes = []build.UnknownPathMode{
+	build.UnknownPathIsIgnored,
+	build.UnknownPathIsError,
+}
+
+// Tests TransitivePackages success cases.
+func TestTransitivePackages(t *testing.T) {
+	if !setEnvironment(t, defaultVDLRoot, defaultVDLPath) {
+		t.Fatalf("Couldn't setEnvironment")
+	}
+	tests := []struct {
+		InPaths  []string // Input paths to TransitivePackages call
+		OutPaths []string // Wanted paths from build.Package.Path.
+		GenPaths []string // Wanted paths from build.Package.GenPath, same as OutPaths if nil.
+	}{
+		{nil, nil, nil},
+		{[]string{}, nil, nil},
+		// Single-package, both import and dir path.
+		{
+			[]string{"v.io/v23/vdl/testdata/base"},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		{
+			[]string{"../testdata/base"},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		// Single-package with wildcard, both import and dir path.
+		{
+			[]string{"v.io/v23/vdl/testdata/base..."},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		{
+			[]string{"v.io/v23/vdl/testdata/base/..."},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		{
+			[]string{"../testdata/base..."},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		{
+			[]string{"../testdata/base/..."},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		// Redundant specification as both import and dir path.
+		{
+			[]string{"v.io/v23/vdl/testdata/base", "../testdata/base"},
+			[]string{"v.io/v23/vdl/testdata/base"},
+			nil,
+		},
+		{
+			[]string{"v.io/v23/vdl/testdata/arith", "../testdata/arith"},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+			},
+			nil,
+		},
+		// Wildcards as both import and dir path.
+		{
+			[]string{"v.io/v23/vdl/testdata..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		{
+			[]string{"v.io/v23/vdl/testdata/..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		{
+			[]string{"../testdata..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		{
+			[]string{"../testdata/..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		// Multi-Wildcards as both import and dir path.
+		{
+			[]string{"v...vdl/testdata/..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		{
+			[]string{"../../...vdl/testdata/..."},
+			[]string{
+				"v.io/v23/vdl/testdata/arith/exp",
+				"v.io/v23/vdl/testdata/base",
+				"v.io/v23/vdl/testdata/arith",
+				"v.io/v23/vdl/testdata/nativetest",
+				"v.io/v23/vdl/testdata/nativedep",
+				"v.io/v23/vdl/testdata/nativedep2",
+				"v.io/v23/vdl/testdata/testconfig",
+			},
+			nil,
+		},
+		// Multi-Wildcards as both import and dir path.
+		{
+			[]string{"v...vdl/testdata/...exp"},
+			[]string{"v.io/v23/vdl/testdata/arith/exp"},
+			nil,
+		},
+		{
+			[]string{"../../...vdl/testdata/...exp"},
+			[]string{"v.io/v23/vdl/testdata/arith/exp"},
+			nil,
+		},
+		// Standard vdl package, as both import and dir path.
+		{
+			[]string{"vdltool"},
+			[]string{"vdltool"},
+			[]string{"v.io/v23/vdlroot/vdltool"},
+		},
+		{
+			[]string{"../../vdlroot/vdltool"},
+			[]string{"vdltool"},
+			[]string{"v.io/v23/vdlroot/vdltool"},
+		},
+	}
+	for _, test := range tests {
+		// All modes should result in the same successful output.
+		for _, mode := range allModes {
+			name := fmt.Sprintf("%v %v", mode, test.InPaths)
+			errs := vdlutil.NewErrors(-1)
+			pkgs := build.TransitivePackages(test.InPaths, mode, build.Opts{}, errs)
+			vdltest.ExpectResult(t, errs, name, "")
+			var paths []string
+			for _, pkg := range pkgs {
+				paths = append(paths, pkg.Path)
+			}
+			if got, want := paths, test.OutPaths; !reflect.DeepEqual(got, want) {
+				t.Errorf("%v got path %v, want %v", name, got, want)
+			}
+			wantGen := test.GenPaths
+			if wantGen == nil {
+				wantGen = test.OutPaths
+			}
+			paths = nil
+			for _, pkg := range pkgs {
+				paths = append(paths, pkg.GenPath)
+			}
+			if got, want := paths, wantGen; !reflect.DeepEqual(got, want) {
+				t.Errorf("%v got gen path %v, want %v", name, got, want)
+			}
+		}
+	}
+}
+
+// Tests TransitivePackages error cases.
+func TestTransitivePackagesUnknownPathError(t *testing.T) {
+	if !setEnvironment(t, defaultVDLRoot, defaultVDLPath) {
+		t.Fatalf("Couldn't setEnvironment")
+	}
+	tests := []struct {
+		InPaths []string
+		ErrRE   string
+	}{
+		// Non-existent as both import and dir path.
+		{
+			[]string{"noexist"},
+			`Can't resolve "noexist" to any packages`,
+		},
+		{
+			[]string{"./noexist"},
+			`Can't resolve "./noexist" to any packages`,
+		},
+		// Invalid package path, as both import and dir path.
+		{
+			[]string{".foo"},
+			`Import path ".foo" is invalid`,
+		},
+		{
+			[]string{"foo/.bar"},
+			`Import path "foo/.bar" is invalid`,
+		},
+		{
+			[]string{"_foo"},
+			`Import path "_foo" is invalid`,
+		},
+		{
+			[]string{"foo/_bar"},
+			`Import path "foo/_bar" is invalid`,
+		},
+		{
+			[]string{"../../../../.foo"},
+			`package path ".foo" is invalid`,
+		},
+		{
+			[]string{"../../../../foo/.bar"},
+			`package path "foo/.bar" is invalid`,
+		},
+		{
+			[]string{"../../../../_foo"},
+			`package path "_foo" is invalid`,
+		},
+		{
+			[]string{"../../../../foo/_bar"},
+			`package path "foo/_bar" is invalid`,
+		},
+		// Special-case error for packages under vdlroot, which can't be imported
+		// using the vdlroot prefix.
+		{
+			[]string{"v.io/v23/vdlroot/vdltool"},
+			`packages under vdlroot must be specified without the vdlroot prefix`,
+		},
+		{
+			[]string{"v.io/v23/vdlroot/..."},
+			`Can't resolve "v.io/v23/vdlroot/..." to any packages`,
+		},
+	}
+	for _, test := range tests {
+		for _, mode := range allModes {
+			name := fmt.Sprintf("%v %v", mode, test.InPaths)
+			errs := vdlutil.NewErrors(-1)
+			pkgs := build.TransitivePackages(test.InPaths, mode, build.Opts{}, errs)
+			errRE := test.ErrRE
+			if mode == build.UnknownPathIsIgnored {
+				// Ignore mode returns success, while error mode returns error.
+				errRE = ""
+			}
+			vdltest.ExpectResult(t, errs, name, errRE)
+			if pkgs != nil {
+				t.Errorf("%v got unexpected packages %v", name, pkgs)
+			}
+		}
+	}
+}
+
+// Tests vdl.config file support.
+func TestPackageConfig(t *testing.T) {
+	if !setEnvironment(t, defaultVDLRoot, defaultVDLPath) {
+		t.Fatalf("Couldn't setEnvironment")
+	}
+	tests := []struct {
+		Path   string
+		Config vdltool.Config
+	}{
+		{"v.io/v23/vdl/testdata/base", vdltool.Config{}},
+		{
+			"v.io/v23/vdl/testdata/testconfig",
+			vdltool.Config{
+				GenLanguages: map[vdltool.GenLanguage]struct{}{vdltool.GenLanguageGo: struct{}{}},
+			},
+		},
+	}
+	for _, test := range tests {
+		name := path.Base(test.Path)
+		env := compile.NewEnv(-1)
+		deps := build.TransitivePackages([]string{test.Path}, build.UnknownPathIsError, build.Opts{}, env.Errors)
+		vdltest.ExpectResult(t, env.Errors, name, "")
+		if len(deps) != 1 {
+			t.Fatalf("TransitivePackages(%q) got %v, want 1 dep", name, deps)
+		}
+		if got, want := deps[0].Name, name; got != want {
+			t.Errorf("TransitivePackages(%q) got Name %q, want %q", name, got, want)
+		}
+		if got, want := deps[0].Path, test.Path; got != want {
+			t.Errorf("TransitivePackages(%q) got Path %q, want %q", name, got, want)
+		}
+		if got, want := deps[0].Config, test.Config; !reflect.DeepEqual(got, want) {
+			t.Errorf("TransitivePackages(%q) got Config %+v, want %+v", name, got, want)
+		}
+	}
+}
+
+// Tests BuildConfig, BuildConfigValue and TransitivePackagesForConfig.
+func TestBuildConfig(t *testing.T) {
+	if !setEnvironment(t, defaultVDLRoot, defaultVDLPath) {
+		t.Fatalf("Couldn't setEnvironment")
+	}
+	tests := []struct {
+		Src   string
+		Value interface{}
+	}{
+		{
+			`config = x;import "v.io/v23/vdl/testdata/base";const x = base.NamedBool(true)`,
+			base.NamedBool(true),
+		},
+		{
+			`config = x;import "v.io/v23/vdl/testdata/base";const x = base.NamedString("abc")`,
+			base.NamedString("abc"),
+		},
+		{
+			`config = x;import "v.io/v23/vdl/testdata/base";const x = base.Args{1, 2}`,
+			base.Args{1, 2},
+		},
+	}
+	for _, test := range tests {
+		// Build import package dependencies.
+		env := compile.NewEnv(-1)
+		deps := build.TransitivePackagesForConfig("file", strings.NewReader(test.Src), build.Opts{}, env.Errors)
+		for _, dep := range deps {
+			build.BuildPackage(dep, env)
+		}
+		vdltest.ExpectResult(t, env.Errors, test.Src, "")
+		// Test BuildConfig
+		wantV := vdl.ZeroValue(vdl.TypeOf(test.Value))
+		if err := vdl.Convert(wantV, test.Value); err != nil {
+			t.Errorf("Convert(%v) got error %v, want nil", test.Value, err)
+		}
+		gotV := build.BuildConfig("file", strings.NewReader(test.Src), nil, nil, env)
+		if !vdl.EqualValue(gotV, wantV) {
+			t.Errorf("BuildConfig(%v) got %v, want %v", test.Src, gotV, wantV)
+		}
+		vdltest.ExpectResult(t, env.Errors, test.Src, "")
+		// TestBuildConfigValue
+		gotRV := reflect.New(reflect.TypeOf(test.Value))
+		build.BuildConfigValue("file", strings.NewReader(test.Src), nil, env, gotRV.Interface())
+		if got, want := gotRV.Elem().Interface(), test.Value; !reflect.DeepEqual(got, want) {
+			t.Errorf("BuildConfigValue(%v) got %v, want %v", test.Src, got, want)
+		}
+		vdltest.ExpectResult(t, env.Errors, test.Src, "")
+	}
+}
+
+type ts []*vdl.Type
+type vs []*vdl.Value
+
+func TestBuildExprs(t *testing.T) {
+	ttArray := vdl.ArrayType(2, vdl.Int32Type)
+	ttStruct := vdl.StructType(vdl.Field{"A", vdl.Int32Type}, vdl.Field{"B", vdl.StringType})
+	vvArray := vdl.ZeroValue(ttArray)
+	vvArray.Index(0).AssignInt(1)
+	vvArray.Index(1).AssignInt(-2)
+	vvStruct := vdl.ZeroValue(ttStruct)
+	vvStruct.StructField(0).AssignInt(1)
+	vvStruct.StructField(1).AssignString("abc")
+	tests := []struct {
+		Data  string
+		Types ts
+		Want  vs
+		Err   string
+	}{
+		{``, nil, nil, "syntax error"},
+		{`true`, nil, vs{vdl.BoolValue(true)}, ""},
+		{`false`, nil, vs{vdl.BoolValue(false)}, ""},
+		{`"abc"`, nil, vs{vdl.StringValue("abc")}, ""},
+		{`1`, nil, vs{nil}, "1 must be assigned a type"},
+		{`1`, ts{vdl.Int64Type}, vs{vdl.Int64Value(1)}, ""},
+		{`1.0`, ts{vdl.Int64Type}, vs{vdl.Int64Value(1)}, ""},
+		{`1.5`, ts{vdl.Int64Type}, vs{nil}, "loses precision"},
+		{`1.0`, ts{vdl.Float64Type}, vs{vdl.Float64Value(1.0)}, ""},
+		{`1.5`, ts{vdl.Float64Type}, vs{vdl.Float64Value(1.5)}, ""},
+		{`1+2`, ts{vdl.Int64Type}, vs{vdl.Int64Value(3)}, ""},
+		{`1+2,"abc"`, ts{vdl.Int64Type, nil}, vs{vdl.Int64Value(3), vdl.StringValue("abc")}, ""},
+		{`1,2,3`, ts{vdl.Int64Type}, vs{vdl.Int64Value(1), vdl.Int64Value(2), vdl.Int64Value(3)}, ""},
+		{`{1,-2}`, ts{ttArray}, vs{vvArray}, ""},
+		{`{0+1,1-3}`, ts{ttArray}, vs{vvArray}, ""},
+		{`{1,"abc"}`, ts{ttStruct}, vs{vvStruct}, ""},
+		{`{A:1,B:"abc"}`, ts{ttStruct}, vs{vvStruct}, ""},
+		{`{B:"abc",A:1}`, ts{ttStruct}, vs{vvStruct}, ""},
+		{`{B:"a"+"bc",A:1*1}`, ts{ttStruct}, vs{vvStruct}, ""},
+	}
+	for _, test := range tests {
+		env := compile.NewEnv(-1)
+		values := build.BuildExprs(test.Data, test.Types, env)
+		vdltest.ExpectResult(t, env.Errors, test.Data, test.Err)
+		if got, want := len(values), len(test.Want); got != want {
+			t.Errorf("%s got len %d, want %d", test.Data, got, want)
+		}
+		for ix, want := range test.Want {
+			var got *vdl.Value
+			if ix < len(values) {
+				got = values[ix]
+			}
+			if !vdl.EqualValue(got, want) {
+				t.Errorf("%s got value #%d %v, want %v", test.Data, ix, got, want)
+			}
+		}
+	}
+}
diff --git a/lib/vdl/codegen/doc.go b/lib/vdl/codegen/doc.go
new file mode 100644
index 0000000..19ccead
--- /dev/null
+++ b/lib/vdl/codegen/doc.go
@@ -0,0 +1,3 @@
+// Package codegen implements utilities useful for all vdl code generators.
+// Code generators for specific languages live in sub-directories.
+package codegen
diff --git a/lib/vdl/codegen/golang/const.go b/lib/vdl/codegen/golang/const.go
new file mode 100644
index 0000000..027ba4f
--- /dev/null
+++ b/lib/vdl/codegen/golang/const.go
@@ -0,0 +1,223 @@
+package golang
+
+import (
+	"fmt"
+	"strconv"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/parse"
+)
+
+func constDefGo(data goData, def *compile.ConstDef) string {
+	v := def.Value
+	return fmt.Sprintf("%s%s %s = %s%s", def.Doc, constOrVar(v.Kind()), def.Name, typedConst(data, v), def.DocSuffix)
+}
+
+func constOrVar(k vdl.Kind) string {
+	switch k {
+	case vdl.Bool, vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64, vdl.Float32, vdl.Float64, vdl.Complex64, vdl.Complex128, vdl.String, vdl.Enum:
+		return "const"
+	}
+	return "var"
+}
+
+func isByteList(t *vdl.Type) bool {
+	return t.Kind() == vdl.List && t.Elem().Kind() == vdl.Byte
+}
+
+func tagValue(data goData, v *vdl.Value) string {
+	return typedConst(data, vdl.AnyValue(v))
+}
+
+// TODO(bprosnitz): Generate the full tag name e.g. security.Read instead of
+// security.Label(1)
+//
+// TODO(toddw): This doesn't work at all if v.Type() is a native type, or has
+// subtypes that are native types.  It's also broken for optional types that
+// can't be represented using a composite literal (e.g. optional primitives).
+//
+// https://github.com/veyron/release-issues/issues/1017
+func typedConst(data goData, v *vdl.Value) string {
+	k, t := v.Kind(), v.Type()
+	if k == vdl.Optional {
+		if elem := v.Elem(); elem != nil {
+			return "&" + typedConst(data, elem)
+		}
+		return "(" + typeGo(data, t) + ")(nil)" // results in (*Foo)(nil)
+	}
+	valstr := untypedConst(data, v)
+	// Enum, TypeObject and Any already include the type in their values.
+	// Built-in bool and string are implicitly convertible from literals.
+	if k == vdl.Enum || k == vdl.TypeObject || k == vdl.Any || t == vdl.BoolType || t == vdl.StringType {
+		return valstr
+	}
+	// Everything else requires an explicit type.
+	typestr := typeGo(data, t)
+	// { } are used instead of ( ) for composites
+	switch k {
+	case vdl.Array, vdl.Struct:
+		return typestr + valstr
+	case vdl.List, vdl.Set, vdl.Map:
+		// Special-case []byte, which we generate as a type conversion from string,
+		// and empty variable-length collections, which we generate as a type
+		// conversion from nil.
+		if !isByteList(t) && !v.IsZero() {
+			return typestr + valstr
+		}
+	}
+	return typestr + "(" + valstr + ")"
+}
+
+func untypedConst(data goData, v *vdl.Value) string {
+	k, t := v.Kind(), v.Type()
+	if isByteList(t) {
+		if v.IsZero() {
+			return "nil"
+		}
+		return strconv.Quote(string(v.Bytes()))
+	}
+	switch k {
+	case vdl.Any:
+		if elem := v.Elem(); elem != nil {
+			// We need to generate a Go expression of type *vdl.Value that represents
+			// elem.  Since the rest of our logic can already generate the Go code for
+			// any value, we just wrap it in vdl.ValueOf to produce the final result.
+			//
+			// This may seem like a strange roundtrip, but results in less generator
+			// and generated code.
+			return data.Pkg("v.io/v23/vdl") + "ValueOf(" + typedConst(data, elem) + ")"
+		}
+		return "(*" + data.Pkg("v.io/v23/vdl") + "Value)(nil)"
+	case vdl.Optional:
+		if elem := v.Elem(); elem != nil {
+			return untypedConst(data, elem)
+		}
+		return "nil"
+	case vdl.TypeObject:
+		// We special-case Any and TypeObject, since they cannot be named by the
+		// user, and are simple to return statically.
+		switch v.TypeObject().Kind() {
+		case vdl.Any:
+			return data.Pkg("v.io/v23/vdl") + "AnyType"
+		case vdl.TypeObject:
+			return data.Pkg("v.io/v23/vdl") + "TypeObjectType"
+		}
+		// We need to generate a Go expression of type *vdl.Type that represents the
+		// type.  Since the rest of our logic can already generate the Go code for
+		// any value, we just wrap it in vdl.TypeOf to produce the final result.
+		//
+		// This may seem like a strange roundtrip, but results in less generator
+		// and generated code.
+		zero := vdl.ZeroValue(v.TypeObject())
+		return data.Pkg("v.io/v23/vdl") + "TypeOf(" + typedConst(data, zero) + ")"
+	case vdl.Bool:
+		return strconv.FormatBool(v.Bool())
+	case vdl.Byte:
+		return strconv.FormatUint(uint64(v.Byte()), 10)
+	case vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		return strconv.FormatUint(v.Uint(), 10)
+	case vdl.Int16, vdl.Int32, vdl.Int64:
+		return strconv.FormatInt(v.Int(), 10)
+	case vdl.Float32, vdl.Float64:
+		return formatFloat(v.Float(), k)
+	case vdl.Complex64, vdl.Complex128:
+		switch re, im := real(v.Complex()), imag(v.Complex()); {
+		case im > 0:
+			return formatFloat(re, k) + "+" + formatFloat(im, k) + "i"
+		case im < 0:
+			return formatFloat(re, k) + formatFloat(im, k) + "i"
+		default:
+			return formatFloat(re, k)
+		}
+	case vdl.String:
+		return strconv.Quote(v.RawString())
+	case vdl.Enum:
+		return typeGo(data, t) + v.EnumLabel()
+	case vdl.Array:
+		if v.IsZero() && !t.ContainsKind(vdl.WalkInline, vdl.TypeObject, vdl.Union) {
+			// We can't rely on the golang zero-value array if t contains inline
+			// typeobject or union, since the golang zero-value for these types is
+			// different from the vdl zero-value for these types.
+			return "{}"
+		}
+		s := "{"
+		for ix := 0; ix < v.Len(); ix++ {
+			s += "\n" + untypedConst(data, v.Index(ix)) + ","
+		}
+		return s + "\n}"
+	case vdl.List:
+		if v.IsZero() {
+			return "nil"
+		}
+		s := "{"
+		for ix := 0; ix < v.Len(); ix++ {
+			s += "\n" + untypedConst(data, v.Index(ix)) + ","
+		}
+		return s + "\n}"
+	case vdl.Set, vdl.Map:
+		if v.IsZero() {
+			return "nil"
+		}
+		s := "{"
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			s += "\n" + subConst(data, key)
+			if k == vdl.Set {
+				s += ": struct{}{},"
+			} else {
+				s += ": " + untypedConst(data, v.MapIndex(key)) + ","
+			}
+		}
+		return s + "\n}"
+	case vdl.Struct:
+		s := "{"
+		hasFields := false
+		for ix := 0; ix < t.NumField(); ix++ {
+			vf := v.StructField(ix)
+			if !vf.IsZero() || vf.Type().ContainsKind(vdl.WalkInline, vdl.TypeObject, vdl.Union) {
+				// We can't rely on the golang zero-value for this field, even if it's a
+				// vdl zero value, if the field contains inline typeobject or union,
+				// since the golang zero-value for these types is different from the vdl
+				// zero-value for these types.
+				s += "\n" + t.Field(ix).Name + ": " + subConst(data, vf) + ","
+				hasFields = true
+			}
+		}
+		if hasFields {
+			s += "\n"
+		}
+		return s + "}"
+	case vdl.Union:
+		ix, field := v.UnionField()
+		return typeGo(data, t) + t.Field(ix).Name + "{" + typedConst(data, field) + "}"
+	default:
+		data.Env.Errorf(data.File, parse.Pos{}, "%v untypedConst not implemented for %v %v", t, k)
+		return "INVALID"
+	}
+}
+
+// subConst deals with a quirk regarding Go composite literals.  Go allows us to
+// elide the type from composite literal Y when the type is implied; basically
+// when Y is contained in another composite literal X.  However it requires the
+// type for Y when X is a struct, and when X is a map and Y is the key.  As such
+// subConst is called for map keys and struct fields.
+func subConst(data goData, v *vdl.Value) string {
+	switch v.Kind() {
+	case vdl.Array, vdl.List, vdl.Set, vdl.Map, vdl.Struct, vdl.Optional:
+		return typedConst(data, v)
+	}
+	return untypedConst(data, v)
+}
+
+func formatFloat(x float64, kind vdl.Kind) string {
+	var bitSize int
+	switch kind {
+	case vdl.Float32, vdl.Complex64:
+		bitSize = 32
+	case vdl.Float64, vdl.Complex128:
+		bitSize = 64
+	default:
+		panic(fmt.Errorf("vdl: formatFloat unhandled kind: %v", kind))
+	}
+	return strconv.FormatFloat(x, 'g', -1, bitSize)
+}
diff --git a/lib/vdl/codegen/golang/const_test.go b/lib/vdl/codegen/golang/const_test.go
new file mode 100644
index 0000000..d02e291
--- /dev/null
+++ b/lib/vdl/codegen/golang/const_test.go
@@ -0,0 +1,127 @@
+package golang
+
+import (
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+func TestConst(t *testing.T) {
+	testingMode = true
+	tests := []struct {
+		Name string
+		V    *vdl.Value
+		Want string
+	}{
+		{"True", vdl.BoolValue(true), `true`},
+		{"False", vdl.BoolValue(false), `false`},
+		{"String", vdl.StringValue("abc"), `"abc"`},
+		{"Bytes", vdl.BytesValue([]byte("abc")), `[]byte("abc")`},
+		{"Byte", vdl.ByteValue(111), `byte(111)`},
+		{"Uint16", vdl.Uint16Value(222), `uint16(222)`},
+		{"Uint32", vdl.Uint32Value(333), `uint32(333)`},
+		{"Uint64", vdl.Uint64Value(444), `uint64(444)`},
+		{"Int16", vdl.Int16Value(-555), `int16(-555)`},
+		{"Int32", vdl.Int32Value(-666), `int32(-666)`},
+		{"Int64", vdl.Int64Value(-777), `int64(-777)`},
+		{"Float32", vdl.Float32Value(1.5), `float32(1.5)`},
+		{"Float64", vdl.Float64Value(2.5), `float64(2.5)`},
+		{"Complex64", vdl.Complex64Value(1 + 2i), `complex64(1+2i)`},
+		{"Complex128", vdl.Complex128Value(3 + 4i), `complex128(3+4i)`},
+		{"Enum", vdl.ZeroValue(tEnum).AssignEnumLabel("B"), `TestEnumB`},
+		{"EmptyArray", vEmptyArray, "[3]string{}"},
+		{"EmptyList", vEmptyList, "[]string(nil)"},
+		{"EmptySet", vEmptySet, "map[string]struct{}(nil)"},
+		{"EmptyMap", vEmptyMap, "map[string]int64(nil)"},
+		{"EmptyStruct", vEmptyStruct, "TestStruct{}"},
+		{"Array", vArray, `[3]string{
+"A",
+"B",
+"C",
+}`},
+		{"List", vList, `[]string{
+"A",
+"B",
+"C",
+}`},
+		{"Set", vSet, `map[string]struct{}{
+"A": struct{}{},
+}`},
+		{"Map", vMap, `map[string]int64{
+"A": 1,
+}`},
+		{"Struct", vStruct, `TestStruct{
+A: "foo",
+B: 123,
+}`},
+		{"UnionABC", vUnionABC, `TestUnion(TestUnionA{"abc"})`},
+		{"Union123", vUnion123, `TestUnion(TestUnionB{int64(123)})`},
+		{"AnyABC", vAnyABC, `vdl.ValueOf("abc")`},
+		{"Any123", vAny123, `vdl.ValueOf(int64(123))`},
+		{"TypeObjectBool", vdl.TypeObjectValue(vdl.BoolType), `vdl.TypeOf(false)`},
+		{"TypeObjectString", vdl.TypeObjectValue(vdl.StringType), `vdl.TypeOf("")`},
+		{"TypeObjectBytes", vdl.TypeObjectValue(vdl.ListType(vdl.ByteType)), `vdl.TypeOf([]byte(nil))`},
+		{"TypeObjectByte", vdl.TypeObjectValue(vdl.ByteType), `vdl.TypeOf(byte(0))`},
+		{"TypeObjectUint16", vdl.TypeObjectValue(vdl.Uint16Type), `vdl.TypeOf(uint16(0))`},
+		{"TypeObjectInt16", vdl.TypeObjectValue(vdl.Int16Type), `vdl.TypeOf(int16(0))`},
+		{"TypeObjectFloat32", vdl.TypeObjectValue(vdl.Float32Type), `vdl.TypeOf(float32(0))`},
+		{"TypeObjectComplex64", vdl.TypeObjectValue(vdl.Complex64Type), `vdl.TypeOf(complex64(0))`},
+		{"TypeObjectEnum", vdl.TypeObjectValue(tEnum), `vdl.TypeOf(TestEnumA)`},
+		{"TypeObjectArray", vdl.TypeObjectValue(tArray), `vdl.TypeOf([3]string{})`},
+		{"TypeObjectList", vdl.TypeObjectValue(tList), `vdl.TypeOf([]string(nil))`},
+		{"TypeObjectSet", vdl.TypeObjectValue(tSet), `vdl.TypeOf(map[string]struct{}(nil))`},
+		{"TypeObjectMap", vdl.TypeObjectValue(tMap), `vdl.TypeOf(map[string]int64(nil))`},
+		{"TypeObjectStruct", vdl.TypeObjectValue(tStruct), `vdl.TypeOf(TestStruct{})`},
+		{"TypeObjectUnion", vdl.TypeObjectValue(tUnion), `vdl.TypeOf(TestUnion(TestUnionA{""}))`},
+		{"TypeObjectAny", vdl.TypeObjectValue(vdl.AnyType), `vdl.AnyType`},
+		{"TypeObjectTypeObject", vdl.TypeObjectValue(vdl.TypeObjectType), `vdl.TypeObjectType`},
+		// TODO(toddw): Add tests for optional types.
+	}
+	data := goData{Env: compile.NewEnv(-1)}
+	for _, test := range tests {
+		if got, want := typedConst(data, test.V), test.Want; got != want {
+			t.Errorf("%s\n GOT %s\nWANT %s", test.Name, got, want)
+		}
+	}
+}
+
+var (
+	vEmptyArray  = vdl.ZeroValue(tArray)
+	vEmptyList   = vdl.ZeroValue(tList)
+	vEmptySet    = vdl.ZeroValue(tSet)
+	vEmptyMap    = vdl.ZeroValue(tMap)
+	vEmptyStruct = vdl.ZeroValue(tStruct)
+
+	vArray    = vdl.ZeroValue(tArray)
+	vList     = vdl.ZeroValue(tList)
+	vSet      = vdl.ZeroValue(tSet)
+	vMap      = vdl.ZeroValue(tMap)
+	vStruct   = vdl.ZeroValue(tStruct)
+	vUnionABC = vdl.ZeroValue(tUnion)
+	vUnion123 = vdl.ZeroValue(tUnion)
+	vAnyABC   = vdl.ZeroValue(vdl.AnyType)
+	vAny123   = vdl.ZeroValue(vdl.AnyType)
+)
+
+func init() {
+	vArray.Index(0).AssignString("A")
+	vArray.Index(1).AssignString("B")
+	vArray.Index(2).AssignString("C")
+	vList.AssignLen(3)
+	vList.Index(0).AssignString("A")
+	vList.Index(1).AssignString("B")
+	vList.Index(2).AssignString("C")
+	// TODO(toddw): Assign more items once the ordering is fixed.
+	vSet.AssignSetKey(vdl.StringValue("A"))
+	vMap.AssignMapIndex(vdl.StringValue("A"), vdl.Int64Value(1))
+
+	vStruct.StructField(0).AssignString("foo")
+	vStruct.StructField(1).AssignInt(123)
+
+	vUnionABC.AssignUnionField(0, vdl.StringValue("abc"))
+	vUnion123.AssignUnionField(1, vdl.Int64Value(123))
+
+	vAnyABC.Assign(vdl.StringValue("abc"))
+	vAny123.Assign(vdl.Int64Value(123))
+}
diff --git a/lib/vdl/codegen/golang/gen.go b/lib/vdl/codegen/golang/gen.go
new file mode 100644
index 0000000..221d877
--- /dev/null
+++ b/lib/vdl/codegen/golang/gen.go
@@ -0,0 +1,799 @@
+// Package golang implements Go code generation from compiled VDL packages.
+package golang
+
+import (
+	"bytes"
+	"fmt"
+	"go/format"
+	"path"
+	"strings"
+	"text/template"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdlutil"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+type goData struct {
+	File    *compile.File
+	Env     *compile.Env
+	Imports *goImports
+}
+
+// testingMode is only set to true in tests, to make testing simpler.
+var testingMode = false
+
+func (data goData) Pkg(pkgPath string) string {
+	if testingMode {
+		return path.Base(pkgPath) + "."
+	}
+	// Special-case to avoid adding package qualifiers if we're generating code
+	// for that package.
+	if data.File.Package.GenPath == pkgPath {
+		return ""
+	}
+	if local := data.Imports.LookupLocal(pkgPath); local != "" {
+		return local + "."
+	}
+	data.Env.Errorf(data.File, parse.Pos{}, "missing package %q", pkgPath)
+	return ""
+}
+
+// Generate takes a populated compile.File and returns a byte slice containing
+// the generated Go source code.
+func Generate(file *compile.File, env *compile.Env) []byte {
+	validateGoConfig(file, env)
+	data := goData{
+		File:    file,
+		Env:     env,
+		Imports: newImports(file, env),
+	}
+	// The implementation uses the template mechanism from text/template and
+	// executes the template against the goData instance.
+	var buf bytes.Buffer
+	if err := goTemplate.Execute(&buf, data); err != nil {
+		// We shouldn't see an error; it means our template is buggy.
+		panic(fmt.Errorf("vdl: couldn't execute template: %v", err))
+	}
+	// Use gofmt to format the generated source.
+	pretty, err := format.Source(buf.Bytes())
+	if err != nil {
+		// We shouldn't see an error; it means we generated invalid code.
+		fmt.Printf("%s", buf.Bytes())
+		panic(fmt.Errorf("vdl: generated invalid Go code: %v", err))
+	}
+	return pretty
+}
+
+// The native types feature is hard to use correctly.  E.g. the package
+// containing the wire type must be imported into your Go binary in order for
+// the wire<->native registration to work, which is hard to ensure.  E.g.
+//
+//   package base    // VDL package
+//   type Wire int   // has native type native.Int
+//
+//   package dep     // VDL package
+//   import "base"
+//   type Foo struct {
+//     X base.Wire
+//   }
+//
+// The Go code for package "dep" imports "native", rather than "base":
+//
+//   package dep     // Go package generated from VDL package
+//   import "native"
+//   type Foo struct {
+//     X native.Int
+//   }
+//
+// Note that when you import the "dep" package in your own code, you always use
+// native.Int, rather than base.Wire; the base.Wire representation is only used
+// as the wire format, but doesn't appear in generated code.  But in order for
+// this to work correctly, the "base" package must imported.  This is tricky.
+//
+// Restrict the feature to these whitelisted VDL packages for now.
+var nativeTypePackageWhitelist = map[string]bool{
+	"time": true,
+	"v.io/v23/vdl/testdata/nativetest": true,
+}
+
+func validateGoConfig(file *compile.File, env *compile.Env) {
+	pkg := file.Package
+	vdlconfig := path.Join(pkg.GenPath, "vdl.config")
+	// Validate native type configuration.  Since native types are hard to use, we
+	// restrict them to a built-in whitelist of packages for now.
+	if len(pkg.Config.Go.WireToNativeTypes) > 0 && !nativeTypePackageWhitelist[pkg.Path] {
+		env.Errors.Errorf("%s: Go.WireToNativeTypes is restricted to whitelisted VDL packages", vdlconfig)
+	}
+	// Make sure each wire type is actually defined in the package, and required
+	// fields are all filled in.
+	for wire, native := range pkg.Config.Go.WireToNativeTypes {
+		if def := pkg.ResolveType(wire); def == nil {
+			env.Errors.Errorf("%s: type %s specified in Go.WireToNativeTypes undefined", vdlconfig, wire)
+		}
+		if native.Type == "" {
+			env.Errors.Errorf("%s: type %s specified in Go.WireToNativeTypes invalid (empty GoType.Type)", vdlconfig, wire)
+		}
+		for _, imp := range native.Imports {
+			if imp.Path == "" || imp.Name == "" {
+				env.Errors.Errorf("%s: type %s specified in Go.WireToNativeTypes invalid (empty GoImport.Path or Name)", vdlconfig, wire)
+				continue
+			}
+			importPrefix := imp.Name + "."
+			if !strings.Contains(native.Type, importPrefix) {
+				env.Errors.Errorf("%s: type %s specified in Go.WireToNativeTypes invalid (native type %q doesn't contain import prefix %q)", vdlconfig, wire, native.Type, importPrefix)
+			}
+		}
+	}
+}
+
+var goTemplate *template.Template
+
+// The template mechanism is great at high-level formatting and simple
+// substitution, but is bad at more complicated logic.  We define some functions
+// that we can use in the template so that when things get complicated we back
+// off to a regular function.
+func init() {
+	funcMap := template.FuncMap{
+		"firstRuneToExport":       vdlutil.FirstRuneToExportCase,
+		"firstRuneToUpper":        vdlutil.FirstRuneToUpper,
+		"firstRuneToLower":        vdlutil.FirstRuneToLower,
+		"errorName":               errorName,
+		"nativeIdent":             nativeIdent,
+		"typeGo":                  typeGo,
+		"typeDefGo":               typeDefGo,
+		"constDefGo":              constDefGo,
+		"tagValue":                tagValue,
+		"embedGo":                 embedGo,
+		"isStreamingMethod":       isStreamingMethod,
+		"hasStreamingMethods":     hasStreamingMethods,
+		"docBreak":                docBreak,
+		"quoteStripDoc":           parse.QuoteStripDoc,
+		"argNames":                argNames,
+		"argTypes":                argTypes,
+		"argNameTypes":            argNameTypes,
+		"argParens":               argParens,
+		"uniqueName":              uniqueName,
+		"uniqueNameImpl":          uniqueNameImpl,
+		"serverContextType":       serverContextType,
+		"serverContextStubType":   serverContextStubType,
+		"outArgsClient":           outArgsClient,
+		"clientStubImpl":          clientStubImpl,
+		"clientFinishImpl":        clientFinishImpl,
+		"serverStubImpl":          serverStubImpl,
+		"reInitStreamValue":       reInitStreamValue,
+		"nativeConversionsInFile": nativeConversionsInFile,
+	}
+	goTemplate = template.Must(template.New("genGo").Funcs(funcMap).Parse(genGo))
+}
+
+func errorName(def *compile.ErrorDef, file *compile.File) string {
+	switch {
+	case def.Exported:
+		return "Err" + def.Name
+	default:
+		return "err" + vdlutil.FirstRuneToUpper(def.Name)
+	}
+}
+
+func isStreamingMethod(method *compile.Method) bool {
+	return method.InStream != nil || method.OutStream != nil
+}
+
+func hasStreamingMethods(methods []*compile.Method) bool {
+	for _, method := range methods {
+		if isStreamingMethod(method) {
+			return true
+		}
+	}
+	return false
+}
+
+// docBreak adds a "//\n" break to separate previous comment lines and doc.  If
+// doc is empty it returns the empty string.
+func docBreak(doc string) string {
+	if doc == "" {
+		return ""
+	}
+	return "//\n" + doc
+}
+
+// argTypes returns a comma-separated list of each type from args.
+func argTypes(first, last string, data goData, args []*compile.Field) string {
+	var result []string
+	if first != "" {
+		result = append(result, first)
+	}
+	for _, arg := range args {
+		result = append(result, typeGo(data, arg.Type))
+	}
+	if last != "" {
+		result = append(result, last)
+	}
+	return strings.Join(result, ", ")
+}
+
+// argNames returns a comma-separated list of each name from args.  If argPrefix
+// is empty, the name specified in args is used; otherwise the name is prefixD,
+// where D is the position of the argument.
+func argNames(boxPrefix, argPrefix, first, last string, args []*compile.Field) string {
+	var result []string
+	if first != "" {
+		result = append(result, first)
+	}
+	for ix, arg := range args {
+		name := arg.Name
+		if argPrefix != "" {
+			name = fmt.Sprintf("%s%d", argPrefix, ix)
+		}
+		if arg.Type == vdl.ErrorType {
+			// TODO(toddw): Also need to box user-defined external interfaces.  Or can
+			// we remove this special-case now?
+			name = boxPrefix + name
+		}
+		result = append(result, name)
+	}
+	if last != "" {
+		result = append(result, last)
+	}
+	return strings.Join(result, ", ")
+}
+
+// argNameTypes returns a comma-separated list of "name type" from args.  If
+// argPrefix is empty, the name specified in args is used; otherwise the name is
+// prefixD, where D is the position of the argument.  If argPrefix is empty and
+// no names are specified in args, no names will be output.
+func argNameTypes(argPrefix, first, last string, data goData, args []*compile.Field) string {
+	noNames := argPrefix == "" && !hasArgNames(args)
+	var result []string
+	if first != "" {
+		result = append(result, maybeStripArgName(first, noNames))
+	}
+	for ax, arg := range args {
+		var name string
+		switch {
+		case noNames:
+			break
+		case argPrefix == "":
+			name = arg.Name + " "
+		default:
+			name = fmt.Sprintf("%s%d ", argPrefix, ax)
+		}
+		result = append(result, name+typeGo(data, arg.Type))
+	}
+	if last != "" {
+		result = append(result, maybeStripArgName(last, noNames))
+	}
+	return strings.Join(result, ", ")
+}
+
+func hasArgNames(args []*compile.Field) bool {
+	// VDL guarantees that either all args are named, or none of them are.
+	return len(args) > 0 && args[0].Name != ""
+}
+
+// maybeStripArgName strips away the first space-terminated token from arg, only
+// if strip is true.
+func maybeStripArgName(arg string, strip bool) string {
+	if index := strings.Index(arg, " "); index != -1 && strip {
+		return arg[index+1:]
+	}
+	return arg
+}
+
+// argParens takes a list of 0 or more arguments, and adds parens only when
+// necessary; if args contains any commas or spaces, we must add parens.
+func argParens(argList string) string {
+	if strings.IndexAny(argList, ", ") > -1 {
+		return "(" + argList + ")"
+	}
+	return argList
+}
+
+// uniqueName returns a unique name based on the interface, method and suffix.
+func uniqueName(iface *compile.Interface, method *compile.Method, suffix string) string {
+	return iface.Name + method.Name + suffix
+}
+
+// uniqueNameImpl returns uniqueName with an "impl" prefix.
+func uniqueNameImpl(iface *compile.Interface, method *compile.Method, suffix string) string {
+	return "impl" + uniqueName(iface, method, suffix)
+}
+
+// The first arg of every server method is a context; the type is either a typed
+// context for streams, or ipc.ServerContext for non-streams.
+func serverContextType(prefix string, data goData, iface *compile.Interface, method *compile.Method) string {
+	if isStreamingMethod(method) {
+		return prefix + uniqueName(iface, method, "Context")
+	}
+	return prefix + data.Pkg("v.io/v23/ipc") + "ServerContext"
+}
+
+// The first arg of every server stub method is a context; the type is either a
+// typed context stub for streams, or ipc.ServerContext for non-streams.
+func serverContextStubType(prefix string, data goData, iface *compile.Interface, method *compile.Method) string {
+	if isStreamingMethod(method) {
+		return prefix + "*" + uniqueName(iface, method, "ContextStub")
+	}
+	return prefix + data.Pkg("v.io/v23/ipc") + "ServerContext"
+}
+
+// outArgsClient returns the out args of an interface method on the client,
+// wrapped in parens if necessary.  The client side always returns a final
+// error, in addition to the regular out-args.
+func outArgsClient(argPrefix string, data goData, iface *compile.Interface, method *compile.Method) string {
+	first, args := "", method.OutArgs
+	if isStreamingMethod(method) {
+		first, args = "ocall "+uniqueName(iface, method, "Call"), nil
+	}
+	return argParens(argNameTypes(argPrefix, first, "err error", data, args))
+}
+
+// clientStubImpl returns the interface method client stub implementation.
+func clientStubImpl(data goData, iface *compile.Interface, method *compile.Method) string {
+	var buf bytes.Buffer
+	inargs := "nil"
+	if len(method.InArgs) > 0 {
+		inargs = "[]interface{}{" + argNames("&", "i", "", "", method.InArgs) + "}"
+	}
+	fmt.Fprint(&buf, "\tvar call "+data.Pkg("v.io/v23/ipc")+"Call\n")
+	fmt.Fprintf(&buf, "\tif call, err = c.c(ctx).StartCall(ctx, c.name, %q, %s, opts...); err != nil {\n\t\treturn\n\t}\n", method.Name, inargs)
+	switch {
+	case isStreamingMethod(method):
+		fmt.Fprintf(&buf, "ocall = &%s{Call: call}\n", uniqueNameImpl(iface, method, "Call"))
+	default:
+		fmt.Fprintf(&buf, "%s\n", clientFinishImpl("call", method))
+	}
+	fmt.Fprint(&buf, "\treturn")
+	return buf.String() // the caller writes the trailing newline
+}
+
+// clientFinishImpl returns the client finish implementation for method.
+func clientFinishImpl(varname string, method *compile.Method) string {
+	outargs := argNames("", "&o", "", "", method.OutArgs)
+	return fmt.Sprintf("\terr = %s.Finish(%s)", varname, outargs)
+}
+
+// serverStubImpl returns the interface method server stub implementation.
+func serverStubImpl(data goData, iface *compile.Interface, method *compile.Method) string {
+	var buf bytes.Buffer
+	inargs := argNames("", "i", "ctx", "", method.InArgs)
+	fmt.Fprintf(&buf, "\treturn s.impl.%s(%s)", method.Name, inargs)
+	return buf.String() // the caller writes the trailing newline
+}
+
+func reInitStreamValue(data goData, t *vdl.Type, name string) string {
+	switch t.Kind() {
+	case vdl.Struct:
+		return name + " = " + typeGo(data, t) + "{}\n"
+	case vdl.Any:
+		return name + " = nil\n"
+	}
+	return ""
+}
+
+// nativeConversionsInFile returns the map between wire and native types for
+// wire types defined in file.
+func nativeConversionsInFile(file *compile.File) map[string]vdltool.GoType {
+	all := file.Package.Config.Go.WireToNativeTypes
+	infile := make(map[string]vdltool.GoType)
+	for wire, gotype := range all {
+		for _, tdef := range file.TypeDefs {
+			if tdef.Name == wire {
+				infile[wire] = gotype
+				break
+			}
+		}
+	}
+	return infile
+}
+
+// The template that we execute against a goData instance to generate our
+// code.  Most of this is fairly straightforward substitution and ranges; more
+// complicated logic is delegated to the helper functions above.
+//
+// We try to generate code that has somewhat reasonable formatting, and leave
+// the fine-tuning to the go/format package.  Note that go/format won't fix
+// some instances of spurious newlines, so we try to keep it reasonable.
+const genGo = `
+{{$data := .}}
+{{$file := $data.File}}
+// This file was auto-generated by the veyron vdl tool.
+// Source: {{$file.BaseName}}
+
+{{$file.PackageDef.Doc}}package {{$file.PackageDef.Name}}{{$file.PackageDef.DocSuffix}}
+
+{{if or $data.Imports.System $data.Imports.User}}
+import ( {{if $data.Imports.System}}
+	// VDL system imports{{range $imp := $data.Imports.System}}
+	{{if $imp.Name}}{{$imp.Name}} {{end}}"{{$imp.Path}}"{{end}}{{end}}
+{{if $data.Imports.User}}
+	// VDL user imports{{range $imp := $data.Imports.User}}
+	{{if $imp.Name}}{{$imp.Name}} {{end}}"{{$imp.Path}}"{{end}}{{end}}
+){{end}}
+
+{{if $file.TypeDefs}}
+{{range $tdef := $file.TypeDefs}}
+{{typeDefGo $data $tdef}}
+{{end}}
+{{$nativeConversions := nativeConversionsInFile $file}}
+func init() { {{range $wire, $native := $nativeConversions}}{{$lwire := firstRuneToLower $wire}}
+	{{$data.Pkg "v.io/v23/vdl"}}RegisterNative({{$lwire}}ToNative, {{$lwire}}FromNative){{end}}{{range $tdef := $file.TypeDefs}}
+	{{$data.Pkg "v.io/v23/vdl"}}Register((*{{$tdef.Name}})(nil)){{end}}
+}
+{{range $wire, $native := $nativeConversions}}{{$lwire := firstRuneToLower $wire}}{{$nat := nativeIdent $data $native}}
+// Type-check {{$wire}} conversion functions.
+var _ func({{$wire}}, *{{$nat}}) error = {{$lwire}}ToNative
+var _ func(*{{$wire}}, {{$nat}}) error = {{$lwire}}FromNative
+{{end}}
+{{end}}
+
+{{range $cdef := $file.ConstDefs}}
+{{constDefGo $data $cdef}}
+{{end}}
+
+{{if $file.ErrorDefs}}var ( {{range $edef := $file.ErrorDefs}}
+	{{$edef.Doc}}{{errorName $edef $file}} = {{$data.Pkg "v.io/v23/verror"}}Register("{{$edef.ID}}", {{$data.Pkg "v.io/v23/verror"}}{{$edef.RetryCode}}, "{{$edef.English}}"){{end}}
+)
+
+{{/* TODO(toddw): Don't set "en-US" or "en" again, since it's already set by Register */}}
+func init() { {{range $edef := $file.ErrorDefs}}{{range $lf := $edef.Formats}}
+	{{$data.Pkg "v.io/v23/i18n"}}Cat().SetWithBase({{$data.Pkg "v.io/v23/i18n"}}LangID("{{$lf.Lang}}"), {{$data.Pkg "v.io/v23/i18n"}}MsgID({{errorName $edef $file}}.ID), "{{$lf.Fmt}}"){{end}}{{end}}
+}
+{{range $edef := $file.ErrorDefs}}
+{{$errName := errorName $edef $file}}
+{{$newErr := print (firstRuneToExport "New" $edef.Exported) (firstRuneToUpper $errName)}}
+// {{$newErr}} returns an error with the {{$errName}} ID.
+func {{$newErr}}(ctx {{argNameTypes "" (print "*" ($data.Pkg "v.io/v23/context") "T") "" $data $edef.Params}}) error {
+	return {{$data.Pkg "v.io/v23/verror"}}New({{$errName}}, {{argNames "" "" "ctx" "" $edef.Params}})
+}
+{{end}}{{end}}
+
+{{range $iface := $file.Interfaces}}
+{{$ifaceStreaming := hasStreamingMethods $iface.AllMethods}}
+{{$ipc_ := $data.Pkg "v.io/v23/ipc"}}
+{{$ctxArg := print "ctx *" ($data.Pkg "v.io/v23/context") "T"}}
+{{$optsArg := print "opts ..." $ipc_ "CallOpt"}}
+// {{$iface.Name}}ClientMethods is the client interface
+// containing {{$iface.Name}} methods.
+{{docBreak $iface.Doc}}type {{$iface.Name}}ClientMethods interface { {{range $embed := $iface.Embeds}}
+	{{$embed.Doc}}{{embedGo $data $embed}}ClientMethods{{$embed.DocSuffix}}{{end}}{{range $method := $iface.Methods}}
+	{{$method.Doc}}{{$method.Name}}({{argNameTypes "" $ctxArg $optsArg $data $method.InArgs}}) {{outArgsClient "" $data $iface $method}}{{$method.DocSuffix}}{{end}}
+}
+
+// {{$iface.Name}}ClientStub adds universal methods to {{$iface.Name}}ClientMethods.
+type {{$iface.Name}}ClientStub interface {
+	{{$iface.Name}}ClientMethods
+	{{$ipc_}}UniversalServiceMethods
+}
+
+// {{$iface.Name}}Client returns a client stub for {{$iface.Name}}.
+func {{$iface.Name}}Client(name string, opts ...{{$ipc_}}BindOpt) {{$iface.Name}}ClientStub {
+	var client {{$ipc_}}Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.({{$ipc_}}Client); ok {
+			client = clientOpt
+		}
+	}
+	return impl{{$iface.Name}}ClientStub{ name, client{{range $embed := $iface.Embeds}}, {{embedGo $data $embed}}Client(name, client){{end}} }
+}
+
+type impl{{$iface.Name}}ClientStub struct {
+	name   string
+	client {{$ipc_}}Client
+{{range $embed := $iface.Embeds}}
+	{{embedGo $data $embed}}ClientStub{{end}}
+}
+
+func (c impl{{$iface.Name}}ClientStub) c({{$ctxArg}}) {{$ipc_}}Client {
+	if c.client != nil {
+		return c.client
+	}
+	return {{$data.Pkg "v.io/v23"}}GetClient(ctx)
+}
+
+{{range $method := $iface.Methods}}
+func (c impl{{$iface.Name}}ClientStub) {{$method.Name}}({{argNameTypes "i" $ctxArg $optsArg $data $method.InArgs}}) {{outArgsClient "o" $data $iface $method}} {
+{{clientStubImpl $data $iface $method}}
+}
+{{end}}
+
+{{range $method := $iface.Methods}}{{if isStreamingMethod $method}}
+{{$clientStream := uniqueName $iface $method "ClientStream"}}
+{{$clientCall := uniqueName $iface $method "Call"}}
+{{$clientCallImpl := uniqueNameImpl $iface $method "Call"}}
+{{$clientRecvImpl := uniqueNameImpl $iface $method "CallRecv"}}
+{{$clientSendImpl := uniqueNameImpl $iface $method "CallSend"}}
+
+// {{$clientStream}} is the client stream for {{$iface.Name}}.{{$method.Name}}.
+type {{$clientStream}} interface { {{if $method.OutStream}}
+	// RecvStream returns the receiver side of the {{$iface.Name}}.{{$method.Name}} client stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() {{typeGo $data $method.OutStream}}
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	} {{end}}{{if $method.InStream}}
+	// SendStream returns the send side of the {{$iface.Name}}.{{$method.Name}} client stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors
+		// encountered while sending, or if Send is called after Close or
+		// the stream has been canceled.  Blocks if there is no buffer
+		// space; will unblock when buffer space is available or after
+		// the stream has been canceled.
+		Send(item {{typeGo $data $method.InStream}}) error
+		// Close indicates to the server that no more items will be sent;
+		// server Recv calls will receive io.EOF after all sent items.
+		// This is an optional call - e.g. a client might call Close if it
+		// needs to continue receiving items from the server after it's
+		// done sending.  Returns errors encountered while closing, or if
+		// Close is called after the stream has been canceled.  Like Send,
+		// blocks if there is no buffer space available.
+		Close() error
+	} {{end}}
+}
+
+// {{$clientCall}} represents the call returned from {{$iface.Name}}.{{$method.Name}}.
+type {{$clientCall}} interface {
+	{{$clientStream}} {{if $method.InStream}}
+	// Finish performs the equivalent of SendStream().Close, then blocks until
+	// the server is done, and returns the positional return values for the call.{{else}}
+	// Finish blocks until the server is done, and returns the positional return
+	// values for call.{{end}}
+	//
+	// Finish returns immediately if the call has been canceled; depending on the
+	// timing the output could either be an error signaling cancelation, or the
+	// valid positional return values from the server.
+	//
+	// Calling Finish is mandatory for releasing stream resources, unless the call
+	// has been canceled or any of the other methods return an error.  Finish should
+	// be called at most once.
+	Finish() {{argParens (argNameTypes "" "" "err error" $data $method.OutArgs)}}
+}
+
+type {{$clientCallImpl}} struct {
+	{{$ipc_}}Call{{if $method.OutStream}}
+	valRecv {{typeGo $data $method.OutStream}}
+	errRecv error{{end}}
+}
+
+{{if $method.OutStream}}func (c *{{$clientCallImpl}}) RecvStream() interface {
+	Advance() bool
+	Value() {{typeGo $data $method.OutStream}}
+	Err() error
+} {
+	return {{$clientRecvImpl}}{c}
+}
+
+type {{$clientRecvImpl}} struct {
+	c *{{$clientCallImpl}}
+}
+
+func (c {{$clientRecvImpl}}) Advance() bool {
+	{{reInitStreamValue $data $method.OutStream "c.c.valRecv"}}c.c.errRecv = c.c.Recv(&c.c.valRecv)
+	return c.c.errRecv == nil
+}
+func (c {{$clientRecvImpl}}) Value() {{typeGo $data $method.OutStream}} {
+	return c.c.valRecv
+}
+func (c {{$clientRecvImpl}}) Err() error {
+	if c.c.errRecv == {{$data.Pkg "io"}}EOF {
+		return nil
+	}
+	return c.c.errRecv
+}
+{{end}}{{if $method.InStream}}func (c *{{$clientCallImpl}}) SendStream() interface {
+	Send(item {{typeGo $data $method.InStream}}) error
+	Close() error
+} {
+	return {{$clientSendImpl}}{c}
+}
+
+type {{$clientSendImpl}} struct {
+	c *{{$clientCallImpl}}
+}
+
+func (c {{$clientSendImpl}}) Send(item {{typeGo $data $method.InStream}}) error {
+	return c.c.Send(item)
+}
+func (c {{$clientSendImpl}}) Close() error {
+	return c.c.CloseSend()
+}
+{{end}}func (c *{{$clientCallImpl}}) Finish() {{argParens (argNameTypes "o" "" "err error" $data $method.OutArgs)}} {
+{{clientFinishImpl "c.Call" $method}}
+	return
+}
+{{end}}{{end}}
+
+// {{$iface.Name}}ServerMethods is the interface a server writer
+// implements for {{$iface.Name}}.
+{{docBreak $iface.Doc}}type {{$iface.Name}}ServerMethods interface { {{range $embed := $iface.Embeds}}
+	{{$embed.Doc}}{{embedGo $data $embed}}ServerMethods{{$embed.DocSuffix}}{{end}}{{range $method := $iface.Methods}}
+	{{$method.Doc}}{{$method.Name}}({{argNameTypes "" (serverContextType "ctx " $data $iface $method) "" $data $method.InArgs}}) {{argParens (argNameTypes "" "" "err error" $data $method.OutArgs)}}{{$method.DocSuffix}}{{end}}
+}
+
+// {{$iface.Name}}ServerStubMethods is the server interface containing
+// {{$iface.Name}} methods, as expected by ipc.Server.{{if $ifaceStreaming}}
+// The only difference between this interface and {{$iface.Name}}ServerMethods
+// is the streaming methods.{{else}}
+// There is no difference between this interface and {{$iface.Name}}ServerMethods
+// since there are no streaming methods.{{end}}
+type {{$iface.Name}}ServerStubMethods {{if $ifaceStreaming}}interface { {{range $embed := $iface.Embeds}}
+	{{$embed.Doc}}{{embedGo $data $embed}}ServerStubMethods{{$embed.DocSuffix}}{{end}}{{range $method := $iface.Methods}}
+	{{$method.Doc}}{{$method.Name}}({{argNameTypes "" (serverContextStubType "ctx " $data $iface $method) "" $data $method.InArgs}}) {{argParens (argNameTypes "" "" "err error" $data $method.OutArgs)}}{{$method.DocSuffix}}{{end}}
+}
+{{else}}{{$iface.Name}}ServerMethods
+{{end}}
+
+// {{$iface.Name}}ServerStub adds universal methods to {{$iface.Name}}ServerStubMethods.
+type {{$iface.Name}}ServerStub interface {
+	{{$iface.Name}}ServerStubMethods
+	// Describe the {{$iface.Name}} interfaces.
+	Describe__() []{{$ipc_}}InterfaceDesc
+}
+
+// {{$iface.Name}}Server returns a server stub for {{$iface.Name}}.
+// It converts an implementation of {{$iface.Name}}ServerMethods into
+// an object that may be used by ipc.Server.
+func {{$iface.Name}}Server(impl {{$iface.Name}}ServerMethods) {{$iface.Name}}ServerStub {
+	stub := impl{{$iface.Name}}ServerStub{
+		impl: impl,{{range $embed := $iface.Embeds}}
+		{{$embed.Name}}ServerStub: {{embedGo $data $embed}}Server(impl),{{end}}
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := {{$ipc_}}NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := {{$ipc_}}NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type impl{{$iface.Name}}ServerStub struct {
+	impl {{$iface.Name}}ServerMethods{{range $embed := $iface.Embeds}}
+	{{embedGo $data $embed}}ServerStub{{end}}
+	gs *{{$ipc_}}GlobState
+}
+
+{{range $method := $iface.Methods}}
+func (s impl{{$iface.Name}}ServerStub) {{$method.Name}}({{argNameTypes "i" (serverContextStubType "ctx " $data $iface $method) "" $data $method.InArgs}}) {{argParens (argTypes "" "error" $data $method.OutArgs)}} {
+{{serverStubImpl $data $iface $method}}
+}
+{{end}}
+
+func (s impl{{$iface.Name}}ServerStub) Globber() *{{$ipc_}}GlobState {
+	return s.gs
+}
+
+func (s impl{{$iface.Name}}ServerStub) Describe__() []{{$ipc_}}InterfaceDesc {
+	return []{{$ipc_}}InterfaceDesc{ {{$iface.Name}}Desc{{range $embed := $iface.TransitiveEmbeds}}, {{embedGo $data $embed}}Desc{{end}} }
+}
+
+// {{$iface.Name}}Desc describes the {{$iface.Name}} interface.
+var {{$iface.Name}}Desc {{$ipc_}}InterfaceDesc = desc{{$iface.Name}}
+
+// desc{{$iface.Name}} hides the desc to keep godoc clean.
+var desc{{$iface.Name}} = {{$ipc_}}InterfaceDesc{ {{if $iface.Name}}
+	Name: "{{$iface.Name}}",{{end}}{{if $iface.File.Package.Path}}
+	PkgPath: "{{$iface.File.Package.Path}}",{{end}}{{if $iface.Doc}}
+	Doc: {{quoteStripDoc $iface.Doc}},{{end}}{{if $iface.Embeds}}
+	Embeds: []{{$ipc_}}EmbedDesc{ {{range $embed := $iface.Embeds}}
+		{ "{{$embed.Name}}", "{{$embed.File.Package.Path}}", {{quoteStripDoc $embed.Doc}} },{{end}}
+	},{{end}}{{if $iface.Methods}}
+	Methods: []{{$ipc_}}MethodDesc{ {{range $method := $iface.Methods}}
+		{ {{if $method.Name}}
+			Name: "{{$method.Name}}",{{end}}{{if $method.Doc}}
+			Doc: {{quoteStripDoc $method.Doc}},{{end}}{{if $method.InArgs}}
+			InArgs: []{{$ipc_}}ArgDesc{ {{range $arg := $method.InArgs}}
+				{ "{{$arg.Name}}", {{quoteStripDoc $arg.Doc}} }, // {{typeGo $data $arg.Type}}{{end}}
+			},{{end}}{{if $method.OutArgs}}
+			OutArgs: []{{$ipc_}}ArgDesc{ {{range $arg := $method.OutArgs}}
+				{ "{{$arg.Name}}", {{quoteStripDoc $arg.Doc}} }, // {{typeGo $data $arg.Type}}{{end}}
+			},{{end}}{{if $method.Tags}}
+			Tags: []*{{$data.Pkg "v.io/v23/vdl"}}Value{ {{range $tag := $method.Tags}}{{tagValue $data $tag}} ,{{end}} },{{end}}
+		},{{end}}
+	},{{end}}
+}
+
+{{range $method := $iface.Methods}}
+{{if isStreamingMethod $method}}
+{{$serverStream := uniqueName $iface $method "ServerStream"}}
+{{$serverContext := uniqueName $iface $method "Context"}}
+{{$serverContextStub := uniqueName $iface $method "ContextStub"}}
+{{$serverRecvImpl := uniqueNameImpl $iface $method "ContextRecv"}}
+{{$serverSendImpl := uniqueNameImpl $iface $method "ContextSend"}}
+
+// {{$serverStream}} is the server stream for {{$iface.Name}}.{{$method.Name}}.
+type {{$serverStream}} interface { {{if $method.InStream}}
+	// RecvStream returns the receiver side of the {{$iface.Name}}.{{$method.Name}} server stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() {{typeGo $data $method.InStream}}
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	} {{end}}{{if $method.OutStream}}
+	// SendStream returns the send side of the {{$iface.Name}}.{{$method.Name}} server stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors encountered
+		// while sending.  Blocks if there is no buffer space; will unblock when
+		// buffer space is available.
+		Send(item {{typeGo $data $method.OutStream}}) error
+	} {{end}}
+}
+
+// {{$serverContext}} represents the context passed to {{$iface.Name}}.{{$method.Name}}.
+type {{$serverContext}} interface {
+	{{$ipc_}}ServerContext
+	{{$serverStream}}
+}
+
+// {{$serverContextStub}} is a wrapper that converts ipc.ServerCall into
+// a typesafe stub that implements {{$serverContext}}.
+type {{$serverContextStub}} struct {
+	{{$ipc_}}ServerCall{{if $method.InStream}}
+	valRecv {{typeGo $data $method.InStream}}
+	errRecv error{{end}}
+}
+
+// Init initializes {{$serverContextStub}} from ipc.ServerCall.
+func (s *{{$serverContextStub}}) Init(call {{$ipc_}}ServerCall) {
+	s.ServerCall = call
+}
+
+{{if $method.InStream}}// RecvStream returns the receiver side of the {{$iface.Name}}.{{$method.Name}} server stream.
+func (s  *{{$serverContextStub}}) RecvStream() interface {
+	Advance() bool
+	Value() {{typeGo $data $method.InStream}}
+	Err() error
+} {
+	return {{$serverRecvImpl}}{s}
+}
+
+type {{$serverRecvImpl}} struct {
+	s *{{$serverContextStub}}
+}
+
+func (s {{$serverRecvImpl}}) Advance() bool {
+	{{reInitStreamValue $data $method.InStream "s.s.valRecv"}}s.s.errRecv = s.s.Recv(&s.s.valRecv)
+	return s.s.errRecv == nil
+}
+func (s {{$serverRecvImpl}}) Value() {{typeGo $data $method.InStream}} {
+	return s.s.valRecv
+}
+func (s {{$serverRecvImpl}}) Err() error {
+	if s.s.errRecv == {{$data.Pkg "io"}}EOF {
+		return nil
+	}
+	return s.s.errRecv
+}
+{{end}}{{if $method.OutStream}}// SendStream returns the send side of the {{$iface.Name}}.{{$method.Name}} server stream.
+func (s *{{$serverContextStub}}) SendStream() interface {
+	Send(item {{typeGo $data $method.OutStream}}) error
+} {
+	return {{$serverSendImpl}}{s}
+}
+
+type {{$serverSendImpl}} struct {
+	s *{{$serverContextStub}}
+}
+
+func (s {{$serverSendImpl}}) Send(item {{typeGo $data $method.OutStream}}) error {
+	return s.s.Send(item)
+}
+{{end}}{{end}}{{end}}
+
+{{end}}
+`
diff --git a/lib/vdl/codegen/golang/import.go b/lib/vdl/codegen/golang/import.go
new file mode 100644
index 0000000..9c3d314
--- /dev/null
+++ b/lib/vdl/codegen/golang/import.go
@@ -0,0 +1,356 @@
+package golang
+
+// TODO(toddw): Add tests for this logic.
+
+import (
+	"sort"
+	"strconv"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+// goImport represents a single import in the generated Go file.
+//   Example A: import     "v.io/core/abc"
+//   Example B: import foo "v.io/core/abc"
+type goImport struct {
+	// Name of the import.
+	//   Example A: ""
+	//   Example B: "foo"
+	Name string
+	// Path of the import.
+	//   Example A: "v.io/core/abc"
+	//   Example B: "v.io/core/abc"
+	Path string
+	// Local identifier within the generated go file to reference the imported
+	// package.
+	//   Example A: "abc"
+	//   Example B: "foo"
+	Local string
+}
+
+// goImports holds all imports for a generated Go file, splitting into two
+// groups "system" and "user".  The splitting is just for slightly nicer output,
+// and to ensure we prefer system over user imports when dealing with package
+// name collisions.
+type goImports struct {
+	System, User []goImport
+}
+
+func newImports(file *compile.File, env *compile.Env) *goImports {
+	deps, user := computeDeps(file, env)
+	system := systemImports(deps, file)
+	seen := make(map[string]bool)
+	return &goImports{
+		System: system.Sort(seen),
+		User:   user.Sort(seen),
+	}
+}
+
+// importMap maps from package path to package name.  It's used to collect
+// package import information.
+type importMap map[string]string
+
+// AddPackage adds a regular dependency on pkg; some block of generated code
+// will reference the pkg.
+func (im importMap) AddPackage(pkg *compile.Package) {
+	im[pkg.GenPath] = pkg.Name
+}
+
+// AddForcedPackage adds a "forced" dependency on pkg.  This means that we need
+// to import pkg even if no other block of generated code references the pkg.
+func (im importMap) AddForcedPackage(pkg *compile.Package) {
+	if im[pkg.GenPath] == "" {
+		im[pkg.GenPath] = "_"
+	}
+}
+
+func (im importMap) DeletePackage(pkg *compile.Package) {
+	delete(im, pkg.GenPath)
+}
+
+func (im importMap) Sort(seen map[string]bool) []goImport {
+	var sortedPaths []string
+	for path := range im {
+		sortedPaths = append(sortedPaths, path)
+	}
+	sort.Strings(sortedPaths)
+	var ret []goImport
+	for _, path := range sortedPaths {
+		ret = append(ret, uniqueImport(im[path], path, seen))
+	}
+	return ret
+}
+
+// Each import must end up with a unique local name.  Here's some examples.
+//   uniqueImport("a", "v.io/a", {})           -> goImport{"", "v.io/a", "a"}
+//   uniqueImport("z", "v.io/a", {})           -> goImport{"", "v.io/a", "z"}
+//   uniqueImport("a", "v.io/a", {"a"})        -> goImport{"a_2", "v.io/a", "a_2"}
+//   uniqueImport("a", "v.io/a", {"a", "a_2"}) -> goImport{"a_3", "v.io/a", "a_3"}
+//   uniqueImport("_", "v.io/a", {})           -> goImport{"_", "v.io/a", ""}
+//   uniqueImport("_", "v.io/a", {"a"})        -> goImport{"_", "v.io/a", ""}
+//   uniqueImport("_", "v.io/a", {"a", "a_2"}) -> goImport{"_", "v.io/a", ""}
+func uniqueImport(pkgName, pkgPath string, seen map[string]bool) goImport {
+	if pkgName == "_" {
+		return goImport{"_", pkgPath, ""}
+	}
+	name := ""
+	iter := 1
+	for {
+		local := pkgName
+		if iter > 1 {
+			local += "_" + strconv.Itoa(iter)
+			name = local
+		}
+		if !seen[local] {
+			// Found a unique local name - return the import.
+			seen[local] = true
+			return goImport{name, pkgPath, local}
+		}
+		iter++
+	}
+}
+
+// LookupLocal returns the local identifier within the generated go file that
+// identifies the given pkgPath.
+func (x *goImports) LookupLocal(pkgPath string) string {
+	if local := lookupLocal(pkgPath, x.System); local != "" {
+		return local
+	}
+	return lookupLocal(pkgPath, x.User)
+}
+
+func lookupLocal(pkgPath string, imports []goImport) string {
+	ix := sort.Search(
+		len(imports),
+		func(i int) bool { return imports[i].Path >= pkgPath },
+	)
+	if ix < len(imports) && imports[ix].Path == pkgPath {
+		return imports[ix].Local
+	}
+	return ""
+}
+
+type deps struct {
+	any              bool
+	typeObject       bool
+	enumTypeDef      bool
+	streamingMethods bool
+	methodTags       bool
+}
+
+func computeDeps(file *compile.File, env *compile.Env) (deps, importMap) {
+	deps, user := &deps{}, make(importMap)
+	// TypeDef.Type is always defined in our package; add deps on the base type.
+	for _, def := range file.TypeDefs {
+		addTypeDeps(def.BaseType, env, deps, user)
+		if def.Type.Kind() == vdl.Enum {
+			deps.enumTypeDef = true
+		}
+	}
+	// Consts contribute their value types.
+	for _, def := range file.ConstDefs {
+		addValueTypeDeps(def.Value, env, deps, user)
+	}
+	// Interfaces contribute their arg types and tag values, as well as embedded
+	// interfaces.
+	for _, iface := range file.Interfaces {
+		for _, embed := range iface.TransitiveEmbeds() {
+			user.AddPackage(embed.File.Package)
+		}
+		for _, method := range iface.Methods {
+			for _, arg := range method.InArgs {
+				addTypeDeps(arg.Type, env, deps, user)
+			}
+			for _, arg := range method.OutArgs {
+				addTypeDeps(arg.Type, env, deps, user)
+			}
+			if stream := method.InStream; stream != nil {
+				addTypeDeps(stream, env, deps, user)
+				deps.streamingMethods = true
+			}
+			if stream := method.OutStream; stream != nil {
+				addTypeDeps(stream, env, deps, user)
+				deps.streamingMethods = true
+			}
+			for _, tag := range method.Tags {
+				addValueTypeDeps(tag, env, deps, user)
+				deps.methodTags = true
+			}
+		}
+	}
+	// Errors contribute their param types.
+	for _, def := range file.ErrorDefs {
+		for _, param := range def.Params {
+			addTypeDeps(param.Type, env, deps, user)
+		}
+	}
+	// Native types contribute their imports, for the auto-generated native
+	// conversion function type assertion.
+	for _, native := range file.Package.Config.Go.WireToNativeTypes {
+		for _, imp := range native.Imports {
+			user[imp.Path] = imp.Name
+		}
+	}
+	// Now remove self and built-in package dependencies.  Every package can use
+	// itself and the built-in package, so we don't need to record this.
+	user.DeletePackage(file.Package)
+	user.DeletePackage(compile.BuiltInPackage)
+	return *deps, user
+}
+
+// Add package deps iff t is a defined (named) type.
+func addTypeDepIfDefined(t *vdl.Type, env *compile.Env, deps *deps, user importMap) bool {
+	if t == vdl.AnyType {
+		deps.any = true
+	}
+	if t == vdl.TypeObjectType {
+		deps.typeObject = true
+	}
+	if def := env.FindTypeDef(t); def != nil {
+		pkg := def.File.Package
+		if native, ok := pkg.Config.Go.WireToNativeTypes[def.Name]; ok {
+			// There is a native type configured for this defined type.  Add the
+			// imports corresponding to the native type.
+			for _, imp := range native.Imports {
+				user[imp.Path] = imp.Name
+			}
+			// Also add a "forced" import on the regular VDL package, to ensure the
+			// wire type is registered, to establish the wire<->native mapping.
+			user.AddForcedPackage(pkg)
+		} else {
+			// There's no native type configured for this defined type.  Add the
+			// imports corresponding to the VDL package.
+			user.AddPackage(pkg)
+		}
+		return true
+	}
+	return false
+}
+
+// Add immediate package deps for t and subtypes of t.
+func addTypeDeps(t *vdl.Type, env *compile.Env, deps *deps, user importMap) {
+	if addTypeDepIfDefined(t, env, deps, user) {
+		// We don't track transitive dependencies, only immediate dependencies.
+		return
+	}
+	// Not all types have TypeDefs; e.g. unnamed lists have no corresponding
+	// TypeDef, so we need to traverse those recursively.
+	addSubTypeDeps(t, env, deps, user)
+}
+
+// Add immediate package deps for subtypes of t.
+func addSubTypeDeps(t *vdl.Type, env *compile.Env, deps *deps, user importMap) {
+	switch t.Kind() {
+	case vdl.Array, vdl.List, vdl.Optional:
+		addTypeDeps(t.Elem(), env, deps, user)
+	case vdl.Set:
+		addTypeDeps(t.Key(), env, deps, user)
+	case vdl.Map:
+		addTypeDeps(t.Key(), env, deps, user)
+		addTypeDeps(t.Elem(), env, deps, user)
+	case vdl.Struct, vdl.Union:
+		for ix := 0; ix < t.NumField(); ix++ {
+			addTypeDeps(t.Field(ix).Type, env, deps, user)
+		}
+	}
+}
+
+// Add immediate package deps for v.Type(), and subvalues.  We must traverse the
+// value to know which types are actually used; e.g. an empty struct doesn't
+// have a dependency on its field types.
+//
+// The purpose of this method is to identify the package and type dependencies
+// for const or tag values.
+func addValueTypeDeps(v *vdl.Value, env *compile.Env, deps *deps, user importMap) {
+	t := v.Type()
+	addTypeDepIfDefined(t, env, deps, user)
+	// Track transitive dependencies, by traversing subvalues recursively.
+	switch t.Kind() {
+	case vdl.Array, vdl.List:
+		for ix := 0; ix < v.Len(); ix++ {
+			addValueTypeDeps(v.Index(ix), env, deps, user)
+		}
+	case vdl.Set:
+		for _, key := range v.Keys() {
+			addValueTypeDeps(key, env, deps, user)
+		}
+	case vdl.Map:
+		for _, key := range v.Keys() {
+			addValueTypeDeps(key, env, deps, user)
+			addValueTypeDeps(v.MapIndex(key), env, deps, user)
+		}
+	case vdl.Struct:
+		if v.IsZero() {
+			// We print zero-valued structs as {}, so we stop the traversal here.
+			return
+		}
+		for ix := 0; ix < t.NumField(); ix++ {
+			addValueTypeDeps(v.StructField(ix), env, deps, user)
+		}
+	case vdl.Union:
+		_, field := v.UnionField()
+		addValueTypeDeps(field, env, deps, user)
+	case vdl.Any, vdl.Optional:
+		if elem := v.Elem(); elem != nil {
+			addValueTypeDeps(elem, env, deps, user)
+		}
+	case vdl.TypeObject:
+		// TypeObject has dependencies on everything its zero value depends on.
+		addValueTypeDeps(vdl.ZeroValue(v.TypeObject()), env, deps, user)
+	}
+}
+
+// systemImports returns the vdl system imports for the given file and deps.
+// You might think we could simply capture the imports during code generation,
+// and then dump out all imports afterwards.  Unfortunately that strategy
+// doesn't work, because of potential package name collisions in the imports.
+//
+// When generating code we need to know the local identifier used to reference a
+// given imported package.  But the local identifier changes if there are
+// collisions with other imported packages.  An example:
+//
+//   package pkg
+//
+//   import       "a/foo"
+//   import foo_2 "b/foo"
+//
+//   type X foo.T
+//   type Y foo_2.T
+//
+// Note that in order to generate code for X and Y, we need to know what local
+// identifier to use.  But we only know what local identifier to use after we've
+// collected all imports and resolved collisions.
+func systemImports(deps deps, file *compile.File) importMap {
+	system := make(importMap)
+	if deps.any || deps.typeObject || deps.methodTags || len(file.TypeDefs) > 0 {
+		// System import for vdl.Value, vdl.Type and vdl.Register.
+		system["v.io/v23/vdl"] = "vdl"
+	}
+	if deps.enumTypeDef {
+		system["fmt"] = "fmt"
+	}
+	if len(file.Interfaces) > 0 {
+		system["v.io/v23"] = "v23"
+		system["v.io/v23/context"] = "context"
+		system["v.io/v23/ipc"] = "ipc"
+	}
+	if deps.streamingMethods {
+		system["io"] = "io"
+	}
+	if len(file.ErrorDefs) > 0 {
+		system["v.io/v23/context"] = "context"
+		system["v.io/v23/i18n"] = "i18n"
+		// If the user has specified any errors, typically we need to import the
+		// "v.io/v23/verror" package.  However we allow vdl code-generation
+		// in the "v.io/v23/verror" package itself, to specify common
+		// errors.  Special-case this scenario to avoid self-cyclic package
+		// dependencies.
+		if file.Package.Path != "v.io/v23/verror" {
+			system["v.io/v23/verror"] = "verror"
+		}
+	}
+	// Now remove self package dependencies.
+	system.DeletePackage(file.Package)
+	return system
+}
diff --git a/lib/vdl/codegen/golang/type.go b/lib/vdl/codegen/golang/type.go
new file mode 100644
index 0000000..596fe73
--- /dev/null
+++ b/lib/vdl/codegen/golang/type.go
@@ -0,0 +1,219 @@
+package golang
+
+import (
+	"fmt"
+	"strconv"
+	"strings"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+func localIdent(data goData, file *compile.File, ident string) string {
+	if testingMode {
+		return ident
+	}
+	return data.Pkg(file.Package.GenPath) + ident
+}
+
+func nativeIdent(data goData, native vdltool.GoType) string {
+	ident := native.Type
+	for _, imp := range native.Imports {
+		// Translate the packages specified in the native type into local package
+		// identifiers.  E.g. if the native type is "foo.Type" with import
+		// "path/foo", we need replace "foo." in the native type with the local
+		// package identifier for "path/foo".
+		pkg := data.Pkg(imp.Path)
+		ident = strings.Replace(ident, imp.Name+".", pkg, -1)
+	}
+	return ident
+}
+
+func packageIdent(file *compile.File, ident string) string {
+	if testingMode {
+		return ident
+	}
+	return file.Package.Name + "." + ident
+}
+
+func qualifiedIdent(file *compile.File, ident string) string {
+	if testingMode {
+		return ident
+	}
+	return file.Package.QualifiedName(ident)
+}
+
+// typeGo translates vdl.Type into a Go type.
+func typeGo(data goData, t *vdl.Type) string {
+	if testingMode {
+		if t.Name() != "" {
+			return t.Name()
+		}
+	}
+	// Terminate recursion at defined types, which include both user-defined types
+	// (enum, struct, union) and built-in types.
+	if def := data.Env.FindTypeDef(t); def != nil {
+		switch {
+		case t == vdl.AnyType:
+			return "*" + data.Pkg("v.io/v23/vdl") + "Value"
+		case t == vdl.TypeObjectType:
+			return "*" + data.Pkg("v.io/v23/vdl") + "Type"
+		case def.File == compile.BuiltInFile:
+			// Built-in primitives just use their name.
+			return def.Name
+		}
+		pkg := def.File.Package
+		if native, ok := pkg.Config.Go.WireToNativeTypes[def.Name]; ok {
+			// There is a Go native type configured for this defined type.
+			return nativeIdent(data, native)
+		}
+		return localIdent(data, def.File, def.Name)
+	}
+	// Otherwise recurse through the type.
+	switch t.Kind() {
+	case vdl.Optional:
+		return "*" + typeGo(data, t.Elem())
+	case vdl.Array:
+		return "[" + strconv.Itoa(t.Len()) + "]" + typeGo(data, t.Elem())
+	case vdl.List:
+		return "[]" + typeGo(data, t.Elem())
+	case vdl.Set:
+		return "map[" + typeGo(data, t.Key()) + "]struct{}"
+	case vdl.Map:
+		return "map[" + typeGo(data, t.Key()) + "]" + typeGo(data, t.Elem())
+	default:
+		panic(fmt.Errorf("vdl: typeGo unhandled type %v %v", t.Kind(), t))
+	}
+}
+
+// typeDefGo prints the type definition for a type.
+func typeDefGo(data goData, def *compile.TypeDef) string {
+	s := fmt.Sprintf("%stype %s ", def.Doc, def.Name)
+	switch t := def.Type; t.Kind() {
+	case vdl.Enum:
+		s += fmt.Sprintf("int%s\nconst (", def.DocSuffix)
+		for ix := 0; ix < t.NumEnumLabel(); ix++ {
+			s += fmt.Sprintf("\n\t%s%s%s", def.LabelDoc[ix], def.Name, t.EnumLabel(ix))
+			if ix == 0 {
+				s += fmt.Sprintf(" %s = iota", def.Name)
+			}
+			s += def.LabelDocSuffix[ix]
+		}
+		s += fmt.Sprintf("\n)"+
+			"\n\n// %[1]sAll holds all labels for %[1]s."+
+			"\nvar %[1]sAll = []%[1]s{%[2]s}"+
+			"\n\n// %[1]sFromString creates a %[1]s from a string label."+
+			"\nfunc %[1]sFromString(label string) (x %[1]s, err error) {"+
+			"\n\terr = x.Set(label)"+
+			"\n\treturn"+
+			"\n}"+
+			"\n\n// Set assigns label to x."+
+			"\nfunc (x *%[1]s) Set(label string) error {"+
+			"\n\tswitch label {",
+			def.Name,
+			commaEnumLabels(def.Name, t))
+		for ix := 0; ix < t.NumEnumLabel(); ix++ {
+			s += fmt.Sprintf("\n\tcase %[2]q, %[3]q:"+
+				"\n\t\t*x = %[1]s%[2]s"+
+				"\n\t\treturn nil", def.Name, t.EnumLabel(ix), strings.ToLower(t.EnumLabel(ix)))
+		}
+		s += fmt.Sprintf("\n\t}"+
+			"\n\t*x = -1"+
+			"\n\treturn "+data.Pkg("fmt")+"Errorf(\"unknown label %%q in %[2]s\", label)"+
+			"\n}"+
+			"\n\n// String returns the string label of x."+
+			"\nfunc (x %[1]s) String() string {"+
+			"\n\tswitch x {", def.Name, packageIdent(def.File, def.Name))
+		for ix := 0; ix < t.NumEnumLabel(); ix++ {
+			s += fmt.Sprintf("\n\tcase %[1]s%[2]s:"+
+				"\n\t\treturn %[2]q", def.Name, t.EnumLabel(ix))
+		}
+		s += fmt.Sprintf("\n\t}"+
+			"\n\treturn \"\""+
+			"\n}"+
+			"\n\nfunc (%[1]s) __VDLReflect(struct{"+
+			"\n\tName string %[3]q"+
+			"\n\tEnum struct{ %[2]s string }"+
+			"\n}) {"+
+			"\n}",
+			def.Name, commaEnumLabels("", t), qualifiedIdent(def.File, def.Name))
+		return s
+	case vdl.Struct:
+		s += "struct {"
+		for ix := 0; ix < t.NumField(); ix++ {
+			f := t.Field(ix)
+			s += "\n\t" + def.FieldDoc[ix] + f.Name + " "
+			s += typeGo(data, f.Type) + def.FieldDocSuffix[ix]
+		}
+		s += "\n}" + def.DocSuffix
+		s += fmt.Sprintf("\n"+
+			"\nfunc (%[1]s) __VDLReflect(struct{"+
+			"\n\tName string %[2]q"+
+			"\n}) {"+
+			"\n}",
+			def.Name, qualifiedIdent(def.File, def.Name))
+		return s
+	case vdl.Union:
+		s = fmt.Sprintf("type ("+
+			"\n\t// %[1]s represents any single field of the %[1]s union type."+
+			"\n\t%[2]s%[1]s interface {"+
+			"\n\t\t// Index returns the field index."+
+			"\n\t\tIndex() int"+
+			"\n\t\t// Interface returns the field value as an interface."+
+			"\n\t\tInterface() interface{}"+
+			"\n\t\t// Name returns the field name."+
+			"\n\t\tName() string"+
+			"\n\t\t// __VDLReflect describes the %[1]s union type."+
+			"\n\t\t__VDLReflect(__%[1]sReflect)"+
+			"\n\t}%[3]s", def.Name, docBreak(def.Doc), def.DocSuffix)
+		for ix := 0; ix < t.NumField(); ix++ {
+			f := t.Field(ix)
+			s += fmt.Sprintf("\n\t// %[1]s%[2]s represents field %[2]s of the %[1]s union type."+
+				"\n\t%[4]s%[1]s%[2]s struct{ Value %[3]s }%[5]s",
+				def.Name, f.Name, typeGo(data, f.Type),
+				docBreak(def.FieldDoc[ix]), def.FieldDocSuffix[ix])
+		}
+		s += fmt.Sprintf("\n\t// __%[1]sReflect describes the %[1]s union type."+
+			"\n\t__%[1]sReflect struct {"+
+			"\n\t\tName string %[2]q"+
+			"\n\t\tType %[1]s"+
+			"\n\t\tUnion struct {", def.Name, qualifiedIdent(def.File, def.Name))
+		for ix := 0; ix < t.NumField(); ix++ {
+			s += fmt.Sprintf("\n\t\t\t%[2]s %[1]s%[2]s", def.Name, t.Field(ix).Name)
+		}
+		s += fmt.Sprintf("\n\t\t}\n\t}\n)")
+		for ix := 0; ix < t.NumField(); ix++ {
+			s += fmt.Sprintf("\n\nfunc (x %[1]s%[2]s) Index() int { return %[3]d }"+
+				"\nfunc (x %[1]s%[2]s) Interface() interface{} { return x.Value }"+
+				"\nfunc (x %[1]s%[2]s) Name() string { return \"%[2]s\" }"+
+				"\nfunc (x %[1]s%[2]s) __VDLReflect(__%[1]sReflect) {}",
+				def.Name, t.Field(ix).Name, ix)
+		}
+		return s
+	default:
+		s += typeGo(data, def.BaseType) + def.DocSuffix
+		s += fmt.Sprintf("\n"+
+			"\nfunc (%[1]s) __VDLReflect(struct{"+
+			"\n\tName string %[2]q"+
+			"\n}) {"+
+			"\n}",
+			def.Name, qualifiedIdent(def.File, def.Name))
+		return s
+	}
+}
+
+func commaEnumLabels(prefix string, t *vdl.Type) (s string) {
+	for ix := 0; ix < t.NumEnumLabel(); ix++ {
+		if ix > 0 {
+			s += ", "
+		}
+		s += prefix
+		s += t.EnumLabel(ix)
+	}
+	return
+}
+
+func embedGo(data goData, embed *compile.Interface) string {
+	return localIdent(data, embed.File, embed.Name)
+}
diff --git a/lib/vdl/codegen/golang/type_test.go b/lib/vdl/codegen/golang/type_test.go
new file mode 100644
index 0000000..289725e
--- /dev/null
+++ b/lib/vdl/codegen/golang/type_test.go
@@ -0,0 +1,183 @@
+package golang
+
+import (
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+func TestType(t *testing.T) {
+	testingMode = true
+	tests := []struct {
+		T    *vdl.Type
+		Want string
+	}{
+		{vdl.AnyType, `*vdl.Value`},
+		{vdl.TypeObjectType, `*vdl.Type`},
+		{vdl.BoolType, `bool`},
+		{vdl.StringType, `string`},
+		{vdl.ListType(vdl.ByteType), `[]byte`},
+		{vdl.ByteType, `byte`},
+		{vdl.Uint16Type, `uint16`},
+		{vdl.Uint32Type, `uint32`},
+		{vdl.Uint64Type, `uint64`},
+		{vdl.Int16Type, `int16`},
+		{vdl.Int32Type, `int32`},
+		{vdl.Int64Type, `int64`},
+		{vdl.Float32Type, `float32`},
+		{vdl.Float64Type, `float64`},
+		{vdl.Complex64Type, `complex64`},
+		{vdl.Complex128Type, `complex128`},
+		{tArray, `[3]string`},
+		{tList, `[]string`},
+		{tSet, `map[string]struct{}`},
+		{tMap, `map[string]int64`},
+	}
+	data := goData{Env: compile.NewEnv(-1)}
+	for _, test := range tests {
+		if got, want := typeGo(data, test.T), test.Want; got != want {
+			t.Errorf("%s\nGOT %s\nWANT %s", test.T, got, want)
+		}
+	}
+}
+
+func TestTypeDef(t *testing.T) {
+	testingMode = true
+	tests := []struct {
+		T    *vdl.Type
+		Want string
+	}{
+		{tEnum, `type TestEnum int
+const (
+	TestEnumA TestEnum = iota
+	TestEnumB
+	TestEnumC
+)
+
+// TestEnumAll holds all labels for TestEnum.
+var TestEnumAll = []TestEnum{TestEnumA, TestEnumB, TestEnumC}
+
+// TestEnumFromString creates a TestEnum from a string label.
+func TestEnumFromString(label string) (x TestEnum, err error) {
+	err = x.Set(label)
+	return
+}
+
+// Set assigns label to x.
+func (x *TestEnum) Set(label string) error {
+	switch label {
+	case "A", "a":
+		*x = TestEnumA
+		return nil
+	case "B", "b":
+		*x = TestEnumB
+		return nil
+	case "C", "c":
+		*x = TestEnumC
+		return nil
+	}
+	*x = -1
+	return fmt.Errorf("unknown label %q in TestEnum", label)
+}
+
+// String returns the string label of x.
+func (x TestEnum) String() string {
+	switch x {
+	case TestEnumA:
+		return "A"
+	case TestEnumB:
+		return "B"
+	case TestEnumC:
+		return "C"
+	}
+	return ""
+}
+
+func (TestEnum) __VDLReflect(struct{
+	Name string "TestEnum"
+	Enum struct{ A, B, C string }
+}) {
+}`},
+		{tStruct, `type TestStruct struct {
+	A string
+	B int64
+}
+
+func (TestStruct) __VDLReflect(struct{
+	Name string "TestStruct"
+}) {
+}`},
+		{tUnion, `type (
+	// TestUnion represents any single field of the TestUnion union type.
+	TestUnion interface {
+		// Index returns the field index.
+		Index() int
+		// Interface returns the field value as an interface.
+		Interface() interface{}
+		// Name returns the field name.
+		Name() string
+		// __VDLReflect describes the TestUnion union type.
+		__VDLReflect(__TestUnionReflect)
+	}
+	// TestUnionA represents field A of the TestUnion union type.
+	TestUnionA struct{ Value string }
+	// TestUnionB represents field B of the TestUnion union type.
+	TestUnionB struct{ Value int64 }
+	// __TestUnionReflect describes the TestUnion union type.
+	__TestUnionReflect struct {
+		Name string "TestUnion"
+		Type TestUnion
+		Union struct {
+			A TestUnionA
+			B TestUnionB
+		}
+	}
+)
+
+func (x TestUnionA) Index() int { return 0 }
+func (x TestUnionA) Interface() interface{} { return x.Value }
+func (x TestUnionA) Name() string { return "A" }
+func (x TestUnionA) __VDLReflect(__TestUnionReflect) {}
+
+func (x TestUnionB) Index() int { return 1 }
+func (x TestUnionB) Interface() interface{} { return x.Value }
+func (x TestUnionB) Name() string { return "B" }
+func (x TestUnionB) __VDLReflect(__TestUnionReflect) {}`},
+	}
+	data := goData{Env: compile.NewEnv(-1)}
+	for _, test := range tests {
+		def := &compile.TypeDef{
+			NamePos:  compile.NamePos{Name: test.T.Name()},
+			Type:     test.T,
+			Exported: compile.ValidExportedIdent(test.T.Name(), compile.ReservedNormal) == nil,
+		}
+		switch test.T.Kind() {
+		case vdl.Enum:
+			def.LabelDoc = make([]string, test.T.NumEnumLabel())
+			def.LabelDocSuffix = make([]string, test.T.NumEnumLabel())
+		case vdl.Struct, vdl.Union:
+			def.FieldDoc = make([]string, test.T.NumField())
+			def.FieldDocSuffix = make([]string, test.T.NumField())
+		}
+		if got, want := typeDefGo(data, def), test.Want; got != want {
+			t.Errorf("%s\n GOT %s\nWANT %s", test.T, got, want)
+		}
+	}
+}
+
+var (
+	tEnum   = vdl.NamedType("TestEnum", vdl.EnumType("A", "B", "C"))
+	tArray  = vdl.ArrayType(3, vdl.StringType)
+	tList   = vdl.ListType(vdl.StringType)
+	tSet    = vdl.SetType(vdl.StringType)
+	tMap    = vdl.MapType(vdl.StringType, vdl.Int64Type)
+	tStruct = vdl.NamedType("TestStruct", vdl.StructType(
+		vdl.Field{"A", vdl.StringType},
+		vdl.Field{"B", vdl.Int64Type},
+	))
+	tUnion = vdl.NamedType("TestUnion", vdl.UnionType(
+		vdl.Field{"A", vdl.StringType},
+		vdl.Field{"B", vdl.Int64Type},
+	))
+)
diff --git a/lib/vdl/codegen/import.go b/lib/vdl/codegen/import.go
new file mode 100644
index 0000000..2c1bd07
--- /dev/null
+++ b/lib/vdl/codegen/import.go
@@ -0,0 +1,146 @@
+package codegen
+
+import (
+	"path"
+	"sort"
+	"strconv"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+// TODO(toddw): Remove this file, after all code generators have been updated to
+// compute their own import dependencies.
+
+// Import represents a single package import.
+type Import struct {
+	Name string // Name of the import; may be empty.
+	Path string // Path of the imported package; e.g. "veyron/vdl"
+
+	// Local name that refers to the imported package; either the non-empty import
+	// name, or the name of the imported package.
+	Local string
+}
+
+// Imports is a collection of package imports.
+// REQUIRED: The imports must be sorted by path.
+type Imports []Import
+
+// LookupLocal returns the local name that identifies the given pkgPath.
+func (x Imports) LookupLocal(pkgPath string) string {
+	ix := sort.Search(len(x), func(i int) bool { return x[i].Path >= pkgPath })
+	if ix < len(x) && x[ix].Path == pkgPath {
+		return x[ix].Local
+	}
+	return ""
+}
+
+// Each import must end up with a unique local name - when we see a collision we
+// simply add a "_N" suffix where N starts at 2 and increments.
+func uniqueImport(pkgName, pkgPath string, seen map[string]bool) Import {
+	name := ""
+	iter := 1
+	for {
+		local := pkgName
+		if iter > 1 {
+			local += "_" + strconv.Itoa(iter)
+			name = local
+		}
+		if !seen[local] {
+			// Found a unique local name - return the import.
+			seen[local] = true
+			return Import{name, pkgPath, local}
+		}
+		iter++
+	}
+}
+
+type pkgSorter []*compile.Package
+
+func (s pkgSorter) Len() int { return len(s) }
+
+func (s pkgSorter) Less(i, j int) bool { return s[i].Path < s[j].Path }
+
+func (s pkgSorter) Swap(i, j int) { s[j], s[i] = s[i], s[j] }
+
+// ImportsForFiles returns the imports required for the given files.
+func ImportsForFiles(files ...*compile.File) Imports {
+	seenPath := make(map[string]bool)
+	pkgs := pkgSorter{}
+
+	for _, f := range files {
+		for _, dep := range f.PackageDeps {
+			if seenPath[dep.Path] {
+				continue
+			}
+			seenPath[dep.Path] = true
+			pkgs = append(pkgs, dep)
+		}
+	}
+	sort.Sort(pkgs)
+
+	var ret Imports
+	seenName := make(map[string]bool)
+	for _, dep := range pkgs {
+		ret = append(ret, uniqueImport(dep.Name, dep.Path, seenName))
+	}
+	return ret
+}
+
+// ImportsForValue returns the imports required to represent the given value v,
+// from within the given pkgPath.  It requires that type names used in
+// v are of the form "PKGPATH.NAME".
+func ImportsForValue(v *vdl.Value, pkgPath string) Imports {
+	deps := pkgDeps{}
+	deps.MergeValue(v)
+	var ret Imports
+	seen := make(map[string]bool)
+	for _, p := range deps.SortedPkgPaths() {
+		if p != pkgPath {
+			ret = append(ret, uniqueImport(path.Base(p), p, seen))
+		}
+	}
+	return ret
+}
+
+// pkgDeps maintains a set of package path dependencies.
+type pkgDeps map[string]bool
+
+func (deps pkgDeps) insertIdent(ident string) {
+	if pkgPath, _ := vdl.SplitIdent(ident); pkgPath != "" {
+		deps[pkgPath] = true
+	}
+}
+
+// MergeValue merges the package paths required to represent v into deps.
+func (deps pkgDeps) MergeValue(v *vdl.Value) {
+	deps.insertIdent(v.Type().Name())
+	switch v.Kind() {
+	case vdl.Any, vdl.Union, vdl.Optional:
+		elem := v.Elem()
+		if elem != nil {
+			deps.MergeValue(elem)
+		}
+	case vdl.Array, vdl.List:
+		deps.insertIdent(v.Type().Elem().Name())
+	case vdl.Set:
+		deps.insertIdent(v.Type().Key().Name())
+	case vdl.Map:
+		deps.insertIdent(v.Type().Key().Name())
+		deps.insertIdent(v.Type().Elem().Name())
+	case vdl.Struct:
+		for fx := 0; fx < v.Type().NumField(); fx++ {
+			deps.insertIdent(v.Type().Field(fx).Type.Name())
+		}
+	}
+}
+
+// SortedPkgPaths deps as a sorted slice.
+func (deps pkgDeps) SortedPkgPaths() []string {
+	var ret []string
+	for pkgPath, _ := range deps {
+		ret = append(ret, pkgPath)
+	}
+	sort.Strings(ret)
+	return ret
+}
diff --git a/lib/vdl/codegen/java/file_array.go b/lib/vdl/codegen/java/file_array.go
new file mode 100644
index 0000000..722deb7
--- /dev/null
+++ b/lib/vdl/codegen/java/file_array.go
@@ -0,0 +1,119 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+	"log"
+	"strings"
+
+	"v.io/v23/vdl/compile"
+)
+
+const arrayTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{.SourceFile}}
+
+package {{.Package}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.VdlArray<{{.ElemType}}> {
+    public static final int LENGTH = {{.Length}};
+
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}({{.ElemType}}[] arr) {
+        super(VDL_TYPE, arr);
+    }
+
+    public {{.Name}}() {
+        this({{.ZeroValue}});
+    }
+
+    {{ if .ElemIsPrimitive }}
+    public {{.Name}}({{ .ElemPrimitiveType }}[] arr) {
+        super(VDL_TYPE, convert(arr));
+    }
+
+    private static {{ .ElemType }}[] convert({{ .ElemPrimitiveType }}[] arr) {
+        final {{ .ElemType }}[] ret = new {{ .ElemType }}[arr.length];
+        for (int i = 0; i < arr.length; ++i) {
+            ret[i] = arr[i];
+        }
+        return ret;
+    }
+    {{ end }}
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        java.lang.reflect.Type elemType =
+                new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+        io.v.v23.vdl.ParcelUtil.writeList(out, this, elemType);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR =
+            new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            java.lang.reflect.Type elemType =
+                    new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+            final {{.ElemType}}[] array = io.v.v23.vdl.ParcelUtil.readList(
+                    in, {{.Name}}.class.getClassLoader(), elemType).toArray(new {{.ElemType}}[0]);
+            return new {{.Name}}(array);
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaArrayFile generates the Java class file for the provided named array type.
+func genJavaArrayFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	elemType := javaType(tdef.Type.Elem(), true, env)
+	elems := strings.TrimSuffix(strings.Repeat(javaZeroValue(tdef.Type.Elem(), env)+", ", tdef.Type.Len()), ", ")
+	zeroValue := fmt.Sprintf("new %s[] {%s}", elemType, elems)
+	data := struct {
+		AccessModifier    string
+		Doc               string
+		ElemType          string
+		ElemIsPrimitive   bool
+		ElemPrimitiveType string
+		Length            int
+		Name              string
+		Package           string
+		SourceFile        string
+		VdlTypeName       string
+		VdlTypeString     string
+		ZeroValue         string
+	}{
+		AccessModifier:    accessModifierForName(tdef.Name),
+		Doc:               javaDocInComment(tdef.Doc),
+		ElemType:          elemType,
+		ElemIsPrimitive:   !isClass(tdef.Type.Elem(), env),
+		ElemPrimitiveType: javaType(tdef.Type.Elem(), false, env),
+		Length:            tdef.Type.Len(),
+		Name:              javaTypeName,
+		Package:           javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		SourceFile:        tdef.File.BaseName,
+		VdlTypeName:       tdef.Type.Name(),
+		VdlTypeString:     tdef.Type.String(),
+		ZeroValue:         zeroValue,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("array", arrayTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute array template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_client_factory.go b/lib/vdl/codegen/java/file_client_factory.go
new file mode 100644
index 0000000..9423280
--- /dev/null
+++ b/lib/vdl/codegen/java/file_client_factory.go
@@ -0,0 +1,56 @@
+package java
+
+import (
+	"bytes"
+	"log"
+	"path"
+
+	"v.io/v23/vdl/compile"
+)
+
+const clientFactoryTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source(s):  {{ .Sources }}
+package {{ .PackagePath }};
+
+/* Factory for binding to {{ .ServiceName }}Client interfaces. */
+{{.AccessModifier}} final class {{ .ServiceName }}ClientFactory {
+    public static {{ .ServiceName }}Client bind(final java.lang.String name) {
+        return bind(name, null);
+    }
+    public static {{ .ServiceName }}Client bind(final java.lang.String name, final io.v.v23.Options veyronOpts) {
+        io.v.v23.ipc.Client client = null;
+        if (veyronOpts != null && veyronOpts.get(io.v.v23.OptionDefs.CLIENT) != null) {
+            client = veyronOpts.get(io.v.v23.OptionDefs.CLIENT, io.v.v23.ipc.Client.class);
+        }
+        return new {{ .StubName }}(client, name);
+    }
+}
+`
+
+// genJavaClientFactoryFile generates the Java file containing client bindings for
+// all interfaces in the provided package.
+func genJavaClientFactoryFile(iface *compile.Interface, env *compile.Env) JavaFileInfo {
+	javaServiceName := toUpperCamelCase(iface.Name)
+	data := struct {
+		AccessModifier string
+		Sources        string
+		ServiceName    string
+		PackagePath    string
+		StubName       string
+	}{
+		AccessModifier: accessModifierForName(iface.Name),
+		Sources:        iface.File.BaseName,
+		ServiceName:    javaServiceName,
+		PackagePath:    javaPath(javaGenPkgPath(iface.File.Package.GenPath)),
+		StubName:       javaPath(javaGenPkgPath(path.Join(iface.File.Package.GenPath, iface.Name+"ClientStub"))),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("client factory", clientFactoryTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute client template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaServiceName + "ClientFactory.java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_client_interface.go b/lib/vdl/codegen/java/file_client_interface.go
new file mode 100644
index 0000000..9e0eb0f
--- /dev/null
+++ b/lib/vdl/codegen/java/file_client_interface.go
@@ -0,0 +1,124 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+	"log"
+	"path"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const clientInterfaceTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{ .Source }}
+package {{ .PackagePath }};
+
+{{ .ServiceDoc }}
+{{ .AccessModifier }} interface {{ .ServiceName }}Client {{ .Extends }} {
+{{ range $method := .Methods }}
+    {{/* If this method has multiple return arguments, generate the class. */}}
+    {{ if $method.IsMultipleRet }}
+    public static class {{ $method.UppercaseMethodName }}Out {
+        {{ range $retArg := $method.RetArgs }}
+        public {{ $retArg.Type }} {{ $retArg.Name }};
+        {{ end }}
+    }
+    {{ end }}
+
+    {{/* Generate the method signature. */}}
+    {{ $method.Doc }}
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.context.VContext context{{ $method.Args }}) throws io.v.v23.verror.VException;
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.context.VContext context{{ $method.Args }}, final io.v.v23.Options veyronOpts) throws io.v.v23.verror.VException;
+{{ end }}
+}
+`
+
+type clientInterfaceArg struct {
+	Type string
+	Name string
+}
+
+type clientInterfaceMethod struct {
+	AccessModifier      string
+	Args                string
+	Doc                 string
+	IsMultipleRet       bool
+	Name                string
+	RetArgs             []clientInterfaceArg
+	RetType             string
+	UppercaseMethodName string
+}
+
+func clientInterfaceNonStreamingOutArg(iface *compile.Interface, method *compile.Method, useClass bool, env *compile.Env) string {
+	switch len(method.OutArgs) {
+	case 0:
+		// "void" or "Void"
+		return javaType(nil, useClass, env)
+	case 1:
+		return javaType(method.OutArgs[0].Type, useClass, env)
+	default:
+		return javaPath(path.Join(interfaceFullyQualifiedName(iface)+"Client", method.Name+"Out"))
+	}
+}
+
+func clientInterfaceOutArg(iface *compile.Interface, method *compile.Method, isService bool, env *compile.Env) string {
+	if isStreamingMethod(method) && !isService {
+		return fmt.Sprintf("io.v.v23.vdl.ClientStream<%s, %s, %s>", javaType(method.InStream, true, env), javaType(method.OutStream, true, env), clientInterfaceNonStreamingOutArg(iface, method, true, env))
+	}
+	return clientInterfaceNonStreamingOutArg(iface, method, false, env)
+}
+
+func processClientInterfaceMethod(iface *compile.Interface, method *compile.Method, env *compile.Env) clientInterfaceMethod {
+	retArgs := make([]clientInterfaceArg, len(method.OutArgs))
+	for i := 0; i < len(method.OutArgs); i++ {
+		retArgs[i].Name = vdlutil.ToCamelCase(method.OutArgs[i].Name)
+		retArgs[i].Type = javaType(method.OutArgs[i].Type, false, env)
+	}
+	return clientInterfaceMethod{
+		AccessModifier:      accessModifierForName(method.Name),
+		Args:                javaDeclarationArgStr(method.InArgs, env, true),
+		Doc:                 method.Doc,
+		IsMultipleRet:       len(retArgs) > 1,
+		Name:                vdlutil.ToCamelCase(method.Name),
+		RetArgs:             retArgs,
+		RetType:             clientInterfaceOutArg(iface, method, false, env),
+		UppercaseMethodName: method.Name,
+	}
+}
+
+// genJavaClientInterfaceFile generates the Java interface file for the provided
+// interface.
+func genJavaClientInterfaceFile(iface *compile.Interface, env *compile.Env) JavaFileInfo {
+	javaServiceName := toUpperCamelCase(iface.Name)
+	methods := make([]clientInterfaceMethod, len(iface.Methods))
+	for i, method := range iface.Methods {
+		methods[i] = processClientInterfaceMethod(iface, method, env)
+	}
+	data := struct {
+		AccessModifier string
+		Extends        string
+		Methods        []clientInterfaceMethod
+		PackagePath    string
+		ServiceDoc     string
+		ServiceName    string
+		Source         string
+	}{
+		AccessModifier: accessModifierForName(iface.Name),
+		Extends:        javaClientExtendsStr(iface.Embeds),
+		Methods:        methods,
+		PackagePath:    javaPath(javaGenPkgPath(iface.File.Package.GenPath)),
+		ServiceDoc:     javaDoc(iface.Doc),
+		ServiceName:    javaServiceName,
+		Source:         iface.File.BaseName,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("client interface", clientInterfaceTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute struct template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaServiceName + "Client.java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_client_stub.go b/lib/vdl/codegen/java/file_client_stub.go
new file mode 100644
index 0000000..394228b
--- /dev/null
+++ b/lib/vdl/codegen/java/file_client_stub.go
@@ -0,0 +1,280 @@
+package java
+
+import (
+	"bytes"
+	"log"
+	"path"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const clientStubTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source(s):  {{ .Source }}
+package {{ .PackagePath }};
+
+/* Client stub for interface: {{ .ServiceName }}Client. */
+{{ .AccessModifier }} final class {{ .ServiceName }}ClientStub implements {{ .FullServiceName }}Client {
+    private final io.v.v23.ipc.Client client;
+    private final java.lang.String veyronName;
+
+    {{/* Define fields to hold each of the embedded object stubs*/}}
+    {{ range $embed := .Embeds }}
+    {{/* e.g. private final com.somepackage.gen_impl.ArithStub stubArith; */}}
+    private final {{ $embed.StubClassName }} {{ $embed.LocalStubVarName }};
+    {{ end }}
+
+    public {{ .ServiceName }}ClientStub(final io.v.v23.ipc.Client client, final java.lang.String veyronName) {
+        this.client = client;
+        this.veyronName = veyronName;
+        {{/* Initialize the embeded stubs */}}
+        {{ range $embed := .Embeds }}
+        this.{{ $embed.LocalStubVarName }} = new {{ $embed.StubClassName }}(client, veyronName);
+         {{ end }}
+    }
+
+    private io.v.v23.ipc.Client getClient(io.v.v23.context.VContext context) {
+        return this.client != null ? client : io.v.v23.V.getClient(context);
+
+    }
+
+    // Methods from interface UniversalServiceMethods.
+    // TODO(spetrovic): Re-enable once we can import the new Signature classes.
+    //@Override
+    //public io.v.v23.ipc.ServiceSignature getSignature(io.v.v23.context.VContext context) throws io.v.v23.verror.VException {
+    //    return getSignature(context, null);
+    //}
+    //@Override
+    //public io.v.v23.ipc.ServiceSignature getSignature(io.v.v23.context.VContext context, io.v.v23.Options veyronOpts) throws io.v.v23.verror.VException {
+    //    // Start the call.
+    //    final io.v.v23.ipc.Client.Call _call = getClient(context).startCall(context, this.veyronName, "signature", new java.lang.Object[0], new java.lang.reflect.Type[0], veyronOpts);
+
+    //    // Finish the call.
+    //    final java.lang.reflect.Type[] _resultTypes = new java.lang.reflect.Type[]{
+    //        new com.google.common.reflect.TypeToken<io.v.v23.ipc.ServiceSignature>() {}.getType(),
+    //    };
+    //    final java.lang.Object[] _results = _call.finish(_resultTypes);
+    //    return (io.v.v23.ipc.ServiceSignature)_results[0];
+    //}
+
+    // Methods from interface {{ .ServiceName }}Client.
+{{/* Iterate over methods defined directly in the body of this service */}}
+{{ range $method := .Methods }}
+    {{/* The optionless overload simply calls the overload with options */}}
+    @Override
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.context.VContext context{{ $method.DeclarationArgs }}) throws io.v.v23.verror.VException {
+        {{if $method.Returns }}return{{ end }} {{ $method.Name }}(context{{ $method.CallingArgsLeadingComma }}, null);
+    }
+    {{/* The main client stub method body */}}
+    @Override
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.context.VContext context{{ $method.DeclarationArgs }}, io.v.v23.Options veyronOpts) throws io.v.v23.verror.VException {
+        {{/* Start the veyron call */}}
+        // Start the call.
+        final java.lang.Object[] _args = new java.lang.Object[]{ {{ $method.CallingArgs }} };
+        final java.lang.reflect.Type[] _argTypes = new java.lang.reflect.Type[]{ {{ $method.CallingArgTypes }} };
+        final io.v.v23.ipc.Client.Call _call = getClient(context).startCall(context, this.veyronName, "{{ $method.Name }}", _args, _argTypes, veyronOpts);
+
+        // Finish the call.
+        {{/* Now handle returning from the function. */}}
+        {{ if $method.NotStreaming }}
+
+        {{ if $method.IsVoid }}
+        final java.lang.reflect.Type[] _resultTypes = new java.lang.reflect.Type[]{};
+        _call.finish(_resultTypes);
+        {{ else }} {{/* else $method.IsVoid */}}
+        final java.lang.reflect.Type[] _resultTypes = new java.lang.reflect.Type[]{
+            {{ range $outArg := $method.OutArgs }}
+            new com.google.common.reflect.TypeToken<{{ $outArg.Type }}>() {}.getType(),
+            {{ end }}
+        };
+        final java.lang.Object[] _results = _call.finish(_resultTypes);
+        {{ if $method.MultipleReturn }}
+        final {{ $method.DeclaredObjectRetType }} _ret = new {{ $method.DeclaredObjectRetType }}();
+            {{ range $i, $outArg := $method.OutArgs }}
+        _ret.{{ $outArg.FieldName }} = ({{ $outArg.Type }})_results[{{ $i }}];
+            {{ end }} {{/* end range over outargs */}}
+        return _ret;
+        {{ else }} {{/* end if $method.MultipleReturn */}}
+        return ({{ $method.DeclaredObjectRetType }})_results[0];
+        {{ end }} {{/* end if $method.MultipleReturn */}}
+
+        {{ end }} {{/* end if $method.IsVoid */}}
+
+        {{else }} {{/* else $method.NotStreaming */}}
+        return new io.v.v23.vdl.ClientStream<{{ $method.SendType }}, {{ $method.RecvType }}, {{ $method.DeclaredObjectRetType }}>() {
+            @Override
+            public void send(final {{ $method.SendType }} item) throws io.v.v23.verror.VException {
+                final java.lang.reflect.Type type = new com.google.common.reflect.TypeToken<{{ $method.SendType }}>() {}.getType();
+                _call.send(item, type);
+            }
+            @Override
+            public {{ $method.RecvType }} recv() throws java.io.EOFException, io.v.v23.verror.VException {
+                final java.lang.reflect.Type type = new com.google.common.reflect.TypeToken<{{ $method.RecvType }}>() {}.getType();
+                final java.lang.Object result = _call.recv(type);
+                try {
+                    return ({{ $method.RecvType }})result;
+                } catch (java.lang.ClassCastException e) {
+                    throw new io.v.v23.verror.VException("Unexpected result type: " + result.getClass().getCanonicalName());
+                }
+            }
+            @Override
+            public {{ $method.DeclaredObjectRetType }} finish() throws io.v.v23.verror.VException {
+                {{ if $method.IsVoid }}
+                final java.lang.reflect.Type[] resultTypes = new java.lang.reflect.Type[]{};
+                _call.finish(resultTypes);
+                return null;
+                {{ else }} {{/* else $method.IsVoid */}}
+                final java.lang.reflect.Type[] resultTypes = new java.lang.reflect.Type[]{
+                    new com.google.common.reflect.TypeToken<{{ $method.DeclaredObjectRetType }}>() {}.getType()
+                };
+                return ({{ $method.DeclaredObjectRetType }})_call.finish(resultTypes)[0];
+                {{ end }} {{/* end if $method.IsVoid */}}
+            }
+        };
+        {{ end }}{{/* end if $method.NotStreaming */}}
+    }
+{{ end }}{{/* end range over methods */}}
+
+{{/* Iterate over methods from embeded services and generate code to delegate the work */}}
+{{ range $eMethod := .EmbedMethods }}
+    @Override
+    {{ $eMethod.AccessModifier }} {{ $eMethod.RetType }} {{ $eMethod.Name }}(final io.v.v23.context.VContext context{{ $eMethod.DeclarationArgs }}) throws io.v.v23.verror.VException {
+        {{/* e.g. return this.stubArith.cosine(context, [args]) */}}
+        {{ if $eMethod.Returns }}return{{ end }} this.{{ $eMethod.LocalStubVarName }}.{{ $eMethod.Name }}(context{{ $eMethod.CallingArgsLeadingComma }});
+    }
+    @Override
+    {{ $eMethod.AccessModifier }} {{ $eMethod.RetType }} {{ $eMethod.Name }}(final io.v.v23.context.VContext context{{ $eMethod.DeclarationArgs }}, io.v.v23.Options veyronOpts) throws io.v.v23.verror.VException {
+        {{/* e.g. return this.stubArith.cosine(context, [args], options) */}}
+        {{ if $eMethod.Returns }}return{{ end }}  this.{{ $eMethod.LocalStubVarName }}.{{ $eMethod.Name }}(context{{ $eMethod.CallingArgsLeadingComma }}, veyronOpts);
+    }
+{{ end }}
+
+}
+`
+
+type clientStubMethodOutArg struct {
+	FieldName string
+	Type      string
+}
+
+type clientStubMethod struct {
+	AccessModifier          string
+	CallingArgs             string
+	CallingArgTypes         string
+	CallingArgsLeadingComma string
+	DeclarationArgs         string
+	DeclaredObjectRetType   string
+	IsVoid                  bool
+	MultipleReturn          bool
+	Name                    string
+	NotStreaming            bool
+	OutArgs                 []clientStubMethodOutArg
+	RecvType                string
+	RetType                 string
+	Returns                 bool
+	SendType                string
+	ServiceName             string
+}
+
+type clientStubEmbedMethod struct {
+	AccessModifier          string
+	CallingArgsLeadingComma string
+	DeclarationArgs         string
+	LocalStubVarName        string
+	Name                    string
+	RetType                 string
+	Returns                 bool
+}
+
+type clientStubEmbed struct {
+	StubClassName    string
+	LocalStubVarName string
+}
+
+func processClientStubMethod(iface *compile.Interface, method *compile.Method, env *compile.Env) clientStubMethod {
+	outArgs := make([]clientStubMethodOutArg, len(method.OutArgs))
+	for i := 0; i < len(method.OutArgs); i++ {
+		outArgs[i].FieldName = vdlutil.ToCamelCase(method.OutArgs[i].Name)
+		outArgs[i].Type = javaType(method.OutArgs[i].Type, true, env)
+	}
+	return clientStubMethod{
+		AccessModifier:          accessModifierForName(method.Name),
+		CallingArgs:             javaCallingArgStr(method.InArgs, false),
+		CallingArgTypes:         javaCallingArgTypeStr(method.InArgs, env),
+		CallingArgsLeadingComma: javaCallingArgStr(method.InArgs, true),
+		DeclarationArgs:         javaDeclarationArgStr(method.InArgs, env, true),
+		DeclaredObjectRetType:   clientInterfaceNonStreamingOutArg(iface, method, true, env),
+		IsVoid:                  len(method.OutArgs) < 1,
+		MultipleReturn:          len(method.OutArgs) > 1,
+		Name:                    vdlutil.ToCamelCase(method.Name),
+		NotStreaming:            !isStreamingMethod(method),
+		OutArgs:                 outArgs,
+		RecvType:                javaType(method.OutStream, true, env),
+		RetType:                 clientInterfaceOutArg(iface, method, false, env),
+		Returns:                 len(method.OutArgs) >= 1 || isStreamingMethod(method),
+		SendType:                javaType(method.InStream, true, env),
+		ServiceName:             toUpperCamelCase(iface.Name),
+	}
+}
+
+func processClientStubEmbedMethod(iface *compile.Interface, embedMethod *compile.Method, env *compile.Env) clientStubEmbedMethod {
+	return clientStubEmbedMethod{
+		AccessModifier:          accessModifierForName(embedMethod.Name),
+		CallingArgsLeadingComma: javaCallingArgStr(embedMethod.InArgs, true),
+		DeclarationArgs:         javaDeclarationArgStr(embedMethod.InArgs, env, true),
+		LocalStubVarName:        vdlutil.ToCamelCase(iface.Name) + "ClientStub",
+		Name:                    vdlutil.ToCamelCase(embedMethod.Name),
+		RetType:                 clientInterfaceOutArg(iface, embedMethod, false, env),
+		Returns:                 len(embedMethod.OutArgs) >= 1 || isStreamingMethod(embedMethod),
+	}
+}
+
+// genJavaClientStubFile generates a client stub for the specified interface.
+func genJavaClientStubFile(iface *compile.Interface, env *compile.Env) JavaFileInfo {
+	embeds := []clientStubEmbed{}
+	for _, embed := range allEmbeddedIfaces(iface) {
+		embeds = append(embeds, clientStubEmbed{
+			LocalStubVarName: vdlutil.ToCamelCase(embed.Name) + "ClientStub",
+			StubClassName:    javaPath(javaGenPkgPath(path.Join(embed.File.Package.GenPath, toUpperCamelCase(embed.Name)+"ClientStub"))),
+		})
+	}
+	embedMethods := []clientStubEmbedMethod{}
+	for _, embedMao := range dedupedEmbeddedMethodAndOrigins(iface) {
+		embedMethods = append(embedMethods, processClientStubEmbedMethod(embedMao.Origin, embedMao.Method, env))
+	}
+	methods := make([]clientStubMethod, len(iface.Methods))
+	for i, method := range iface.Methods {
+		methods[i] = processClientStubMethod(iface, method, env)
+	}
+	javaServiceName := toUpperCamelCase(iface.Name)
+	data := struct {
+		AccessModifier   string
+		EmbedMethods     []clientStubEmbedMethod
+		Embeds           []clientStubEmbed
+		FullServiceName  string
+		Methods          []clientStubMethod
+		PackagePath      string
+		ServiceName      string
+		Source           string
+		VDLIfacePathName string
+	}{
+		AccessModifier:   accessModifierForName(iface.Name),
+		EmbedMethods:     embedMethods,
+		Embeds:           embeds,
+		FullServiceName:  javaPath(interfaceFullyQualifiedName(iface)),
+		Methods:          methods,
+		PackagePath:      javaPath(javaGenPkgPath(iface.File.Package.GenPath)),
+		ServiceName:      javaServiceName,
+		Source:           iface.File.BaseName,
+		VDLIfacePathName: path.Join(iface.File.Package.GenPath, iface.Name+"ClientMethods"),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("client stub", clientStubTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute client stub template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaServiceName + "ClientStub.java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_complex.go b/lib/vdl/codegen/java/file_complex.go
new file mode 100644
index 0000000..6b0aca2
--- /dev/null
+++ b/lib/vdl/codegen/java/file_complex.go
@@ -0,0 +1,96 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+	"log"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+const complexTmpl = `
+// This file was auto-generated by the veyron vdl tool.
+// Source: {{.Source}}
+package {{.PackagePath}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends {{.VdlComplex}} {
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}({{.ValueType}} real, {{.ValueType}} imag) {
+        super(VDL_TYPE, real, imag);
+    }
+
+    public {{.Name}}({{.ValueType}} real) {
+        this(real, 0);
+    }
+
+    public {{.Name}}() {
+        this(0, 0);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR
+        = new android.os.Parcelable.Creator<{{.Name}}>() {
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            {{.VdlComplex}} value = {{.VdlComplex}}.CREATOR.createFromParcel(in);
+            return new {{.Name}}(value.getReal(), value.getImag());
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaComplexFile generates the Java class file for the provided user-defined VDL complex type.
+func genJavaComplexFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	var ValueType string
+	switch kind := tdef.Type.Kind(); kind {
+	case vdl.Complex64:
+		ValueType = "float"
+	case vdl.Complex128:
+		ValueType = "double"
+	default:
+		panic(fmt.Errorf("val: unhandled kind: %v", kind))
+	}
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		Name           string
+		PackagePath    string
+		Source         string
+		ValueType      string
+		VdlComplex     string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		Name:           javaTypeName,
+		PackagePath:    javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		Source:         tdef.File.BaseName,
+		ValueType:      ValueType,
+		VdlComplex:     javaVdlPrimitiveType(tdef.Type.Kind()),
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("complex", complexTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute VDL complex template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_constants.go b/lib/vdl/codegen/java/file_constants.go
new file mode 100644
index 0000000..03c97ae
--- /dev/null
+++ b/lib/vdl/codegen/java/file_constants.go
@@ -0,0 +1,94 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const constTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source(s): {{ .Source }}
+package {{ .PackagePath }};
+
+
+public final class {{ .ClassName }} {
+    {{ range $file := .Files }}
+
+    /* The following constants originate in file: {{ $file.Name }} */
+    {{/*Constants*/}}
+    {{ range $const := $file.Consts }}
+    {{ $const.Doc }}
+    {{ $const.AccessModifier }} static final {{ $const.Type }} {{ $const.Name }} = {{ $const.Value }};
+    {{ end }} {{/* end range $file.Consts */}}
+    {{ end }} {{/* range .Files */}}
+}
+`
+
+type constConst struct {
+	AccessModifier string
+	Doc            string
+	Type           string
+	Name           string
+	Value          string
+}
+
+type constFile struct {
+	Name   string
+	Consts []constConst
+}
+
+func shouldGenerateConstFile(pkg *compile.Package) bool {
+	for _, file := range pkg.Files {
+		if len(file.ConstDefs) > 0 {
+			return true
+		}
+	}
+	return false
+}
+
+// genJavaConstFile generates the (single) Java file that contains constant
+// definitions from all the VDL files.
+func genJavaConstFile(pkg *compile.Package, env *compile.Env) *JavaFileInfo {
+	if !shouldGenerateConstFile(pkg) {
+		return nil
+	}
+
+	className := "Constants"
+
+	files := make([]constFile, len(pkg.Files))
+	for i, file := range pkg.Files {
+		consts := make([]constConst, len(file.ConstDefs))
+		for j, cnst := range file.ConstDefs {
+			consts[j].AccessModifier = accessModifierForName(cnst.Name)
+			consts[j].Doc = javaDoc(cnst.Doc)
+			consts[j].Type = javaType(cnst.Value.Type(), false, env)
+			consts[j].Name = vdlutil.ToConstCase(cnst.Name)
+			consts[j].Value = javaConstVal(cnst.Value, env)
+		}
+		files[i].Name = file.BaseName
+		files[i].Consts = consts
+	}
+
+	data := struct {
+		ClassName   string
+		Source      string
+		PackagePath string
+		Files       []constFile
+	}{
+		ClassName:   className,
+		Source:      javaFileNames(pkg.Files),
+		PackagePath: javaPath(javaGenPkgPath(pkg.GenPath)),
+		Files:       files,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("const", constTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute const template: %v", err)
+	}
+	return &JavaFileInfo{
+		Name: className + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_enum.go b/lib/vdl/codegen/java/file_enum.go
new file mode 100644
index 0000000..5a5e841
--- /dev/null
+++ b/lib/vdl/codegen/java/file_enum.go
@@ -0,0 +1,104 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const enumTmpl = `
+// This file was auto-generated by the veyron vdl tool.
+// Source: {{.Source}}
+package {{.PackagePath}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.VdlEnum {
+    {{ range $index, $label := .EnumLabels }}
+        @io.v.v23.vdl.GeneratedFromVdl(name = "{{$label}}", index = {{$index}})
+        public static final {{$.Name}} {{$label}};
+    {{ end }}
+
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    static {
+        {{ range $label := .EnumLabels }}
+            {{$label}} = new {{$.Name}}("{{$label}}");
+        {{ end }}
+    }
+
+    private {{.Name}}(String name) {
+        super(VDL_TYPE, name);
+    }
+
+    public static {{.Name}} valueOf(String name) {
+        {{ range $label := .EnumLabels }}
+            if ("{{$label}}".equals(name)) {
+                return {{$label}};
+            }
+        {{ end }}
+        throw new java.lang.IllegalArgumentException();
+    }
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        out.writeString(name());
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR =
+            new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            return {{.Name}}.valueOf(in.readString());
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaEnumFile generates the Java class file for the provided user-defined enum type.
+func genJavaEnumFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	labels := make([]string, tdef.Type.NumEnumLabel())
+	for i := 0; i < tdef.Type.NumEnumLabel(); i++ {
+		labels[i] = tdef.Type.EnumLabel(i)
+	}
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		EnumLabels     []string
+		Doc            string
+		Name           string
+		PackagePath    string
+		Source         string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		EnumLabels:     labels,
+		Doc:            javaDocInComment(tdef.Doc),
+		Name:           javaTypeName,
+		PackagePath:    javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		Source:         tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("enum", enumTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute enum template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_errors.go b/lib/vdl/codegen/java/file_errors.go
new file mode 100644
index 0000000..3b2e0d2
--- /dev/null
+++ b/lib/vdl/codegen/java/file_errors.go
@@ -0,0 +1,142 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const errorTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source(s): {{ .Source }}
+package {{ .PackagePath }};
+
+
+public final class {{ .ClassName }} {
+    {{ range $file := .Files }}
+
+    /* The following errors originate in file: {{ $file.Name }} */
+    {{/*Error Defs*/}}
+    {{ range $error := $file.Errors }}
+    {{ $error.Doc }}
+    {{ $error.AccessModifier }} static final io.v.v23.verror.VException.IDAction {{ $error.Name }} = io.v.v23.verror.VException.register("{{ $error.ID }}", io.v.v23.verror.VException.ActionCode.{{ $error.ActionName }}, "{{ $error.EnglishFmt }}");
+    {{ end }} {{/* range $file.Errors */}}
+
+    {{ end }} {{/* range .Files */}}
+
+    static {
+        {{ range $file := .Files }}
+        /* The following errors originate in file: {{ $file.Name }} */
+        {{ range $error := $file.Errors }}
+        {{ range $format := $error.Formats}}
+        io.v.v23.i18n.Language.getDefaultCatalog().setWithBase("{{ $format.Lang }}", {{ $error.Name }}.getID(), "{{ $format.Fmt }}");
+        {{ end }} {{/* range $error.Formats */}}
+        {{ end }} {{/* range $file.Errors */}}
+        {{ end }} {{/* range .Files */}}
+    }
+
+    {{ range $file := .Files }}
+    /* The following error creator methods originate in file: {{ $file.Name }} */
+    {{ range $error := $file.Errors }}
+    /**
+     * Creates an error with {@code {{ $error.Name }}} identifier.
+     */
+    public static io.v.v23.verror.VException {{ $error.MethodName }}(io.v.v23.context.VContext _ctx{{ $error.MethodArgs}}) {
+        final java.lang.Object[] _params = new java.lang.Object[] { {{ $error.Params }} };
+        final java.lang.reflect.Type[] _paramTypes = new java.lang.reflect.Type[]{ {{ $error.ParamTypes }} };
+        return io.v.v23.verror.VException.make({{ $error.Name }}, _ctx, _paramTypes, _params);
+    }
+    {{ end }} {{/* range $file.Errors */}}
+    {{ end }} {{/* range .Files */}}
+}
+`
+
+type errorDef struct {
+	AccessModifier string
+	Doc            string
+	Name           string
+	ID             string
+	ActionName     string
+	EnglishFmt     string
+	Formats        []errorFormat
+	MethodName     string
+	MethodArgs     string
+	Params         string
+	ParamTypes     string
+}
+
+type errorFormat struct {
+	Lang string
+	Fmt  string
+}
+
+type errorFile struct {
+	Name   string
+	Errors []errorDef
+}
+
+func shouldGenerateErrorFile(pkg *compile.Package) bool {
+	for _, file := range pkg.Files {
+		if len(file.ErrorDefs) > 0 {
+			return true
+		}
+	}
+	return false
+}
+
+// genJavaErrorFile generates the (single) Java file that contains error
+// definitions from all the VDL files.
+func genJavaErrorFile(pkg *compile.Package, env *compile.Env) *JavaFileInfo {
+	if !shouldGenerateErrorFile(pkg) {
+		return nil
+	}
+
+	className := "Errors"
+
+	files := make([]errorFile, len(pkg.Files))
+	for i, file := range pkg.Files {
+		errors := make([]errorDef, len(file.ErrorDefs))
+		for j, err := range file.ErrorDefs {
+			formats := make([]errorFormat, len(err.Formats))
+			for k, format := range err.Formats {
+				formats[k].Lang = string(format.Lang)
+				formats[k].Fmt = format.Fmt
+			}
+			errors[j].AccessModifier = accessModifierForName(err.Name)
+			errors[j].Doc = javaDoc(err.Doc)
+			errors[j].Name = vdlutil.ToConstCase(err.Name)
+			errors[j].ID = err.ID
+			errors[j].ActionName = vdlutil.ToConstCase(err.RetryCode.String())
+			errors[j].EnglishFmt = err.English
+			errors[j].Formats = formats
+			errors[j].MethodName = "make" + toUpperCamelCase(err.Name)
+			errors[j].MethodArgs = javaDeclarationArgStr(err.Params, env, true)
+			errors[j].Params = javaCallingArgStr(err.Params, false)
+			errors[j].ParamTypes = javaCallingArgTypeStr(err.Params, env)
+		}
+		files[i].Name = file.BaseName
+		files[i].Errors = errors
+	}
+
+	data := struct {
+		ClassName   string
+		Source      string
+		PackagePath string
+		Files       []errorFile
+	}{
+		ClassName:   className,
+		Source:      javaFileNames(pkg.Files),
+		PackagePath: javaPath(javaGenPkgPath(pkg.GenPath)),
+		Files:       files,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("error", errorTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute error template: %v", err)
+	}
+	return &JavaFileInfo{
+		Name: className + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_list.go b/lib/vdl/codegen/java/file_list.go
new file mode 100644
index 0000000..dbe0e11
--- /dev/null
+++ b/lib/vdl/codegen/java/file_list.go
@@ -0,0 +1,89 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const listTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{.SourceFile}}
+package {{.Package}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.VdlList<{{.ElemType}}> {
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}(java.util.List<{{.ElemType}}> impl) {
+        super(VDL_TYPE, impl);
+    }
+
+    public {{.Name}}() {
+        this(new java.util.ArrayList<{{.ElemType}}>());
+    }
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        java.lang.reflect.Type elemType =
+                new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+        io.v.v23.vdl.ParcelUtil.writeList(out, this, elemType);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR =
+            new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            java.lang.reflect.Type elemType =
+                    new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+            java.util.List<?> list = io.v.v23.vdl.ParcelUtil.readList(
+                    in, {{.Name}}.class.getClassLoader(), elemType);
+            return new {{.Name}}((java.util.List<{{.ElemType}}>) list);
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaListFile generates the Java class file for the provided named list type.
+func genJavaListFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		ElemType       string
+		Name           string
+		Package        string
+		SourceFile     string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		ElemType:       javaType(tdef.Type.Elem(), true, env),
+		Name:           javaTypeName,
+		Package:        javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		SourceFile:     tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("list", listTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute list template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_map.go b/lib/vdl/codegen/java/file_map.go
new file mode 100644
index 0000000..48f6820
--- /dev/null
+++ b/lib/vdl/codegen/java/file_map.go
@@ -0,0 +1,96 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const mapTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{.SourceFile}}
+
+package {{.Package}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.VdlMap<{{.KeyType}}, {{.ElemType}}> {
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}(java.util.Map<{{.KeyType}}, {{.ElemType}}> impl) {
+        super(VDL_TYPE, impl);
+    }
+
+    public {{.Name}}() {
+        this(new java.util.HashMap<{{.KeyType}}, {{.ElemType}}>());
+    }
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        java.lang.reflect.Type keyType =
+                new com.google.common.reflect.TypeToken<{{.KeyType}}>(){}.getType();
+        java.lang.reflect.Type elemType =
+                new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+        io.v.v23.vdl.ParcelUtil.writeMap(out, this, keyType, elemType);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR =
+            new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            java.lang.reflect.Type keyType =
+                    new com.google.common.reflect.TypeToken<{{.KeyType}}>(){}.getType();
+            java.lang.reflect.Type elemType =
+                    new com.google.common.reflect.TypeToken<{{.ElemType}}>(){}.getType();
+            java.util.Map<?, ?> map = io.v.v23.vdl.ParcelUtil.readMap(
+                    in, {{.Name}}.class.getClassLoader(), keyType, elemType);
+            return new {{.Name}}((java.util.Map<{{.KeyType}}, {{.ElemType}}>) map);
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaMapFile generates the Java class file for the provided named map type.
+func genJavaMapFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		ElemType       string
+		KeyType        string
+		Name           string
+		Package        string
+		SourceFile     string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		ElemType:       javaType(tdef.Type.Elem(), true, env),
+		KeyType:        javaType(tdef.Type.Key(), true, env),
+		Name:           javaTypeName,
+		Package:        javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		SourceFile:     tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.BaseType.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("map", mapTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute map template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_package_info.go b/lib/vdl/codegen/java/file_package_info.go
new file mode 100644
index 0000000..da21530
--- /dev/null
+++ b/lib/vdl/codegen/java/file_package_info.go
@@ -0,0 +1,50 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const packageTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{ .Source }}
+
+{{ .Doc }}
+package {{ .PackagePath }};
+`
+
+// genPackageFileJava generates the Java package info file, iff any package
+// comments were specified in the package's VDL files.
+func genJavaPackageFile(pkg *compile.Package, env *compile.Env) *JavaFileInfo {
+	generated := false
+	for _, file := range pkg.Files {
+		if file.PackageDef.Doc != "" {
+			if generated {
+				log.Printf("WARNING: Multiple vdl files with package documentation. One will be overwritten.")
+				return nil
+			}
+			generated = true
+
+			data := struct {
+				Source      string
+				PackagePath string
+				Doc         string
+			}{
+				Source:      javaFileNames(pkg.Files),
+				PackagePath: javaPath(javaGenPkgPath(pkg.GenPath)),
+				Doc:         javaDoc(file.PackageDef.Doc),
+			}
+			var buf bytes.Buffer
+			err := parseTmpl("package", packageTmpl).Execute(&buf, data)
+			if err != nil {
+				log.Fatalf("vdl: couldn't execute package template: %v", err)
+			}
+			return &JavaFileInfo{
+				Name: "package-info.java",
+				Data: buf.Bytes(),
+			}
+		}
+	}
+	return nil
+}
diff --git a/lib/vdl/codegen/java/file_primitive.go b/lib/vdl/codegen/java/file_primitive.go
new file mode 100644
index 0000000..5aa7b41
--- /dev/null
+++ b/lib/vdl/codegen/java/file_primitive.go
@@ -0,0 +1,114 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+const primitiveTmpl = `
+// This file was auto-generated by the veyron vdl tool.
+// Source: {{.Source}}
+package {{.PackagePath}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends {{.VdlType}} {
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}({{.ConstructorType}} value) {
+        super(VDL_TYPE, value);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR
+        = new android.os.Parcelable.Creator<{{.Name}}>() {
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            return new {{.Name}}({{.VdlType}}.CREATOR.createFromParcel(in).getValue());
+        }
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+
+    public {{.Name}}() {
+        super(VDL_TYPE);
+    }
+}
+`
+
+// javaConstructorType returns java type that is used as a constructor argument
+// type for a VDL primitive.
+func javaConstructorType(t *vdl.Type) string {
+	switch t.Kind() {
+	case vdl.Uint16:
+		return "short"
+	case vdl.Uint32:
+		return "int"
+	case vdl.Uint64:
+		return "long"
+	default:
+		constructorType, _ := javaBuiltInType(t, false)
+		return constructorType
+	}
+}
+
+// javaConstructorType returns java class that is used as a type adapter delegate
+// argument for a VDL primitive.
+func javaTypeAdapterDelegateClass(t *vdl.Type) string {
+	switch t.Kind() {
+	case vdl.Uint16:
+		return "java.lang.Short"
+	case vdl.Uint32:
+		return "java.lang.Integer"
+	case vdl.Uint64:
+		return "java.lang.Long"
+	default:
+		typeAdapterDelegateClass, _ := javaBuiltInType(t, true)
+		return typeAdapterDelegateClass
+	}
+}
+
+// genJavaPrimitiveFile generates the Java class file for the provided user-defined type.
+func genJavaPrimitiveFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier           string
+		Doc                      string
+		Name                     string
+		PackagePath              string
+		Source                   string
+		ConstructorType          string
+		TypeAdapterDelegateClass string
+		VdlType                  string
+		VdlTypeName              string
+		VdlTypeString            string
+	}{
+		AccessModifier:           accessModifierForName(tdef.Name),
+		Doc:                      javaDocInComment(tdef.Doc),
+		Name:                     javaTypeName,
+		PackagePath:              javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		Source:                   tdef.File.BaseName,
+		ConstructorType:          javaConstructorType(tdef.Type),
+		TypeAdapterDelegateClass: javaTypeAdapterDelegateClass(tdef.Type),
+		VdlType:                  javaVdlPrimitiveType(tdef.Type.Kind()),
+		VdlTypeName:              tdef.Type.Name(),
+		VdlTypeString:            tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("primitive", primitiveTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute primitive template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_server_interface.go b/lib/vdl/codegen/java/file_server_interface.go
new file mode 100644
index 0000000..f143b1b
--- /dev/null
+++ b/lib/vdl/codegen/java/file_server_interface.go
@@ -0,0 +1,101 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+	"log"
+	"path"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const serverInterfaceTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{ .Source }}
+package {{ .PackagePath }};
+
+{{ .ServerDoc }}
+@io.v.v23.vdl.VeyronServer(
+    serverWrapper = {{ .ServerWrapperPath }}.class
+)
+{{ .AccessModifier }} interface {{ .ServiceName }}Server {{ .Extends }} {
+{{ range $method := .Methods }}
+    {{/* Generate the method signature. */}}
+    {{ $method.Doc }}
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.ipc.ServerContext context{{ $method.Args }}) throws io.v.v23.verror.VException;
+{{ end }}
+}
+`
+
+func serverInterfaceOutArg(method *compile.Method, iface *compile.Interface, env *compile.Env) string {
+	switch len(method.OutArgs) {
+	case 0:
+		return "void"
+	case 1:
+		return javaType(method.OutArgs[0].Type, false, env)
+	default:
+		return javaPath(path.Join(interfaceFullyQualifiedName(iface)+"Client", method.Name+"Out"))
+	}
+}
+
+type serverInterfaceMethod struct {
+	AccessModifier string
+	Args           string
+	Doc            string
+	Name           string
+	RetType        string
+}
+
+func processServerInterfaceMethod(method *compile.Method, iface *compile.Interface, env *compile.Env) serverInterfaceMethod {
+	args := javaDeclarationArgStr(method.InArgs, env, true)
+	if isStreamingMethod(method) {
+		args += fmt.Sprintf(", io.v.v23.vdl.Stream<%s, %s> stream", javaType(method.OutStream, true, env), javaType(method.InStream, true, env))
+	}
+	return serverInterfaceMethod{
+		AccessModifier: accessModifierForName(method.Name),
+		Args:           args,
+		Doc:            method.Doc,
+		Name:           vdlutil.ToCamelCase(method.Name),
+		RetType:        serverInterfaceOutArg(method, iface, env),
+	}
+}
+
+// genJavaServerInterfaceFile generates the Java interface file for the provided
+// interface.
+func genJavaServerInterfaceFile(iface *compile.Interface, env *compile.Env) JavaFileInfo {
+	methods := make([]serverInterfaceMethod, len(iface.Methods))
+	for i, method := range iface.Methods {
+		methods[i] = processServerInterfaceMethod(method, iface, env)
+	}
+	javaServiceName := toUpperCamelCase(iface.Name)
+	data := struct {
+		AccessModifier    string
+		Extends           string
+		Methods           []serverInterfaceMethod
+		PackagePath       string
+		ServerDoc         string
+		ServerVDLPath     string
+		ServiceName       string
+		ServerWrapperPath string
+		Source            string
+	}{
+		AccessModifier:    accessModifierForName(iface.Name),
+		Extends:           javaServerExtendsStr(iface.Embeds),
+		Methods:           methods,
+		PackagePath:       javaPath(javaGenPkgPath(iface.File.Package.GenPath)),
+		ServerDoc:         javaDoc(iface.Doc),
+		ServiceName:       javaServiceName,
+		ServerVDLPath:     path.Join(iface.File.Package.GenPath, iface.Name+"ServerMethods"),
+		ServerWrapperPath: javaPath(javaGenPkgPath(path.Join(iface.File.Package.GenPath, javaServiceName+"ServerWrapper"))),
+		Source:            iface.File.BaseName,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("server interface", serverInterfaceTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute struct template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaServiceName + "Server.java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_server_wrapper.go b/lib/vdl/codegen/java/file_server_wrapper.go
new file mode 100644
index 0000000..d825e13
--- /dev/null
+++ b/lib/vdl/codegen/java/file_server_wrapper.go
@@ -0,0 +1,228 @@
+package java
+
+import (
+	"bytes"
+	"log"
+	"path"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const serverWrapperTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source(s):  {{ .Source }}
+package {{ .PackagePath }};
+
+{{ .AccessModifier }} final class {{ .ServiceName }}ServerWrapper {
+
+    private final {{ .FullServiceName }}Server server;
+
+{{/* Define fields to hold each of the embedded server wrappers*/}}
+{{ range $embed := .Embeds }}
+    {{/* e.g. private final com.somepackage.gen_impl.ArithStub stubArith; */}}
+    private final {{ $embed.WrapperClassName }} {{ $embed.LocalWrapperVarName }};
+    {{ end }}
+
+    public {{ .ServiceName }}ServerWrapper(final {{ .FullServiceName }}Server server) {
+        this.server = server;
+        {{/* Initialize the embeded server wrappers */}}
+        {{ range $embed := .Embeds }}
+        this.{{ $embed.LocalWrapperVarName }} = new {{ $embed.WrapperClassName }}(server);
+        {{ end }}
+    }
+
+    /**
+     * Returns a description of this server.
+     */
+    // TODO(spetrovic): Re-enable once we can import the new Signature classes.
+    //@SuppressWarnings("unused")
+    //public io.v.v23.ipc.ServiceSignature signature(io.v.v23.ipc.ServerCall call) throws io.v.v23.verror.VException {
+    //    throw new io.v.v23.verror.VException("Signature method not yet supported for Java servers");
+    //}
+
+    /**
+     * Returns all tags associated with the provided method or null if the method isn't implemented
+     * by this server.
+     */
+    @SuppressWarnings("unused")
+    public io.v.v23.vdl.VdlValue[] getMethodTags(final io.v.v23.ipc.ServerCall call, final java.lang.String method) throws io.v.v23.verror.VException {
+        {{ range $methodName, $tags := .MethodTags }}
+        if ("{{ $methodName }}".equals(method)) {
+            try {
+                return new io.v.v23.vdl.VdlValue[] {
+                    {{ range $tag := $tags }} io.v.v23.vdl.VdlValue.valueOf({{ $tag.Value }}, {{ $tag.Type }}), {{ end }}
+                };
+            } catch (IllegalArgumentException e) {
+                throw new io.v.v23.verror.VException(String.format("Couldn't get tags for method \"{{ $methodName }}\": %s", e.getMessage()));
+            }
+        }
+        {{ end }}
+        {{ range $embed := .Embeds }}
+        {
+            final io.v.v23.vdl.VdlValue[] tags = this.{{ $embed.LocalWrapperVarName }}.getMethodTags(call, method);
+            if (tags != null) {
+                return tags;
+            }
+        }
+        {{ end }}
+        return null;  // method not found
+    }
+
+     {{/* Iterate over methods defined directly in the body of this server */}}
+    {{ range $method := .Methods }}
+    {{ $method.AccessModifier }} {{ $method.RetType }} {{ $method.Name }}(final io.v.v23.ipc.ServerCall call{{ $method.DeclarationArgs }}) throws io.v.v23.verror.VException {
+        {{ if $method.IsStreaming }}
+        final io.v.v23.vdl.Stream<{{ $method.SendType }}, {{ $method.RecvType }}> _stream = new io.v.v23.vdl.Stream<{{ $method.SendType }}, {{ $method.RecvType }}>() {
+            @Override
+            public void send({{ $method.SendType }} item) throws io.v.v23.verror.VException {
+                final java.lang.reflect.Type type = new com.google.common.reflect.TypeToken< {{ $method.SendType }} >() {}.getType();
+                call.send(item, type);
+            }
+            @Override
+            public {{ $method.RecvType }} recv() throws java.io.EOFException, io.v.v23.verror.VException {
+                final java.lang.reflect.Type type = new com.google.common.reflect.TypeToken< {{ $method.RecvType }} >() {}.getType();
+                final java.lang.Object result = call.recv(type);
+                try {
+                    return ({{ $method.RecvType }})result;
+                } catch (java.lang.ClassCastException e) {
+                    throw new io.v.v23.verror.VException("Unexpected result type: " + result.getClass().getCanonicalName());
+                }
+            }
+        };
+        {{ end }} {{/* end if $method.IsStreaming */}}
+        {{ if $method.Returns }} return {{ end }} this.server.{{ $method.Name }}( call {{ $method.CallingArgs }} {{ if $method.IsStreaming }} ,_stream {{ end }} );
+    }
+{{end}}
+
+{{/* Iterate over methods from embeded servers and generate code to delegate the work */}}
+{{ range $eMethod := .EmbedMethods }}
+    {{ $eMethod.AccessModifier }} {{ $eMethod.RetType }} {{ $eMethod.Name }}(final io.v.v23.ipc.ServerCall call{{ $eMethod.DeclarationArgs }}) throws io.v.v23.verror.VException {
+        {{/* e.g. return this.stubArith.cosine(call, [args], options) */}}
+        {{ if $eMethod.Returns }}return{{ end }}  this.{{ $eMethod.LocalWrapperVarName }}.{{ $eMethod.Name }}(call{{ $eMethod.CallingArgs }});
+    }
+{{ end }} {{/* end range .EmbedMethods */}}
+
+}
+`
+
+type serverWrapperMethod struct {
+	AccessModifier  string
+	CallingArgs     string
+	DeclarationArgs string
+	IsStreaming     bool
+	Name            string
+	RecvType        string
+	RetType         string
+	Returns         bool
+	SendType        string
+}
+
+type serverWrapperEmbedMethod struct {
+	AccessModifier      string
+	CallingArgs         string
+	DeclarationArgs     string
+	LocalWrapperVarName string
+	Name                string
+	RetType             string
+	Returns             bool
+}
+
+type serverWrapperEmbed struct {
+	LocalWrapperVarName string
+	WrapperClassName    string
+}
+
+type methodTag struct {
+	Value string
+	Type  string
+}
+
+func processServerWrapperMethod(iface *compile.Interface, method *compile.Method, env *compile.Env) serverWrapperMethod {
+	return serverWrapperMethod{
+		AccessModifier:  accessModifierForName(method.Name),
+		CallingArgs:     javaCallingArgStr(method.InArgs, true),
+		DeclarationArgs: javaDeclarationArgStr(method.InArgs, env, true),
+		IsStreaming:     isStreamingMethod(method),
+		Name:            vdlutil.ToCamelCase(method.Name),
+		RecvType:        javaType(method.InStream, true, env),
+		RetType:         clientInterfaceOutArg(iface, method, true, env),
+		Returns:         len(method.OutArgs) >= 1,
+		SendType:        javaType(method.OutStream, true, env),
+	}
+}
+
+func processServerWrapperEmbedMethod(iface *compile.Interface, embedMethod *compile.Method, env *compile.Env) serverWrapperEmbedMethod {
+	return serverWrapperEmbedMethod{
+		AccessModifier:      accessModifierForName(embedMethod.Name),
+		CallingArgs:         javaCallingArgStr(embedMethod.InArgs, true),
+		DeclarationArgs:     javaDeclarationArgStr(embedMethod.InArgs, env, true),
+		LocalWrapperVarName: vdlutil.ToCamelCase(iface.Name) + "Wrapper",
+		Name:                vdlutil.ToCamelCase(embedMethod.Name),
+		RetType:             clientInterfaceOutArg(iface, embedMethod, true, env),
+		Returns:             len(embedMethod.OutArgs) >= 1,
+	}
+}
+
+// genJavaServerWrapperFile generates a java file containing a server wrapper for the specified
+// interface.
+func genJavaServerWrapperFile(iface *compile.Interface, env *compile.Env) JavaFileInfo {
+	embeds := []serverWrapperEmbed{}
+	for _, embed := range allEmbeddedIfaces(iface) {
+		embeds = append(embeds, serverWrapperEmbed{
+			WrapperClassName:    javaPath(javaGenPkgPath(path.Join(embed.File.Package.GenPath, toUpperCamelCase(embed.Name+"ServerWrapper")))),
+			LocalWrapperVarName: vdlutil.ToCamelCase(embed.Name) + "Wrapper",
+		})
+	}
+	methodTags := make(map[string][]methodTag)
+	// Add generated methods to the tag map:
+	methodTags["signature"] = []methodTag{}
+	methodTags["getMethodTags"] = []methodTag{}
+	// Copy method tags off of the interface.
+	for _, method := range iface.Methods {
+		tags := make([]methodTag, len(method.Tags))
+		for i, tag := range method.Tags {
+			tags[i].Value = javaConstVal(tag, env)
+			tags[i].Type = javaReflectType(tag.Type(), env)
+		}
+		methodTags[vdlutil.ToCamelCase(method.Name)] = tags
+	}
+	embedMethods := []serverWrapperEmbedMethod{}
+	for _, embedMao := range dedupedEmbeddedMethodAndOrigins(iface) {
+		embedMethods = append(embedMethods, processServerWrapperEmbedMethod(embedMao.Origin, embedMao.Method, env))
+	}
+	methods := make([]serverWrapperMethod, len(iface.Methods))
+	for i, method := range iface.Methods {
+		methods[i] = processServerWrapperMethod(iface, method, env)
+	}
+	javaServiceName := toUpperCamelCase(iface.Name)
+	data := struct {
+		AccessModifier  string
+		EmbedMethods    []serverWrapperEmbedMethod
+		Embeds          []serverWrapperEmbed
+		FullServiceName string
+		Methods         []serverWrapperMethod
+		MethodTags      map[string][]methodTag
+		PackagePath     string
+		ServiceName     string
+		Source          string
+	}{
+		AccessModifier:  accessModifierForName(iface.Name),
+		EmbedMethods:    embedMethods,
+		Embeds:          embeds,
+		FullServiceName: javaPath(interfaceFullyQualifiedName(iface)),
+		Methods:         methods,
+		MethodTags:      methodTags,
+		PackagePath:     javaPath(javaGenPkgPath(iface.File.Package.GenPath)),
+		ServiceName:     javaServiceName,
+		Source:          iface.File.BaseName,
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("server wrapper", serverWrapperTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute server wrapper template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaServiceName + "ServerWrapper.java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_set.go b/lib/vdl/codegen/java/file_set.go
new file mode 100644
index 0000000..929bb9e
--- /dev/null
+++ b/lib/vdl/codegen/java/file_set.go
@@ -0,0 +1,90 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const setTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{.SourceFile}}
+
+package {{.Package}};
+
+/**
+ * {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.VdlSet<{{.KeyType}}> {
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}(java.util.Set<{{.KeyType}}> impl) {
+        super(VDL_TYPE, impl);
+    }
+
+    public {{.Name}}() {
+        this(new java.util.HashSet<{{.KeyType}}>());
+    }
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        java.lang.reflect.Type keyType =
+                new com.google.common.reflect.TypeToken<{{.KeyType}}>(){}.getType();
+        io.v.v23.vdl.ParcelUtil.writeSet(out, this, keyType);
+    }
+
+    @SuppressWarnings("hiding")
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR =
+            new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            java.lang.reflect.Type keyType =
+                    new com.google.common.reflect.TypeToken<{{.KeyType}}>(){}.getType();
+            java.util.Set<?> set = io.v.v23.vdl.ParcelUtil.readSet(
+                    in, {{.Name}}.class.getClassLoader(), keyType);
+            return new {{.Name}}((java.util.Set<{{.KeyType}}>) set);
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}
+`
+
+// genJavaSetFile generates the Java class file for the provided named set type.
+func genJavaSetFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		KeyType        string
+		Name           string
+		Package        string
+		SourceFile     string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		KeyType:        javaType(tdef.Type.Key(), true, env),
+		Name:           javaTypeName,
+		Package:        javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		SourceFile:     tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("set", setTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute set template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_struct.go b/lib/vdl/codegen/java/file_struct.go
new file mode 100644
index 0000000..fc975cf
--- /dev/null
+++ b/lib/vdl/codegen/java/file_struct.go
@@ -0,0 +1,233 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+const structTmpl = `// This file was auto-generated by the veyron vdl tool.
+// Source: {{.Source}}
+package {{.PackagePath}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} final class {{.Name}} extends io.v.v23.vdl.AbstractVdlStruct
+        implements android.os.Parcelable {
+    {{/* Field declarations */}}
+    {{ range $index, $field := .Fields }}
+      @io.v.v23.vdl.GeneratedFromVdl(name = "{{$field.Name}}", index = {{$index}})
+      private {{$field.Type}} {{$field.LowercaseName}};
+    {{ end }}
+
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    {{/* Constructors */}}
+    public {{.Name}}() {
+        super(VDL_TYPE);
+        {{ range $field := .Fields }}
+            this.{{$field.LowercaseName}} = {{$field.ZeroValue}};
+        {{ end }}
+    }
+
+    {{ if .FieldsAsArgs }}
+    public {{.Name}}({{ .FieldsAsArgs }}) {
+        super(VDL_TYPE);
+        {{ range $field := .Fields }}
+            this.{{$field.LowercaseName}} = {{$field.LowercaseName}};
+        {{ end }}
+    }
+    {{ end }}
+
+    {{/* Getters and setters */}}
+    {{ range $field := .Fields }}
+    {{ $field.AccessModifier }} {{$field.Type}} get{{$field.Name}}() {
+        return this.{{$field.LowercaseName}};
+    }
+
+    {{ $field.AccessModifier }} void set{{$field.Name}}({{$field.Type}} {{$field.LowercaseName}}) {
+        this.{{$field.LowercaseName}} = {{$field.LowercaseName}};
+    }
+    {{ end }}
+
+    @Override
+    public boolean equals(java.lang.Object obj) {
+        if (this == obj) return true;
+        if (obj == null) return false;
+        if (this.getClass() != obj.getClass()) return false;
+        final {{.Name}} other = ({{.Name}})obj;
+
+        {{ range $field := .Fields }}
+        {{ if .IsArray }}
+        if (!java.util.Arrays.equals(this.{{$field.LowercaseName}}, other.{{$field.LowercaseName}})) {
+            return false;
+        }
+        {{ else }}
+        {{ if .IsClass }}
+        if (this.{{$field.LowercaseName}} == null) {
+            if (other.{{$field.LowercaseName}} != null) {
+                return false;
+            }
+        } else if (!this.{{$field.LowercaseName}}.equals(other.{{$field.LowercaseName}})) {
+            return false;
+        }
+        {{ else }}
+        if (this.{{$field.LowercaseName}} != other.{{$field.LowercaseName}}) {
+            return false;
+        }
+        {{ end }} {{/* if is class */}}
+        {{ end }} {{/* if is array */}}
+        {{ end }} {{/* range over fields */}}
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        int result = 1;
+        final int prime = 31;
+        {{ range $field := .Fields }}
+        result = prime * result + {{$field.HashcodeComputation}};
+        {{ end }}
+        return result;
+    }
+
+    @Override
+    public java.lang.String toString() {
+        String result = "{";
+        {{ range $index, $field := .Fields }}
+            {{ if gt $index 0 }}
+                result += ", ";
+            {{ end }}
+            {{ if .IsArray }}
+                result += "{{$field.LowercaseName}}:" + java.util.Arrays.toString({{$field.LowercaseName}});
+            {{ else }}
+            result += "{{$field.LowercaseName}}:" + {{$field.LowercaseName}};
+            {{ end}} {{/* if is array */}}
+        {{ end }} {{/* range over fields */}}
+        return result + "}";
+    }
+
+    @Override
+    public int describeContents() {
+        return 0;
+    }
+
+    @Override
+    public void writeToParcel(android.os.Parcel out, int flags) {
+        {{ range $field := .Fields }}
+        try {
+            io.v.v23.vdl.ParcelUtil.writeValue(out, {{$field.LowercaseName}},
+                getClass().getDeclaredField("{{$field.LowercaseName}}").getGenericType());
+        } catch (NoSuchFieldException e) {
+            // do nothing
+        }
+        {{ end }}
+    }
+
+    public static final android.os.Parcelable.Creator<{{.Name}}> CREATOR
+        = new android.os.Parcelable.Creator<{{.Name}}>() {
+        @SuppressWarnings("unchecked")
+        @Override
+        public {{.Name}} createFromParcel(android.os.Parcel in) {
+            {{.Name}} value = new {{.Name}}();
+            {{ range $field := .Fields }}
+            try {
+                value.set{{$field.Name}}(({{$field.Class}}) io.v.v23.vdl.ParcelUtil.readValue(
+                in, value.getClass().getClassLoader(), value.getClass().getDeclaredField("{{$field.LowercaseName}}").getGenericType()));
+            } catch (NoSuchFieldException e) {
+                // do nothing
+            }
+            {{ end }}
+            return value;
+        }
+
+        @Override
+        public {{.Name}}[] newArray(int size) {
+            return new {{.Name}}[size];
+        }
+    };
+}`
+
+type structDefinitionField struct {
+	AccessModifier      string
+	Class               string
+	HashcodeComputation string
+	IsClass             bool
+	IsArray             bool
+	LowercaseName       string
+	Name                string
+	Type                string
+	ZeroValue           string
+}
+
+func javaFieldArgStr(structType *vdl.Type, env *compile.Env) string {
+	var buf bytes.Buffer
+	for i := 0; i < structType.NumField(); i++ {
+		if i > 0 {
+			buf.WriteString(", ")
+		}
+		fld := structType.Field(i)
+		buf.WriteString("final ")
+		buf.WriteString(javaType(fld.Type, false, env))
+		buf.WriteString(" ")
+		buf.WriteString(vdlutil.ToCamelCase(fld.Name))
+	}
+	return buf.String()
+}
+
+// genJavaStructFile generates the Java class file for the provided user-defined type.
+func genJavaStructFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	fields := make([]structDefinitionField, tdef.Type.NumField())
+	for i := 0; i < tdef.Type.NumField(); i++ {
+		fld := tdef.Type.Field(i)
+		fields[i] = structDefinitionField{
+			AccessModifier:      accessModifierForName(fld.Name),
+			Class:               javaType(fld.Type, true, env),
+			HashcodeComputation: javaHashCode(vdlutil.ToCamelCase(fld.Name), fld.Type, env),
+			IsClass:             isClass(fld.Type, env),
+			IsArray:             isJavaNativeArray(fld.Type, env),
+			LowercaseName:       vdlutil.ToCamelCase(fld.Name),
+			Name:                fld.Name,
+			Type:                javaType(fld.Type, false, env),
+			ZeroValue:           javaZeroValue(fld.Type, env),
+		}
+	}
+
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		Fields         []structDefinitionField
+		FieldsAsArgs   string
+		Name           string
+		PackagePath    string
+		Source         string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		Fields:         fields,
+		FieldsAsArgs:   javaFieldArgStr(tdef.Type, env),
+		Name:           javaTypeName,
+		PackagePath:    javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		Source:         tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("struct", structTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute struct template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/file_union.go b/lib/vdl/codegen/java/file_union.go
new file mode 100644
index 0000000..2d86b62
--- /dev/null
+++ b/lib/vdl/codegen/java/file_union.go
@@ -0,0 +1,105 @@
+package java
+
+import (
+	"bytes"
+	"log"
+
+	"v.io/v23/vdl/compile"
+)
+
+const unionTmpl = `
+// This file was auto-generated by the veyron vdl tool.
+// Source: {{.Source}}
+package {{.PackagePath}};
+
+/**
+ * type {{.Name}} {{.VdlTypeString}} {{.Doc}}
+ **/
+@io.v.v23.vdl.GeneratedFromVdl(name = "{{.VdlTypeName}}")
+{{ .AccessModifier }} class {{.Name}} extends io.v.v23.vdl.VdlUnion {
+    {{ range $index, $field := .Fields }}
+    @io.v.v23.vdl.GeneratedFromVdl(name = "{{$field.Name}}", index = {{$index}})
+    public static class {{$field.Name}} extends {{$.Name}} {
+        private {{$field.Type}} elem;
+
+        public {{$field.Name}}({{$field.Type}} elem) {
+            super({{$index}}, elem);
+            this.elem = elem;
+        }
+
+        public {{$field.Name}}() {
+            this({{$field.ZeroValue}});
+        }
+
+        @Override
+        public {{$field.Class}} getElem() {
+            return elem;
+        }
+
+        @Override
+        public int hashCode() {
+            return {{$field.HashcodeComputation}};
+        }
+    }
+    {{ end }}
+
+    public static final io.v.v23.vdl.VdlType VDL_TYPE =
+            io.v.v23.vdl.Types.getVdlTypeFromReflect({{.Name}}.class);
+
+    public {{.Name}}(int index, Object value) {
+        super(VDL_TYPE, index, value);
+    }
+}
+`
+
+type unionDefinitionField struct {
+	Class               string
+	HashcodeComputation string
+	Name                string
+	Type                string
+	ZeroValue           string
+}
+
+// genJavaUnionFile generates the Java class file for the provided user-defined union type.
+func genJavaUnionFile(tdef *compile.TypeDef, env *compile.Env) JavaFileInfo {
+	fields := make([]unionDefinitionField, tdef.Type.NumField())
+	for i := 0; i < tdef.Type.NumField(); i++ {
+		fld := tdef.Type.Field(i)
+		fields[i] = unionDefinitionField{
+			Class:               javaType(fld.Type, true, env),
+			HashcodeComputation: javaHashCode("elem", fld.Type, env),
+			Name:                fld.Name,
+			Type:                javaType(fld.Type, false, env),
+			ZeroValue:           javaZeroValue(fld.Type, env),
+		}
+	}
+	javaTypeName := toUpperCamelCase(tdef.Name)
+	data := struct {
+		AccessModifier string
+		Doc            string
+		Fields         []unionDefinitionField
+		Name           string
+		PackagePath    string
+		Source         string
+		VdlTypeName    string
+		VdlTypeString  string
+	}{
+		AccessModifier: accessModifierForName(tdef.Name),
+		Doc:            javaDocInComment(tdef.Doc),
+		Fields:         fields,
+		Name:           javaTypeName,
+		PackagePath:    javaPath(javaGenPkgPath(tdef.File.Package.GenPath)),
+		Source:         tdef.File.BaseName,
+		VdlTypeName:    tdef.Type.Name(),
+		VdlTypeString:  tdef.Type.String(),
+	}
+	var buf bytes.Buffer
+	err := parseTmpl("union", unionTmpl).Execute(&buf, data)
+	if err != nil {
+		log.Fatalf("vdl: couldn't execute union template: %v", err)
+	}
+	return JavaFileInfo{
+		Name: javaTypeName + ".java",
+		Data: buf.Bytes(),
+	}
+}
diff --git a/lib/vdl/codegen/java/generate.go b/lib/vdl/codegen/java/generate.go
new file mode 100644
index 0000000..58a8cd2
--- /dev/null
+++ b/lib/vdl/codegen/java/generate.go
@@ -0,0 +1,121 @@
+// Package java implements Java code generation from compiled VDL packages.
+package java
+
+import (
+	"path"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+// pkgPathXlator is the function used to translate a VDL package path
+// into a Java package path.  If nil, no translation takes place.
+var pkgPathXlator func(path string) string
+
+// SetPkgPathXlator sets the function used to translate a VDL package
+// path into a Java package path.
+func SetPkgPathXlator(xlator func(path string) string) {
+	pkgPathXlator = xlator
+}
+
+// javaGenPkgPath returns the Java package path given the VDL package path.
+func javaGenPkgPath(vdlPkgPath string) string {
+	if pkgPathXlator == nil {
+		return vdlPkgPath
+	}
+	return pkgPathXlator(vdlPkgPath)
+}
+
+// JavaFileInfo stores the name and contents of the generated Java file.
+type JavaFileInfo struct {
+	Dir  string
+	Name string
+	Data []byte
+}
+
+// Generate generates Java files for all VDL files in the provided package,
+// returning the list of generated Java files as a slice.  Since Java requires
+// that each public class/interface gets defined in a separate file, this method
+// will return one generated file per struct.  (Interfaces actually generate
+// two files because we create separate interfaces for clients and servers.)
+// In addition, since Java doesn't support global variables (i.e., variables
+// defined outside of a class), all constants are moved into a special "Consts"
+// class and stored in a separate file.  All client bindings are stored in a
+// separate Client.java file. Finally, package documentation (if any) is stored
+// in a "package-info.java" file.
+//
+// TODO(spetrovic): Run Java formatters on the generated files.
+func Generate(pkg *compile.Package, env *compile.Env) (ret []JavaFileInfo) {
+	validateJavaConfig(pkg, env)
+	// One file for package documentation (if any).
+	if g := genJavaPackageFile(pkg, env); g != nil {
+		ret = append(ret, *g)
+	}
+	// Single file for all constants' definitions.
+	if g := genJavaConstFile(pkg, env); g != nil {
+		ret = append(ret, *g)
+	}
+	// Single file for all errors' definitions.
+	if g := genJavaErrorFile(pkg, env); g != nil {
+		ret = append(ret, *g)
+	}
+	for _, file := range pkg.Files {
+		// Separate file for all typedefs.
+		for _, tdef := range file.TypeDefs {
+			switch tdef.Type.Kind() {
+			case vdl.Array:
+				ret = append(ret, genJavaArrayFile(tdef, env))
+			case vdl.Complex64, vdl.Complex128:
+				ret = append(ret, genJavaComplexFile(tdef, env))
+			case vdl.Enum:
+				ret = append(ret, genJavaEnumFile(tdef, env))
+			case vdl.List:
+				ret = append(ret, genJavaListFile(tdef, env))
+			case vdl.Map:
+				ret = append(ret, genJavaMapFile(tdef, env))
+			case vdl.Union:
+				ret = append(ret, genJavaUnionFile(tdef, env))
+			case vdl.Set:
+				ret = append(ret, genJavaSetFile(tdef, env))
+			case vdl.Struct:
+				ret = append(ret, genJavaStructFile(tdef, env))
+			default:
+				ret = append(ret, genJavaPrimitiveFile(tdef, env))
+			}
+		}
+		// Separate file for all interface definitions.
+		for _, iface := range file.Interfaces {
+			ret = append(ret, genJavaClientFactoryFile(iface, env))
+			ret = append(ret, genJavaClientInterfaceFile(iface, env)) // client interface
+			ret = append(ret, genJavaClientStubFile(iface, env))
+			ret = append(ret, genJavaServerInterfaceFile(iface, env)) // server interface
+			ret = append(ret, genJavaServerWrapperFile(iface, env))
+		}
+	}
+	return
+}
+
+// The native types feature is hard to use correctly.  E.g. the wire type
+// must be statically registered in Java vdl package in order for the
+// wire<->native conversion to work, which is hard to ensure.
+//
+// Restrict the feature to these whitelisted VDL packages for now.
+var nativeTypePackageWhitelist = map[string]bool{
+	"time": true,
+	"v.io/v23/vdl/testdata/nativetest": true,
+}
+
+func validateJavaConfig(pkg *compile.Package, env *compile.Env) {
+	vdlconfig := path.Join(pkg.GenPath, "vdl.config")
+	// Validate native type configuration.  Since native types are hard to use, we
+	// restrict them to a built-in whitelist of packages for now.
+	if len(pkg.Config.Java.WireToNativeTypes) > 0 && !nativeTypePackageWhitelist[pkg.Path] {
+		env.Errors.Errorf("%s: Java.WireToNativeTypes is restricted to whitelisted VDL packages", vdlconfig)
+	}
+	// Make sure each wire type is actually defined in the package.
+	for wire, _ := range pkg.Config.Java.WireToNativeTypes {
+		if def := pkg.ResolveType(wire); def == nil {
+			env.Errors.Errorf("%s: type %s specified in Java.WireToNativeTypes undefined", vdlconfig, wire)
+		}
+	}
+}
diff --git a/lib/vdl/codegen/java/util.go b/lib/vdl/codegen/java/util.go
new file mode 100644
index 0000000..d681314
--- /dev/null
+++ b/lib/vdl/codegen/java/util.go
@@ -0,0 +1,17 @@
+package java
+
+import (
+	"unicode"
+	"unicode/utf8"
+)
+
+// accessModifierForName returns the Java access modifier given the name.
+// It follows VDL naming conventions, indicating that an uppercase name
+// denotes a public type and a lowercase name a package-protected type.
+func accessModifierForName(name string) string {
+	r, _ := utf8.DecodeRuneInString(name)
+	if unicode.IsUpper(r) {
+		return "public"
+	}
+	return ""
+}
diff --git a/lib/vdl/codegen/java/util_args.go b/lib/vdl/codegen/java/util_args.go
new file mode 100644
index 0000000..340d91b
--- /dev/null
+++ b/lib/vdl/codegen/java/util_args.go
@@ -0,0 +1,59 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+
+	"v.io/v23/vdl/compile"
+)
+
+// javaDeclarationArgStr creates a comma separated string of args to be used in a function declaration
+// e.g. "final int x, final Object o"
+func javaDeclarationArgStr(args []*compile.Field, env *compile.Env, leadingComma bool) string {
+	var buf bytes.Buffer
+	for i, arg := range args {
+		if leadingComma || i > 0 {
+			buf.WriteString(", ")
+		}
+		buf.WriteString("final ")
+		buf.WriteString(javaType(arg.Type, false, env))
+		buf.WriteString(" ")
+		if arg.Name != "" {
+			buf.WriteString(arg.Name)
+		} else {
+			buf.WriteString(fmt.Sprintf("arg%d", i+1))
+		}
+	}
+	return buf.String()
+}
+
+// javaCallingArgStr creates a comma separated string of arg to be used in calling a function
+// e.g. "x, o"
+func javaCallingArgStr(args []*compile.Field, leadingComma bool) string {
+	var buf bytes.Buffer
+	for i, arg := range args {
+		if leadingComma || i > 0 {
+			buf.WriteString(", ")
+		}
+		if arg.Name != "" {
+			buf.WriteString(arg.Name)
+		} else {
+			buf.WriteString(fmt.Sprintf("arg%d", i+1))
+		}
+	}
+	return buf.String()
+}
+
+// javaCallingArgTypeStr creates a comma separated string of arg types.
+func javaCallingArgTypeStr(args []*compile.Field, env *compile.Env) string {
+	var buf bytes.Buffer
+	for i, arg := range args {
+		if i > 0 {
+			buf.WriteString(", ")
+		}
+		buf.WriteString("new com.google.common.reflect.TypeToken<")
+		buf.WriteString(javaType(arg.Type, true, env))
+		buf.WriteString(">(){}.getType()")
+	}
+	return buf.String()
+}
diff --git a/lib/vdl/codegen/java/util_case.go b/lib/vdl/codegen/java/util_case.go
new file mode 100644
index 0000000..76ded03
--- /dev/null
+++ b/lib/vdl/codegen/java/util_case.go
@@ -0,0 +1,15 @@
+package java
+
+import (
+	"unicode"
+	"unicode/utf8"
+)
+
+// toUpperCamelCase converts thisString to ThisString.
+func toUpperCamelCase(s string) string {
+	if s == "" {
+		return ""
+	}
+	r, n := utf8.DecodeRuneInString(s)
+	return string(unicode.ToUpper(r)) + s[n:]
+}
diff --git a/lib/vdl/codegen/java/util_doc.go b/lib/vdl/codegen/java/util_doc.go
new file mode 100644
index 0000000..0b2abf8
--- /dev/null
+++ b/lib/vdl/codegen/java/util_doc.go
@@ -0,0 +1,40 @@
+package java
+
+import (
+	"strings"
+)
+
+// javaRawComment extracts a raw language-independent comment from a VDL comment.
+func javaRawComment(vdlComment string) string {
+	if vdlComment == "" {
+		return ""
+	}
+	comment := strings.Replace(vdlComment, "/**", "", -1)
+	comment = strings.Replace(comment, "/*", "", -1)
+	comment = strings.Replace(comment, "*/", "", -1)
+	comment = strings.Replace(comment, "//", "", -1)
+	comment = strings.Replace(comment, "\n *", "\n", -1)
+	splitComment := strings.Split(comment, "\n")
+	for i, _ := range splitComment {
+		splitComment[i] = strings.TrimSpace(splitComment[i])
+	}
+	return strings.TrimSpace(strings.Join(splitComment, "\n"))
+}
+
+// javaDocInComment transforms a VDL comment to javadoc style, but without starting a new comment.
+// (i.e. this assumes that the output will be within a /**  */ comment).
+func javaDocInComment(vdlComment string) string {
+	if vdlComment == "" {
+		return ""
+	}
+	return "\n * " + strings.Replace(javaRawComment(vdlComment), "\n", "\n * ", -1)
+}
+
+// javaDoc transforms the provided VDL comment into the JavaDoc format.
+// This starts a new javadoc comment block.
+func javaDoc(vdlComment string) string {
+	if vdlComment == "" {
+		return ""
+	}
+	return "/**" + javaDocInComment(vdlComment) + "\n */\n"
+}
diff --git a/lib/vdl/codegen/java/util_file.go b/lib/vdl/codegen/java/util_file.go
new file mode 100644
index 0000000..75f18dd
--- /dev/null
+++ b/lib/vdl/codegen/java/util_file.go
@@ -0,0 +1,19 @@
+package java
+
+import (
+	"bytes"
+
+	"v.io/v23/vdl/compile"
+)
+
+// javaFileNames constructs a comma separated string with the short (basename) of the input files
+func javaFileNames(files []*compile.File) string {
+	var buf bytes.Buffer
+	for i, file := range files {
+		if i > 0 {
+			buf.WriteString(", ")
+		}
+		buf.WriteString(file.BaseName)
+	}
+	return buf.String()
+}
diff --git a/lib/vdl/codegen/java/util_interface.go b/lib/vdl/codegen/java/util_interface.go
new file mode 100644
index 0000000..dc06ba6
--- /dev/null
+++ b/lib/vdl/codegen/java/util_interface.go
@@ -0,0 +1,63 @@
+package java
+
+import (
+	"bytes"
+	"path"
+
+	"v.io/v23/vdl/compile"
+)
+
+// allEmbeddedIfaces returns all unique interfaces in the embed tree
+// starting at the provided interface (not including that interface).
+func allEmbeddedIfaces(iface *compile.Interface) (ret []*compile.Interface) {
+	added := make(map[string]bool)
+	for _, eIface := range iface.Embeds {
+		for _, eIface = range append(allEmbeddedIfaces(eIface), eIface) {
+			path := path.Join(eIface.File.Package.GenPath, toUpperCamelCase(eIface.Name))
+			if _, ok := added[path]; ok { // already added iface
+				continue
+			}
+			ret = append(ret, eIface)
+			added[path] = true
+		}
+	}
+	return
+}
+
+// interfaceFullyQualifiedName outputs the fully qualified name of an interface
+// e.g. "com.a.B"
+func interfaceFullyQualifiedName(iface *compile.Interface) string {
+	return path.Join(javaGenPkgPath(iface.File.Package.GenPath), toUpperCamelCase(iface.Name))
+}
+
+// javaClientExtendsStr creates an extends clause for a client interface
+// e.g. "extends com.a.B, com.d.E"
+func javaClientExtendsStr(embeds []*compile.Interface) string {
+	var buf bytes.Buffer
+	buf.WriteString("extends ")
+	for _, embed := range embeds {
+		buf.WriteString(javaPath(interfaceFullyQualifiedName(embed)))
+		buf.WriteString("Client")
+		buf.WriteString(", ")
+	}
+	buf.WriteString("io.v.v23.ipc.UniversalServiceMethods")
+	return buf.String()
+}
+
+// javaServerExtendsStr creates an extends clause for a server interface
+// e.g. "extends com.a.B, com.d.E"
+func javaServerExtendsStr(embeds []*compile.Interface) string {
+	if len(embeds) == 0 {
+		return ""
+	}
+	var buf bytes.Buffer
+	buf.WriteString("extends ")
+	for i, embed := range embeds {
+		if i > 0 {
+			buf.WriteString(", ")
+		}
+		buf.WriteString(javaPath(interfaceFullyQualifiedName(embed)))
+		buf.WriteString("Server")
+	}
+	return buf.String()
+}
diff --git a/lib/vdl/codegen/java/util_method.go b/lib/vdl/codegen/java/util_method.go
new file mode 100644
index 0000000..cdf60a7
--- /dev/null
+++ b/lib/vdl/codegen/java/util_method.go
@@ -0,0 +1,92 @@
+package java
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+
+	"v.io/v23/vdl/compile"
+)
+
+func isStreamingMethod(method *compile.Method) bool {
+	return method.InStream != nil || method.OutStream != nil
+}
+
+// methodAndOrigin is simply a pair of a method and its origin (the interface from which it came from)
+// The usefulness of this pair of data is that methods can be defined on multiple interfaces and embeds can clash for the same method.
+// Therefore, we need to keep track of the originating interface.
+type methodAndOrigin struct {
+	Method *compile.Method
+	Origin *compile.Interface
+}
+
+// allMethodsAndOrigins constructs a list of all methods in an interface (including embeded interfaces) along with their corresponding origin interface.
+func allMethodsAndOrigin(iface *compile.Interface) []methodAndOrigin {
+	result := make([]methodAndOrigin, len(iface.Methods))
+	for i, method := range iface.Methods {
+		result[i] = methodAndOrigin{
+			Method: method,
+			Origin: iface,
+		}
+	}
+	for _, embed := range iface.Embeds {
+		result = append(result, allMethodsAndOrigin(embed)...)
+	}
+	return result
+}
+
+// dedupedEmbeddedMethodAndOrigins returns the set of methods only defined in embedded interfaces and dedupes methods with the same name.
+// This is used to generate a set of methods for a given service that are not (re)defined in the interface body (and instead only in embeddings).
+func dedupedEmbeddedMethodAndOrigins(iface *compile.Interface) []methodAndOrigin {
+	ifaceMethods := map[string]bool{}
+	for _, method := range iface.Methods {
+		ifaceMethods[method.Name] = true
+	}
+
+	embeddedMao := map[string]methodAndOrigin{}
+	for _, mao := range allMethodsAndOrigin(iface) {
+		if _, found := ifaceMethods[mao.Method.Name]; found {
+			continue
+		}
+		if _, found := embeddedMao[mao.Method.Name]; found {
+			continue
+		}
+		embeddedMao[mao.Method.Name] = mao
+	}
+
+	ret := []methodAndOrigin{}
+	for _, mao := range embeddedMao {
+		ret = append(ret, mao)
+	}
+	sort.Sort(bySignature(ret))
+
+	return ret
+}
+
+// bySignature implements sort.Interface
+type bySignature []methodAndOrigin
+
+func (b bySignature) Len() int {
+	return len(b)
+}
+
+func (b bySignature) Less(i, j int) bool {
+	return b.signature(i) < b.signature(j)
+}
+
+func (b bySignature) signature(i int) string {
+	var buf bytes.Buffer
+	buf.WriteString(fmt.Sprintf("%s|%s|%s",
+		b[i].Origin.File.Package.GenPath,
+		b[i].Origin.Name,
+		b[i].Method.Name,
+	))
+	for _, arg := range b[i].Method.InArgs {
+		buf.WriteString(fmt.Sprintf("|%s", arg.Type.Name()))
+	}
+	return buf.String()
+}
+
+func (b bySignature) Swap(i, j int) {
+	b[i], b[j] = b[j], b[i]
+}
diff --git a/lib/vdl/codegen/java/util_path.go b/lib/vdl/codegen/java/util_path.go
new file mode 100644
index 0000000..9798eb9
--- /dev/null
+++ b/lib/vdl/codegen/java/util_path.go
@@ -0,0 +1,11 @@
+package java
+
+import (
+	"strings"
+)
+
+// javaPath converts the provided Go path into the Java path.  It replaces all "/"
+// with "." in the path.
+func javaPath(goPath string) string {
+	return strings.Replace(goPath, "/", ".", -1)
+}
diff --git a/lib/vdl/codegen/java/util_template.go b/lib/vdl/codegen/java/util_template.go
new file mode 100644
index 0000000..74289b4
--- /dev/null
+++ b/lib/vdl/codegen/java/util_template.go
@@ -0,0 +1,20 @@
+package java
+
+import (
+	"text/template"
+)
+
+var tmplCache = map[string]*template.Template{}
+
+// parseTmpl parses a template and caches the parsed value.
+// Each template body must be associated with a unique name.
+func parseTmpl(name string, body string) *template.Template {
+	if tmpl, ok := tmplCache[name]; ok {
+		return tmpl
+	}
+
+	tmpl := template.Must(template.New(name).Parse(body))
+
+	tmplCache[name] = tmpl
+	return tmpl
+}
diff --git a/lib/vdl/codegen/java/util_type.go b/lib/vdl/codegen/java/util_type.go
new file mode 100644
index 0000000..7543fa1
--- /dev/null
+++ b/lib/vdl/codegen/java/util_type.go
@@ -0,0 +1,211 @@
+package java
+
+import (
+	"fmt"
+	"log"
+	"path"
+	"strings"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+func javaFullyQualifiedNamedType(def *compile.TypeDef, forceClass bool, env *compile.Env) string {
+	if def.File == compile.BuiltInFile {
+		name, _ := javaBuiltInType(def.Type, forceClass)
+		return name
+	}
+	return javaPath(path.Join(javaGenPkgPath(def.File.Package.GenPath), toUpperCamelCase(def.Name)))
+}
+
+// javaReflectType returns java.reflect.Type string for provided VDL type.
+func javaReflectType(t *vdl.Type, env *compile.Env) string {
+	return fmt.Sprintf("new com.google.common.reflect.TypeToken<%s>(){}.getType()", javaType(t, true, env))
+}
+
+// javaBuiltInType returns the type name for the provided built in type
+// definition, forcing the use of a java class (e.g., java.lang.Integer) if so
+// desired.  This method also returns a boolean value indicating whether the
+// returned type is a class.
+func javaBuiltInType(typ *vdl.Type, forceClass bool) (string, bool) {
+	if typ == nil {
+		if forceClass {
+			return "java.lang.Void", true
+		} else {
+			return "void", false
+		}
+	}
+	switch typ.Kind() {
+	case vdl.Bool:
+		if forceClass {
+			return "java.lang.Boolean", true
+		} else {
+			return "boolean", false
+		}
+	case vdl.Byte:
+		if forceClass {
+			return "java.lang.Byte", true
+		} else {
+			return "byte", false
+		}
+	case vdl.Uint16:
+		return "io.v.v23.vdl.VdlUint16", true
+	case vdl.Int16:
+		if forceClass {
+			return "java.lang.Short", true
+		} else {
+			return "short", false
+		}
+	case vdl.Uint32:
+		return "io.v.v23.vdl.VdlUint32", true
+	case vdl.Int32:
+		if forceClass {
+			return "java.lang.Integer", true
+		} else {
+			return "int", false
+		}
+	case vdl.Uint64:
+		return "io.v.v23.vdl.VdlUint64", true
+	case vdl.Int64:
+		if forceClass {
+			return "java.lang.Long", true
+		} else {
+			return "long", false
+		}
+	case vdl.Float32:
+		if forceClass {
+			return "java.lang.Float", true
+		} else {
+			return "float", false
+		}
+	case vdl.Float64:
+		if forceClass {
+			return "java.lang.Double", true
+		} else {
+			return "double", false
+		}
+	case vdl.Complex64:
+		return "io.v.v23.vdl.VdlComplex64", true
+	case vdl.Complex128:
+		return "io.v.v23.vdl.VdlComplex128", true
+	case vdl.String:
+		return "java.lang.String", true
+	case vdl.TypeObject:
+		return "io.v.v23.vdl.VdlTypeObject", true
+	case vdl.Any:
+		return "io.v.v23.vdl.VdlAny", true
+	default:
+		return "", false
+	}
+}
+
+func javaNativeType(t *vdl.Type, env *compile.Env) (string, bool) {
+	if t == vdl.ErrorType {
+		return "io.v.v23.verror.VException", true
+	}
+	if def := env.FindTypeDef(t); def != nil {
+		pkg := def.File.Package
+		if native, ok := pkg.Config.Java.WireToNativeTypes[def.Name]; ok {
+			// There is a Java native type configured for this defined type.
+			return native, true
+		}
+	}
+	return "", false
+}
+
+func javaType(t *vdl.Type, forceClass bool, env *compile.Env) string {
+	if t == nil {
+		name, _ := javaBuiltInType(nil, forceClass)
+		return name
+	}
+	if native, ok := javaNativeType(t, env); ok {
+		return native
+	}
+	if def := env.FindTypeDef(t); def != nil {
+		return javaFullyQualifiedNamedType(def, forceClass, env)
+	}
+	switch t.Kind() {
+	case vdl.Array:
+		return fmt.Sprintf("%s[]", javaType(t.Elem(), false, env))
+	case vdl.List:
+		// NOTE(spetrovic): We represent byte lists as Java byte arrays, as it's doubtful anybody
+		// would want to use them as Java lists.
+		if javaType(t.Elem(), false, env) == "byte" {
+			return fmt.Sprintf("byte[]")
+		}
+		return fmt.Sprintf("%s<%s>", "java.util.List", javaType(t.Elem(), true, env))
+	case vdl.Set:
+		return fmt.Sprintf("%s<%s>", "java.util.Set", javaType(t.Key(), true, env))
+	case vdl.Map:
+		return fmt.Sprintf("%s<%s, %s>", "java.util.Map", javaType(t.Key(), true, env), javaType(t.Elem(), true, env))
+	case vdl.Optional:
+		return fmt.Sprintf("io.v.v23.vdl.VdlOptional<%s>", javaType(t.Elem(), true, env))
+	default:
+		log.Fatalf("vdl: javaType unhandled type %v %v", t.Kind(), t)
+		return ""
+	}
+}
+
+func javaVdlPrimitiveType(kind vdl.Kind) string {
+	switch kind {
+	case vdl.Bool, vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64, vdl.Float32, vdl.Float64, vdl.Complex128, vdl.Complex64, vdl.String:
+		return "io.v.v23.vdl.Vdl" + vdlutil.FirstRuneToUpper(kind.String())
+	}
+	log.Fatalf("val: unhandled kind: %v", kind)
+	return ""
+}
+
+// javaHashCode returns the java code for the hashCode() computation for a given type.
+func javaHashCode(name string, ty *vdl.Type, env *compile.Env) string {
+	if isJavaNativeArray(ty, env) {
+		return fmt.Sprintf("java.util.Arrays.hashCode(%s)", name)
+	}
+	if def := env.FindTypeDef(ty); def != nil && def.File == compile.BuiltInFile {
+		switch ty.Kind() {
+		case vdl.Bool:
+			return fmt.Sprintf("java.lang.Boolean.valueOf(%s).hashCode()", name)
+		case vdl.Byte, vdl.Int16:
+			return "(int)" + name
+		case vdl.Int32:
+			return name
+		case vdl.Int64:
+			return fmt.Sprintf("java.lang.Long.valueOf(%s).hashCode()", name)
+		case vdl.Float32:
+			return fmt.Sprintf("java.lang.Float.valueOf(%s).hashCode()", name)
+		case vdl.Float64:
+			return fmt.Sprintf("java.lang.Double.valueOf(%s).hashCode()", name)
+		}
+	}
+	return fmt.Sprintf("(%s == null ? 0 : %s.hashCode())", name, name)
+}
+
+// isClass returns true iff the provided type is represented by a Java class.
+func isClass(t *vdl.Type, env *compile.Env) bool {
+	if t == nil { // void type
+		return false
+	}
+	if def := env.FindTypeDef(t); def != nil && def.File == compile.BuiltInFile {
+		// Built-in type.  See if it's represented by a class.
+		if tname, isClass := javaBuiltInType(t, false); tname != "" && !isClass {
+			return false
+		}
+	}
+	return true
+}
+
+// isJavaNativeArray returns true iff the provided type is represented by a Java array.
+func isJavaNativeArray(t *vdl.Type, env *compile.Env) bool {
+	typeStr := javaType(t, false, env)
+	return strings.HasSuffix(typeStr, "[]")
+}
+
+func bitlen(kind vdl.Kind) int {
+	switch kind {
+	case vdl.Float32, vdl.Complex64:
+		return 32
+	case vdl.Float64, vdl.Complex128:
+		return 64
+	}
+	panic(fmt.Errorf("vdl: bitLen unhandled kind %v", kind))
+}
diff --git a/lib/vdl/codegen/java/util_val.go b/lib/vdl/codegen/java/util_val.go
new file mode 100644
index 0000000..02fa853
--- /dev/null
+++ b/lib/vdl/codegen/java/util_val.go
@@ -0,0 +1,200 @@
+package java
+
+import (
+	"fmt"
+	"strconv"
+	"strings"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+// javaConstVal returns the value string for the provided constant value.
+func javaConstVal(v *vdl.Value, env *compile.Env) (ret string) {
+	if v == nil {
+		return "null"
+	}
+	if v.IsZero() {
+		return javaZeroValue(v.Type(), env)
+	}
+
+	ret = javaVal(v, env)
+	switch v.Type().Kind() {
+	case vdl.Complex64, vdl.Complex128, vdl.Enum, vdl.Union, vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		return
+	}
+	if def := env.FindTypeDef(v.Type()); def != nil && def.File != compile.BuiltInFile { // User-defined type.
+		ret = fmt.Sprintf("new %s(%s)", javaType(v.Type(), false, env), ret)
+	}
+	return
+}
+
+// javaVal returns the value string for the provided Value.
+func javaVal(v *vdl.Value, env *compile.Env) string {
+	const longSuffix = "L"
+	const floatSuffix = "f"
+
+	if v.Kind() == vdl.Array || (v.Kind() == vdl.List && v.Type().Elem().Kind() == vdl.Byte && v.Type().Name() == "") {
+		ret := fmt.Sprintf("new %s[] {", javaType(v.Type().Elem(), false, env))
+		for i := 0; i < v.Len(); i++ {
+			if i > 0 {
+				ret = ret + ", "
+			}
+			ret = ret + javaConstVal(v.Index(i), env)
+		}
+		return ret + "}"
+	}
+
+	switch v.Kind() {
+	case vdl.Bool:
+		if v.Bool() {
+			return "true"
+		} else {
+			return "false"
+		}
+	case vdl.Byte:
+		return "(byte)" + strconv.FormatUint(uint64(v.Byte()), 10)
+	case vdl.Uint16:
+		return fmt.Sprintf("new %s((short) %s)", javaType(v.Type(), true, env), strconv.FormatUint(v.Uint(), 10))
+	case vdl.Int16:
+		return "(short)" + strconv.FormatInt(v.Int(), 10)
+	case vdl.Uint32:
+		return fmt.Sprintf("new %s((int) %s)", javaType(v.Type(), true, env), strconv.FormatUint(v.Uint(), 10)+longSuffix)
+	case vdl.Int32:
+		return strconv.FormatInt(v.Int(), 10)
+	case vdl.Uint64:
+		return fmt.Sprintf("new %s(%s)", javaType(v.Type(), true, env), strconv.FormatInt(int64(v.Uint()), 10)+longSuffix)
+	case vdl.Int64:
+		return strconv.FormatInt(v.Int(), 10) + longSuffix
+	case vdl.Float32, vdl.Float64:
+		c := strconv.FormatFloat(v.Float(), 'g', -1, bitlen(v.Kind()))
+		if strings.Index(c, ".") == -1 {
+			c += ".0"
+		}
+		if v.Kind() == vdl.Float32 {
+			return c + floatSuffix
+		}
+		return c
+	case vdl.Complex64, vdl.Complex128:
+		r := strconv.FormatFloat(real(v.Complex()), 'g', -1, bitlen(v.Kind()))
+		i := strconv.FormatFloat(imag(v.Complex()), 'g', -1, bitlen(v.Kind()))
+		if v.Kind() == vdl.Complex64 {
+			r = r + "f"
+			i = i + "f"
+		}
+		return fmt.Sprintf("new %s(%s, %s)", javaType(v.Type(), true, env), r, i)
+	case vdl.String:
+		return strconv.Quote(v.RawString())
+	case vdl.Any:
+		if v.Elem() == nil {
+			return fmt.Sprintf("new %s()", javaType(v.Type(), false, env))
+		}
+		elemReflectTypeStr := javaReflectType(v.Elem().Type(), env)
+		elemStr := javaConstVal(v.Elem(), env)
+		return fmt.Sprintf("new %s(%s, %s)", javaType(v.Type(), false, env), elemReflectTypeStr, elemStr)
+	case vdl.Enum:
+		return fmt.Sprintf("%s.%s", javaType(v.Type(), false, env), v.EnumLabel())
+	case vdl.List:
+		elemTypeStr := javaType(v.Type().Elem(), true, env)
+		ret := fmt.Sprintf("new com.google.common.collect.ImmutableList.Builder<%s>()", elemTypeStr)
+		for i := 0; i < v.Len(); i++ {
+			ret = fmt.Sprintf("%s.add(%s)", ret, javaConstVal(v.Index(i), env))
+		}
+		return ret + ".build()"
+	case vdl.Map:
+		keyTypeStr := javaType(v.Type().Key(), true, env)
+		elemTypeStr := javaType(v.Type().Elem(), true, env)
+		ret := fmt.Sprintf("new com.google.common.collect.ImmutableMap.Builder<%s, %s>()", keyTypeStr, elemTypeStr)
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			keyStr := javaConstVal(key, env)
+			elemStr := javaConstVal(v.MapIndex(key), env)
+			ret = fmt.Sprintf("%s.put(%s, %s)", ret, keyStr, elemStr)
+		}
+		return ret + ".build()"
+	case vdl.Union:
+		index, value := v.UnionField()
+		name := v.Type().Field(index).Name
+		elemStr := javaConstVal(value, env)
+		return fmt.Sprintf("new %s.%s(%s)", javaType(v.Type(), false, env), name, elemStr)
+	case vdl.Set:
+		keyTypeStr := javaType(v.Type().Key(), true, env)
+		ret := fmt.Sprintf("new com.google.common.collect.ImmutableSet.Builder<%s>()", keyTypeStr)
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			ret = fmt.Sprintf("%s.add(%s)", ret, javaConstVal(key, env))
+		}
+		return ret + ".build()"
+	case vdl.Struct:
+		var ret string
+		for i := 0; i < v.Type().NumField(); i++ {
+			if i > 0 {
+				ret = ret + ", "
+			}
+			ret = ret + javaConstVal(v.StructField(i), env)
+		}
+		return ret
+	case vdl.TypeObject:
+		return fmt.Sprintf("new %s(%s)", javaType(v.Type(), false, env), javaReflectType(v.TypeObject(), env))
+	case vdl.Optional:
+		if v.Elem() != nil {
+			return fmt.Sprintf("new %s(%s)", javaType(v.Type(), false, env), javaConstVal(v.Elem(), env))
+		} else {
+			return fmt.Sprintf("new %s(%s)", javaType(v.Type(), false, env), javaReflectType(v.Type(), env))
+		}
+	}
+	panic(fmt.Errorf("vdl: javaVal unhandled type %v %v", v.Kind(), v.Type()))
+}
+
+// javaZeroValue returns the zero value string for the provided VDL value.
+// We assume that default constructor of user-defined types returns a zero value.
+func javaZeroValue(t *vdl.Type, env *compile.Env) string {
+	if _, ok := javaNativeType(t, env); ok {
+		return "null"
+	}
+
+	// First process user-defined types.
+	switch t.Kind() {
+	case vdl.Enum:
+		return fmt.Sprintf("%s.%s", javaType(t, false, env), t.EnumLabel(0))
+	case vdl.Union:
+		return fmt.Sprintf("new %s.%s()", javaType(t, false, env), t.Field(0).Name)
+	}
+	if def := env.FindTypeDef(t); def != nil && def.File != compile.BuiltInFile {
+		return fmt.Sprintf("new %s()", javaType(t, false, env))
+	}
+
+	// Arrays, enums, structs and unions can be user-defined only.
+	if t.Kind() == vdl.List && t.Elem().Kind() == vdl.Byte {
+		return fmt.Sprintf("new %s[]{}", javaType(t.Elem(), false, env))
+	}
+	switch t.Kind() {
+	case vdl.Bool:
+		return "false"
+	case vdl.Byte:
+		return "(byte) 0"
+	case vdl.Int16:
+		return "(short) 0"
+	case vdl.Int32:
+		return "0"
+	case vdl.Int64:
+		return "0L"
+	case vdl.Float32:
+		return "0.0f"
+	case vdl.Float64:
+		return "0.0"
+	case vdl.Any, vdl.Complex64, vdl.Complex128, vdl.TypeObject, vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		return fmt.Sprintf("new %s()", javaType(t, false, env))
+	case vdl.String:
+		return "\"\""
+	case vdl.List:
+		return fmt.Sprintf("new java.util.ArrayList<%s>()", javaType(t.Elem(), true, env))
+	case vdl.Map:
+		keyTypeStr := javaType(t.Key(), true, env)
+		elemTypeStr := javaType(t.Elem(), true, env)
+		return fmt.Sprintf("new java.util.HashMap<%s, %s>()", keyTypeStr, elemTypeStr)
+	case vdl.Set:
+		return fmt.Sprintf("new java.util.HashSet<%s>()", javaType(t.Key(), true, env))
+	case vdl.Optional:
+		return fmt.Sprintf("new %s(%s)", javaType(t, false, env), javaReflectType(t, env))
+	}
+	panic(fmt.Errorf("vdl: javaZeroValue unhandled type %v", t))
+}
diff --git a/lib/vdl/codegen/javascript/const_test.go b/lib/vdl/codegen/javascript/const_test.go
new file mode 100644
index 0000000..3098a7f
--- /dev/null
+++ b/lib/vdl/codegen/javascript/const_test.go
@@ -0,0 +1,55 @@
+package javascript
+
+import (
+	"testing"
+
+	"v.io/v23/vdl"
+)
+
+func TestTypedConst(t *testing.T) {
+	names, structType, _, _, err := getTestTypes()
+	if err != nil {
+		t.Fatalf("Error in getTestTypes(): %v", err)
+	}
+	structValue := vdl.ZeroValue(structType)
+	_, index := structValue.Type().FieldByName(unnamedTypeFieldName)
+	structValue.StructField(index).AssignLen(1)
+	structValue.StructField(index).Index(0).AssignString("AStringVal")
+
+	tests := []struct {
+		name       string
+		inputValue *vdl.Value
+		expected   string
+	}{
+		{
+			name:       "struct test",
+			inputValue: structValue,
+			expected: `new (vdl.Registry.lookupOrCreateConstructor(_typeNamedStruct))({
+  'list': [
+],
+  'bool': false,
+  'unnamedTypeField': [
+"AStringVal",
+],
+})`,
+		},
+		{
+			name:       "bytes test",
+			inputValue: vdl.BytesValue([]byte{1, 2, 3, 4}),
+			expected: `new (vdl.Registry.lookupOrCreateConstructor(_type2))(new Uint8Array([
+1,
+2,
+3,
+4,
+]))`,
+		},
+	}
+	for _, test := range tests {
+		strVal := typedConst(names, test.inputValue)
+		if strVal != test.expected {
+			t.Errorf("In %q, expected %q, but got %q", test.name, test.expected, strVal)
+		}
+	}
+}
+
+// TODO(bjornick) Add more thorough tests.
diff --git a/lib/vdl/codegen/javascript/error_test.go b/lib/vdl/codegen/javascript/error_test.go
new file mode 100644
index 0000000..6ab0baf
--- /dev/null
+++ b/lib/vdl/codegen/javascript/error_test.go
@@ -0,0 +1,56 @@
+package javascript
+
+import (
+	"testing"
+
+	"v.io/v23/i18n"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+func TestError(t *testing.T) {
+	e := &compile.ErrorDef{
+		NamePos: compile.NamePos{
+			Name: "Test",
+		},
+		ID:        "v.io/v23/vdl/codegen/javascript.Test",
+		RetryCode: vdl.WireRetryCodeNoRetry,
+		Params: []*compile.Field{
+			&compile.Field{
+				NamePos: compile.NamePos{
+					Name: "x",
+				},
+				Type: vdl.BoolType,
+			},
+			&compile.Field{
+				NamePos: compile.NamePos{
+					Name: "y",
+				},
+				Type: vdl.Int32Type,
+			},
+		},
+		Formats: []compile.LangFmt{
+			compile.LangFmt{
+				Lang: i18n.LangID("en-US"),
+				Fmt:  "english string",
+			},
+			compile.LangFmt{
+				Lang: i18n.LangID("fr"),
+				Fmt:  "french string",
+			},
+		},
+	}
+	var names typeNames
+	result := generateErrorConstructor(names, e)
+	expected := `module.exports.TestError = makeError('v.io/v23/vdl/codegen/javascript.Test', actions.NO_RETRY, {
+  'en-US': 'english string',
+  'fr': 'french string',
+}, [
+  vdl.Types.BOOL,
+  vdl.Types.INT32,
+]);
+`
+	if result != expected {
+		t.Errorf("got %s, expected %s", result, expected)
+	}
+}
diff --git a/lib/vdl/codegen/javascript/errors.go b/lib/vdl/codegen/javascript/errors.go
new file mode 100644
index 0000000..b216bdd
--- /dev/null
+++ b/lib/vdl/codegen/javascript/errors.go
@@ -0,0 +1,22 @@
+package javascript
+
+import (
+	"fmt"
+
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+func generateErrorConstructor(names typeNames, e *compile.ErrorDef) string {
+	name := e.Name + "Error"
+	result := fmt.Sprintf("module.exports.%s = makeError('%s', actions.%s, ", name, e.ID, vdlutil.ToConstCase(e.RetryCode.String()))
+	result += "{\n"
+	for _, f := range e.Formats {
+		result += fmt.Sprintf("  '%s': '%s',\n", f.Lang, f.Fmt)
+	}
+	result += "}, [\n"
+	for _, param := range e.Params {
+		result += "  " + names.LookupType(param.Type) + ",\n"
+	}
+	return result + "]);\n"
+}
diff --git a/lib/vdl/codegen/javascript/gen.go b/lib/vdl/codegen/javascript/gen.go
new file mode 100644
index 0000000..1a5af0b
--- /dev/null
+++ b/lib/vdl/codegen/javascript/gen.go
@@ -0,0 +1,431 @@
+// Package javascript implements Javascript code generation from compiled VDL packages.
+package javascript
+
+// Generates the javascript source code for vdl files.  The generated output in javascript
+// differs from most other languages, since we don't generate stubs. Instead generate an
+// object that contains the parsed VDL structures that will be used by the Javascript code
+// to valid servers.
+
+import (
+	"bytes"
+	"encoding/binary"
+	"fmt"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"text/template"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdlutil"
+)
+
+type data struct {
+	Pkg            *compile.Package
+	Env            *compile.Env
+	GenerateImport func(string) string
+	UserImports    jsImports
+	PathToCoreJS   string
+	TypeNames      typeNames
+}
+
+// Generate takes a populated compile.Package and produces a byte slice
+// containing the generated Javascript code.
+func Generate(pkg *compile.Package, env *compile.Env, genImport func(string) string, pathToCoreJS string) []byte {
+	data := data{
+		Pkg:            pkg,
+		Env:            env,
+		GenerateImport: genImport,
+		UserImports:    jsImportsForFiles(pkg.Files...),
+		TypeNames:      newTypeNames(pkg),
+		PathToCoreJS:   pathToCoreJS,
+	}
+	var buf bytes.Buffer
+	if err := javascriptTemplate.Execute(&buf, data); err != nil {
+		// We shouldn't see an error; it means our template is buggy.
+		panic(fmt.Errorf("vdl: couldn't execute template: %v", err))
+	}
+	return buf.Bytes()
+}
+
+var javascriptTemplate *template.Template
+
+func bitlen(kind vdl.Kind) int {
+	switch kind {
+	case vdl.Float32, vdl.Complex64:
+		return 32
+	case vdl.Float64, vdl.Complex128:
+		return 64
+	}
+	panic(fmt.Errorf("vdl: bitLen unhandled kind %v", kind))
+}
+
+func genMethodTags(names typeNames, method *compile.Method) string {
+	tags := method.Tags
+	result := "["
+	for _, tag := range tags {
+		result += typedConst(names, tag) + ", "
+	}
+	result += "]"
+	return result
+}
+
+// Format the given int64 into a JS BigInt.
+func formatUint64BigInt(v uint64) string {
+	buffer := make([]byte, 8)
+	binary.BigEndian.PutUint64(buffer, v)
+	sign := "0"
+	if v > 0 {
+		sign = "1"
+	}
+	return fmt.Sprintf("new vdl.BigInt(%s, %s)", sign, formatByteBuffer(buffer))
+}
+
+// Format the given int64 into a JS BigInt.
+func formatInt64BigInt(v int64) string {
+	buffer := make([]byte, 8)
+	var sign int64 = 0
+	if v > 0 {
+		sign = 1
+	} else if v < 0 {
+		sign = -1
+	}
+	binary.BigEndian.PutUint64(buffer, uint64(v*sign)) // Adjust value by sign.
+
+	return fmt.Sprintf("new vdl.BigInt(%d, %s)", sign, formatByteBuffer(buffer))
+}
+
+// Given a buffer of bytes, create the JS Uint8Array that corresponds to it (to be used with BigInt).
+func formatByteBuffer(buffer []byte) string {
+	buf := bytes.TrimLeft(buffer, "\x00") // trim leading zeros
+	str := "new Uint8Array(["
+	for i, b := range buf {
+		if i > 0 {
+			str += ", "
+		}
+		str += fmt.Sprintf("%#x", b)
+	}
+	str += "])"
+	return str
+}
+
+// untypedConst generates a javascript string representing a constant that is
+// not wrapped with type information.
+func untypedConst(names typeNames, v *vdl.Value) string {
+	switch v.Kind() {
+	case vdl.Bool:
+		if v.Bool() {
+			return "true"
+		} else {
+			return "false"
+		}
+	case vdl.Byte:
+		return strconv.FormatUint(uint64(v.Byte()), 10)
+	case vdl.Uint16, vdl.Uint32:
+		return strconv.FormatUint(v.Uint(), 10)
+	case vdl.Int16, vdl.Int32:
+		return strconv.FormatInt(v.Int(), 10)
+	case vdl.Uint64:
+		return formatUint64BigInt(v.Uint())
+	case vdl.Int64:
+		return formatInt64BigInt(v.Int())
+	case vdl.Float32, vdl.Float64:
+		return strconv.FormatFloat(v.Float(), 'g', -1, bitlen(v.Kind()))
+	case vdl.String:
+		return strconv.Quote(v.RawString())
+	case vdl.Any:
+		if v.Elem() != nil {
+			return typedConst(names, v.Elem())
+		}
+		return "null"
+	case vdl.Optional:
+		if v.Elem() != nil {
+			return untypedConst(names, v.Elem())
+		}
+		return "null"
+	case vdl.Complex64, vdl.Complex128:
+		return fmt.Sprintf("new vdl.Complex(%f, %f)", real(v.Complex()), imag(v.Complex()))
+	case vdl.Enum:
+		return fmt.Sprintf("'%s'", v.EnumLabel())
+	case vdl.Array, vdl.List:
+		result := "["
+		for ix := 0; ix < v.Len(); ix++ {
+			val := untypedConst(names, v.Index(ix))
+			result += "\n" + val + ","
+		}
+		result += "\n]"
+		if v.Type().Elem().Kind() == vdl.Byte {
+			return "new Uint8Array(" + result + ")"
+		}
+		return result
+	case vdl.Set:
+		result := "new Set(["
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			result += "\n  " + untypedConst(names, key) + ", "
+		}
+		result += "])"
+		return result
+	case vdl.Map:
+		result := "new Map(["
+		for i, key := range vdl.SortValuesAsString(v.Keys()) {
+			if i > 0 {
+				result += ","
+			}
+			result += fmt.Sprintf("\n  [%s, %s]",
+				untypedConst(names, key),
+				untypedConst(names, v.MapIndex(key)))
+
+		}
+		result += "])"
+		return result
+	case vdl.Struct:
+		result := "{"
+		t := v.Type()
+		for ix := 0; ix < t.NumField(); ix++ {
+			result += "\n  '" +
+				vdlutil.FirstRuneToLower(t.Field(ix).Name) +
+				"': " +
+				untypedConst(names, v.StructField(ix)) +
+				","
+		}
+		return result + "\n}"
+	case vdl.Union:
+		ix, innerVal := v.UnionField()
+		return fmt.Sprintf("{ %q: %v }", vdlutil.FirstRuneToLower(v.Type().Field(ix).Name), untypedConst(names, innerVal))
+	case vdl.TypeObject:
+		return names.LookupType(v.TypeObject())
+	default:
+		panic(fmt.Errorf("vdl: untypedConst unhandled type %v %v", v.Kind(), v.Type()))
+	}
+}
+
+func primitiveWithOptionalName(primitive, name string) string {
+	if name == "" {
+		return "Types." + primitive
+	}
+	return "new vdl.Type({kind: Kind." + primitive + ", name: '" + name + "'})"
+}
+
+// typedConst returns a javascript string representing a const that is always
+// wrapped with type information
+func typedConst(names typeNames, v *vdl.Value) string {
+	switch v.Kind() {
+	case vdl.Any, vdl.TypeObject:
+		return untypedConst(names, v)
+	default:
+		return fmt.Sprintf("new %s(%s)",
+			names.LookupConstructor(v.Type()),
+			untypedConst(names, v))
+	}
+}
+
+// Returns a Not Implemented stub for the method
+func generateMethodStub(method *compile.Method) string {
+	args := "ctx"
+	for _, arg := range method.InArgs {
+		args += fmt.Sprintf(", %s", arg.Name)
+	}
+
+	return fmt.Sprintf(`function(%s) {
+  throw new Error('Method %s not implemented');
+}`, args, method.Name)
+}
+
+// Returns the JS version of the method signature.
+func generateMethodSignature(method *compile.Method, names typeNames) string {
+	return fmt.Sprintf(`{
+    name: '%s',
+    doc: %s,
+    inArgs: %s,
+    outArgs: %s,
+    inStream: %s,
+    outStream: %s,
+    tags: %s
+  }`,
+		method.Name,
+		quoteStripDoc(method.Doc),
+		generateMethodArguments(method.InArgs, names),
+		generateMethodArguments(method.OutArgs, names),
+		generateMethodStreaming(method.InStream, names),
+		generateMethodStreaming(method.OutStream, names),
+		genMethodTags(names, method))
+}
+
+// Returns a slice describing the method's arguments.
+func generateMethodArguments(args []*compile.Field, names typeNames) string {
+	ret := "["
+	for _, arg := range args {
+		ret += fmt.Sprintf(
+			`{
+      name: '%s',
+      doc: %s,
+      type: %s
+    },
+    `, arg.Name, quoteStripDoc(arg.Doc), names.LookupType(arg.Type))
+	}
+	ret += "]"
+	return ret
+}
+
+// Returns the VOM type of the stream.
+func generateMethodStreaming(streaming *vdl.Type, names typeNames) string {
+	if streaming == nil {
+		return "null"
+	}
+	return fmt.Sprintf(
+		`{
+      name: '',
+      doc: '',
+      type: %s
+    }`,
+		names.LookupType(streaming))
+}
+
+// Returns a slice of embeddings with the proper qualified identifiers.
+func generateEmbeds(embeds []*compile.Interface) string {
+	result := "["
+	for _, embed := range embeds {
+		result += fmt.Sprintf(`{
+      name: '%s',
+      pkgPath: '%s',
+      doc: %s
+    },
+    `, embed.Name, embed.File.Package.Path, quoteStripDoc(embed.Doc))
+	}
+	result += "]"
+	return result
+}
+
+func importPath(data data, path string) string {
+	// We need to prefix all of these paths with a ./ to tell node that the path is relative to
+	// the current directory.  Sadly filepath.Join(".", foo) == foo, so we have to do it
+	// explicitly.
+	return "." + string(filepath.Separator) + data.GenerateImport(path)
+}
+
+func quoteStripDoc(doc string) string {
+	// TODO(alexfandrianto): We need to handle '// ' and '\n' in the docstring.
+	// It would also be nice to single-quote the whole string.
+	trimmed := strings.Trim(doc, "\n")
+	return strconv.Quote(trimmed)
+}
+
+func hasErrors(pkg *compile.Package) bool {
+	for _, file := range pkg.Files {
+		if len(file.ErrorDefs) > 0 {
+			return true
+		}
+	}
+	return false
+}
+
+func generateSystemImports(data data) string {
+	res := "var vdl = require('"
+	packagePrefix := ""
+	if data.PathToCoreJS != "" {
+		packagePrefix = strings.Repeat("../", strings.Count(data.Pkg.Path, "/")+1) + data.PathToCoreJS
+	}
+	if data.PathToCoreJS != "" {
+		res += packagePrefix + "/vdl');"
+	} else {
+		res += "veyron').vdl;"
+	}
+	res += "\n"
+	if hasErrors(data.Pkg) {
+		if data.PathToCoreJS != "" {
+			res += "var makeError = require('" + packagePrefix + "/errors/make-errors');\n"
+			res += "var actions = require('" + packagePrefix + "/errors/actions');\n"
+		} else {
+			res += "var makeError = require('veyron').makeError;\n"
+			res += "var actions = require('veyron').errorActions;\n"
+		}
+	}
+	return res
+}
+
+func init() {
+	funcMap := template.FuncMap{
+		"firstRuneToLower":          vdlutil.FirstRuneToLower,
+		"genMethodTags":             genMethodTags,
+		"makeTypeDefinitionsString": makeTypeDefinitionsString,
+		"typedConst":                typedConst,
+		"generateEmbeds":            generateEmbeds,
+		"generateMethodStub":        generateMethodStub,
+		"generateMethodSignature":   generateMethodSignature,
+		"importPath":                importPath,
+		"quoteStripDoc":             quoteStripDoc,
+		"generateErrorConstructor":  generateErrorConstructor,
+		"generateSystemImports":     generateSystemImports,
+	}
+	javascriptTemplate = template.Must(template.New("genJS").Funcs(funcMap).Parse(genJS))
+}
+
+// The template that we execute against a compile.Package instance to generate our
+// code.  Most of this is fairly straightforward substitution and ranges; more
+// complicated logic is delegated to the helper functions above.
+//
+// We try to generate code that has somewhat reasonable formatting.
+const genJS = `{{with $data := .}}// This file was auto-generated by the veyron vdl tool.
+{{generateSystemImports $data}}
+
+{{/* Define additional imported modules. */}}
+{{$pkg := $data.Pkg}}
+{{if $data.UserImports}}{{range $imp := $data.UserImports}}
+var {{$imp.Local}} = require('{{importPath $data $imp.Path}}');{{end}}{{end}}
+
+module.exports = {};
+
+
+{{/* Define any types introduced by the VDL file. */}}
+// Types:
+{{makeTypeDefinitionsString $data.TypeNames }}
+
+
+{{/* Define all constants as typed constants. */}}
+// Consts:
+{{range $file := $pkg.Files}}{{range $const := $file.ConstDefs}}
+  module.exports.{{$const.Name}} = {{typedConst $data.TypeNames $const.Value}};
+{{end}}{{end}}
+
+{{/* Define all errors. */}}
+// Errors:
+{{range $file := $pkg.Files}}{{range $error := $file.ErrorDefs}}
+{{generateErrorConstructor $data.TypeNames $error}}
+{{end}}{{end}}
+
+{{/* Define each of those service interfaces here, including method stubs and
+     service signature. */}}
+// Services:
+{{range $file := $pkg.Files}}
+  {{range $iface := $file.Interfaces}}
+    {{/* Define the service interface. */}}
+function {{$iface.Name}}(){}
+module.exports.{{$iface.Name}} = {{$iface.Name}}
+
+    {{range $method := $iface.AllMethods}}
+      {{/* Add each method to the service prototype. */}}
+{{$iface.Name}}.prototype.{{firstRuneToLower $method.Name}} = {{generateMethodStub $method}};
+    {{end}} {{/* end range $iface.AllMethods */}}
+
+    {{/* The service signature encodes the same info as signature.Interface.
+         TODO(alexfandrianto): We want to associate the signature type here, but
+         it's complicated. https://github.com/veyron/release-issues/issues/432
+         For now, we need to pass the type in manually into encode. */}}
+{{$iface.Name}}.prototype._serviceDescription = {
+  name: '{{$iface.Name}}',
+  pkgPath: '{{$pkg.Path}}',
+  doc: {{quoteStripDoc $iface.Doc}},
+  embeds: {{generateEmbeds $iface.Embeds}},
+  methods: [
+    {{range $method := $iface.AllMethods}}
+      {{/* Each method signature contains the information in ipc.MethodSig. */}}
+    {{generateMethodSignature $method $data.TypeNames}},
+    {{end}} {{/*end range $iface.AllMethods*/}}
+  ]
+};
+
+  {{end}} {{/* end range $files.Interfaces */}}
+{{end}} {{/* end range $pkg.Files */}}
+
+
+{{end}}`
diff --git a/lib/vdl/codegen/javascript/gen_type_def.go b/lib/vdl/codegen/javascript/gen_type_def.go
new file mode 100644
index 0000000..d1a399d
--- /dev/null
+++ b/lib/vdl/codegen/javascript/gen_type_def.go
@@ -0,0 +1,217 @@
+package javascript
+
+import (
+	"fmt"
+
+	"v.io/v23/vdl"
+)
+
+// makeTypeDefinitionsString generates a string that defines the specified types.
+// It consists of the following sections:
+// - Definitions. e.g. "var _typeNamedBool = new Type();"
+// - Field assignments. e.g. "_typeNamedBool.name = \"NamedBool\";"
+// - Type Freezes, e.g. "_typedNamedBool.freeze();"
+// - Constructor definitions. e.g. "types.NamedBool = Registry.lookupOrCreateConstructor(_typeNamedBool)"
+func makeTypeDefinitionsString(jsnames typeNames) string {
+	str := ""
+	sortedDefs := jsnames.SortedList()
+
+	for _, def := range sortedDefs {
+		str += makeDefString(def.Name)
+	}
+
+	for _, def := range sortedDefs {
+		str += makeTypeFieldAssignmentString(def.Name, def.Type, jsnames)
+	}
+
+	for _, def := range sortedDefs {
+		str += makeTypeFreezeString(def.Name)
+	}
+
+	for _, def := range sortedDefs {
+		if def.Type.Name() != "" {
+			str += makeConstructorDefinitionString(def.Type, jsnames)
+		}
+	}
+
+	return str
+}
+
+// makeDefString generates a type definition for the specified type name.
+// e.g. "var _typeNamedBool = new Type();"
+func makeDefString(jsname string) string {
+	return fmt.Sprintf("var %s = new vdl.Type();\n", jsname)
+}
+
+// makeTypeFreezeString calls the type's freeze function to finalize it.
+// e.g. "typeNamedBool.freeze();"
+func makeTypeFreezeString(jsname string) string {
+	return fmt.Sprintf("%s.freeze();\n", jsname)
+}
+
+// makeTypeFieldAssignmentString generates assignments for type fields.
+// e.g. "_typeNamedBool.name = \"NamedBool\";"
+func makeTypeFieldAssignmentString(jsname string, t *vdl.Type, jsnames typeNames) string {
+	// kind
+	str := fmt.Sprintf("%s.kind = %s;\n", jsname, jsKind(t.Kind()))
+
+	// name
+	str += fmt.Sprintf("%s.name = %q;\n", jsname, t.Name())
+
+	// labels
+	if t.Kind() == vdl.Enum {
+		str += fmt.Sprintf("%s.labels = [", jsname)
+		for i := 0; i < t.NumEnumLabel(); i++ {
+			if i > 0 {
+				str += ", "
+			}
+			str += fmt.Sprintf("%q", t.EnumLabel(i))
+		}
+		str += "];\n"
+	}
+
+	// len
+	if t.Kind() == vdl.Array { // Array is the only type where len is valid.
+		str += fmt.Sprintf("%s.len = %d;\n", jsname, t.Len())
+	}
+
+	// elem
+	switch t.Kind() {
+	case vdl.Optional, vdl.Array, vdl.List, vdl.Map:
+		str += fmt.Sprintf("%s.elem = %s;\n", jsname, jsnames.LookupType(t.Elem()))
+	}
+
+	// key
+	switch t.Kind() {
+	case vdl.Set, vdl.Map:
+		str += fmt.Sprintf("%s.key = %s;\n", jsname, jsnames.LookupType(t.Key()))
+	}
+
+	// fields
+	switch t.Kind() {
+	case vdl.Struct, vdl.Union:
+		str += fmt.Sprintf("%s.fields = [", jsname)
+		for i := 0; i < t.NumField(); i++ {
+			if i > 0 {
+				str += ", "
+			}
+			field := t.Field(i)
+			str += fmt.Sprintf("{name: %q, type: %s}", field.Name, jsnames.LookupType(field.Type))
+		}
+		str += "];\n"
+	}
+
+	return str
+}
+
+// makeConstructorDefinitionString creates a string that defines the constructor for the type.
+// e.g. "module.exports.NamedBool = Registry.lookupOrCreateConstructor(_typeNamedBool)"
+func makeConstructorDefinitionString(t *vdl.Type, jsnames typeNames) string {
+	_, name := vdl.SplitIdent(t.Name())
+	ctorName := jsnames.LookupConstructor(t)
+	return fmt.Sprintf("module.exports.%s = %s;\n", name, ctorName)
+}
+
+func jsKind(k vdl.Kind) string {
+	switch k {
+	case vdl.Any:
+		return "vdl.Kind.ANY"
+	case vdl.Union:
+		return "vdl.Kind.UNION"
+	case vdl.Optional:
+		return "vdl.Kind.OPTIONAL"
+	case vdl.Bool:
+		return "vdl.Kind.BOOL"
+	case vdl.Byte:
+		return "vdl.Kind.BYTE"
+	case vdl.Uint16:
+		return "vdl.Kind.UINT16"
+	case vdl.Uint32:
+		return "vdl.Kind.UINT32"
+	case vdl.Uint64:
+		return "vdl.Kind.UINT64"
+	case vdl.Int16:
+		return "vdl.Kind.INT16"
+	case vdl.Int32:
+		return "vdl.Kind.INT32"
+	case vdl.Int64:
+		return "vdl.Kind.INT64"
+	case vdl.Float32:
+		return "vdl.Kind.FLOAT32"
+	case vdl.Float64:
+		return "vdl.Kind.FLOAT64"
+	case vdl.Complex64:
+		return "vdl.Kind.COMPLEX64"
+	case vdl.Complex128:
+		return "vdl.Kind.COMPLEX128"
+	case vdl.String:
+		return "vdl.Kind.STRING"
+	case vdl.Enum:
+		return "vdl.Kind.ENUM"
+	case vdl.TypeObject:
+		return "vdl.Kind.TYPEOBJECT"
+	case vdl.Array:
+		return "vdl.Kind.ARRAY"
+	case vdl.List:
+		return "vdl.Kind.LIST"
+	case vdl.Set:
+		return "vdl.Kind.SET"
+	case vdl.Map:
+		return "vdl.Kind.MAP"
+	case vdl.Struct:
+		return "vdl.Kind.STRUCT"
+	}
+	panic(fmt.Errorf("val: unhandled kind: %d", k))
+}
+
+// builtinJSType indicates whether a vdl.Type has built-in type definition in vdl.js
+// If true, then it returns a pointer to the type definition in javascript/types.js
+// It assumes a variable named "vdl.Types" is already pointing to vom.Types
+func builtinJSType(t *vdl.Type) (string, bool) {
+	_, n := vdl.SplitIdent(t.Name())
+
+	if t == vdl.ErrorType {
+		return "vdl.Types.ERROR", true
+	}
+
+	// named types are not built-in.
+	if n != "" {
+		return "", false
+	}
+
+	// switch on supported types in vdl.js
+	switch t.Kind() {
+	case vdl.Any:
+		return "vdl.Types.ANY", true
+	case vdl.Bool:
+		return "vdl.Types.BOOL", true
+	case vdl.Byte:
+		return "vdl.Types.BYTE", true
+	case vdl.Uint16:
+		return "vdl.Types.UINT16", true
+	case vdl.Uint32:
+		return "vdl.Types.UINT32", true
+	case vdl.Uint64:
+		return "vdl.Types.UINT64", true
+	case vdl.Int16:
+		return "vdl.Types.INT16", true
+	case vdl.Int32:
+		return "vdl.Types.INT32", true
+	case vdl.Int64:
+		return "vdl.Types.INT64", true
+	case vdl.Float32:
+		return "vdl.Types.FLOAT32", true
+	case vdl.Float64:
+		return "vdl.Types.FLOAT64", true
+	case vdl.Complex64:
+		return "vdl.Types.COMPLEX64", true
+	case vdl.Complex128:
+		return "vdl.Types.COMPLEX128", true
+	case vdl.String:
+		return "vdl.Types.STRING", true
+	case vdl.TypeObject:
+		return "vdl.Types.TYPEOBJECT", true
+	}
+
+	return "", false
+}
diff --git a/lib/vdl/codegen/javascript/import.go b/lib/vdl/codegen/javascript/import.go
new file mode 100644
index 0000000..0c5b9fe
--- /dev/null
+++ b/lib/vdl/codegen/javascript/import.go
@@ -0,0 +1,103 @@
+package javascript
+
+import (
+	"sort"
+	"strconv"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+// TODO(bjornick): Merge with pkg_types.go
+
+// jsImport represents a single package import.
+type jsImport struct {
+	Path string // Path of the imported package; e.g. "veyron/vdl"
+
+	// Local name that refers to the imported package; either the non-empty import
+	// name, or the name of the imported package.
+	Local string
+}
+
+// jsImports is a collection of package imports.
+// REQUIRED: The imports must be sorted by path.
+type jsImports []jsImport
+
+// LookupLocal returns the local name that identifies the given pkgPath.
+func (x jsImports) LookupLocal(pkgPath string) string {
+	ix := sort.Search(len(x), func(i int) bool { return x[i].Path >= pkgPath })
+	if ix < len(x) && x[ix].Path == pkgPath {
+		return x[ix].Local
+	}
+	return ""
+}
+
+// Each import must end up with a unique local name - when we see a collision we
+// simply add a "_N" suffix where N starts at 2 and increments.
+func uniqueImport(pkgName, pkgPath string, seen map[string]bool) jsImport {
+	iter := 1
+	for {
+		local := pkgName
+		if iter > 1 {
+			local += "_" + strconv.Itoa(iter)
+		}
+		if !seen[local] {
+			// Found a unique local name - return the import.
+			seen[local] = true
+			return jsImport{pkgPath, local}
+		}
+		iter++
+	}
+}
+
+type pkgSorter []*compile.Package
+
+func (s pkgSorter) Len() int { return len(s) }
+
+func (s pkgSorter) Less(i, j int) bool { return s[i].Path < s[j].Path }
+
+func (s pkgSorter) Swap(i, j int) { s[j], s[i] = s[i], s[j] }
+
+// jsImportsForFiles returns the imports required for the given files.
+func jsImportsForFiles(files ...*compile.File) jsImports {
+	seenPath := make(map[string]bool)
+	pkgs := pkgSorter{}
+
+	for _, f := range files {
+		// TODO(toddw,bjornick): Remove File.PackageDeps and replace by walking through each type.
+		for _, dep := range f.PackageDeps {
+			if seenPath[dep.Path] {
+				continue
+			}
+			seenPath[dep.Path] = true
+			pkgs = append(pkgs, dep)
+		}
+	}
+	sort.Sort(pkgs)
+
+	var ret jsImports
+	seenName := make(map[string]bool)
+	for _, dep := range pkgs {
+		ret = append(ret, uniqueImport(dep.Name, dep.GenPath, seenName))
+	}
+	return ret
+}
+
+// pkgDeps maintains a set of package path dependencies.
+type pkgDeps map[string]bool
+
+func (deps pkgDeps) insertIdent(ident string) {
+	if pkgPath, _ := vdl.SplitIdent(ident); pkgPath != "" {
+		deps[pkgPath] = true
+	}
+}
+
+// SortedPkgPaths deps as a sorted slice.
+func (deps pkgDeps) SortedPkgPaths() []string {
+	var ret []string
+	for pkgPath, _ := range deps {
+		ret = append(ret, pkgPath)
+	}
+	sort.Strings(ret)
+	return ret
+}
diff --git a/lib/vdl/codegen/javascript/pkg_types.go b/lib/vdl/codegen/javascript/pkg_types.go
new file mode 100644
index 0000000..125d761
--- /dev/null
+++ b/lib/vdl/codegen/javascript/pkg_types.go
@@ -0,0 +1,232 @@
+package javascript
+
+import (
+	"fmt"
+	"sort"
+	"strings"
+
+	"v.io/v23/vdl/compile"
+
+	"v.io/v23/vdl"
+)
+
+// typeNames holds a mapping between VDL type and generated type name.
+type typeNames map[*vdl.Type]string
+
+// LookupConstructor returns a string representing the constructor of the type.
+// Several cases:
+// - Local package type (and has been added to tn), return
+// Registry.lookupOrCreateConstructor(_typeNameHere)
+// - Builtin
+// This is not supported. Fail.
+// - Type in other package
+// Return pkgName.ConstructorName
+func (tn typeNames) LookupConstructor(t *vdl.Type) string {
+	if builtInName, ok := builtinJSType(t); ok {
+		return tn.constructorFromTypeName(builtInName)
+	}
+
+	if name, ok := tn[t]; ok {
+		return tn.constructorFromTypeName(name)
+	}
+
+	pkgPath, name := vdl.SplitIdent(t.Name())
+	pkgParts := strings.Split(pkgPath, "/")
+	pkgName := pkgParts[len(pkgParts)-1]
+	return fmt.Sprintf("%s.%s", pkgName, name)
+}
+
+func (tn typeNames) constructorFromTypeName(name string) string {
+	return "(vdl.Registry.lookupOrCreateConstructor(" + name + "))"
+}
+
+// LookupType returns a string representing the type.
+// - If it is a built in type, return the name.
+// - Otherwise get type type from the constructor.
+func (tn typeNames) LookupType(t *vdl.Type) string {
+	if builtInName, ok := builtinJSType(t); ok {
+		return builtInName
+	}
+
+	if name, ok := tn[t]; ok {
+		return name
+	}
+
+	return "new " + tn.LookupConstructor(t) + "()._type"
+}
+
+// SortedList returns a list of type and name pairs, sorted by name.
+// This is needed to make the output stable.
+func (tn typeNames) SortedList() typeNamePairList {
+	pairs := typeNamePairList{}
+	for t, name := range tn {
+		pairs = append(pairs, typeNamePair{t, name})
+	}
+	sort.Sort(pairs)
+	return pairs
+}
+
+type typeNamePairList []typeNamePair
+type typeNamePair struct {
+	Type *vdl.Type
+	Name string
+}
+
+func (l typeNamePairList) Len() int           { return len(l) }
+func (l typeNamePairList) Less(i, j int) bool { return l[i].Name < l[j].Name }
+func (l typeNamePairList) Swap(i, j int)      { l[i], l[j] = l[j], l[i] }
+
+// newTypeNames generates typeNames for all new types in a package.
+func newTypeNames(pkg *compile.Package) typeNames {
+	ptn := pkgTypeNames{
+		nextIndex: 1,
+		names:     typeNames{},
+		pkg:       pkg,
+	}
+	return ptn.getNames()
+}
+
+// pkgTypeNames tracks information necessary to define JS types from VDL.
+// The next index that a new type will be auto-generated with previously auto-generated type names
+// package being generated
+type pkgTypeNames struct {
+	nextIndex int
+	names     typeNames
+	pkg       *compile.Package
+}
+
+// getNames generates typeNames for all new types in a package.
+func (p pkgTypeNames) getNames() typeNames {
+	for _, file := range p.pkg.Files {
+		for _, def := range file.TypeDefs {
+			p.addInnerTypes(def.Type)
+		}
+		for _, constdef := range file.ConstDefs {
+			p.addInnerTypes(constdef.Value.Type())
+			p.addTypesInConst(constdef.Value)
+		}
+		for _, errordef := range file.ErrorDefs {
+			for _, field := range errordef.Params {
+				p.addInnerTypes(field.Type)
+			}
+		}
+		for _, interfacedef := range file.Interfaces {
+			for _, method := range interfacedef.AllMethods() {
+				for _, inarg := range method.InArgs {
+					p.addInnerTypes(inarg.Type)
+				}
+				for _, outarg := range method.OutArgs {
+					p.addInnerTypes(outarg.Type)
+				}
+				if method.InStream != nil {
+					p.addInnerTypes(method.InStream)
+				}
+				if method.OutStream != nil {
+					p.addInnerTypes(method.OutStream)
+				}
+			}
+		}
+	}
+
+	return p.names
+}
+
+// addNameIfNeeded produces a new typeName if:
+// -it is not already generated
+// -it is not from another package
+// -it is not an already a built-in type in vdl.js (primitives, any, etc..)
+func (p *pkgTypeNames) addNameIfNeeded(t *vdl.Type) {
+	if _, ok := p.names[t]; ok {
+		return
+	}
+
+	// Do not create name for built-in JS types (primitives, any, etc..)
+	if _, ok := builtinJSType(t); ok {
+		return
+	}
+
+	var name string
+	if t.Name() != "" {
+		pp, n := vdl.SplitIdent(t.Name())
+		// Do not create name for types from other packages.
+		// TODO(aghassemi) TODO(bprosnitz): error is special right now, it is not a VDL type
+		// ideally we can handle error similar to other builtin JS types but VDL error is still in flux.
+		// Issue tracked in https://github.com/veyron/release-issues/issues/654
+
+		if pp != p.pkg.Path && n != "error" {
+			return
+		}
+		name = "_type" + n
+	} else {
+		name = fmt.Sprintf("_type%d", p.nextIndex)
+		p.nextIndex++
+	}
+
+	p.names[t] = name
+}
+
+func (p *pkgTypeNames) addInnerTypes(t *vdl.Type) {
+	if _, ok := p.names[t]; ok {
+		return
+	}
+
+	p.addNameIfNeeded(t)
+
+	switch t.Kind() {
+	case vdl.Optional, vdl.Array, vdl.List, vdl.Map:
+		p.addInnerTypes(t.Elem())
+	}
+
+	switch t.Kind() {
+	case vdl.Set, vdl.Map:
+		p.addInnerTypes(t.Key())
+	}
+
+	switch t.Kind() {
+	case vdl.Struct, vdl.Union:
+		for i := 0; i < t.NumField(); i++ {
+			p.addInnerTypes(t.Field(i).Type)
+		}
+	}
+}
+
+func (p *pkgTypeNames) addTypesInConst(v *vdl.Value) {
+	// Generate the type if it is a typeobject or any.
+	switch v.Kind() {
+	case vdl.TypeObject:
+		p.addInnerTypes(v.TypeObject())
+	case vdl.Any:
+		if !v.IsNil() {
+			p.addInnerTypes(v.Elem().Type())
+		}
+	}
+
+	// Recurse.
+	switch v.Kind() {
+	case vdl.List, vdl.Array:
+		for i := 0; i < v.Len(); i++ {
+			p.addTypesInConst(v.Index(i))
+		}
+	case vdl.Set:
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			p.addTypesInConst(key)
+		}
+	case vdl.Map:
+		for _, key := range vdl.SortValuesAsString(v.Keys()) {
+			p.addTypesInConst(key)
+			p.addTypesInConst(v.MapIndex(key))
+
+		}
+	case vdl.Struct:
+		for i := 0; i < v.Type().NumField(); i++ {
+			p.addTypesInConst(v.StructField(i))
+		}
+	case vdl.Union:
+		_, innerVal := v.UnionField()
+		p.addTypesInConst(innerVal)
+	case vdl.Any, vdl.Optional:
+		if !v.IsNil() {
+			p.addTypesInConst(v.Elem())
+		}
+	}
+}
diff --git a/lib/vdl/codegen/javascript/type_test.go b/lib/vdl/codegen/javascript/type_test.go
new file mode 100644
index 0000000..a3aa81d
--- /dev/null
+++ b/lib/vdl/codegen/javascript/type_test.go
@@ -0,0 +1,103 @@
+package javascript
+
+import (
+	"fmt"
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/compile"
+)
+
+const unnamedTypeFieldName = "UnnamedTypeField"
+
+func getTestTypes() (names typeNames, tyStruct, tyList, tyBool *vdl.Type, outErr error) {
+	var builder vdl.TypeBuilder
+	namedBool := builder.Named("otherPkg.NamedBool").AssignBase(vdl.BoolType)
+	listType := builder.List()
+	namedList := builder.Named("NamedList").AssignBase(listType)
+	structType := builder.Struct()
+	namedStruct := builder.Named("NamedStruct").AssignBase(structType)
+	structType.AppendField("List", namedList)
+	structType.AppendField("Bool", namedBool)
+	structType.AppendField(unnamedTypeFieldName, builder.List().AssignElem(vdl.StringType))
+	listType.AssignElem(namedStruct)
+	if builder.Build() != true {
+		outErr = fmt.Errorf("Failed to build test types")
+		return
+	}
+
+	builtBool, err := namedBool.Built()
+	if err != nil {
+		outErr = fmt.Errorf("Error creating NamedBool: %v", err)
+		return
+	}
+
+	builtList, err := namedList.Built()
+	if err != nil {
+		outErr = fmt.Errorf("Error creating NamedList %v", err)
+		return
+	}
+
+	builtStruct, err := namedStruct.Built()
+	if err != nil {
+		outErr = fmt.Errorf("Error creating NamedStruct: %v", err)
+		return
+	}
+
+	pkg := &compile.Package{
+		Files: []*compile.File{
+			&compile.File{
+				TypeDefs: []*compile.TypeDef{
+					{
+						Type: builtList,
+					},
+					{
+						Type: builtStruct,
+					},
+					{
+						Type: vdl.ListType(vdl.ByteType),
+					},
+				},
+			},
+		},
+	}
+
+	return newTypeNames(pkg), builtStruct, builtList, builtBool, nil
+}
+
+// TestType tests that the output string of generated types is what we expect.
+func TestType(t *testing.T) {
+	jsnames, _, _, _, err := getTestTypes()
+	if err != nil {
+		t.Fatalf("Error in getTestTypes(): %v", err)
+	}
+	result := makeTypeDefinitionsString(jsnames)
+
+	expectedResult := `var _type1 = new vdl.Type();
+var _type2 = new vdl.Type();
+var _typeNamedList = new vdl.Type();
+var _typeNamedStruct = new vdl.Type();
+_type1.kind = vdl.Kind.LIST;
+_type1.name = "";
+_type1.elem = vdl.Types.STRING;
+_type2.kind = vdl.Kind.LIST;
+_type2.name = "";
+_type2.elem = vdl.Types.BYTE;
+_typeNamedList.kind = vdl.Kind.LIST;
+_typeNamedList.name = "NamedList";
+_typeNamedList.elem = _typeNamedStruct;
+_typeNamedStruct.kind = vdl.Kind.STRUCT;
+_typeNamedStruct.name = "NamedStruct";
+_typeNamedStruct.fields = [{name: "List", type: _typeNamedList}, {name: "Bool", type: new otherPkg.NamedBool()._type}, {name: "UnnamedTypeField", type: _type1}];
+_type1.freeze();
+_type2.freeze();
+_typeNamedList.freeze();
+_typeNamedStruct.freeze();
+module.exports.NamedList = (vdl.Registry.lookupOrCreateConstructor(_typeNamedList));
+module.exports.NamedStruct = (vdl.Registry.lookupOrCreateConstructor(_typeNamedStruct));
+`
+
+	if result != expectedResult {
+		t.Errorf("Expected %q, but got %q", expectedResult, result)
+	}
+}
diff --git a/lib/vdl/codegen/vdlgen/const.go b/lib/vdl/codegen/vdlgen/const.go
new file mode 100644
index 0000000..d70bd46
--- /dev/null
+++ b/lib/vdl/codegen/vdlgen/const.go
@@ -0,0 +1,148 @@
+package vdlgen
+
+// TODO(toddw): Add tests
+
+import (
+	"fmt"
+	"strconv"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/codegen"
+)
+
+// TypedConst returns the explicitly-typed vdl const corresponding to v, in the
+// given pkgPath, with the given imports.
+func TypedConst(v *vdl.Value, pkgPath string, imports codegen.Imports) string {
+	if v == nil {
+		return "nil"
+	}
+	k, t := v.Kind(), v.Type()
+	typestr := Type(t, pkgPath, imports)
+	if k == vdl.Optional {
+		// TODO(toddw): This only works if the optional elem is a composite literal.
+		if elem := v.Elem(); elem != nil {
+			return typestr + UntypedConst(elem, pkgPath, imports)
+		}
+		return typestr + "(nil)"
+	}
+	valstr := UntypedConst(v, pkgPath, imports)
+	if k == vdl.Enum || k == vdl.TypeObject || t == vdl.BoolType || t == vdl.StringType {
+		// Enum and TypeObject already include the type in their value.
+		// Built-in bool and string are implicitly convertible from literals.
+		return valstr
+	}
+	switch k {
+	case vdl.Array, vdl.List, vdl.Set, vdl.Map, vdl.Struct, vdl.Union:
+		// { } are used instead of ( ) for composites, except for []byte and [N]byte
+		if !t.IsBytes() {
+			return typestr + valstr
+		}
+	}
+	return typestr + "(" + valstr + ")"
+}
+
+// UntypedConst returns the untyped vdl const corresponding to v, in the given
+// pkgPath, with the given imports.
+func UntypedConst(v *vdl.Value, pkgPath string, imports codegen.Imports) string {
+	k, t := v.Kind(), v.Type()
+	if t.IsBytes() {
+		return strconv.Quote(string(v.Bytes()))
+	}
+	switch k {
+	case vdl.Any:
+		if elem := v.Elem(); elem != nil {
+			return TypedConst(elem, pkgPath, imports)
+		}
+		return "nil"
+	case vdl.Optional:
+		if elem := v.Elem(); elem != nil {
+			return UntypedConst(elem, pkgPath, imports)
+		}
+		return "nil"
+	case vdl.Bool:
+		return strconv.FormatBool(v.Bool())
+	case vdl.Byte:
+		return strconv.FormatUint(uint64(v.Byte()), 10)
+	case vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		return strconv.FormatUint(v.Uint(), 10)
+	case vdl.Int16, vdl.Int32, vdl.Int64:
+		return strconv.FormatInt(v.Int(), 10)
+	case vdl.Float32, vdl.Float64:
+		return formatFloat(v.Float(), k)
+	case vdl.Complex64, vdl.Complex128:
+		switch re, im := real(v.Complex()), imag(v.Complex()); {
+		case im > 0:
+			return formatFloat(re, k) + "+" + formatFloat(im, k) + "i"
+		case im < 0:
+			return formatFloat(re, k) + formatFloat(im, k) + "i"
+		default:
+			return formatFloat(re, k)
+		}
+	case vdl.String:
+		return strconv.Quote(v.RawString())
+	case vdl.Array, vdl.List:
+		if v.IsZero() {
+			return "{}"
+		}
+		s := "{"
+		for ix := 0; ix < v.Len(); ix++ {
+			if ix > 0 {
+				s += ", "
+			}
+			s += UntypedConst(v.Index(ix), pkgPath, imports)
+		}
+		return s + "}"
+	case vdl.Set, vdl.Map:
+		s := "{"
+		for ix, key := range vdl.SortValuesAsString(v.Keys()) {
+			if ix > 0 {
+				s += ", "
+			}
+			s += UntypedConst(key, pkgPath, imports)
+			if k == vdl.Map {
+				s += ": " + UntypedConst(v.MapIndex(key), pkgPath, imports)
+			}
+		}
+		return s + "}"
+	case vdl.Struct:
+		s := "{"
+		hasFields := false
+		for ix := 0; ix < t.NumField(); ix++ {
+			vf := v.StructField(ix)
+			if vf.IsZero() {
+				continue
+			}
+			if hasFields {
+				s += ", "
+			}
+			s += t.Field(ix).Name + ": " + UntypedConst(vf, pkgPath, imports)
+			hasFields = true
+		}
+		return s + "}"
+	case vdl.Union:
+		index, value := v.UnionField()
+		return "{" + t.Field(index).Name + ": " + UntypedConst(value, pkgPath, imports) + "}"
+	}
+	// Enum and TypeObject always require the typestr.
+	switch k {
+	case vdl.Enum:
+		return Type(t, pkgPath, imports) + "." + v.EnumLabel()
+	case vdl.TypeObject:
+		return "typeobject(" + Type(v.TypeObject(), pkgPath, imports) + ")"
+	default:
+		panic(fmt.Errorf("vdlgen.Const unhandled type: %v %v", k, t))
+	}
+}
+
+func formatFloat(x float64, kind vdl.Kind) string {
+	var bitSize int
+	switch kind {
+	case vdl.Float32, vdl.Complex64:
+		bitSize = 32
+	case vdl.Float64, vdl.Complex128:
+		bitSize = 64
+	default:
+		panic(fmt.Errorf("formatFloat unhandled kind: %v", kind))
+	}
+	return strconv.FormatFloat(x, 'g', -1, bitSize)
+}
diff --git a/lib/vdl/codegen/vdlgen/import.go b/lib/vdl/codegen/vdlgen/import.go
new file mode 100644
index 0000000..e55a18a
--- /dev/null
+++ b/lib/vdl/codegen/vdlgen/import.go
@@ -0,0 +1,26 @@
+// Package vdlgen implements VDL code generation from compiled VDL packages.
+package vdlgen
+
+// TODO(toddw): Add tests
+
+import (
+	"v.io/v23/vdl/codegen"
+)
+
+// Imports returns the vdl imports clause corresponding to imports; empty if
+// there are no imports.
+func Imports(imports codegen.Imports) string {
+	var s string
+	if len(imports) > 0 {
+		s += "import ("
+		for _, imp := range imports {
+			s += "\n\t"
+			if imp.Name != "" {
+				s += imp.Name + " "
+			}
+			s += imp.Path
+		}
+		s += "\n)"
+	}
+	return s
+}
diff --git a/lib/vdl/codegen/vdlgen/type.go b/lib/vdl/codegen/vdlgen/type.go
new file mode 100644
index 0000000..7628c4f
--- /dev/null
+++ b/lib/vdl/codegen/vdlgen/type.go
@@ -0,0 +1,75 @@
+package vdlgen
+
+// TODO(toddw): Add tests
+
+import (
+	"fmt"
+	"strings"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/codegen"
+)
+
+// Type returns t using VDL syntax, returning the qualified name if t is named,
+// otherwise returning the base type of t.  The pkgPath and imports are used to
+// determine the local package qualifier to add to named types; if a local
+// package qualifier cannot be found, a full package path qualifier is added.
+func Type(t *vdl.Type, pkgPath string, imports codegen.Imports) string {
+	if t.Name() == "" {
+		return BaseType(t, pkgPath, imports)
+	}
+	path, name := vdl.SplitIdent(t.Name())
+	if path == "" && name == "" {
+		return "<empty>"
+	}
+	if path == "" || path == pkgPath {
+		return name
+	}
+	if local := imports.LookupLocal(path); local != "" {
+		return local + "." + name
+	}
+	return `"` + path + `".` + name
+}
+
+// BaseType returns the base type of t using VDL syntax, where the base type is
+// the type of t disregarding its name.  Subtypes contained in t are output via
+// calls to Type.
+func BaseType(t *vdl.Type, pkgPath string, imports codegen.Imports) string {
+	switch k := t.Kind(); k {
+	case vdl.Any, vdl.Bool, vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64, vdl.Float32, vdl.Float64, vdl.Complex64, vdl.Complex128, vdl.String, vdl.TypeObject:
+		// Built-in types are named the same as their kind.
+		return k.String()
+	case vdl.Optional:
+		return "?" + Type(t.Elem(), pkgPath, imports)
+	case vdl.Enum:
+		ret := "enum{"
+		for i := 0; i < t.NumEnumLabel(); i++ {
+			if i > 0 {
+				ret += ";"
+			}
+			ret += t.EnumLabel(i)
+		}
+		return ret + "}"
+	case vdl.Array:
+		return fmt.Sprintf("[%d]%s", t.Len(), Type(t.Elem(), pkgPath, imports))
+	case vdl.List:
+		return fmt.Sprintf("[]%s", Type(t.Elem(), pkgPath, imports))
+	case vdl.Set:
+		return fmt.Sprintf("set[%s]", Type(t.Key(), pkgPath, imports))
+	case vdl.Map:
+		key := Type(t.Key(), pkgPath, imports)
+		elem := Type(t.Elem(), pkgPath, imports)
+		return fmt.Sprintf("map[%s]%s", key, elem)
+	case vdl.Struct, vdl.Union:
+		ret := k.String() + " {\n"
+		for i := 0; i < t.NumField(); i++ {
+			f := t.Field(i)
+			ftype := Type(f.Type, pkgPath, imports)
+			ftype = strings.Replace(ftype, "\n", "\n\t", -1)
+			ret += fmt.Sprintf("\t%s %s\n", f.Name, ftype)
+		}
+		return ret + "}"
+	default:
+		panic(fmt.Errorf("vdlgen.BaseType: unhandled type: %v %v", k, t))
+	}
+}
diff --git a/lib/vdl/compile/builtin.go b/lib/vdl/compile/builtin.go
new file mode 100644
index 0000000..3a77603
--- /dev/null
+++ b/lib/vdl/compile/builtin.go
@@ -0,0 +1,59 @@
+package compile
+
+import (
+	"v.io/v23/vdl"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+var (
+	// The BuiltInPackage and BuiltInFile are used to hold the built-ins.
+	BuiltInPackage = newPackage("", "_builtin", "_builtin", vdltool.Config{})
+	BuiltInFile    = &File{BaseName: "_builtin.vdl"}
+)
+
+func init() {
+	// Link the BuiltIn{Package,File} to each other before defining built-ins.
+	BuiltInPackage.Files = []*File{BuiltInFile}
+	BuiltInFile.Package = BuiltInPackage
+	// Built-in types
+	builtInType("any", vdl.AnyType)
+	builtInType("bool", vdl.BoolType)
+	builtInType("byte", vdl.ByteType)
+	builtInType("uint16", vdl.Uint16Type)
+	builtInType("uint32", vdl.Uint32Type)
+	builtInType("uint64", vdl.Uint64Type)
+	builtInType("int16", vdl.Int16Type)
+	builtInType("int32", vdl.Int32Type)
+	builtInType("int64", vdl.Int64Type)
+	builtInType("float32", vdl.Float32Type)
+	builtInType("float64", vdl.Float64Type)
+	builtInType("complex64", vdl.Complex64Type)
+	builtInType("complex128", vdl.Complex128Type)
+	builtInType("string", vdl.StringType)
+	builtInType("typeobject", vdl.TypeObjectType)
+	builtInType("error", vdl.ErrorType)
+	// Built-in consts
+	builtInConst("nil", NilConst)
+	builtInConst("true", TrueConst)
+	builtInConst("false", FalseConst)
+}
+
+func builtInType(name string, t *vdl.Type) {
+	def := &TypeDef{
+		NamePos:  NamePos{Name: name},
+		Exported: true,
+		Type:     t,
+		File:     BuiltInFile,
+	}
+	addTypeDef(def, nil)
+}
+
+func builtInConst(name string, v *vdl.Value) {
+	def := &ConstDef{
+		NamePos:  NamePos{Name: name},
+		Exported: true,
+		Value:    v,
+		File:     BuiltInFile,
+	}
+	addConstDef(def, nil)
+}
diff --git a/lib/vdl/compile/compile.go b/lib/vdl/compile/compile.go
new file mode 100644
index 0000000..546b437
--- /dev/null
+++ b/lib/vdl/compile/compile.go
@@ -0,0 +1,314 @@
+// Package compile provides utilities to compile vdl files.  The Compile
+// function is the main entry point.
+package compile
+
+// The job of the compiler is to take parse results as input, and output
+// compiled results.  The concepts between the parser and compiler are very
+// similar, thus the naming of parse/compile results is also similar.
+// E.g. parse.File represents a parsed file, while compile.File represents a
+// compiled file.
+//
+// The flow of the compiler is contained in the Compile function below, and
+// basically defines one concept across all files in the package before moving
+// onto the next concept.  E.g. we define all types in the package before
+// defining all consts in the package.
+//
+// The logic for simple concepts (e.g. imports) is contained directly in this
+// file, while more complicated concepts (types, consts and interfaces) each get
+// their own file.
+
+import (
+	"path/filepath"
+	"sort"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+// CompilePackage compiles a list of parse.Files into a Package.  Updates env
+// with the compiled package and returns it on success, or returns nil and
+// guarantees !env.Errors.IsEmpty().  All imports that the parsed package depend
+// on must already have been compiled and populated into env.
+func CompilePackage(pkgpath, genpath string, pfiles []*parse.File, config vdltool.Config, env *Env) *Package {
+	if pkgpath == "" {
+		env.Errors.Errorf("Compile called with empty pkgpath")
+		return nil
+	}
+	if env.pkgs[pkgpath] != nil {
+		env.Errors.Errorf("%q invalid recompile (already exists in env)", pkgpath)
+		return nil
+	}
+	pkg := compile(pkgpath, genpath, pfiles, config, env)
+	if pkg == nil {
+		return nil
+	}
+	if computeDeps(pkg, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	env.pkgs[pkg.Path] = pkg
+	return pkg
+}
+
+// CompileConfig compiles a parse.Config into a value.  Returns the compiled
+// value on success, or returns nil and guarantees !env.Errors.IsEmpty().  All
+// imports that the parsed config depend on must already have been compiled and
+// populated into env.  If t is non-nil, the returned value will be of that
+// type.
+func CompileConfig(t *vdl.Type, pconfig *parse.Config, env *Env) *vdl.Value {
+	if pconfig == nil || env == nil {
+		env.Errors.Errorf("CompileConfig called with nil config or env")
+		return nil
+	}
+	// Since the concepts are so similar between config files and vdl files, we
+	// just compile it as a single-file vdl package, and compile the exported
+	// config const to retrieve the final exported config value.
+	pfile := &parse.File{
+		BaseName:   filepath.Base(pconfig.FileName),
+		PackageDef: pconfig.ConfigDef,
+		Imports:    pconfig.Imports,
+		ConstDefs:  pconfig.ConstDefs,
+	}
+	pkgpath := filepath.ToSlash(filepath.Dir(pconfig.FileName))
+	pkg := compile(pkgpath, pkgpath, []*parse.File{pfile}, vdltool.Config{}, env)
+	if pkg == nil {
+		return nil
+	}
+	config := compileConst("config", t, pconfig.Config, pkg.Files[0], env)
+	// Wait to compute deps after we've compiled the config const expression,
+	// since it might include the only usage of some of the imports.
+	if computeDeps(pkg, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	return config
+}
+
+// CompileExpr compiles expr into a value.  Returns the compiled value on
+// success, or returns nil and guarantees !env.Errors.IsEmpty().  All imports
+// that expr depends on must already have been compiled and populated into env.
+// If t is non-nil, the returned value will be of that type.
+func CompileExpr(t *vdl.Type, expr parse.ConstExpr, env *Env) *vdl.Value {
+	// Set up a dummy file and compile expr into a value.
+	file := &File{
+		BaseName: "_expr.vdl",
+		Package:  newPackage("_expr", "_expr", "_expr", vdltool.Config{}),
+	}
+	return compileConst("expression", t, expr, file, env)
+}
+
+func compile(pkgpath, genpath string, pfiles []*parse.File, config vdltool.Config, env *Env) *Package {
+	if len(pfiles) == 0 {
+		env.Errors.Errorf("%q compile called with no files", pkgpath)
+		return nil
+	}
+	// Initialize each file and put it in pkg.
+	pkgName := parse.InferPackageName(pfiles, env.Errors)
+	if _, err := ValidIdent(pkgName, ReservedNormal); err != nil {
+		env.Errors.Errorf("package %s is invalid: %s", pkgName, err.Error())
+		return nil
+	}
+	pkg := newPackage(pkgName, pkgpath, genpath, config)
+	for _, pfile := range pfiles {
+		pkg.Files = append(pkg.Files, &File{
+			BaseName:   pfile.BaseName,
+			PackageDef: NamePos(pfile.PackageDef),
+			Package:    pkg,
+			imports:    make(map[string]*importPath),
+		})
+	}
+	// Compile our various structures.  The order of these operations matters;
+	// e.g. we must compile types before consts, since consts may use a type
+	// defined in this package.
+	if compileImports(pkg, pfiles, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	if compileTypeDefs(pkg, pfiles, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	if compileErrorDefs(pkg, pfiles, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	if compileConstDefs(pkg, pfiles, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	if compileInterfaces(pkg, pfiles, env); !env.Errors.IsEmpty() {
+		return nil
+	}
+	return pkg
+}
+
+func compileImports(pkg *Package, pfiles []*parse.File, env *Env) {
+	for index := range pfiles {
+		file, pfile := pkg.Files[index], pfiles[index]
+		for _, pimp := range pfile.Imports {
+			if dep := env.ResolvePackage(pimp.Path); dep == nil {
+				env.Errorf(file, pimp.Pos, "import path %q not found", pimp.Path)
+			}
+			local := pimp.LocalName()
+			if dup := file.imports[local]; dup != nil {
+				env.Errorf(file, pimp.Pos, "import %s reused (previous at %s)", local, dup.pos)
+				continue
+			}
+			file.imports[local] = &importPath{pimp.Path, pimp.Pos, false}
+		}
+	}
+}
+
+// TODO(toddw): Remove this function and all helpers, after all code generators
+// have been updated to compute their own dependencies.  The only code that will
+// remain below this point is the loop checking for unused imports.
+func computeDeps(pkg *Package, env *Env) {
+	// Check for unused user-supplied imports.
+	for _, file := range pkg.Files {
+		for _, imp := range file.imports {
+			if !imp.used {
+				env.Errorf(file, imp.pos, "import path %q unused", imp.path)
+			}
+		}
+	}
+	// Compute type and package dependencies per-file, based on the types and
+	// interfaces that are actually used.  We ignore const dependencies, since
+	// we've already evaluated the const expressions.
+	for _, file := range pkg.Files {
+		tdeps := make(map[*vdl.Type]bool)
+		pdeps := make(map[*Package]bool)
+		// TypeDef.Type is always defined in our package; start with sub types.
+		for _, def := range file.TypeDefs {
+			addSubTypeDeps(def.Type, pkg, env, tdeps, pdeps)
+		}
+		// Consts contribute their value types.
+		for _, def := range file.ConstDefs {
+			addValueTypeDeps(def.Value, pkg, env, tdeps, pdeps)
+		}
+		// Interfaces contribute their arg types and tag values, as well as embedded
+		// interfaces.
+		for _, iface := range file.Interfaces {
+			for _, embed := range iface.TransitiveEmbeds() {
+				pdeps[embed.File.Package] = true
+			}
+			for _, method := range iface.Methods {
+				for _, arg := range method.InArgs {
+					addTypeDeps(arg.Type, pkg, env, tdeps, pdeps)
+				}
+				for _, arg := range method.OutArgs {
+					addTypeDeps(arg.Type, pkg, env, tdeps, pdeps)
+				}
+				if stream := method.InStream; stream != nil {
+					addTypeDeps(stream, pkg, env, tdeps, pdeps)
+				}
+				if stream := method.OutStream; stream != nil {
+					addTypeDeps(stream, pkg, env, tdeps, pdeps)
+				}
+				for _, tag := range method.Tags {
+					addValueTypeDeps(tag, pkg, env, tdeps, pdeps)
+				}
+			}
+		}
+		// Errors contribute their param types.
+		for _, def := range file.ErrorDefs {
+			for _, param := range def.Params {
+				addTypeDeps(param.Type, pkg, env, tdeps, pdeps)
+			}
+		}
+		file.TypeDeps = tdeps
+		// Now remove self and built-in package dependencies.  Every package can use
+		// itself and the built-in package, so we don't need to record this.
+		delete(pdeps, pkg)
+		delete(pdeps, BuiltInPackage)
+		// Finally populate PackageDeps and sort by package path.
+		file.PackageDeps = make([]*Package, 0, len(pdeps))
+		for pdep, _ := range pdeps {
+			file.PackageDeps = append(file.PackageDeps, pdep)
+		}
+		sort.Sort(pkgSorter(file.PackageDeps))
+	}
+}
+
+// Add immediate package deps for t and subtypes of t.
+func addTypeDeps(t *vdl.Type, pkg *Package, env *Env, tdeps map[*vdl.Type]bool, pdeps map[*Package]bool) {
+	if def := env.typeDefs[t]; def != nil {
+		// We don't track transitive dependencies, only immediate dependencies.
+		tdeps[t] = true
+		pdeps[def.File.Package] = true
+		if t == vdl.TypeObjectType {
+			// Special-case: usage of typeobject implies usage of any, since the zero
+			// value for typeobject is any.
+			addTypeDeps(vdl.AnyType, pkg, env, tdeps, pdeps)
+		}
+		return
+	}
+	// Not all types have TypeDefs; e.g. unnamed lists have no corresponding
+	// TypeDef, so we need to traverse those recursively.
+	addSubTypeDeps(t, pkg, env, tdeps, pdeps)
+}
+
+// Add immediate package deps for subtypes of t.
+func addSubTypeDeps(t *vdl.Type, pkg *Package, env *Env, tdeps map[*vdl.Type]bool, pdeps map[*Package]bool) {
+	switch t.Kind() {
+	case vdl.Array, vdl.List:
+		addTypeDeps(t.Elem(), pkg, env, tdeps, pdeps)
+	case vdl.Set:
+		addTypeDeps(t.Key(), pkg, env, tdeps, pdeps)
+	case vdl.Map:
+		addTypeDeps(t.Key(), pkg, env, tdeps, pdeps)
+		addTypeDeps(t.Elem(), pkg, env, tdeps, pdeps)
+	case vdl.Struct, vdl.Union:
+		for ix := 0; ix < t.NumField(); ix++ {
+			addTypeDeps(t.Field(ix).Type, pkg, env, tdeps, pdeps)
+		}
+	}
+}
+
+// Add immediate package deps for v.Type(), and subvalues.  We must traverse the
+// value to know which types are actually used; e.g. an empty struct doesn't
+// have a dependency on its field types.
+//
+// The purpose of this method is to identify the package and type dependencies
+// for const or tag values.
+func addValueTypeDeps(v *vdl.Value, pkg *Package, env *Env, tdeps map[*vdl.Type]bool, pdeps map[*Package]bool) {
+	t := v.Type()
+	if def := env.typeDefs[t]; def != nil {
+		tdeps[t] = true
+		pdeps[def.File.Package] = true
+		// Fall through to track transitive dependencies, based on the subvalues.
+	}
+	// Traverse subvalues recursively.
+	switch t.Kind() {
+	case vdl.Array, vdl.List:
+		for ix := 0; ix < v.Len(); ix++ {
+			addValueTypeDeps(v.Index(ix), pkg, env, tdeps, pdeps)
+		}
+	case vdl.Set, vdl.Map:
+		for _, key := range v.Keys() {
+			addValueTypeDeps(key, pkg, env, tdeps, pdeps)
+			if t.Kind() == vdl.Map {
+				addValueTypeDeps(v.MapIndex(key), pkg, env, tdeps, pdeps)
+			}
+		}
+	case vdl.Struct:
+		// There are no subvalues to track if the value is 0.
+		if v.IsZero() {
+			return
+		}
+		for ix := 0; ix < t.NumField(); ix++ {
+			addValueTypeDeps(v.StructField(ix), pkg, env, tdeps, pdeps)
+		}
+	case vdl.Union:
+		_, field := v.UnionField()
+		addValueTypeDeps(field, pkg, env, tdeps, pdeps)
+	case vdl.Any, vdl.Optional:
+		if elem := v.Elem(); elem != nil {
+			addValueTypeDeps(elem, pkg, env, tdeps, pdeps)
+		}
+	case vdl.TypeObject:
+		// TypeObject has dependencies on everything its zero value depends on.
+		addValueTypeDeps(vdl.ZeroValue(v.TypeObject()), pkg, env, tdeps, pdeps)
+	}
+}
+
+// pkgSorter implements sort.Interface, sorting by package path.
+type pkgSorter []*Package
+
+func (s pkgSorter) Len() int           { return len(s) }
+func (s pkgSorter) Swap(i, j int)      { s[i], s[j] = s[j], s[i] }
+func (s pkgSorter) Less(i, j int) bool { return s[i].Path < s[j].Path }
diff --git a/lib/vdl/compile/compile_test.go b/lib/vdl/compile/compile_test.go
new file mode 100644
index 0000000..55a967e
--- /dev/null
+++ b/lib/vdl/compile/compile_test.go
@@ -0,0 +1,203 @@
+package compile_test
+
+import (
+	"fmt"
+	"path"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdltest"
+)
+
+func TestValidExportedIdent(t *testing.T) {
+	tests := []struct {
+		ident  string
+		errstr string
+	}{
+		{"", `"" invalid`},
+		{"xFirstLetterLower", `"xFirstLetterLower" must be exported`},
+		{"0FirstLetterDigit", `"0FirstLetterDigit" invalid`},
+		{"_FirstLetterPunct", `"_FirstLetterPunct" invalid`},
+		{" FirstLetterSpace", `" FirstLetterSpace" invalid`},
+		{"X.InvalidPunct", `"X.InvalidPunct" invalid`},
+		{"X InvalidSpace", `"X InvalidSpace" invalid`},
+		{"X\nNonAlphaNum", `"X\nNonAlphaNum" invalid`},
+		{"X", ""},
+		{"XYZ", ""},
+		{"Xyz", ""},
+		{"Xyz123", ""},
+		{"Xyz_123", ""},
+	}
+	for _, test := range tests {
+		err := compile.ValidExportedIdent(test.ident, compile.ReservedNormal)
+		errstr := fmt.Sprint(err)
+		if test.errstr != "" && !strings.Contains(errstr, test.errstr) {
+			t.Errorf(`ValidExportedIdent(%s) got error %q, want substr %q`, test.ident, errstr, test.errstr)
+		}
+		if test.errstr == "" && err != nil {
+			t.Errorf(`ValidExportedIdent(%s) got error %q, want nil`, test.ident, errstr)
+		}
+	}
+}
+
+func TestValidIdent(t *testing.T) {
+	tests := []struct {
+		name     string
+		exported bool
+		errstr   string
+	}{
+		{"", false, `"" invalid`},
+		{"0FirstLetterDigit", false, `"0FirstLetterDigit" invalid`},
+		{"_FirstLetterPunct", false, `"_FirstLetterPunct" invalid`},
+		{" FirstLetterSpace", false, `" FirstLetterSpace" invalid`},
+		{"x.InvalidPunct", false, `"x.InvalidPunct" invalid`},
+		{"x InvalidSpace", false, `"x InvalidSpace" invalid`},
+		{"x\nNonAlphaNum", false, `"x\nNonAlphaNum" invalid`},
+		{"X", true, ""},
+		{"XYZ", true, ""},
+		{"Xyz", true, ""},
+		{"Xyz123", true, ""},
+		{"Xyz_123", true, ""},
+		{"x", false, ""},
+		{"xYZ", false, ""},
+		{"xyz", false, ""},
+		{"xyz123", false, ""},
+		{"xyz_123", false, ""},
+	}
+	for _, test := range tests {
+		exported, err := compile.ValidIdent(test.name, compile.ReservedNormal)
+		errstr := fmt.Sprint(err)
+		if test.errstr != "" && !strings.Contains(errstr, test.errstr) {
+			t.Errorf(`ValidIdent(%s) got error %q, want substr %q`, test.name, errstr, test.errstr)
+		}
+		if test.errstr == "" && err != nil {
+			t.Errorf(`ValidIdent(%s) got error %q, want nil`, test.name, errstr)
+		}
+		if got, want := exported, test.exported; got != want {
+			t.Errorf(`ValidIdent(%s) got exported %v, want %v`, test.name, got, want)
+		}
+	}
+}
+
+type f map[string]string
+
+func TestParseAndCompile(t *testing.T) {
+	tests := []struct {
+		name   string
+		files  map[string]string
+		errRE  string
+		expect func(t *testing.T, name string, pkg *compile.Package)
+	}{
+		{"test1", f{"1.vdl": pkg1file1, "2.vdl": pkg1file2}, "", expectPkg1},
+		{"test2", f{"1.vdl": "package native"}, `"native" invalid identifier`, nil},
+	}
+	for _, test := range tests {
+		path := path.Join("a/b", test.name)
+		buildPkg := vdltest.FakeBuildPackage(test.name, path, test.files)
+		env := compile.NewEnv(-1)
+		pkg := build.BuildPackage(buildPkg, env)
+		vdltest.ExpectResult(t, env.Errors, test.name, test.errRE)
+		if pkg == nil {
+			continue
+		}
+		if got, want := pkg.Name, test.name; got != want {
+			t.Errorf("%s got package name %s, want %s", got, want)
+		}
+		if got, want := pkg.Path, path; got != want {
+			t.Errorf("%s got package path %s, want %s", got, want)
+		}
+		test.expect(t, test.name, pkg)
+	}
+}
+
+const pkg1file1 = `package test1
+
+type Scalars struct {
+	A bool
+	B byte
+	C int32
+	D int64
+	E uint32
+	F uint64
+	G float32
+	H float64
+	I complex64
+	J complex128
+	K string
+	L error
+	M any
+}
+
+type KeyScalars struct {
+	A bool
+	B byte
+	C int32
+	D int64
+	E uint32
+	F uint64
+	G float32
+	H float64
+	I complex64
+	J complex128
+	K string
+}
+
+type CompComp struct {
+	A Composites
+	B []Composites
+	C map[string]Composites
+}
+
+const (
+	Cbool = true
+	Cbyte = byte(1)
+	Cint32 = int32(2)
+	Cint64 = int64(3)
+	Cuint32 = uint32(4)
+	Cuint64 = uint64(5)
+	Cfloat32 = float32(6)
+	Cfloat64 = float64(7)
+	Ccomplex64 = complex64(8+9i)
+	Ccomplex128 = complex128(10+11i)
+	Cstring = "foo"
+	Cany = Cbool
+
+	True = true
+	Foo = "foo"
+	Five = int32(5)
+	SixSquared = Six*Six
+)
+
+type ServiceA interface {
+	MethodA1() error
+	MethodA2(a int32, b string) (s string | error)
+	MethodA3(a int32) stream<_, Scalars> (s string | error) {"tag", Six}
+	MethodA4(a int32) stream<int32, string> error
+}
+`
+
+const pkg1file2 = `package test1
+type Composites struct {
+	A Scalars
+	B []Scalars
+	C map[string]Scalars
+	D map[KeyScalars][]map[string]complex128
+}
+
+const (
+	FiveSquared = Five*Five
+	Six = uint64(6)
+)
+
+type ServiceB interface {
+	ServiceA
+	MethodB1(a Scalars, b Composites) (c CompComp | error)
+}
+`
+
+func expectPkg1(t *testing.T, name string, pkg *compile.Package) {
+	// TODO(toddw): verify real expectations, and add more tests.
+	fmt.Println(pkg)
+}
diff --git a/lib/vdl/compile/const.go b/lib/vdl/compile/const.go
new file mode 100644
index 0000000..17cfdf9
--- /dev/null
+++ b/lib/vdl/compile/const.go
@@ -0,0 +1,628 @@
+package compile
+
+import (
+	"fmt"
+	"math/big"
+
+	"v.io/lib/toposort"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/opconst"
+	"v.io/v23/vdl/parse"
+)
+
+// ConstDef represents a user-defined named const definition in the compiled
+// results.
+type ConstDef struct {
+	NamePos             // name, parse position and docs
+	Exported bool       // is this const definition exported?
+	Value    *vdl.Value // const value
+	File     *File      // parent file that this const is defined in
+}
+
+func (x *ConstDef) String() string {
+	c := *x
+	c.File = nil // avoid infinite loop
+	return fmt.Sprintf("%+v", c)
+}
+
+// compileConstDefs is the "entry point" to the rest of this file.  It takes the
+// consts defined in pfiles and compiles them into ConstDefs in pkg.
+func compileConstDefs(pkg *Package, pfiles []*parse.File, env *Env) {
+	cd := constDefiner{pkg, pfiles, env, make(map[string]*constBuilder)}
+	if cd.Declare(); !env.Errors.IsEmpty() {
+		return
+	}
+	cd.SortAndDefine()
+}
+
+// constDefiner defines consts in a package.  This is split into two phases:
+// 1) Declare ensures local const references can be resolved.
+// 2) SortAndDefine sorts in dependency order, and evaluates and defines each
+//    const.
+//
+// It holds a builders map from const name to constBuilder, where the
+// constBuilder is responsible for compiling and defining a single const.
+type constDefiner struct {
+	pkg      *Package
+	pfiles   []*parse.File
+	env      *Env
+	builders map[string]*constBuilder
+}
+
+type constBuilder struct {
+	def   *ConstDef
+	pexpr parse.ConstExpr
+}
+
+func printConstBuilderName(ibuilder interface{}) string {
+	return ibuilder.(*constBuilder).def.Name
+}
+
+// Declare creates builders for each const defined in the package.
+func (cd constDefiner) Declare() {
+	for ix := range cd.pkg.Files {
+		file, pfile := cd.pkg.Files[ix], cd.pfiles[ix]
+		for _, pdef := range pfile.ConstDefs {
+			export, err := ValidIdent(pdef.Name, ReservedNormal)
+			if err != nil {
+				cd.env.prefixErrorf(file, pdef.Pos, err, "const %s invalid name", pdef.Name)
+				continue // keep going to catch more errors
+			}
+			detail := identDetail("const", file, pdef.Pos)
+			if err := file.DeclareIdent(pdef.Name, detail); err != nil {
+				cd.env.prefixErrorf(file, pdef.Pos, err, "const %s name conflict", pdef.Name)
+				continue
+			}
+			def := &ConstDef{NamePos: NamePos(pdef.NamePos), Exported: export, File: file}
+			cd.builders[pdef.Name] = &constBuilder{def, pdef.Expr}
+		}
+	}
+}
+
+// Sort and define consts.  We sort by dependencies on other named consts in
+// this package.  We don't allow cycles.  The ordering is necessary to perform
+// simple single-pass evaluation.
+//
+// The dependency analysis is performed on consts, not the files they occur in;
+// consts in the same package may be defined in any file, even if they cause
+// cyclic file dependencies.
+func (cd constDefiner) SortAndDefine() {
+	// Populate sorter with dependency information.  The sorting ensures that the
+	// list of const defs within each file is topologically sorted, and also
+	// deterministic; other than dependencies, const defs are listed in the same
+	// order they were defined in the parsed files.
+	var sorter toposort.Sorter
+	for _, pfile := range cd.pfiles {
+		for _, pdef := range pfile.ConstDefs {
+			b := cd.builders[pdef.Name]
+			sorter.AddNode(b)
+			for dep, _ := range cd.getLocalDeps(b.pexpr) {
+				sorter.AddEdge(b, dep)
+			}
+		}
+	}
+	// Sort and check for cycles.
+	sorted, cycles := sorter.Sort()
+	if len(cycles) > 0 {
+		cycleStr := toposort.DumpCycles(cycles, printConstBuilderName)
+		first := cycles[0][0].(*constBuilder)
+		cd.env.Errorf(first.def.File, first.def.Pos, "package %v has cyclic consts: %v", cd.pkg.Name, cycleStr)
+		return
+	}
+	// Define all consts.  Since we add the const defs as we go and evaluate in
+	// topological order, dependencies are guaranteed to be resolvable when we get
+	// around to evaluating the consts that depend on them.
+	for _, ibuilder := range sorted {
+		b := ibuilder.(*constBuilder)
+		def, file := b.def, b.def.File
+		if value := compileConst("const", nil, b.pexpr, file, cd.env); value != nil {
+			def.Value = value
+			addConstDef(def, cd.env)
+		}
+	}
+}
+
+// addConstDef updates our various structures to add a new const def.
+func addConstDef(def *ConstDef, env *Env) {
+	def.File.ConstDefs = append(def.File.ConstDefs, def)
+	def.File.Package.constDefs[def.Name] = def
+	if env != nil {
+		// env should only be nil during initialization of the built-in package;
+		// NewEnv ensures new environments have the built-in consts.
+		env.constDefs[def.Value] = def
+	}
+}
+
+// getLocalDeps returns the set of named const dependencies for pexpr that are
+// in this package.
+func (cd constDefiner) getLocalDeps(pexpr parse.ConstExpr) constBuilderSet {
+	switch pe := pexpr.(type) {
+	case nil, *parse.ConstLit, *parse.ConstTypeObject:
+		return nil
+	case *parse.ConstCompositeLit:
+		var deps constBuilderSet
+		for _, kv := range pe.KVList {
+			deps = mergeConstBuilderSets(deps, cd.getLocalDeps(kv.Key))
+			deps = mergeConstBuilderSets(deps, cd.getLocalDeps(kv.Value))
+		}
+		return deps
+	case *parse.ConstNamed:
+		// Named references to other consts in this package are all we care about.
+		if b := cd.builders[pe.Name]; b != nil {
+			return constBuilderSet{b: true}
+		}
+		return nil
+	case *parse.ConstIndexed:
+		e, i := cd.getLocalDeps(pe.Expr), cd.getLocalDeps(pe.IndexExpr)
+		return mergeConstBuilderSets(e, i)
+	case *parse.ConstTypeConv:
+		return cd.getLocalDeps(pe.Expr)
+	case *parse.ConstUnaryOp:
+		return cd.getLocalDeps(pe.Expr)
+	case *parse.ConstBinaryOp:
+		l, r := cd.getLocalDeps(pe.Lexpr), cd.getLocalDeps(pe.Rexpr)
+		return mergeConstBuilderSets(l, r)
+	}
+	panic(fmt.Errorf("vdl: unhandled parse.ConstExpr %T %#v", pexpr, pexpr))
+}
+
+type constBuilderSet map[*constBuilder]bool
+
+// mergeConstBuilderSets returns the union of a and b.  It may mutate either a
+// or b and return the mutated set as a result.
+func mergeConstBuilderSets(a, b constBuilderSet) constBuilderSet {
+	if a != nil {
+		for builder, _ := range b {
+			a[builder] = true
+		}
+		return a
+	}
+	return b
+}
+
+// compileConst compiles pexpr into a *vdl.Value.  All named types and consts
+// referenced by pexpr must already be defined.
+//
+// The implicit type is applied to pexpr; untyped consts and composite literals
+// with no explicit type assume the implicit type.  Errors are reported if the
+// implicit type isn't assignable from the final value.  If the implicit type is
+// nil, the exported config const must be explicitly typed.
+func compileConst(what string, implicit *vdl.Type, pexpr parse.ConstExpr, file *File, env *Env) *vdl.Value {
+	c := evalConstExpr(implicit, pexpr, file, env)
+	if !c.IsValid() {
+		return nil
+	}
+	if implicit != nil && c.Type() == nil {
+		// Convert untyped const into the implicit type.
+		conv, err := c.Convert(implicit)
+		if err != nil {
+			env.prefixErrorf(file, pexpr.Pos(), err, "invalid %v", what)
+			return nil
+		}
+		c = conv
+	}
+	v, err := c.ToValue()
+	if err != nil {
+		env.prefixErrorf(file, pexpr.Pos(), err, "invalid %s", what)
+		return nil
+	}
+	if implicit != nil && !implicit.AssignableFrom(v) {
+		env.Errorf(file, pexpr.Pos(), "invalid %v (%v not assignable from %v)", what, implicit, v)
+		return nil
+	}
+	return v
+}
+
+// compileConstExplicit is similar to compileConst, but instead of an optional
+// implicit type, requires a non-nil explicit type.  The compiled const is
+// explicitly converted to the explicit type.
+func compileConstExplicit(what string, explicit *vdl.Type, pexpr parse.ConstExpr, file *File, env *Env) *vdl.Value {
+	c := evalConstExpr(explicit, pexpr, file, env)
+	if !c.IsValid() {
+		return nil
+	}
+	conv, err := c.Convert(explicit)
+	if err != nil {
+		env.prefixErrorf(file, pexpr.Pos(), err, "invalid %v", what)
+		return nil
+	}
+	v, err := conv.ToValue()
+	if err != nil {
+		env.prefixErrorf(file, pexpr.Pos(), err, "invalid %s", what)
+		return nil
+	}
+	return v
+}
+
+var bigRatZero = new(big.Rat)
+
+// evalConstExpr returns the result of evaluating pexpr into a opconst.Const.
+// If implicit is non-nil, we apply it to pexpr if it doesn't have an explicit
+// type specified.  E.g. composite literals and enum labels with no explicit
+// type assume the implicit type.
+func evalConstExpr(implicit *vdl.Type, pexpr parse.ConstExpr, file *File, env *Env) opconst.Const {
+	switch pe := pexpr.(type) {
+	case *parse.ConstLit:
+		// All literal constants start out untyped.
+		switch tlit := pe.Lit.(type) {
+		case string:
+			return opconst.String(tlit)
+		case *big.Int:
+			return opconst.Integer(tlit)
+		case *big.Rat:
+			return opconst.Rational(tlit)
+		case *parse.BigImag:
+			return opconst.Complex(bigRatZero, (*big.Rat)(tlit))
+		default:
+			panic(fmt.Errorf("vdl: unhandled parse.ConstLit %T %#v", tlit, tlit))
+		}
+	case *parse.ConstCompositeLit:
+		t := implicit
+		if pe.Type != nil {
+			// If an explicit type is specified for the composite literal, it
+			// overrides the implicit type.
+			t = compileType(pe.Type, file, env)
+			if t == nil {
+				break
+			}
+		}
+		v := evalCompLit(t, pe, file, env)
+		if v == nil {
+			break
+		}
+		return opconst.FromValue(v)
+	case *parse.ConstNamed:
+		c, err := env.EvalConst(pe.Name, file)
+		if err != nil {
+			if implicit != nil {
+				// Try applying the name as a selector against the implicit type.  This
+				// allows a shortened form for enum labels, without redundantly
+				// specifying the enum type.
+				if c, err2 := env.evalSelectorOnType(implicit, pe.Name); err2 == nil {
+					return c
+				}
+			}
+			env.prefixErrorf(file, pe.Pos(), err, "const %s invalid", pe.Name)
+			break
+		}
+		return c
+	case *parse.ConstIndexed:
+		value := compileConst("const", nil, pe.Expr, file, env)
+		if value == nil {
+			break
+		}
+		// TODO(bprosnitz) Should indexing on set also be supported?
+		switch value.Kind() {
+		case vdl.Array, vdl.List:
+			v := evalListIndex(value, pe.IndexExpr, file, env)
+			if v != nil {
+				return opconst.FromValue(v)
+			}
+		case vdl.Map:
+			v := evalMapIndex(value, pe.IndexExpr, file, env)
+			if v != nil {
+				return opconst.FromValue(v)
+			}
+		default:
+			env.Errorf(file, pe.Pos(), "illegal use of index operator with unsupported type")
+		}
+	case *parse.ConstTypeConv:
+		t := compileType(pe.Type, file, env)
+		x := evalConstExpr(nil, pe.Expr, file, env)
+		if t == nil || !x.IsValid() {
+			break
+		}
+		res, err := x.Convert(t)
+		if err != nil {
+			env.prefixErrorf(file, pe.Pos(), err, "invalid type conversion")
+			break
+		}
+		return res
+	case *parse.ConstTypeObject:
+		t := compileType(pe.Type, file, env)
+		if t == nil {
+			break
+		}
+		return opconst.FromValue(vdl.TypeObjectValue(t))
+	case *parse.ConstUnaryOp:
+		x := evalConstExpr(nil, pe.Expr, file, env)
+		op := opconst.ToUnaryOp(pe.Op)
+		if op == opconst.InvalidUnaryOp {
+			env.Errorf(file, pe.Pos(), "unary %s undefined", pe.Op)
+			break
+		}
+		if !x.IsValid() {
+			break
+		}
+		res, err := opconst.EvalUnary(op, x)
+		if err != nil {
+			env.prefixErrorf(file, pe.Pos(), err, "unary %s invalid", pe.Op)
+			break
+		}
+		return res
+	case *parse.ConstBinaryOp:
+		x := evalConstExpr(nil, pe.Lexpr, file, env)
+		y := evalConstExpr(nil, pe.Rexpr, file, env)
+		op := opconst.ToBinaryOp(pe.Op)
+		if op == opconst.InvalidBinaryOp {
+			env.Errorf(file, pe.Pos(), "binary %s undefined", pe.Op)
+			break
+		}
+		if !x.IsValid() || !y.IsValid() {
+			break
+		}
+		res, err := opconst.EvalBinary(op, x, y)
+		if err != nil {
+			env.prefixErrorf(file, pe.Pos(), err, "binary %s invalid", pe.Op)
+			break
+		}
+		return res
+	default:
+		panic(fmt.Errorf("vdl: unhandled parse.ConstExpr %T %#v", pexpr, pexpr))
+	}
+	return opconst.Const{}
+}
+
+// evalListIndex evalutes base[index], where base is a list or array.
+func evalListIndex(base *vdl.Value, indexExpr parse.ConstExpr, file *File, env *Env) *vdl.Value {
+	index := compileConstExplicit(base.Kind().String()+" index", vdl.Uint64Type, indexExpr, file, env)
+	if index == nil {
+		return nil
+	}
+	ix := int(index.Uint())
+	if ix >= base.Len() {
+		env.Errorf(file, indexExpr.Pos(), "index %d out of range", ix)
+		return nil
+	}
+	return base.Index(ix)
+}
+
+// evalMapIndex evaluates base[index], where base is a map.
+func evalMapIndex(base *vdl.Value, indexExpr parse.ConstExpr, file *File, env *Env) *vdl.Value {
+	key := compileConst("map key", base.Type().Key(), indexExpr, file, env)
+	if key == nil {
+		return nil
+	}
+	item := base.MapIndex(key)
+	if item == nil {
+		// Unlike normal go code, it is probably undesirable to return the zero
+		// value here.  It is very likely this is an error.
+		env.Errorf(file, indexExpr.Pos(), "map key %v not found in map", key)
+		return nil
+	}
+	return item
+}
+
+// evalCompLit evaluates a composite literal, returning it as a vdl.Value.  The
+// type t is required, but note that subtypes enclosed in a composite type can
+// always use the implicit type from the parent composite type.
+func evalCompLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	if t == nil {
+		env.Errorf(file, lit.Pos(), "missing type for composite literal")
+		return nil
+	}
+	isOptional := false
+	if t.Kind() == vdl.Optional {
+		isOptional = true
+		t = t.Elem()
+	}
+	var v *vdl.Value
+	switch t.Kind() {
+	case vdl.Array, vdl.List:
+		v = evalListLit(t, lit, file, env)
+	case vdl.Set:
+		v = evalSetLit(t, lit, file, env)
+	case vdl.Map:
+		v = evalMapLit(t, lit, file, env)
+	case vdl.Struct:
+		v = evalStructLit(t, lit, file, env)
+	case vdl.Union:
+		v = evalUnionLit(t, lit, file, env)
+	default:
+		env.Errorf(file, lit.Pos(), "%v invalid type for composite literal", t)
+		return nil
+	}
+	if v != nil && isOptional {
+		v = vdl.OptionalValue(v)
+	}
+	return v
+}
+
+func evalListLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	listv := vdl.ZeroValue(t)
+	desc := fmt.Sprintf("%v %s literal", t, t.Kind())
+	var index int
+	assigned := make(map[int]bool)
+	for _, kv := range lit.KVList {
+		if kv.Value == nil {
+			env.Errorf(file, lit.Pos(), "missing value in %s", desc)
+			return nil
+		}
+		// Set the index to the key, if it exists.  Semantics are looser than
+		// values; we allow any key that's convertible to uint64, even if the key is
+		// already typed.
+		if kv.Key != nil {
+			key := compileConstExplicit("list index", vdl.Uint64Type, kv.Key, file, env)
+			if key == nil {
+				return nil
+			}
+			index = int(key.Uint())
+		}
+		// Make sure the index hasn't been assigned already, and adjust the list
+		// length as necessary.
+		if assigned[index] {
+			env.Errorf(file, kv.Value.Pos(), "duplicate index %d in %s", index, desc)
+			return nil
+		}
+		assigned[index] = true
+		if index >= listv.Len() {
+			if t.Kind() == vdl.Array {
+				env.Errorf(file, kv.Value.Pos(), "index %d out of range in %s", index, desc)
+				return nil
+			}
+			listv.AssignLen(index + 1)
+		}
+		// Evaluate the value and perform the assignment.
+		value := compileConst(t.Kind().String()+" value", t.Elem(), kv.Value, file, env)
+		if value == nil {
+			return nil
+		}
+		listv.Index(index).Assign(value)
+		index++
+	}
+	return listv
+}
+
+func evalSetLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	setv := vdl.ZeroValue(t)
+	desc := fmt.Sprintf("%v set literal", t)
+	for _, kv := range lit.KVList {
+		if kv.Key != nil {
+			env.Errorf(file, kv.Key.Pos(), "invalid index in %s", desc)
+			return nil
+		}
+		if kv.Value == nil {
+			env.Errorf(file, lit.Pos(), "missing key in %s", desc)
+			return nil
+		}
+		// Evaluate the key and make sure it hasn't been assigned already.
+		key := compileConst("set key", t.Key(), kv.Value, file, env)
+		if key == nil {
+			return nil
+		}
+		if setv.ContainsKey(key) {
+			env.Errorf(file, kv.Value.Pos(), "duplicate key %v in %s", key, desc)
+			return nil
+		}
+		setv.AssignSetKey(key)
+	}
+	return setv
+}
+
+func evalMapLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	mapv := vdl.ZeroValue(t)
+	desc := fmt.Sprintf("%v map literal", t)
+	for _, kv := range lit.KVList {
+		if kv.Key == nil {
+			env.Errorf(file, lit.Pos(), "missing key in %s", desc)
+			return nil
+		}
+		if kv.Value == nil {
+			env.Errorf(file, lit.Pos(), "missing elem in %s", desc)
+			return nil
+		}
+		// Evaluate the key and make sure it hasn't been assigned already.
+		key := compileConst("map key", t.Key(), kv.Key, file, env)
+		if key == nil {
+			return nil
+		}
+		if mapv.ContainsKey(key) {
+			env.Errorf(file, kv.Key.Pos(), "duplicate key %v in %s", key, desc)
+			return nil
+		}
+		// Evaluate the value and perform the assignment.
+		value := compileConst("map value", t.Elem(), kv.Value, file, env)
+		if value == nil {
+			return nil
+		}
+		mapv.AssignMapIndex(key, value)
+	}
+	return mapv
+}
+
+func evalStructLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	// We require that either all items have keys, or none of them do.
+	structv := vdl.ZeroValue(t)
+	desc := fmt.Sprintf("%v struct literal", t)
+	haskeys := len(lit.KVList) > 0 && lit.KVList[0].Key != nil
+	assigned := make(map[int]bool)
+	for index, kv := range lit.KVList {
+		if kv.Value == nil {
+			env.Errorf(file, lit.Pos(), "missing field value in %s", desc)
+			return nil
+		}
+		if haskeys != (kv.Key != nil) {
+			env.Errorf(file, kv.Value.Pos(), "mixed key:value and value in %s", desc)
+			return nil
+		}
+		// Get the field description, either from the key or the index.
+		var field vdl.Field
+		if kv.Key != nil {
+			// There is an explicit field name specified.
+			fname, ok := kv.Key.(*parse.ConstNamed)
+			if !ok {
+				env.Errorf(file, kv.Key.Pos(), "invalid field name %q in %s", kv.Key.String(), desc)
+				return nil
+			}
+			field, index = t.FieldByName(fname.Name)
+			if index < 0 {
+				env.Errorf(file, kv.Key.Pos(), "unknown field %q in %s", fname.Name, desc)
+				return nil
+			}
+		} else {
+			// No field names, just use the index position.
+			if index >= t.NumField() {
+				env.Errorf(file, kv.Value.Pos(), "too many fields in %s", desc)
+				return nil
+			}
+			field = t.Field(index)
+		}
+		// Make sure the field hasn't been assigned already.
+		if assigned[index] {
+			env.Errorf(file, kv.Value.Pos(), "duplicate field %q in %s", field.Name, desc)
+			return nil
+		}
+		assigned[index] = true
+		// Evaluate the value and perform the assignment.
+		value := compileConst("struct field", field.Type, kv.Value, file, env)
+		if value == nil {
+			return nil
+		}
+		structv.StructField(index).Assign(value)
+	}
+	if !haskeys && 0 < len(assigned) && len(assigned) < t.NumField() {
+		env.Errorf(file, lit.Pos(), "too few fields in %s", desc)
+		return nil
+	}
+	return structv
+}
+
+func evalUnionLit(t *vdl.Type, lit *parse.ConstCompositeLit, file *File, env *Env) *vdl.Value {
+	// We require exactly one kv with an explicit key.
+	unionv := vdl.ZeroValue(t)
+	desc := fmt.Sprintf("%v union literal", t)
+	if len(lit.KVList) != 1 {
+		env.Errorf(file, lit.Pos(), "invalid %s (must have exactly one entry)", desc)
+		return nil
+	}
+	kv := lit.KVList[0]
+	if kv.Key == nil || kv.Value == nil {
+		env.Errorf(file, lit.Pos(), "invalid %s (must have explicit key and value)", desc)
+		return nil
+	}
+	// Get the field description.
+	fname, ok := kv.Key.(*parse.ConstNamed)
+	if !ok {
+		env.Errorf(file, kv.Key.Pos(), "invalid field name %q in %s", kv.Key.String(), desc)
+		return nil
+	}
+	field, index := t.FieldByName(fname.Name)
+	if index < 0 {
+		env.Errorf(file, kv.Key.Pos(), "unknown field %q in %s", fname.Name, desc)
+		return nil
+	}
+	// Evaluate the value and perform the assignment.
+	value := compileConst("union field", field.Type, kv.Value, file, env)
+	if value == nil {
+		return nil
+	}
+	unionv.AssignUnionField(index, value)
+	return unionv
+}
+
+var (
+	// Built-in consts defined by the compiler.
+	NilConst   = vdl.ZeroValue(vdl.AnyType) // nil == any(nil)
+	TrueConst  = vdl.BoolValue(true)
+	FalseConst = vdl.BoolValue(false)
+)
diff --git a/lib/vdl/compile/const_test.go b/lib/vdl/compile/const_test.go
new file mode 100644
index 0000000..96232d1
--- /dev/null
+++ b/lib/vdl/compile/const_test.go
@@ -0,0 +1,1044 @@
+package compile_test
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdltest"
+)
+
+func testConstPackage(t *testing.T, name string, tpkg constPkg, env *compile.Env) *compile.Package {
+	// Compile the package with a single file, and adding the "package foo"
+	// prefix to the source data automatically.
+	files := map[string]string{
+		tpkg.Name + ".vdl": "package " + tpkg.Name + "\n" + tpkg.Data,
+	}
+	pkgPath := "p.kg/" + tpkg.Name // use dots in pkgpath to test tricky cases
+	buildPkg := vdltest.FakeBuildPackage(tpkg.Name, pkgPath, files)
+	pkg := build.BuildPackage(buildPkg, env)
+	vdltest.ExpectResult(t, env.Errors, name, tpkg.ErrRE)
+	if pkg == nil || tpkg.ErrRE != "" {
+		return nil
+	}
+	matchConstRes(t, name, tpkg, pkg.Files[0].ConstDefs)
+	return pkg
+}
+
+func matchConstRes(t *testing.T, tname string, tpkg constPkg, cdefs []*compile.ConstDef) {
+	if tpkg.ExpectRes == nil {
+		return
+	}
+	// Look for a ConstDef called "Res" to compare our expected results.
+	for _, cdef := range cdefs {
+		if cdef.Name == "Res" {
+			if got, want := cdef.Value, tpkg.ExpectRes; !vdl.EqualValue(got, want) {
+				t.Errorf("%s value got %s, want %s", tname, got, want)
+			}
+			return
+		}
+	}
+	t.Errorf("%s couldn't find Res in package %s", tname, tpkg.Name)
+}
+
+func testConfigFile(t *testing.T, name string, tpkg constPkg, env *compile.Env) {
+	// Take advantage of the fact that vdl files and config files have very
+	// similar syntax.  Just prefix the data with "config Res\n" rather than
+	// "package a\n" and we have a valid config file.
+	fname := tpkg.Name + ".config"
+	data := "config = Res\n" + tpkg.Data
+	config := build.BuildConfig(fname, strings.NewReader(data), nil, nil, env)
+	vdltest.ExpectResult(t, env.Errors, name, tpkg.ErrRE)
+	if config == nil || tpkg.ErrRE != "" {
+		return
+	}
+	if got, want := config, tpkg.ExpectRes; !vdl.EqualValue(got, want) {
+		t.Errorf("%s value got %s, want %s", name, got, want)
+	}
+}
+
+func TestConst(t *testing.T) {
+	for _, test := range constTests {
+		env := compile.NewEnv(-1)
+		for _, tpkg := range test.Pkgs {
+			testConstPackage(t, test.Name, tpkg, env)
+		}
+	}
+}
+
+func TestConfig(t *testing.T) {
+	for _, test := range constTests {
+		env := compile.NewEnv(-1)
+		// Compile all but the last tpkg as regular packages.
+		for _, tpkg := range test.Pkgs[:len(test.Pkgs)-1] {
+			testConstPackage(t, test.Name, tpkg, env)
+		}
+		// Compile the last tpkg as a regular package to see if it defines anything
+		// other than consts.
+		last := test.Pkgs[len(test.Pkgs)-1]
+		pkg := testConstPackage(t, test.Name, last, env)
+		if pkg == nil ||
+			len(pkg.Files[0].ErrorDefs) > 0 ||
+			len(pkg.Files[0].TypeDefs) > 0 ||
+			len(pkg.Files[0].Interfaces) > 0 {
+			continue // has non-const stuff, can't be a valid config file
+		}
+		// Finally compile the config file.
+		testConfigFile(t, test.Name, last, env)
+	}
+}
+
+func namedZero(name string, base *vdl.Type) *vdl.Value {
+	return vdl.ZeroValue(vdl.NamedType(name, base))
+}
+
+func makeIntList(vals ...int64) *vdl.Value {
+	listv := vdl.ZeroValue(vdl.ListType(vdl.Int64Type)).AssignLen(len(vals))
+	for index, v := range vals {
+		listv.Index(index).AssignInt(v)
+	}
+	return listv
+}
+
+func makeIntArray(name string, vals ...int64) *vdl.Value {
+	arrayv := vdl.ZeroValue(vdl.NamedType(name, vdl.ArrayType(len(vals), vdl.Int64Type)))
+	for index, v := range vals {
+		arrayv.Index(index).AssignInt(v)
+	}
+	return arrayv
+}
+
+func makeByteList(vals ...byte) *vdl.Value {
+	arrayv := vdl.ZeroValue(vdl.ListType(vdl.ByteType)).AssignLen(len(vals))
+	for index, v := range vals {
+		arrayv.Index(index).AssignByte(v)
+	}
+	return arrayv
+}
+
+func makeByteArray(name string, vals ...byte) *vdl.Value {
+	arrayv := vdl.ZeroValue(vdl.NamedType(name, vdl.ArrayType(len(vals), vdl.ByteType)))
+	for index, v := range vals {
+		arrayv.Index(index).AssignByte(v)
+	}
+	return arrayv
+}
+
+func makeStringSet(keys ...string) *vdl.Value {
+	setv := vdl.ZeroValue(vdl.SetType(vdl.StringType))
+	for _, k := range keys {
+		setv.AssignSetKey(vdl.StringValue(k))
+	}
+	return setv
+}
+
+func makeStringIntMap(m map[string]int64) *vdl.Value {
+	mapv := vdl.ZeroValue(vdl.MapType(vdl.StringType, vdl.Int64Type))
+	for k, v := range m {
+		mapv.AssignMapIndex(vdl.StringValue(k), vdl.Int64Value(v))
+	}
+	return mapv
+}
+
+func makeStructType(name string) *vdl.Type {
+	return vdl.NamedType(name, vdl.StructType([]vdl.Field{
+		{"X", vdl.Int64Type}, {"Y", vdl.StringType}, {"Z", vdl.BoolType},
+	}...))
+}
+
+func makeStruct(name string, x int64, y string, z bool) *vdl.Value {
+	structv := vdl.ZeroValue(makeStructType(name))
+	structv.StructField(0).AssignInt(x)
+	structv.StructField(1).AssignString(y)
+	structv.StructField(2).AssignBool(z)
+	return structv
+}
+
+func makeUnionType(name string) *vdl.Type {
+	return vdl.NamedType(name, vdl.UnionType([]vdl.Field{
+		{"X", vdl.Int64Type}, {"Y", vdl.StringType}, {"Z", vdl.BoolType},
+	}...))
+}
+
+func makeUnion(name string, val interface{}) *vdl.Value {
+	unionv := vdl.ZeroValue(makeUnionType(name))
+	switch tval := val.(type) {
+	case int64:
+		unionv.AssignUnionField(0, vdl.Int64Value(tval))
+	case string:
+		unionv.AssignUnionField(1, vdl.StringValue(tval))
+	case bool:
+		unionv.AssignUnionField(2, vdl.BoolValue(tval))
+	default:
+		panic(fmt.Errorf("makeUnion unhandled %T %v", val, val))
+	}
+	return unionv
+}
+
+func makeStructTypeObjectType(name string) *vdl.Type {
+	return vdl.NamedType(name, vdl.StructType(vdl.Field{"T", vdl.TypeObjectType}))
+}
+
+func makeStructTypeObject(name string, t *vdl.Type) *vdl.Value {
+	structv := vdl.ZeroValue(makeStructTypeObjectType(name))
+	structv.StructField(0).AssignTypeObject(t)
+	return structv
+}
+
+func makeABStruct() *vdl.Value {
+	tA := vdl.NamedType("p.kg/a.A", vdl.StructType([]vdl.Field{
+		{"X", vdl.Int64Type}, {"Y", vdl.StringType},
+	}...))
+	tB := vdl.NamedType("p.kg/a.B", vdl.StructType(vdl.Field{"Z", vdl.ListType(tA)}))
+	res := vdl.ZeroValue(tB)
+	listv := res.StructField(0).AssignLen(2)
+	listv.Index(0).StructField(0).AssignInt(1)
+	listv.Index(0).StructField(1).AssignString("a")
+	listv.Index(1).StructField(0).AssignInt(2)
+	listv.Index(1).StructField(1).AssignString("b")
+	return res
+}
+
+func makeEnumXYZ(name, label string) *vdl.Value {
+	t := vdl.NamedType(name, vdl.EnumType("X", "Y", "Z"))
+	return vdl.ZeroValue(t).AssignEnumLabel(label)
+}
+
+func makeInnerEnum(label string) *vdl.Value {
+	tA := vdl.NamedType("p.kg/a.A", vdl.EnumType("X", "Y", "Z"))
+	tB := vdl.NamedType("p.kg/a.B", vdl.StructType(vdl.Field{"A", tA}))
+	res := vdl.ZeroValue(tB)
+	res.StructField(0).AssignEnumLabel(label)
+	return res
+}
+
+func makeCyclicStructType() *vdl.Type {
+	// type A struct {X string;Z ?A}
+	var builder vdl.TypeBuilder
+	a := builder.Struct().AppendField("X", vdl.StringType)
+	n := builder.Named("p.kg/a.A").AssignBase(a)
+	a.AppendField("Z", builder.Optional().AssignElem(n))
+	builder.Build()
+	ty, err := n.Built()
+	if err != nil {
+		panic(fmt.Errorf("Builder failed: %v", err))
+	}
+	return ty
+}
+
+func makeCyclicStruct(x string, z *vdl.Value) *vdl.Value {
+	ty := makeCyclicStructType()
+	ret := vdl.ZeroValue(ty)
+	ret.StructField(0).AssignString(x)
+	if z != nil {
+		ret.StructField(1).Assign(vdl.OptionalValue(z))
+	}
+	return ret
+}
+
+type constPkg struct {
+	Name      string
+	Data      string
+	ExpectRes *vdl.Value
+	ErrRE     string
+}
+
+type cp []constPkg
+
+var constTests = []struct {
+	Name string
+	Pkgs cp
+}{
+	// Test literals.
+	{
+		"UntypedBool",
+		cp{{"a", `const Res = true`, vdl.BoolValue(true), ""}}},
+	{
+		"UntypedString",
+		cp{{"a", `const Res = "abc"`, vdl.StringValue("abc"), ""}}},
+	{
+		"UntypedInteger",
+		cp{{"a", `const Res = 123`, nil,
+			`invalid const \(123 must be assigned a type\)`}}},
+	{
+		"UntypedFloat",
+		cp{{"a", `const Res = 1.5`, nil,
+			`invalid const \(1\.5 must be assigned a type\)`}}},
+	{
+		"UntypedComplex",
+		cp{{"a", `const Res = 3.4+9.8i`, nil,
+			`invalid const \(3\.4\+9\.8i must be assigned a type\)`}}},
+
+	// Test list literals.
+	{
+		"IntList",
+		cp{{"a", `const Res = []int64{0,1,2}`, makeIntList(0, 1, 2), ""}}},
+	{
+		"IntListKeys",
+		cp{{"a", `const Res = []int64{1:1, 2:2, 0:0}`, makeIntList(0, 1, 2), ""}}},
+	{
+		"IntListMixedKey",
+		cp{{"a", `const Res = []int64{1:1, 2, 0:0}`, makeIntList(0, 1, 2), ""}}},
+	{
+		"IntListDupKey",
+		cp{{"a", `const Res = []int64{2:2, 1:1, 0}`, nil, "duplicate index 2"}}},
+	{
+		"IntListInvalidIndex",
+		cp{{"a", `const Res = []int64{"a":2, 1:1, 2:2}`, nil, `can't convert "a" to uint64`}}},
+	{
+		"IntListInvalidValue",
+		cp{{"a", `const Res = []int64{0,1,"c"}`, nil, "invalid list value"}}},
+	{
+		"IndexingNamedList",
+		cp{{"a", `const A = []int64{3,4,2}; const Res=A[1]`, vdl.Int64Value(4), ""}}},
+	{
+		"IndexingUnnamedList",
+		cp{{"a", `const Res = []int64{3,4,2}[1]`, nil, "cannot apply index operator to unnamed constant"}}},
+	{
+		"TypedListIndexing",
+		cp{{"a", `const A = []int64{3,4,2};  const Res = A[int16(1)]`, vdl.Int64Value(4), ""}}},
+	{
+		"NegativeListIndexing",
+		cp{{"a", `const A = []int64{3,4,2}; const Res = A[-1]`, nil, `\(const -1 overflows uint64\)`}}},
+	{
+		"OutOfRangeListIndexing",
+		cp{{"a", `const A = []int64{3,4,2}; const Res = A[10]`, nil, "index 10 out of range"}}},
+	{
+		"InvalidIndexType",
+		cp{{"a", `const A = []int64{3,4,2}; const Res = A["ok"]`, nil, "invalid list index"}}},
+	{
+		"InvalidIndexBaseType",
+		cp{{"a", `type A struct{}; const B = A{}; const Res = B["ok"]`, nil, "illegal use of index operator with unsupported type"}}},
+
+	// Test array literals.
+	{
+		"IntArray",
+		cp{{"a", `type T [3]int64; const Res = T{0,1,2}`, makeIntArray("p.kg/a.T", 0, 1, 2), ""}}},
+	{
+		"IntArrayShorterInit",
+		cp{{"a", `type T [3]int64; const Res = T{0,1}`, makeIntArray("p.kg/a.T", 0, 1, 0), ""}}},
+	{
+		"IntArrayLongerInit",
+		cp{{"a", `type T [3]int64; const Res = T{0,1,2,3}`, nil, "index 3 out of range"}}},
+	{
+		"IntArrayKeys",
+		cp{{"a", `type T [3]int64; const Res = T{1:1, 2:2, 0:0}`, makeIntArray("p.kg/a.T", 0, 1, 2), ""}}},
+	{
+		"IntArrayMixedKey",
+		cp{{"a", `type T [3]int64; const Res = T{1:1, 2, 0:0}`, makeIntArray("p.kg/a.T", 0, 1, 2), ""}}},
+	{
+		"IntArrayDupKey",
+		cp{{"a", `type T [3]int64; const Res = T{2:2, 1:1, 0}`, nil, "duplicate index 2"}}},
+	{
+		"IntArrayInvalidIndex",
+		cp{{"a", `type T [3]int64; const Res = T{"a":2, 1:1, 2:2}`, nil, `can't convert "a" to uint64`}}},
+	{
+		"IntArrayInvalidValue",
+		cp{{"a", `type T [3]int64; const Res = T{0,1,"c"}`, nil, "invalid array value"}}},
+	{
+		"IndexingNamedList",
+		cp{{"a", `type T [3]int64; const A = T{3,4,2}; const Res=A[1]`, vdl.Int64Value(4), ""}}},
+	{
+		"IndexingUnnamedArray",
+		cp{{"a", `type T [3]int64; const Res = T{3,4,2}[1]`, nil, "cannot apply index operator to unnamed constant"}}},
+	{
+		"TypedArrayIndexing",
+		cp{{"a", `type T [3]int64; const A = T{3,4,2};  const Res = A[int16(1)]`, vdl.Int64Value(4), ""}}},
+	{
+		"NegativeArrayIndexing",
+		cp{{"a", `type T [3]int64; const A = T{3,4,2}; const Res = A[-1]`, nil, `\(const -1 overflows uint64\)`}}},
+	{
+		"OutOfRangeArrayIndexing",
+		cp{{"a", `type T [3]int64; const A = T{3,4,2}; const Res = A[10]`, nil, "index 10 out of range"}}},
+	{
+		"InvalidIndexType",
+		cp{{"a", `type T [3]int64; const A = T{3,4,2}; const Res = A["ok"]`, nil, "invalid array index"}}},
+
+	// Test byte list literals.
+	{
+		"ByteList",
+		cp{{"a", `const Res = []byte{0,1,2}`, makeByteList(0, 1, 2), ""}}},
+
+	// Test byte array literals.
+	{
+		"ByteArray",
+		cp{{"a", `type T [3]byte; const Res = T{0,1,2}`, makeByteArray("p.kg/a.T", 0, 1, 2), ""}}},
+	{
+		"ByteArrayShorterInit",
+		cp{{"a", `type T [3]byte; const Res = T{0,1}`, makeByteArray("p.kg/a.T", 0, 1, 0), ""}}},
+	{
+		"ByteArrayLongerInit",
+		cp{{"a", `type T [3]byte; const Res = T{0,1,2,3}`, nil, "index 3 out of range"}}},
+
+	// Test set literals.
+	{
+		"StringSet",
+		cp{{"a", `const Res = set[string]{"a","b","c"}`, makeStringSet("a", "b", "c"), ""}}},
+	{
+		"StringSetInvalidIndex",
+		cp{{"a", `const Res = set[string]{"a","b","c":3}`, nil, "invalid index"}}},
+	{
+		"StringSetDupKey",
+		cp{{"a", `const Res = set[string]{"a","b","b"}`, nil, "duplicate key"}}},
+	{
+		"StringSetInvalidKey",
+		cp{{"a", `const Res = set[string]{"a","b",3}`, nil, "invalid set key"}}},
+
+	// Test map literals.
+	{
+		"StringIntMap",
+		cp{{"a", `const Res = map[string]int64{"a":1, "b":2, "c":3}`, makeStringIntMap(map[string]int64{"a": 1, "b": 2, "c": 3}), ""}}},
+	{
+		"StringIntMapNoKey",
+		cp{{"a", `const Res = map[string]int64{"a":1, "b":2, 3}`, nil, "missing key"}}},
+	{
+		"StringIntMapDupKey",
+		cp{{"a", `const Res = map[string]int64{"a":1, "b":2, "a":3}`, nil, "duplicate key"}}},
+	{
+		"StringIntMapInvalidKey",
+		cp{{"a", `const Res = map[string]int64{"a":1, "b":2, 3:3}`, nil, "invalid map key"}}},
+	{
+		"StringIntMapInvalidValue",
+		cp{{"a", `const Res = map[string]int64{"a":1, "b":2, "c":"c"}`, nil, "invalid map value"}}},
+	{
+		"MapIndexing",
+		cp{{"a", `const A = map[int64]int64{1:4}; const Res=A[1]`, vdl.Int64Value(4), ""}}},
+	{
+		"MapUnnamedIndexing",
+		cp{{"a", `const Res = map[int64]int64{1:4}[1]`, nil, "cannot apply index operator to unnamed constant"}}},
+	{
+		"MapTypedIndexing",
+		cp{{"a", `const A = map[int64]int64{1:4}; const Res = A[int64(1)]`, vdl.Int64Value(4), ""}}},
+	{
+		"MapIncorrectlyTypedIndexing",
+		cp{{"a", `const A = map[int64]int64{1:4};const Res = A[int16(1)]`, nil, `invalid map key \(int64 not assignable from int16\(1\)\)`}}},
+	{
+		"MapIndexingMissingValue",
+		cp{{"a", `const A = map[int64]int64{1:4}; const Res = A[0]`, nil, `map key int64\(0\) not found in map`}}},
+
+	// Test struct literals.
+	{
+		"StructNoKeys",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{1,"b",true}`, makeStruct("p.kg/a.A", 1, "b", true), ""}}},
+	{
+		"StructKeys",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{X:1,Y:"b",Z:true}`, makeStruct("p.kg/a.A", 1, "b", true), ""}}},
+	{
+		"StructKeysShort",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{Y:"b"}`, makeStruct("p.kg/a.A", 0, "b", false), ""}}},
+	{
+		"StructMixedKeys",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{X:1,"b",Z:true}`, nil, "mixed key:value and value"}}},
+	{
+		"StructInvalidFieldName",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{1+1:1}`, nil, `invalid field name`}}},
+	{
+		"StructUnknownFieldName",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{ZZZ:1}`, nil, `unknown field "ZZZ"`}}},
+	{
+		"StructDupFieldName",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{X:1,X:2}`, nil, `duplicate field "X"`}}},
+	{
+		"StructTooManyFields",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{1,"b",true,4}`, nil, `too many fields`}}},
+	{
+		"StructTooFewFields",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{1,"b"}`, nil, `too few fields`}}},
+	{
+		"StructInvalidField",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = A{Y:1}`, nil, "invalid struct field"}}},
+	{
+		"ImplicitSubTypes",
+		cp{{"a", `type A struct{X int64;Y string}; type B struct{Z []A}; const Res = B{{{1, "a"}, A{X:2,Y:"b"}}}`, makeABStruct(), ""}}},
+	{
+		"StructSelector",
+		cp{{"a", `type A struct{X int64;Y string}; const x = A{2,"b"}; const Res = x.Y`, vdl.StringValue("b"), ""}}},
+	{
+		"StructMultipleSelector",
+		cp{{"a", `type A struct{X int64;Y B}; type B struct{Z bool}; const x = A{2,B{true}}; const Res = x.Y.Z`, vdl.BoolValue(true), ""}}},
+
+	{
+		"InvalidStructSelectorName",
+		cp{{"a", `type A struct{X int64;Y string}; const x = A{2,"b"}; const Res = x.Z`, nil, "invalid field name"}}},
+	{
+		"StructSelectorOnNonStructType",
+		cp{{"a", `type A []int32; const x = A{2}; const Res = x.Z`, nil, "invalid selector on const of kind: list"}}},
+	{
+		"SelectorOnUnnamedStruct",
+		cp{{"a", `type A struct{X int64;Y string}; const Res = A{2,"b"}.Y`, nil, "cannot apply selector operator to unnamed constant"}}},
+
+	// Test union literals.
+	{
+		"UnionX",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{X: 123}`, makeUnion("p.kg/a.A", int64(123)), ""}}},
+	{
+		"UnionY",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{Y: "abc"}`, makeUnion("p.kg/a.A", "abc"), ""}}},
+	{
+		"UnionZ",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{Z: true}`, makeUnion("p.kg/a.A", true), ""}}},
+	{
+		"UnionInvalidFieldName",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{1+1: true}`, nil, `invalid field name`}}},
+	{
+		"UnionUnknownFieldName",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{ZZZ: true}`, nil, `unknown field "ZZZ"`}}},
+	{
+		"UnionTooManyFields",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{X: 123, Y: "abc"}`, nil, `must have exactly one entry`}}},
+	{
+		"UnionTooFewFields",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{}`, nil, `must have exactly one entry`}}},
+	{
+		"UnionInvalidField",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{Y: 1}`, nil, `invalid union field`}}},
+	{
+		"UnionNoValue",
+		cp{{"a", `type A union{X int64;Y string;Z bool}; const Res = A{Y}`, nil, `must have explicit key and value`}}},
+
+	// Test optional and nil.
+	{
+		"OptionalNil",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = ?A(nil)`, vdl.ZeroValue(vdl.OptionalType(makeStructType("p.kg/a.A"))), ""}}},
+	{
+		"Optional",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = ?A{1,"b",true}`, vdl.OptionalValue(makeStruct("p.kg/a.A", 1, "b", true)), ""}}},
+	{
+		"OptionalCyclicNil",
+		cp{{"a", `type A struct{X string;Z ?A}; const Res = A{"a",nil}`, makeCyclicStruct("a", nil), ""}}},
+	{
+		"OptionalCyclic",
+		cp{{"a", `type A struct{X string;Z ?A}; const Res = A{"a",{"b",{"c",nil}}}`, makeCyclicStruct("a", makeCyclicStruct("b", makeCyclicStruct("c", nil))), ""}}},
+	{
+		"OptionalCyclicExplicitType",
+		cp{{"a", `type A struct{X string;Z ?A}; const Res = A{"a",?A{"b",?A{"c",nil}}}`, makeCyclicStruct("a", makeCyclicStruct("b", makeCyclicStruct("c", nil))), ""}}},
+	{
+		"OptionalCyclicTypeMismatch",
+		cp{{"a", `type A struct{X string;Z ?A}; const Res = A{"a","b"}`, nil, `can't convert "b" to \?p.kg/a.A`}}},
+	{
+		"OptionalCyclicExplicitTypeMismatch",
+		cp{{"a", `type A struct{X string;Z ?A}; const Res = A{"a",A{}}`, nil, `not assignable from p.kg/a.A`}}},
+
+	// Test enums.
+	{
+		"Enum",
+		cp{{"a", `type A enum{X;Y;Z}; const Res = A.X`, makeEnumXYZ("p.kg/a.A", "X"), ""}}},
+	{
+		"EnumNoLabel",
+		cp{{"a", `type A enum{X;Y;Z}; const Res = A`, nil, "A is a type"}}},
+	{
+		"InnerEnumExplicit",
+		cp{{"a", `type A enum{X;Y;Z}; type B struct{A A}; const Res = B{A: A.Y}`, makeInnerEnum("Y"), ""}}},
+	{
+		"InnerEnumImplicit",
+		cp{{"a", `type A enum{X;Y;Z}; type B struct{A A}; const Res = B{A: Z}`, makeInnerEnum("Z"), ""}}},
+
+	// Test explicit primitive type conversions.
+	{
+		"TypedBool",
+		cp{{"a", `const Res = bool(false)`, vdl.BoolValue(false), ""}}},
+	{
+		"TypedString",
+		cp{{"a", `const Res = string("abc")`, vdl.StringValue("abc"), ""}}},
+	{
+		"TypedInt32",
+		cp{{"a", `const Res = int32(123)`, vdl.Int32Value(123), ""}}},
+	{
+		"TypedFloat32",
+		cp{{"a", `const Res = float32(1.5)`, vdl.Float32Value(1.5), ""}}},
+	{
+		"TypedComplex64",
+		cp{{"a", `const Res = complex64(2+1.5i)`, vdl.Complex64Value(2 + 1.5i), ""}}},
+	{
+		"TypedBoolMismatch",
+		cp{{"a", `const Res = bool(1)`, nil,
+			"can't convert 1 to bool"}}},
+	{
+		"TypedStringMismatch",
+		cp{{"a", `const Res = string(1)`, nil,
+			"can't convert 1 to string"}}},
+	{
+		"TypedInt32Mismatch",
+		cp{{"a", `const Res = int32(true)`, nil,
+			`can't convert true to int32`}}},
+	{
+		"TypedFloat32Mismatch",
+		cp{{"a", `const Res = float32(true)`, nil,
+			`can't convert true to float32`}}},
+
+	// Test explicit user type conversions.
+	{
+		"TypedUserBool",
+		cp{{"a", `type TypedBool bool;const Res = TypedBool(true)`, namedZero("p.kg/a.TypedBool", vdl.BoolType).AssignBool(true), ""}}},
+	{
+		"TypedUserString",
+		cp{{"a", `type TypedStr string;const Res = TypedStr("abc")`, namedZero("p.kg/a.TypedStr", vdl.StringType).AssignString("abc"), ""}}},
+	{
+		"TypedUserInt32",
+		cp{{"a", `type TypedInt int32;const Res = TypedInt(123)`, namedZero("p.kg/a.TypedInt", vdl.Int32Type).AssignInt(123), ""}}},
+	{
+		"TypedUserFloat32",
+		cp{{"a", `type TypedFlt float32;const Res = TypedFlt(1.5)`, namedZero("p.kg/a.TypedFlt", vdl.Float32Type).AssignFloat(1.5), ""}}},
+	{
+		"TypedUserComplex64",
+		cp{{"a", `type TypedCpx complex64;const Res = TypedCpx(1.5+2i)`, namedZero("p.kg/a.TypedCpx", vdl.Complex64Type).AssignComplex(1.5 + 2i), ""}}},
+	{
+		"TypedUserBoolMismatch",
+		cp{{"a", `type TypedBool bool;const Res = TypedBool(1)`, nil,
+			`invalid type conversion \(can't convert 1 to p.kg/a.TypedBool bool\)`}}},
+	{
+		"TypedUserStringMismatch",
+		cp{{"a", `type TypedStr string;const Res = TypedStr(1)`, nil,
+			`invalid type conversion \(can't convert 1 to p.kg/a.TypedStr string\)`}}},
+	{
+		"TypedUserInt32Mismatch",
+		cp{{"a", `type TypedInt int32;const Res = TypedInt(true)`, nil,
+			`can't convert true to p.kg/a.TypedInt int32`}}},
+	{
+		"TypedUserFloat32Mismatch",
+		cp{{"a", `type TypedFlt float32;const Res = TypedFlt(true)`, nil,
+			`can't convert true to p.kg/a.TypedFlt float32`}}},
+
+	// Test typeobject consts.
+	{
+		"TypeObjectBool",
+		cp{{"a", `const Res = typeobject(bool)`, vdl.TypeObjectValue(vdl.BoolType), ""}}},
+	{
+		"TypeObjectString",
+		cp{{"a", `const Res = typeobject(string)`, vdl.TypeObjectValue(vdl.StringType), ""}}},
+	{
+		"TypeObjectInt32",
+		cp{{"a", `const Res = typeobject(int32)`, vdl.TypeObjectValue(vdl.Int32Type), ""}}},
+	{
+		"TypeObjectFloat32",
+		cp{{"a", `const Res = typeobject(float32)`, vdl.TypeObjectValue(vdl.Float32Type), ""}}},
+	{
+		"TypeObjectComplex64",
+		cp{{"a", `const Res = typeobject(complex64)`, vdl.TypeObjectValue(vdl.Complex64Type), ""}}},
+	{
+		"TypeObjectTypeObject",
+		cp{{"a", `const Res = typeobject(typeobject)`, vdl.TypeObjectValue(vdl.TypeObjectType), ""}}},
+	{
+		"TypeObjectList",
+		cp{{"a", `const Res = typeobject([]string)`, vdl.TypeObjectValue(vdl.ListType(vdl.StringType)), ""}}},
+	{
+		"TypeObjectArray",
+		cp{{"a", `type T [3]int64; const Res = typeobject(T)`, vdl.TypeObjectValue(vdl.NamedType("p.kg/a.T", vdl.ArrayType(3, vdl.Int64Type))), ""}}},
+	{
+		"TypeObjectSet",
+		cp{{"a", `const Res = typeobject(set[string])`, vdl.TypeObjectValue(vdl.SetType(vdl.StringType)), ""}}},
+	{
+		"TypeObjectMap",
+		cp{{"a", `const Res = typeobject(map[string]int32)`, vdl.TypeObjectValue(vdl.MapType(vdl.StringType, vdl.Int32Type)), ""}}},
+	{
+		"TypeObjectStruct",
+		cp{{"a", `type A struct{X int64;Y string;Z bool}; const Res = typeobject(A)`, vdl.TypeObjectValue(makeStructType("p.kg/a.A")), ""}}},
+	{
+		"TypeObjectStructField",
+		cp{{"a", `type A struct{T typeobject}; const Res = A{typeobject(bool)}`, makeStructTypeObject("p.kg/a.A", vdl.BoolType), ""}}},
+	{
+		"TypeObjectEnum",
+		cp{{"a", `type A enum{X;Y;Z}; const Res = typeobject(A)`, vdl.TypeObjectValue(vdl.NamedType("p.kg/a.A", vdl.EnumType("X", "Y", "Z"))), ""}}},
+
+	// Test named consts.
+	{
+		"NamedBool",
+		cp{{"a", `const foo = true;const Res = foo`, vdl.BoolValue(true), ""}}},
+	{
+		"NamedString",
+		cp{{"a", `const foo = "abc";const Res = foo`, vdl.StringValue("abc"), ""}}},
+	{
+		"NamedInt32",
+		cp{{"a", `const foo = int32(123);const Res = foo`, vdl.Int32Value(123), ""}}},
+	{
+		"NamedFloat32",
+		cp{{"a", `const foo = float32(1.5);const Res = foo`, vdl.Float32Value(1.5), ""}}},
+	{
+		"NamedComplex64",
+		cp{{"a", `const foo = complex64(3+2i);const Res = foo`, vdl.Complex64Value(3 + 2i), ""}}},
+	{
+		"NamedUserBool",
+		cp{{"a", `type TypedBool bool;const foo = TypedBool(true);const Res = foo`,
+			namedZero("p.kg/a.TypedBool", vdl.BoolType).AssignBool(true), ""}}},
+	{
+		"NamedUserString",
+		cp{{"a", `type TypedStr string;const foo = TypedStr("abc");const Res = foo`,
+			namedZero("p.kg/a.TypedStr", vdl.StringType).AssignString("abc"), ""}}},
+	{
+		"NamedUserInt32",
+		cp{{"a", `type TypedInt int32;const foo = TypedInt(123);const Res = foo`,
+			namedZero("p.kg/a.TypedInt", vdl.Int32Type).AssignInt(123), ""}}},
+	{
+		"NamedUserFloat32",
+		cp{{"a", `type TypedFlt float32;const foo = TypedFlt(1.5);const Res = foo`,
+			namedZero("p.kg/a.TypedFlt", vdl.Float32Type).AssignFloat(1.5), ""}}},
+	{
+		"ConstNamedI",
+		cp{{"a", `const I = true;const Res = I`, vdl.BoolValue(true), ""}}},
+
+	// Test unary ops.
+	{
+		"Not",
+		cp{{"a", `const Res = !true`, vdl.BoolValue(false), ""}}},
+	{
+		"Pos",
+		cp{{"a", `const Res = int32(+123)`, vdl.Int32Value(123), ""}}},
+	{
+		"Neg",
+		cp{{"a", `const Res = int32(-123)`, vdl.Int32Value(-123), ""}}},
+	{
+		"Complement",
+		cp{{"a", `const Res = int32(^1)`, vdl.Int32Value(-2), ""}}},
+	{
+		"TypedNot",
+		cp{{"a", `type TypedBool bool;const Res = !TypedBool(true)`, namedZero("p.kg/a.TypedBool", vdl.BoolType), ""}}},
+	{
+		"TypedPos",
+		cp{{"a", `type TypedInt int32;const Res = TypedInt(+123)`, namedZero("p.kg/a.TypedInt", vdl.Int32Type).AssignInt(123), ""}}},
+	{
+		"TypedNeg",
+		cp{{"a", `type TypedInt int32;const Res = TypedInt(-123)`, namedZero("p.kg/a.TypedInt", vdl.Int32Type).AssignInt(-123), ""}}},
+	{
+		"TypedComplement",
+		cp{{"a", `type TypedInt int32;const Res = TypedInt(^1)`, namedZero("p.kg/a.TypedInt", vdl.Int32Type).AssignInt(-2), ""}}},
+	{
+		"NamedNot",
+		cp{{"a", `const foo = bool(true);const Res = !foo`, vdl.BoolValue(false), ""}}},
+	{
+		"NamedPos",
+		cp{{"a", `const foo = int32(123);const Res = +foo`, vdl.Int32Value(123), ""}}},
+	{
+		"NamedNeg",
+		cp{{"a", `const foo = int32(123);const Res = -foo`, vdl.Int32Value(-123), ""}}},
+	{
+		"NamedComplement",
+		cp{{"a", `const foo = int32(1);const Res = ^foo`, vdl.Int32Value(-2), ""}}},
+	{
+		"ErrNot",
+		cp{{"a", `const Res = !1`, nil, `unary \! invalid \(untyped integer not supported\)`}}},
+	{
+		"ErrPos",
+		cp{{"a", `const Res = +"abc"`, nil, `unary \+ invalid \(untyped string not supported\)`}}},
+	{
+		"ErrNeg",
+		cp{{"a", `const Res = -false`, nil, `unary \- invalid \(untyped boolean not supported\)`}}},
+	{
+		"ErrComplement",
+		cp{{"a", `const Res = ^1.5`, nil, `unary \^ invalid \(converting untyped rational 1.5 to integer loses precision\)`}}},
+
+	// Test logical and comparison ops.
+	{
+		"Or",
+		cp{{"a", `const Res = true || false`, vdl.BoolValue(true), ""}}},
+	{
+		"And",
+		cp{{"a", `const Res = true && false`, vdl.BoolValue(false), ""}}},
+	{
+		"Lt11",
+		cp{{"a", `const Res = 1 < 1`, vdl.BoolValue(false), ""}}},
+	{
+		"Lt12",
+		cp{{"a", `const Res = 1 < 2`, vdl.BoolValue(true), ""}}},
+	{
+		"Lt21",
+		cp{{"a", `const Res = 2 < 1`, vdl.BoolValue(false), ""}}},
+	{
+		"Gt11",
+		cp{{"a", `const Res = 1 > 1`, vdl.BoolValue(false), ""}}},
+	{
+		"Gt12",
+		cp{{"a", `const Res = 1 > 2`, vdl.BoolValue(false), ""}}},
+	{
+		"Gt21",
+		cp{{"a", `const Res = 2 > 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Le11",
+		cp{{"a", `const Res = 1 <= 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Le12",
+		cp{{"a", `const Res = 1 <= 2`, vdl.BoolValue(true), ""}}},
+	{
+		"Le21",
+		cp{{"a", `const Res = 2 <= 1`, vdl.BoolValue(false), ""}}},
+	{
+		"Ge11",
+		cp{{"a", `const Res = 1 >= 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Ge12",
+		cp{{"a", `const Res = 1 >= 2`, vdl.BoolValue(false), ""}}},
+	{
+		"Ge21",
+		cp{{"a", `const Res = 2 >= 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Ne11",
+		cp{{"a", `const Res = 1 != 1`, vdl.BoolValue(false), ""}}},
+	{
+		"Ne12",
+		cp{{"a", `const Res = 1 != 2`, vdl.BoolValue(true), ""}}},
+	{
+		"Ne21",
+		cp{{"a", `const Res = 2 != 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Eq11",
+		cp{{"a", `const Res = 1 == 1`, vdl.BoolValue(true), ""}}},
+	{
+		"Eq12",
+		cp{{"a", `const Res = 1 == 2`, vdl.BoolValue(false), ""}}},
+	{
+		"Eq21",
+		cp{{"a", `const Res = 2 == 1`, vdl.BoolValue(false), ""}}},
+
+	// Test arithmetic ops.
+	{
+		"IntPlus",
+		cp{{"a", `const Res = int32(1) + 1`, vdl.Int32Value(2), ""}}},
+	{
+		"IntMinus",
+		cp{{"a", `const Res = int32(2) - 1`, vdl.Int32Value(1), ""}}},
+	{
+		"IntTimes",
+		cp{{"a", `const Res = int32(3) * 2`, vdl.Int32Value(6), ""}}},
+	{
+		"IntDivide",
+		cp{{"a", `const Res = int32(5) / 2`, vdl.Int32Value(2), ""}}},
+	{
+		"FloatPlus",
+		cp{{"a", `const Res = float32(1) + 1`, vdl.Float32Value(2), ""}}},
+	{
+		"FloatMinus",
+		cp{{"a", `const Res = float32(2) - 1`, vdl.Float32Value(1), ""}}},
+	{
+		"FloatTimes",
+		cp{{"a", `const Res = float32(3) * 2`, vdl.Float32Value(6), ""}}},
+	{
+		"FloatDivide",
+		cp{{"a", `const Res = float32(5) / 2`, vdl.Float32Value(2.5), ""}}},
+	{
+		"ComplexPlus",
+		cp{{"a", `const Res = 3i + complex64(1+2i) + 1`, vdl.Complex64Value(2 + 5i), ""}}},
+	{
+		"ComplexMinus",
+		cp{{"a", `const Res = complex64(1+2i) -4 -1i`, vdl.Complex64Value(-3 + 1i), ""}}},
+	{
+		"ComplexTimes",
+		cp{{"a", `const Res = complex64(1+3i) * (5+1i)`, vdl.Complex64Value(2 + 16i), ""}}},
+	{
+		"ComplexDivide",
+		cp{{"a", `const Res = complex64(2+16i) / (5+1i)`, vdl.Complex64Value(1 + 3i), ""}}},
+
+	// Test integer arithmetic ops.
+	{
+		"Mod",
+		cp{{"a", `const Res = int32(8) % 3`, vdl.Int32Value(2), ""}}},
+	{
+		"BitOr",
+		cp{{"a", `const Res = int32(8) | 7`, vdl.Int32Value(15), ""}}},
+	{
+		"BitAnd",
+		cp{{"a", `const Res = int32(8) & 15`, vdl.Int32Value(8), ""}}},
+	{
+		"BitXor",
+		cp{{"a", `const Res = int32(8) ^ 5`, vdl.Int32Value(13), ""}}},
+	{
+		"UntypedFloatMod",
+		cp{{"a", `const Res = int32(8.0 % 3.0)`, vdl.Int32Value(2), ""}}},
+	{
+		"UntypedFloatBitOr",
+		cp{{"a", `const Res = int32(8.0 | 7.0)`, vdl.Int32Value(15), ""}}},
+	{
+		"UntypedFloatBitAnd",
+		cp{{"a", `const Res = int32(8.0 & 15.0)`, vdl.Int32Value(8), ""}}},
+	{
+		"UntypedFloatBitXor",
+		cp{{"a", `const Res = int32(8.0 ^ 5.0)`, vdl.Int32Value(13), ""}}},
+	{
+		"TypedFloatMod",
+		cp{{"a", `const Res = int32(float32(8.0) % 3.0)`, nil,
+			`binary % invalid \(can't convert typed float32 to integer\)`}}},
+	{
+		"TypedFloatBitOr",
+		cp{{"a", `const Res = int32(float32(8.0) | 7.0)`, nil,
+			`binary | invalid \(can't convert typed float32 to integer\)`}}},
+	{
+		"TypedFloatBitAnd",
+		cp{{"a", `const Res = int32(float32(8.0) & 15.0)`, nil,
+			`binary & invalid \(can't convert typed float32 to integer\)`}}},
+	{
+		"TypedFloatBitXor",
+		cp{{"a", `const Res = int32(float32(8.0) ^ 5.0)`, nil,
+			`binary \^ invalid \(can't convert typed float32 to integer\)`}}},
+
+	// Test shift ops.
+	{
+		"Lsh",
+		cp{{"a", `const Res = int32(8) << 2`, vdl.Int32Value(32), ""}}},
+	{
+		"Rsh",
+		cp{{"a", `const Res = int32(8) >> 2`, vdl.Int32Value(2), ""}}},
+	{
+		"UntypedFloatLsh",
+		cp{{"a", `const Res = int32(8.0 << 2.0)`, vdl.Int32Value(32), ""}}},
+	{
+		"UntypedFloatRsh",
+		cp{{"a", `const Res = int32(8.0 >> 2.0)`, vdl.Int32Value(2), ""}}},
+
+	// Test mixed ops.
+	{
+		"Mixed",
+		cp{{"a", `const F = "f";const Res = "f" == F && (1+2) == 3`, vdl.BoolValue(true), ""}}},
+	{
+		"MixedPrecedence",
+		cp{{"a", `const Res = int32(1+2*3-4)`, vdl.Int32Value(3), ""}}},
+
+	// Test uint conversion.
+	{
+		"MaxUint32",
+		cp{{"a", `const Res = uint32(4294967295)`, vdl.Uint32Value(4294967295), ""}}},
+	{
+		"MaxUint64",
+		cp{{"a", `const Res = uint64(18446744073709551615)`,
+			vdl.Uint64Value(18446744073709551615), ""}}},
+	{
+		"OverflowUint32",
+		cp{{"a", `const Res = uint32(4294967296)`, nil,
+			"const 4294967296 overflows uint32"}}},
+	{
+		"OverflowUint64",
+		cp{{"a", `const Res = uint64(18446744073709551616)`, nil,
+			"const 18446744073709551616 overflows uint64"}}},
+	{
+		"NegUint32",
+		cp{{"a", `const Res = uint32(-3)`, nil,
+			"const -3 overflows uint32"}}},
+	{
+		"NegUint64",
+		cp{{"a", `const Res = uint64(-4)`, nil,
+			"const -4 overflows uint64"}}},
+	{
+		"ZeroUint32",
+		cp{{"a", `const Res = uint32(0)`, vdl.Uint32Value(0), ""}}},
+
+	// Test int conversion.
+	{
+		"MinInt32",
+		cp{{"a", `const Res = int32(-2147483648)`, vdl.Int32Value(-2147483648), ""}}},
+	{
+		"MinInt64",
+		cp{{"a", `const Res = int64(-9223372036854775808)`,
+			vdl.Int64Value(-9223372036854775808), ""}}},
+	{
+		"MinOverflowInt32",
+		cp{{"a", `const Res = int32(-2147483649)`, nil,
+			"const -2147483649 overflows int32"}}},
+	{
+		"MinOverflowInt64",
+		cp{{"a", `const Res = int64(-9223372036854775809)`, nil,
+			"const -9223372036854775809 overflows int64"}}},
+	{
+		"MaxInt32",
+		cp{{"a", `const Res = int32(2147483647)`,
+			vdl.Int32Value(2147483647), ""}}},
+	{
+		"MaxInt64",
+		cp{{"a", `const Res = int64(9223372036854775807)`,
+			vdl.Int64Value(9223372036854775807), ""}}},
+	{
+		"MaxOverflowInt32",
+		cp{{"a", `const Res = int32(2147483648)`, nil,
+			"const 2147483648 overflows int32"}}},
+	{
+		"MaxOverflowInt64",
+		cp{{"a", `const Res = int64(9223372036854775808)`, nil,
+			"const 9223372036854775808 overflows int64"}}},
+	{
+		"ZeroInt32",
+		cp{{"a", `const Res = int32(0)`, vdl.Int32Value(0), ""}}},
+
+	// Test float conversion.
+	{
+		"SmallestFloat32",
+		cp{{"a", `const Res = float32(1.401298464324817070923729583289916131281e-45)`,
+			vdl.Float32Value(1.401298464324817070923729583289916131281e-45), ""}}},
+	{
+		"SmallestFloat64",
+		cp{{"a", `const Res = float64(4.940656458412465441765687928682213723651e-324)`,
+			vdl.Float64Value(4.940656458412465441765687928682213723651e-324), ""}}},
+	{
+		"MaxFloat32",
+		cp{{"a", `const Res = float32(3.40282346638528859811704183484516925440e+38)`,
+			vdl.Float32Value(3.40282346638528859811704183484516925440e+38), ""}}},
+	{
+		"MaxFloat64",
+		cp{{"a", `const Res = float64(1.797693134862315708145274237317043567980e+308)`,
+			vdl.Float64Value(1.797693134862315708145274237317043567980e+308), ""}}},
+	{
+		"UnderflowFloat32",
+		cp{{"a", `const Res = float32(1.401298464324817070923729583289916131280e-45)`,
+			nil, "underflows float32"}}},
+	{
+		"UnderflowFloat64",
+		cp{{"a", `const Res = float64(4.940656458412465441765687928682213723650e-324)`,
+			nil, "underflows float64"}}},
+	{
+		"OverflowFloat32",
+		cp{{"a", `const Res = float32(3.40282346638528859811704183484516925441e+38)`,
+			nil, "overflows float32"}}},
+	{
+		"OverflowFloat64",
+		cp{{"a", `const Res = float64(1.797693134862315708145274237317043567981e+308)`,
+			nil, "overflows float64"}}},
+	{
+		"ZeroFloat32",
+		cp{{"a", `const Res = float32(0)`, vdl.Float32Value(0), ""}}},
+
+	// Test complex conversion.
+	{
+		"RealComplexToFloat",
+		cp{{"a", `const Res = float64(1+0i)`, vdl.Float64Value(1), ""}}},
+	{
+		"RealComplexToInt",
+		cp{{"a", `const Res = int32(1+0i)`, vdl.Int32Value(1), ""}}},
+	{
+		"FloatToRealComplex",
+		cp{{"a", `const Res = complex64(1.5)`, vdl.Complex64Value(1.5), ""}}},
+	{
+		"IntToRealComplex",
+		cp{{"a", `const Res = complex64(2)`, vdl.Complex64Value(2), ""}}},
+
+	// Test float rounding - note that 1.1 incurs loss of precision.
+	{
+		"RoundedCompareFloat32",
+		cp{{"a", `const Res = float32(1.1) == 1.1`, vdl.BoolValue(true), ""}}},
+	{
+		"RoundedCompareFloat64",
+		cp{{"a", `const Res = float64(1.1) == 1.1`, vdl.BoolValue(true), ""}}},
+	{
+		"RoundedTruncation",
+		cp{{"a", `const Res = float64(float32(1.1)) != 1.1`, vdl.BoolValue(true), ""}}},
+
+	// Test multi-package consts
+	{"MultiPkgSameConstName", cp{
+		{"a", `const Res = true`, vdl.BoolValue(true), ""},
+		{"b", `const Res = true`, vdl.BoolValue(true), ""}}},
+	{"MultiPkgDep", cp{
+		{"a", `const Res = x;const x = true`, vdl.BoolValue(true), ""},
+		{"b", `import "p.kg/a";const Res = a.Res && false`, vdl.BoolValue(false), ""}}},
+	{"MultiPkgDepQualifiedPath", cp{
+		{"a", `const Res = x;const x = true`, vdl.BoolValue(true), ""},
+		{"b", `import "p.kg/a";const Res = "p.kg/a".Res && false`, vdl.BoolValue(false), ""}}},
+	{"MultiPkgUnexportedConst", cp{
+		{"a", `const Res = x;const x = true`, vdl.BoolValue(true), ""},
+		{"b", `import "p.kg/a";const Res = a.x && false`, nil, "a.x undefined"}}},
+	{"MultiPkgSamePkgName", cp{
+		{"a", `const Res = true`, vdl.BoolValue(true), ""},
+		{"a", `const Res = true`, nil, "invalid recompile"}}},
+	{"MultiPkgUnimportedPkg", cp{
+		{"a", `const Res = true`, vdl.BoolValue(true), ""},
+		{"b", `const Res = a.Res && false`, nil, "a.Res undefined"}}},
+	{"RedefinitionOfImportedName", cp{
+		{"a", `const Res = true`, vdl.BoolValue(true), ""},
+		{"b", `import "p.kg/a"; const a = "test"; const Res = a`, nil, "const a name conflict"}}},
+}
diff --git a/lib/vdl/compile/error.go b/lib/vdl/compile/error.go
new file mode 100644
index 0000000..29d2ca2
--- /dev/null
+++ b/lib/vdl/compile/error.go
@@ -0,0 +1,186 @@
+package compile
+
+import (
+	"fmt"
+	"regexp"
+	"strconv"
+
+	"v.io/v23/i18n"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/parse"
+)
+
+// ErrorDef represents a user-defined error definition in the compiled results.
+type ErrorDef struct {
+	NamePos                     // name, parse position and docs
+	Exported  bool              // is this error definition exported?
+	ID        string            // error ID
+	RetryCode vdl.WireRetryCode // retry action to be performed by client
+	Params    []*Field          // list of positional parameter names and types
+	Formats   []LangFmt         // list of language / format pairs
+	English   string            // English format text from Formats
+}
+
+// LangFmt represents a language / format string pair.
+type LangFmt struct {
+	Lang i18n.LangID // IETF language tag
+	Fmt  string      // i18n format string in the given language.
+}
+
+func (x *ErrorDef) String() string {
+	return fmt.Sprintf("%+v", *x)
+}
+
+// compileErrorDefs fills in pkg with compiled error definitions.
+func compileErrorDefs(pkg *Package, pfiles []*parse.File, env *Env) {
+	for index := range pkg.Files {
+		file, pfile := pkg.Files[index], pfiles[index]
+		for _, ped := range pfile.ErrorDefs {
+			name, detail := ped.Name, identDetail("error", file, ped.Pos)
+			export, err := ValidIdent(name, ReservedNormal)
+			if err != nil {
+				env.prefixErrorf(file, ped.Pos, err, "error %s invalid name", name)
+				continue
+			}
+			if err := file.DeclareIdent(name, detail); err != nil {
+				env.prefixErrorf(file, ped.Pos, err, "error %s name conflict", name)
+				continue
+			}
+			id := pkg.Path + "." + name
+			ed := &ErrorDef{NamePos: NamePos(ped.NamePos), Exported: export, ID: id}
+			defineErrorActions(ed, name, ped.Actions, file, env)
+			ed.Params = defineErrorParams(name, ped.Params, file, env)
+			ed.Formats = defineErrorFormats(name, ped.Formats, ed.Params, file, env)
+			// We require the "en" base language for at least one of the Formats, and
+			// favor "en-US" if it exists.  This requirement is an attempt to ensure
+			// there is at least one common language across all errors.
+			for _, lf := range ed.Formats {
+				if lf.Lang == i18n.LangID("en-US") {
+					ed.English = lf.Fmt
+					break
+				}
+				if ed.English == "" && i18n.BaseLangID(lf.Lang) == i18n.LangID("en") {
+					ed.English = lf.Fmt
+				}
+			}
+			if ed.English == "" {
+				env.Errorf(file, ed.Pos, "error %s invalid (must define at least one English format)", name)
+				continue
+			}
+			file.ErrorDefs = append(file.ErrorDefs, ed)
+		}
+	}
+}
+
+func defineErrorActions(ed *ErrorDef, name string, pactions []parse.StringPos, file *File, env *Env) {
+	// We allow multiple actions to be specified in the parser, so that it's easy
+	// to add new actions in the future.
+	seenRetry := false
+	for _, pact := range pactions {
+		if retry, err := vdl.WireRetryCodeFromString(pact.String); err == nil {
+			if seenRetry {
+				env.Errorf(file, pact.Pos, "error %s action %s invalid (retry action specified multiple times)", name, pact.String)
+				continue
+			}
+			seenRetry = true
+			ed.RetryCode = retry
+			continue
+		}
+		env.Errorf(file, pact.Pos, "error %s action %s invalid (unknown action)", name, pact.String)
+	}
+}
+
+func defineErrorParams(name string, pparams []*parse.Field, file *File, env *Env) []*Field {
+	var params []*Field
+	seen := make(map[string]*parse.Field)
+	for _, pparam := range pparams {
+		pname, pos := pparam.Name, pparam.Pos
+		if pname == "" {
+			env.Errorf(file, pos, "error %s invalid (parameters must be named)", name)
+			return nil
+		}
+		if dup := seen[pname]; dup != nil {
+			env.Errorf(file, pos, "error %s param %s duplicate name (previous at %s)", name, pname, dup.Pos)
+			continue
+		}
+		seen[pname] = pparam
+		if _, err := ValidIdent(pname, ReservedCamelCase); err != nil {
+			env.prefixErrorf(file, pos, err, "error %s param %s invalid", name, pname)
+			continue
+		}
+		param := &Field{NamePos(pparam.NamePos), compileType(pparam.Type, file, env)}
+		params = append(params, param)
+	}
+	return params
+}
+
+func defineErrorFormats(name string, plfs []parse.LangFmt, params []*Field, file *File, env *Env) []LangFmt {
+	var lfs []LangFmt
+	seen := make(map[i18n.LangID]parse.LangFmt)
+	for _, plf := range plfs {
+		pos, lang, fmt := plf.Pos(), i18n.LangID(plf.Lang.String), plf.Fmt.String
+		if lang == "" {
+			env.Errorf(file, pos, "error %s has empty language identifier", name)
+			continue
+		}
+		if dup, ok := seen[lang]; ok {
+			env.Errorf(file, pos, "error %s duplicate language %s (previous at %s)", name, lang, dup.Pos())
+			continue
+		}
+		seen[lang] = plf
+		xfmt, err := xlateErrorFormat(fmt, params)
+		if err != nil {
+			env.prefixErrorf(file, pos, err, "error %s language %s format invalid", name, lang)
+			continue
+		}
+		lfs = append(lfs, LangFmt{lang, xfmt})
+	}
+	return lfs
+}
+
+// xlateErrorFormat translates the user-supplied format into the format
+// expected by i18n, mainly translating parameter names into numeric indexes.
+func xlateErrorFormat(format string, params []*Field) (string, error) {
+	const prefix = "{1:}{2:}"
+	if format == "" {
+		return prefix, nil
+	}
+	// Create a map from param name to index.  The index numbering starts at 3,
+	// since the first two params are the component and op name, and i18n formats
+	// use 1-based indices.
+	pmap := make(map[string]string)
+	for ix, param := range params {
+		pmap[param.Name] = strconv.Itoa(ix + 3)
+	}
+	tagRE, err := regexp.Compile(`\{\:?([0-9a-zA-Z_]+)\:?\}`)
+	if err != nil {
+		return "", err
+	}
+	result, pos := prefix+" ", 0
+	for _, match := range tagRE.FindAllStringSubmatchIndex(format, -1) {
+		// The tag submatch indices are available as match[2], match[3]
+		if len(match) != 4 || match[2] < pos || match[2] > match[3] {
+			return "", fmt.Errorf("internal error: bad regexp indices %v", match)
+		}
+		beg, end := match[2], match[3]
+		tag := format[beg:end]
+		if tag == "_" {
+			continue // Skip underscore tags.
+		}
+		if _, err := strconv.Atoi(tag); err == nil {
+			continue // Skip number tags.
+		}
+		xtag, ok := pmap[tag]
+		if !ok {
+			return "", fmt.Errorf("unknown param %q", tag)
+		}
+		// Replace tag with xtag in the result.
+		result += format[pos:beg]
+		result += xtag
+		pos = end
+	}
+	if end := len(format); pos < end {
+		result += format[pos:end]
+	}
+	return result, nil
+}
diff --git a/lib/vdl/compile/error_format_test.go b/lib/vdl/compile/error_format_test.go
new file mode 100644
index 0000000..28d4945
--- /dev/null
+++ b/lib/vdl/compile/error_format_test.go
@@ -0,0 +1,58 @@
+package compile
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+)
+
+func TestXlateErrorFormat(t *testing.T) {
+	const pre = "{1:}{2:}"
+	tests := []struct {
+		Format string
+		Want   string
+		Err    string
+	}{
+		{``, pre, ``},
+		{`abc`, pre + ` abc`, ``},
+
+		{`{_}{:_}{_:}{:_:}`, pre + ` {_}{:_}{_:}{:_:}`, ``},
+		{`{1}{:2}{3:}{:4:}`, pre + ` {1}{:2}{3:}{:4:}`, ``},
+		{`{a}{:b}{c:}{:d:}`, pre + ` {3}{:4}{5:}{:6:}`, ``},
+
+		{`A{_}B{:_}C{_:}D{:_:}E`, pre + ` A{_}B{:_}C{_:}D{:_:}E`, ``},
+		{`A{1}B{:2}C{3:}D{:4:}E`, pre + ` A{1}B{:2}C{3:}D{:4:}E`, ``},
+		{`A{a}B{:b}C{c:}D{:d:}E`, pre + ` A{3}B{:4}C{5:}D{:6:}E`, ``},
+
+		{
+			`{_}{1}{a}{:_}{:2}{:b}{_:}{3:}{c:}{:_:}{:4:}{:d:}`,
+			pre + ` {_}{1}{3}{:_}{:2}{:4}{_:}{3:}{5:}{:_:}{:4:}{:6:}`,
+			``,
+		},
+		{
+			`A{_}B{1}C{a}D{:_}E{:2}F{:b}G{_:}H{3:}I{c:}J{:_:}K{:4:}L{:d:}M`,
+			pre + ` A{_}B{1}C{3}D{:_}E{:2}F{:4}G{_:}H{3:}I{5:}J{:_:}K{:4:}L{:6:}M`,
+			``,
+		},
+
+		{`{ {a}{b}{c} }`, pre + ` { {3}{4}{5} }`, ``},
+		{`{x{a}{b}{c}y}`, pre + ` {x{3}{4}{5}y}`, ``},
+
+		{`{foo}`, ``, `unknown param "foo"`},
+	}
+	params := []*Field{
+		{NamePos: NamePos{Name: "a"}},
+		{NamePos: NamePos{Name: "b"}},
+		{NamePos: NamePos{Name: "c"}},
+		{NamePos: NamePos{Name: "d"}},
+	}
+	for _, test := range tests {
+		xlate, err := xlateErrorFormat(test.Format, params)
+		if got, want := fmt.Sprint(err), test.Err; !strings.Contains(got, want) {
+			t.Errorf(`"%s" got error %q, want substr %q`, test.Format, got, want)
+		}
+		if got, want := xlate, test.Want; got != want {
+			t.Errorf(`"%s" got "%s", want "%s"`, test.Format, got, want)
+		}
+	}
+}
diff --git a/lib/vdl/compile/error_test.go b/lib/vdl/compile/error_test.go
new file mode 100644
index 0000000..0ded0c2
--- /dev/null
+++ b/lib/vdl/compile/error_test.go
@@ -0,0 +1,301 @@
+package compile_test
+
+import (
+	"reflect"
+	"testing"
+
+	"v.io/v23/i18n"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdltest"
+)
+
+func TestError(t *testing.T) {
+	for _, test := range errorTests {
+		testError(t, test)
+	}
+}
+
+func testError(t *testing.T, test errorTest) {
+	env := compile.NewEnv(-1)
+	for _, epkg := range test.Pkgs {
+		// Compile the package with a single file, and adding the "package foo"
+		// prefix to the source data automatically.
+		files := map[string]string{
+			epkg.Name + ".vdl": "package " + epkg.Name + "\n" + epkg.Data,
+		}
+		buildPkg := vdltest.FakeBuildPackage(epkg.Name, epkg.Name, files)
+		pkg := build.BuildPackage(buildPkg, env)
+		vdltest.ExpectResult(t, env.Errors, test.Name, epkg.ErrRE)
+		if pkg == nil || epkg.ErrRE != "" {
+			continue
+		}
+		matchErrorRes(t, test.Name, epkg, pkg.Files[0].ErrorDefs)
+	}
+}
+
+func matchErrorRes(t *testing.T, tname string, epkg errorPkg, edefs []*compile.ErrorDef) {
+	// Look for an ErrorDef called "Res" to compare our expected results.
+	for _, edef := range edefs {
+		if edef.ID == epkg.Name+".Res" {
+			got, want := cleanErrorDef(*edef), cleanErrorDef(epkg.Want)
+			if !reflect.DeepEqual(got, want) {
+				t.Errorf("%s got %+v, want %+v", tname, got, want)
+			}
+			return
+		}
+	}
+	t.Errorf("%s couldn't find Res in package %s", tname, epkg.Name)
+}
+
+// cleanErrorDef resets fields that we don't care about testing.
+func cleanErrorDef(ed compile.ErrorDef) compile.ErrorDef {
+	ed.NamePos = compile.NamePos{}
+	ed.Exported = false
+	ed.ID = ""
+	for _, param := range ed.Params {
+		param.Pos = parse.Pos{}
+	}
+	return ed
+}
+
+type errorPkg struct {
+	Name  string
+	Data  string
+	Want  compile.ErrorDef
+	ErrRE string
+}
+
+type ep []errorPkg
+
+type errorTest struct {
+	Name string
+	Pkgs ep
+}
+
+const (
+	en i18n.LangID = "en"
+	zh             = "zh"
+)
+
+func arg(name string, t *vdl.Type) *compile.Field {
+	arg := new(compile.Field)
+	arg.Name = name
+	arg.Type = t
+	return arg
+}
+
+const pre = "{1:}{2:} "
+
+var errorTests = []errorTest{
+	{"NoParams1", ep{{"a", `error Res() {"en":"msg1"}`,
+		compile.ErrorDef{
+			Formats: []compile.LangFmt{{en, pre + "msg1"}},
+			English: pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParams2", ep{{"a", `error Res() {"en":"msg1","zh":"msg2"}`,
+		compile.ErrorDef{
+			Formats: []compile.LangFmt{{en, pre + "msg1"}, {zh, pre + "msg2"}},
+			English: pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParamsNoRetry", ep{{"a", `error Res() {NoRetry,"en":"msg1"}`,
+		compile.ErrorDef{
+			RetryCode: vdl.WireRetryCodeNoRetry,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParamsRetryConnection", ep{{"a", `error Res() {RetryConnection,"en":"msg1"}`,
+		compile.ErrorDef{
+			RetryCode: vdl.WireRetryCodeRetryConnection,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParamsRetryRefetch", ep{{"a", `error Res() {RetryRefetch,"en":"msg1"}`,
+		compile.ErrorDef{
+			RetryCode: vdl.WireRetryCodeRetryRefetch,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParamsRetryBackoff", ep{{"a", `error Res() {RetryBackoff,"en":"msg1"}`,
+		compile.ErrorDef{
+			RetryCode: vdl.WireRetryCodeRetryBackoff,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"NoParamsMulti", ep{{"a", `error Res() {RetryRefetch,"en":"msg1","zh":"msg2"}`,
+		compile.ErrorDef{
+			RetryCode: vdl.WireRetryCodeRetryRefetch,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}, {zh, pre + "msg2"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+
+	{"WithParams1", ep{{"a", `error Res(x string, y int32) {"en":"msg1"}`,
+		compile.ErrorDef{
+			Params:  []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			Formats: []compile.LangFmt{{en, pre + "msg1"}},
+			English: pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParams2", ep{{"a", `error Res(x string, y int32) {"en":"msg1","zh":"msg2"}`,
+		compile.ErrorDef{
+			Params:  []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			Formats: []compile.LangFmt{{en, pre + "msg1"}, {zh, pre + "msg2"}},
+			English: pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsNoRetry", ep{{"a", `error Res(x string, y int32) {NoRetry,"en":"msg1"}`,
+		compile.ErrorDef{
+			Params:    []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			RetryCode: vdl.WireRetryCodeNoRetry,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsRetryConnection", ep{{"a", `error Res(x string, y int32) {RetryConnection,"en":"msg1"}`,
+		compile.ErrorDef{
+			Params:    []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			RetryCode: vdl.WireRetryCodeRetryConnection,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsRetryRefetch", ep{{"a", `error Res(x string, y int32) {RetryRefetch,"en":"msg1"}`,
+		compile.ErrorDef{
+			Params:    []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			RetryCode: vdl.WireRetryCodeRetryRefetch,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsRetryBackoff", ep{{"a", `error Res(x string, y int32) {RetryBackoff,"en":"msg1"}`,
+		compile.ErrorDef{
+			Params:    []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			RetryCode: vdl.WireRetryCodeRetryBackoff,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsMulti", ep{{"a", `error Res(x string, y int32) {RetryRefetch,"en":"msg1","zh":"msg2"}`,
+		compile.ErrorDef{
+			Params:    []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			RetryCode: vdl.WireRetryCodeRetryRefetch,
+			Formats:   []compile.LangFmt{{en, pre + "msg1"}, {zh, pre + "msg2"}},
+			English:   pre + "msg1",
+		},
+		"",
+	}}},
+	{"WithParamsFormat", ep{{"a", `error Res(x string, y int32) {"en":"en {x} {y}","zh":"zh {y} {x}"}`,
+		compile.ErrorDef{
+			Params:  []*compile.Field{arg("x", vdl.StringType), arg("y", vdl.Int32Type)},
+			Formats: []compile.LangFmt{{en, pre + "en {3} {4}"}, {zh, pre + "zh {4} {3}"}},
+			English: pre + "en {3} {4}",
+		},
+		"",
+	}}},
+	{"WithSamePackageParam", ep{{"a", `error Res(x Bool) {"en":"en {x}"};type Bool bool`,
+		compile.ErrorDef{
+			Params:  []*compile.Field{arg("x", vdl.NamedType("a.Bool", vdl.BoolType))},
+			Formats: []compile.LangFmt{{en, pre + "en {3}"}},
+			English: pre + "en {3}",
+		},
+		"",
+	}}},
+
+	// Test multi-package errors.
+	{"MultiPkgSameErrorName", ep{
+		{
+			"a", `error Res() {"en":"msg1"}`,
+			compile.ErrorDef{
+				Formats: []compile.LangFmt{{en, pre + "msg1"}},
+				English: pre + "msg1",
+			},
+			"",
+		},
+		{
+			"b", `error Res() {"en":"msg2"}`,
+			compile.ErrorDef{
+				Formats: []compile.LangFmt{{en, pre + "msg2"}},
+				English: pre + "msg2",
+			},
+			"",
+		},
+	}},
+	{"MultiPkgTypeDep", ep{
+		{
+			"a", `error Res() {"en":"msg1"};type Bool bool`,
+			compile.ErrorDef{
+				Formats: []compile.LangFmt{{en, pre + "msg1"}},
+				English: pre + "msg1",
+			},
+			"",
+		},
+		{
+			"b", `import "a";error Res(x a.Bool) {"en":"en {x}"}`,
+			compile.ErrorDef{
+				Params:  []*compile.Field{arg("x", vdl.NamedType("a.Bool", vdl.BoolType))},
+				Formats: []compile.LangFmt{{en, pre + "en {3}"}},
+				English: pre + "en {3}",
+			},
+			"",
+		},
+	}},
+	{"RedefinitionOfImportName", ep{
+		{
+			"a", `error Res() {"en":"msg1"}`,
+			compile.ErrorDef{
+				Formats: []compile.LangFmt{{en, pre + "msg1"}},
+				English: pre + "msg1",
+			},
+			"",
+		},
+		{
+			"b", `import "a";error a() {"en":"en {}"}`, compile.ErrorDef{},
+			"error a name conflict",
+		},
+	}},
+
+	// Test errors.
+	{"NoParamsNoLangFmt1", ep{{"a", `error Res()`, compile.ErrorDef{}, englishFormat}}},
+	{"NoParamsNoLangFmt2", ep{{"a", `error Res() {}`, compile.ErrorDef{}, englishFormat}}},
+	{"NoParamsNoLangFmt3", ep{{"a", `error Res() {NoRetry}`, compile.ErrorDef{}, englishFormat}}},
+
+	{"WithParamsNoLangFmt1", ep{{"a", `error Res(x string, y int32)`, compile.ErrorDef{}, englishFormat}}},
+	{"WithParamsNoLangFmt2", ep{{"a", `error Res(x string, y int32) {}`, compile.ErrorDef{}, englishFormat}}},
+	{"WithParamsNoLangFmt3", ep{{"a", `error Res(x string, y int32) {NoRetry}`, compile.ErrorDef{}, englishFormat}}},
+
+	{"MissingParamName1", ep{{"a", `error Res(bool) {"en":"msg1"}`, compile.ErrorDef{}, "parameters must be named"}}},
+	{"MissingParamName2", ep{{"a", `error Res(bool, int32) {"en":"msg1"}`, compile.ErrorDef{}, "parameters must be named"}}},
+
+	{"UnknownType", ep{{"a", `error Res(x foo) {"en":"msg1"}`, compile.ErrorDef{}, "type foo undefined"}}},
+	{"InvalidParam", ep{{"a", `error Res(_x foo) {"en":"msg1"}`, compile.ErrorDef{}, "param _x invalid"}}},
+	{"DupParam", ep{{"a", `error Res(x bool, x int32) {"en":"msg1"}`, compile.ErrorDef{}, "param x duplicate name"}}},
+	{"UnknownAction", ep{{"a", `error Res() {Foo,"en":"msg1"}`, compile.ErrorDef{}, "unknown action"}}},
+	{"EmptyLanguage", ep{{"a", `error Res() {"":"msg"}`, compile.ErrorDef{}, "empty language"}}},
+	{"DupLanguage", ep{{"a", `error Res() {"en":"msg1","en":"msg2"}`, compile.ErrorDef{}, "duplicate language en"}}},
+	{"UnknownParam", ep{{"a", `error Res() {"en":"{foo}"}`, compile.ErrorDef{}, `unknown param "foo"`}}},
+	{"DupError", ep{{"a", `error Res() {"en":"msg1"};error Res() {"en":"msg1"}`, compile.ErrorDef{}, "error Res name conflict"}}},
+}
+
+const englishFormat = "must define at least one English format"
diff --git a/lib/vdl/compile/ident_test.go b/lib/vdl/compile/ident_test.go
new file mode 100644
index 0000000..7f172c0
--- /dev/null
+++ b/lib/vdl/compile/ident_test.go
@@ -0,0 +1,55 @@
+package compile_test
+
+import (
+	"testing"
+
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdltest"
+)
+
+func TestIdentConflict(t *testing.T) {
+	tests := []struct {
+		Name string
+		Data string
+	}{
+		// Test conflicting identifiers.
+		{"Type", `type foo int64; type foo int64`},
+		{"TypeMixed", `type FoO int64; type foo int64`},
+
+		{"Const", `const foo = true; const foo = true`},
+		{"ConstMixed", `const FoO = true; const foo = true`},
+
+		{"Interface", `type foo interface{}; type foo interface{}`},
+		{"InterfaceMixed", `type FoO interface{}; type foo interface{}`},
+
+		{"Error", `error foo() {"en":"a"}; error foo() {"en":"a"}`},
+		{"ErrorMixed", `error FoO() {"en":"a"}; error foo() {"en":"a"}`},
+
+		{"TypeAndConst", `type foo int64; const foo = true`},
+		{"TypeAndConstMixed", `type FoO int64; const foo = true`},
+		{"TypeAndInterface", `type foo int64; type foo interface{}`},
+		{"TypeAndInterfaceMixed", `type FoO int64; type foo interface{}`},
+		{"TypeAndError", `type foo int64; error foo() {"en":"a"}`},
+		{"TypeAndErrorMixed", `type foo int64; error FoO() {"en":"a"}`},
+
+		{"ConstAndInterface", `const foo = true; type foo interface{}`},
+		{"ConstAndInterfaceMixed", `const FoO = true; type foo interface{}`},
+		{"ConstAndError", `const foo = true; error foo() {"en":"a"}`},
+		{"ConstAndErrorMixed", `const foo = true; error FoO() {"en":"a"}`},
+
+		{"InterfaceAndError", `type foo interface{}; error foo() {"en":"a"}`},
+		{"InterfaceAndErrorMixed", `type foo interface{}; error FoO() {"en":"a"}`},
+	}
+	for _, test := range tests {
+		env := compile.NewEnv(-1)
+		files := map[string]string{
+			test.Name + ".vdl": "package a\n" + test.Data,
+		}
+		buildPkg := vdltest.FakeBuildPackage(test.Name, test.Name, files)
+		if pkg := build.BuildPackage(buildPkg, env); pkg != nil {
+			t.Errorf("%s got package, want nil", test.Name)
+		}
+		vdltest.ExpectResult(t, env.Errors, test.Name, "name conflict")
+	}
+}
diff --git a/lib/vdl/compile/interface.go b/lib/vdl/compile/interface.go
new file mode 100644
index 0000000..89c8da3
--- /dev/null
+++ b/lib/vdl/compile/interface.go
@@ -0,0 +1,217 @@
+package compile
+
+import (
+	"v.io/lib/toposort"
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/parse"
+)
+
+// compileInterfaces is the "entry point" to the rest of this file.  It takes
+// the interfaces defined in pfiles and compiles them into Interfaces in pkg.
+func compileInterfaces(pkg *Package, pfiles []*parse.File, env *Env) {
+	id := ifaceDefiner{pkg, pfiles, env, make(map[string]*ifaceBuilder)}
+	if id.Declare(); !env.Errors.IsEmpty() {
+		return
+	}
+	id.SortAndDefine()
+}
+
+// ifaceDefiner defines interfaces in a package.  This is split into two phases:
+// 1) Declare ensures local interface references can be resolved.
+// 2) SortAndDefine sorts in dependency order, and evaluates and defines each
+//    const.
+//
+// It holds a builders map from interface name to ifaceBuilder, where the
+// ifaceBuilder is responsible for compiling and defining a single interface.
+type ifaceDefiner struct {
+	pkg      *Package
+	pfiles   []*parse.File
+	env      *Env
+	builders map[string]*ifaceBuilder
+}
+
+type ifaceBuilder struct {
+	def  *Interface
+	pdef *parse.Interface
+}
+
+func printIfaceBuilderName(ibuilder interface{}) string {
+	return ibuilder.(*ifaceBuilder).def.Name
+}
+
+// Declare creates builders for each interface defined in the package.
+func (id ifaceDefiner) Declare() {
+	for ix := range id.pkg.Files {
+		file, pfile := id.pkg.Files[ix], id.pfiles[ix]
+		for _, pdef := range pfile.Interfaces {
+			export, err := ValidIdent(pdef.Name, ReservedNormal)
+			if err != nil {
+				id.env.prefixErrorf(file, pdef.Pos, err, "interface %s invalid name", pdef.Name)
+				continue // keep going to catch more errors
+			}
+			detail := identDetail("interface", file, pdef.Pos)
+			if err := file.DeclareIdent(pdef.Name, detail); err != nil {
+				id.env.prefixErrorf(file, pdef.Pos, err, "interface %s name conflict", pdef.Name)
+				continue
+			}
+			def := &Interface{NamePos: NamePos(pdef.NamePos), Exported: export, File: file}
+			id.builders[pdef.Name] = &ifaceBuilder{def, pdef}
+		}
+	}
+}
+
+// Sort and define interfaces.  We sort by dependencies on other interfaces in
+// this package.  The sorting is to ensure there are no cycles.
+func (id ifaceDefiner) SortAndDefine() {
+	// Populate sorter with dependency information.  The sorting ensures that the
+	// list of interfaces within each file is topologically sorted, and also
+	// deterministic; in the absence of interface embeddings, interfaces are
+	// listed in the same order they were defined in the parsed files.
+	var sorter toposort.Sorter
+	for _, pfile := range id.pfiles {
+		for _, pdef := range pfile.Interfaces {
+			b := id.builders[pdef.Name]
+			sorter.AddNode(b)
+			for _, dep := range id.getLocalDeps(b) {
+				sorter.AddEdge(b, dep)
+			}
+		}
+	}
+	// Sort and check for cycles.
+	sorted, cycles := sorter.Sort()
+	if len(cycles) > 0 {
+		cycleStr := toposort.DumpCycles(cycles, printIfaceBuilderName)
+		first := cycles[0][0].(*ifaceBuilder)
+		id.env.Errorf(first.def.File, first.def.Pos, "package %v has cyclic interfaces: %v", id.pkg.Name, cycleStr)
+		return
+	}
+	// Define all interfaces.  Since we add the interfaces as we go and evaluate
+	// in topological order, dependencies are guaranteed to be resolvable when we
+	// get around to defining the interfaces that embed on them.
+	for _, ibuilder := range sorted {
+		b := ibuilder.(*ifaceBuilder)
+		id.define(b)
+		addIfaceDef(b.def)
+	}
+}
+
+// addIfaceDef updates our various structures to add a new interface.
+func addIfaceDef(def *Interface) {
+	def.File.Interfaces = append(def.File.Interfaces, def)
+	def.File.Package.ifaceDefs[def.Name] = def
+}
+
+// getLocalDeps returns the list of interface dependencies for b that are in
+// this package.
+func (id ifaceDefiner) getLocalDeps(b *ifaceBuilder) (deps []*ifaceBuilder) {
+	for _, pe := range b.pdef.Embeds {
+		// Embeddings of other interfaces in this package are all we care about.
+		if dep := id.builders[pe.Name]; dep != nil {
+			deps = append(deps, dep)
+		}
+	}
+	return
+}
+
+func (id ifaceDefiner) define(b *ifaceBuilder) {
+	id.defineEmbeds(b)
+	id.defineMethods(b)
+}
+
+func (id ifaceDefiner) defineEmbeds(b *ifaceBuilder) {
+	// TODO(toddw): Check for duplicate methods.
+	def, file := b.def, b.def.File
+	seen := make(map[string]*parse.NamePos)
+	for _, pe := range b.pdef.Embeds {
+		if dup := seen[pe.Name]; dup != nil {
+			id.env.Errorf(file, pe.Pos, "interface %s duplicate embedding (previous at %s)", pe.Name, dup.Pos)
+			continue // keep going to catch more errors
+		}
+		seen[pe.Name] = pe
+		// Resolve the embedded interface.
+		embed, matched := id.env.ResolveInterface(pe.Name, file)
+		if embed == nil {
+			id.env.Errorf(file, pe.Pos, "interface %s undefined", pe.Name)
+			continue // keep going to catch more errors
+		}
+		if len(matched) < len(pe.Name) {
+			id.env.Errorf(file, pe.Pos, "interface %s invalid (%s unmatched)", pe.Name, pe.Name[len(matched):])
+			continue // keep going to catch more errors
+		}
+		def.Embeds = append(def.Embeds, embed)
+	}
+}
+
+func (id ifaceDefiner) defineMethods(b *ifaceBuilder) {
+	def, file := b.def, b.def.File
+	seen := make(map[string]*parse.Method)
+	for _, pm := range b.pdef.Methods {
+		if dup := seen[pm.Name]; dup != nil {
+			id.env.Errorf(file, pm.Pos, "method %s redefined (previous at %s)", pm.Name, dup.Pos)
+			continue // keep going to catch more errors
+		}
+		seen[pm.Name] = pm
+		if err := ValidExportedIdent(pm.Name, ReservedCamelCase); err != nil {
+			id.env.Errorf(file, pm.Pos, "method %s name (%s)", pm.Name, err)
+			continue // keep going to catch more errors
+		}
+		m := &Method{NamePos: NamePos(pm.NamePos)}
+		m.InArgs = id.defineArgs(in, m.NamePos, pm.InArgs, file)
+		m.OutArgs = id.defineArgs(out, m.NamePos, pm.OutArgs, file)
+		m.InStream = id.defineStreamType(pm.InStream, file)
+		m.OutStream = id.defineStreamType(pm.OutStream, file)
+		m.Tags = id.defineTags(pm.Tags, file)
+		def.Methods = append(def.Methods, m)
+	}
+}
+
+type inout string
+
+const (
+	in  inout = "in"
+	out inout = "out"
+)
+
+func (id ifaceDefiner) defineArgs(io inout, method NamePos, pargs []*parse.Field, file *File) (args []*Field) {
+	seen := make(map[string]*parse.Field)
+	for _, parg := range pargs {
+		if dup := seen[parg.Name]; dup != nil && parg.Name != "" {
+			id.env.Errorf(file, parg.Pos, "method %s arg %s duplicate name (previous at %s)", method.Name, parg.Name, dup.Pos)
+			continue // keep going to catch more errors
+		}
+		seen[parg.Name] = parg
+		if io == out && len(pargs) > 2 && parg.Name == "" {
+			id.env.Errorf(file, parg.Pos, "method %s out arg unnamed (must name all out args if there are more than 2)", method.Name)
+			continue // keep going to catch more errors
+		}
+		if parg.Name != "" {
+			if _, err := ValidIdent(parg.Name, ReservedCamelCase); err != nil {
+				id.env.prefixErrorf(file, parg.Pos, err, "method %s invalid arg %s", method.Name, parg.Name)
+				continue // keep going to catch more errors
+			}
+		}
+		arg := &Field{NamePos(parg.NamePos), compileType(parg.Type, file, id.env)}
+		args = append(args, arg)
+	}
+	return
+}
+
+func (id ifaceDefiner) defineStreamType(ptype parse.Type, file *File) *vdl.Type {
+	if ptype == nil {
+		return nil
+	}
+	if tn, ok := ptype.(*parse.TypeNamed); ok && tn.Name == "_" {
+		// Special-case the _ placeholder, which means there's no stream type.
+		return nil
+	}
+	return compileType(ptype, file, id.env)
+}
+
+func (id ifaceDefiner) defineTags(ptags []parse.ConstExpr, file *File) (tags []*vdl.Value) {
+	for _, ptag := range ptags {
+		if tag := compileConst("tag", nil, ptag, file, id.env); tag != nil {
+			tags = append(tags, tag)
+		}
+	}
+	return
+}
diff --git a/lib/vdl/compile/interface_test.go b/lib/vdl/compile/interface_test.go
new file mode 100644
index 0000000..af7b166
--- /dev/null
+++ b/lib/vdl/compile/interface_test.go
@@ -0,0 +1,166 @@
+package compile_test
+
+import (
+	"reflect"
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdltest"
+)
+
+func TestInterface(t *testing.T) {
+	for _, test := range ifaceTests {
+		env := compile.NewEnv(-1)
+		for _, tpkg := range test.Pkgs {
+			// Compile the package with a single file, adding the "package a" prefix
+			// to the source data automatically.
+			files := map[string]string{
+				tpkg.Name + ".vdl": "package " + tpkg.Name + "\n" + tpkg.Data,
+			}
+			pkgPath := "p.kg/" + tpkg.Name // use dots in pkgpath to test tricky cases
+			buildPkg := vdltest.FakeBuildPackage(tpkg.Name, pkgPath, files)
+			pkg := build.BuildPackage(buildPkg, env)
+			vdltest.ExpectResult(t, env.Errors, test.Name, tpkg.ErrRE)
+			if pkg == nil || tpkg.ErrRE != "" {
+				continue
+			}
+			matchIfaceRes(t, test.Name, tpkg, pkg.Files[0].Interfaces)
+		}
+	}
+}
+
+func matchIfaceRes(t *testing.T, tname string, tpkg ifacePkg, ifaces []*compile.Interface) {
+	if tpkg.Iface == nil {
+		return
+	}
+	// Look for an interface called "Res" to compare our expected results.
+	for _, iface := range ifaces {
+		if iface.Name == "Res" {
+			if got, want := normalizeIface(*iface), normalizeIface(*tpkg.Iface); !reflect.DeepEqual(got, want) {
+				t.Errorf("%s got %v, want %v", tname, got, want)
+			}
+			return
+		}
+	}
+	t.Errorf("%s couldn't find Res in package %s", tname, tpkg.Name)
+}
+
+func normalizeIface(x compile.Interface) compile.Interface {
+	// Don't compare uninteresting portions, to make tests more succinct.
+	x.Pos = parse.Pos{}
+	x.Exported = false
+	x.File = nil
+	embeds := x.Embeds
+	x.Embeds = nil
+	for _, embed := range embeds {
+		norm := normalizeIface(*embed)
+		x.Embeds = append(x.Embeds, &norm)
+	}
+	methods := x.Methods
+	x.Methods = nil
+	for _, method := range methods {
+		norm := normalizeMethod(*method)
+		x.Methods = append(x.Methods, &norm)
+	}
+	return x
+}
+
+func normalizeMethod(x compile.Method) compile.Method {
+	x.Pos = parse.Pos{}
+	x.InArgs = normalizeArgs(x.InArgs)
+	x.OutArgs = normalizeArgs(x.OutArgs)
+	return x
+}
+
+func normalizeArgs(x []*compile.Field) (ret []*compile.Field) {
+	for _, arg := range x {
+		norm := normalizeArg(*arg)
+		ret = append(ret, &norm)
+	}
+	return
+}
+
+func normalizeArg(x compile.Field) compile.Field {
+	x.Pos = parse.Pos{}
+	return x
+}
+
+func np(name string) compile.NamePos {
+	return compile.NamePos{Name: name}
+}
+
+type ifaceTest struct {
+	Name string
+	Pkgs ip
+}
+
+type ip []ifacePkg
+
+type ifacePkg struct {
+	Name  string
+	Data  string
+	Iface *compile.Interface
+	ErrRE string
+}
+
+var ifaceTests = []ifaceTest{
+	{"Empty", ip{{"a", `type Res interface{}`, &compile.Interface{NamePos: np("Res")}, ""}}},
+	{"NoArgs", ip{{"a", `type Res interface{NoArgs() error}`,
+		&compile.Interface{
+			NamePos: np("Res"),
+			Methods: []*compile.Method{{NamePos: np("NoArgs")}},
+		},
+		"",
+	}}},
+	{"HasArgs", ip{{"a", `type Res interface{HasArgs(x bool) (string | error)}`,
+		&compile.Interface{
+			NamePos: np("Res"),
+			Methods: []*compile.Method{{
+				NamePos: np("HasArgs"),
+				InArgs:  []*compile.Field{{NamePos: np("x"), Type: vdl.BoolType}},
+				OutArgs: []*compile.Field{{Type: vdl.StringType}},
+			}},
+		},
+		"",
+	}}},
+	{"Embed", ip{{"a", `type A interface{};type Res interface{A}`,
+		&compile.Interface{
+			NamePos: np("Res"),
+			Embeds:  []*compile.Interface{{NamePos: np("A")}},
+		},
+		"",
+	}}},
+	{"MultiEmbed", ip{{"a", `type A interface{};type B interface{};type Res interface{A;B}`,
+		&compile.Interface{
+			NamePos: np("Res"),
+			Embeds:  []*compile.Interface{{NamePos: np("A")}, {NamePos: np("B")}},
+		},
+		"",
+	}}},
+	{"MultiPkgEmbed", ip{
+		{"a", `type Res interface{}`, &compile.Interface{NamePos: np("Res")}, ""},
+		{"b", `import "p.kg/a";type Res interface{a.Res}`,
+			&compile.Interface{
+				NamePos: np("Res"),
+				Embeds:  []*compile.Interface{{NamePos: np("Res")}},
+			},
+			"",
+		},
+	}},
+	{"MultiPkgEmbedQualifiedPath", ip{
+		{"a", `type Res interface{}`, &compile.Interface{NamePos: np("Res")}, ""},
+		{"b", `import "p.kg/a";type Res interface{"p.kg/a".Res}`,
+			&compile.Interface{
+				NamePos: np("Res"),
+				Embeds:  []*compile.Interface{{NamePos: np("Res")}},
+			},
+			"",
+		},
+	}},
+	{"UnmatchedEmbed", ip{{"a", `type A interface{};type Res interface{A.foobar}`, nil,
+		`\(\.foobar unmatched\)`,
+	}}},
+}
diff --git a/lib/vdl/compile/reserved_words.go b/lib/vdl/compile/reserved_words.go
new file mode 100644
index 0000000..0a91541
--- /dev/null
+++ b/lib/vdl/compile/reserved_words.go
@@ -0,0 +1,154 @@
+package compile
+
+import (
+	"v.io/v23/vdl/vdlutil"
+)
+
+// ReservedMode indicates which mode to perform reserved-word checking:
+//   ReservedNormal    - Check the given identifier.
+//   ReservedCamelCase - Check the given identifier in lower-camel-case.
+type ReservedMode int
+
+const (
+	ReservedNormal ReservedMode = iota
+	ReservedCamelCase
+)
+
+// reservedWord checks if identifiers are reserved after they are converted to the native form for the language.
+func reservedWord(ident string, mode ReservedMode) bool {
+	return reservedWordJava(ident, mode) ||
+		reservedWordJavascript(ident, mode) ||
+		reservedWordGo(ident)
+	// TODO(bprosnitz) Other identifiers? (set, assert, raise, with, etc)
+}
+
+func reservedWordJava(ident string, mode ReservedMode) bool {
+	if mode == ReservedCamelCase {
+		ident = vdlutil.ToCamelCase(ident)
+	}
+	_, isReserved := javaReservedWords[ident]
+	return isReserved
+}
+
+var javaReservedWords = map[string]bool{
+	"abstract":     true,
+	"assert":       true,
+	"boolean":      true,
+	"break":        true,
+	"byte":         true,
+	"case":         true,
+	"catch":        true,
+	"char":         true,
+	"class":        true,
+	"const":        true,
+	"continue":     true,
+	"default":      true,
+	"do":           true,
+	"double":       true,
+	"else":         true,
+	"enum":         true,
+	"extends":      true,
+	"final":        true,
+	"finally":      true,
+	"float":        true,
+	"for":          true,
+	"goto":         true,
+	"if":           true,
+	"implements":   true,
+	"import":       true,
+	"instanceof":   true,
+	"int":          true,
+	"interface":    true,
+	"long":         true,
+	"native":       true,
+	"new":          true,
+	"package":      true,
+	"private":      true,
+	"protected":    true,
+	"public":       true,
+	"return":       true,
+	"short":        true,
+	"static":       true,
+	"strictfp":     true,
+	"super":        true,
+	"switch":       true,
+	"synchronized": true,
+	"this":         true,
+	"throw":        true,
+	"throws":       true,
+	"transient":    true,
+	"try":          true,
+	"void":         true,
+	"volatile":     true,
+	"while":        true,
+}
+
+func reservedWordGo(ident string) bool {
+	_, isReserved := goReservedWords[ident]
+	return isReserved
+}
+
+var goReservedWords = map[string]bool{
+	"break":       true,
+	"case":        true,
+	"chan":        true,
+	"const":       true,
+	"continue":    true,
+	"default":     true,
+	"defer":       true,
+	"else":        true,
+	"fallthrough": true,
+	"for":         true,
+	"func":        true,
+	"go":          true,
+	"goto":        true,
+	"if":          true,
+	"import":      true,
+	"interface":   true,
+	"map":         true,
+	"package":     true,
+	"range":       true,
+	"return":      true,
+	"select":      true,
+	"struct":      true,
+	"switch":      true,
+	"type":        true,
+	"var":         true,
+}
+
+func reservedWordJavascript(ident string, mode ReservedMode) bool {
+	if mode == ReservedCamelCase {
+		ident = vdlutil.ToCamelCase(ident)
+	}
+	_, isReserved := javascriptReservedWords[ident]
+	return isReserved
+}
+
+var javascriptReservedWords = map[string]bool{
+	"break":    true,
+	"case":     true,
+	"catch":    true,
+	"continue": true,
+	"debugger": true,
+	"default":  true,
+	//"delete":     true, // TODO(bprosnitz) Look into adding this back. This conflicts with Delete() on Content in repository.vdlutil.
+	"do":       true,
+	"else":     true,
+	"finally":  true,
+	"for":      true,
+	"function": true,
+	"if":       true,
+	//"in":         true, // TODO(bprosnitz) Look into addint this back. It conflicts with In in access/service.vdlutil.
+	"instanceof": true,
+	"new":        true,
+	"return":     true,
+	"switch":     true,
+	"this":       true,
+	"throw":      true,
+	"try":        true,
+	"typeof":     true,
+	"var":        true,
+	"void":       true,
+	"while":      true,
+	"with":       true,
+}
diff --git a/lib/vdl/compile/result.go b/lib/vdl/compile/result.go
new file mode 100644
index 0000000..31727a9
--- /dev/null
+++ b/lib/vdl/compile/result.go
@@ -0,0 +1,508 @@
+package compile
+
+import (
+	"fmt"
+	"path"
+	"regexp"
+	"strings"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/opconst"
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdlutil"
+	"v.io/v23/vdlroot/vdltool"
+)
+
+// Env is the environment for compilation.  It contains all errors that were
+// collected during the execution - you can pass Errors to the parse phase to
+// collect all errors together.  As packages are compiled it also collects the
+// output; after a sequence of dependent packages is compiled, all compiled
+// output will be collected.
+//
+// Always create a new Env via NewEnv; the zero Env is invalid.
+type Env struct {
+	Errors    *vdlutil.Errors
+	pkgs      map[string]*Package
+	typeDefs  map[*vdl.Type]*TypeDef
+	constDefs map[*vdl.Value]*ConstDef
+
+	disallowPathQualifiers bool // Disallow syntax like "a/b/c".Type
+}
+
+// NewEnv creates a new Env, allowing up to maxErrors errors before we stop.
+func NewEnv(maxErrors int) *Env {
+	return NewEnvWithErrors(vdlutil.NewErrors(maxErrors))
+}
+
+// NewEnvWithErrors creates a new Env, using the given errs to collect errors.
+func NewEnvWithErrors(errs *vdlutil.Errors) *Env {
+	env := &Env{
+		Errors:    errs,
+		pkgs:      make(map[string]*Package),
+		typeDefs:  make(map[*vdl.Type]*TypeDef),
+		constDefs: make(map[*vdl.Value]*ConstDef),
+	}
+	// The env always starts out with the built-in package.
+	env.pkgs[BuiltInPackage.Name] = BuiltInPackage
+	for _, def := range BuiltInFile.TypeDefs {
+		env.typeDefs[def.Type] = def
+	}
+	for _, def := range BuiltInFile.ConstDefs {
+		env.constDefs[def.Value] = def
+	}
+	return env
+}
+
+// FindTypeDef returns the type definition corresponding to t, or nil if t isn't
+// a defined type.  All built-in and user-defined named types are considered
+// defined; e.g. unnamed lists don't have a corresponding type def.
+func (e *Env) FindTypeDef(t *vdl.Type) *TypeDef { return e.typeDefs[t] }
+
+// FindConstDef returns the const definition corresponding to v, or nil if v
+// isn't a defined const.  All user-defined named consts are considered defined;
+// e.g. method tags don't have a corresponding const def.
+func (e *Env) FindConstDef(v *vdl.Value) *ConstDef { return e.constDefs[v] }
+
+// ResolvePackage resolves a package path to its previous compiled results.
+func (e *Env) ResolvePackage(path string) *Package {
+	return e.pkgs[path]
+}
+
+// Resolves a name against the current package and imported package namespace.
+func (e *Env) resolve(name string, file *File) (val interface{}, matched string) {
+	// First handle package-path qualified identifiers, which look like this:
+	//   "a/b/c".Ident   (qualified with package path "a/b/c")
+	// These must be handled first, since the package-path may include dots.
+	if strings.HasPrefix(name, `"`) {
+		if parts := strings.SplitN(name[1:], `".`, 2); len(parts) == 2 {
+			path, remain := parts[0], parts[1]
+			if e.disallowPathQualifiers {
+				// TODO(toddw): Add real position.
+				e.Errorf(file, parse.Pos{}, "package path qualified identifier %s not allowed", name)
+			}
+			if file.ValidateImportPackagePath(path) {
+				if pkg := e.ResolvePackage(path); pkg != nil {
+					if dotParts := strings.Split(remain, "."); len(dotParts) > 0 {
+						if val := pkg.resolve(dotParts[0], false); val != nil {
+							return val, `"` + path + `".` + dotParts[0]
+						}
+					}
+				}
+			}
+		}
+	}
+	// Now handle built-in and package-local identifiers.  Examples:
+	//   string
+	//   TypeName
+	//   EnumType.Label
+	//   ConstName
+	//   StructConst.Field
+	//   InterfaceName
+	nameParts := strings.Split(name, ".")
+	if len(nameParts) == 0 {
+		return nil, ""
+	}
+	if builtin := BuiltInPackage.resolve(nameParts[0], false); builtin != nil {
+		return builtin, nameParts[0]
+	}
+	if local := file.Package.resolve(nameParts[0], true); local != nil {
+		return local, nameParts[0]
+	}
+	// Now handle package qualified identifiers, which look like this:
+	//   pkg.Ident   (qualified with local package identifier pkg)
+	if len(nameParts) > 1 {
+		if path := file.LookupImportPath(nameParts[0]); path != "" {
+			if pkg := e.ResolvePackage(path); pkg != nil {
+				if val := pkg.resolve(nameParts[1], false); val != nil {
+					return val, nameParts[0] + "." + nameParts[1]
+				}
+			}
+		}
+	}
+	// No match found.
+	return nil, ""
+}
+
+// ResolveType resolves a name to a type definition.
+// Returns the type def and the portion of name that was matched.
+func (e *Env) ResolveType(name string, file *File) (td *TypeDef, matched string) {
+	v, matched := e.resolve(name, file)
+	td, _ = v.(*TypeDef)
+	if td == nil {
+		return nil, ""
+	}
+	return td, matched
+}
+
+// ResolveConst resolves a name to a const definition.
+// Returns the const def and the portion of name that was matched.
+func (e *Env) ResolveConst(name string, file *File) (cd *ConstDef, matched string) {
+	v, matched := e.resolve(name, file)
+	cd, _ = v.(*ConstDef)
+	if cd == nil {
+		return nil, ""
+	}
+	return cd, matched
+}
+
+// ResolveInterface resolves a name to an interface definition.
+// Returns the interface and the portion of name that was matched.
+func (e *Env) ResolveInterface(name string, file *File) (i *Interface, matched string) {
+	v, matched := e.resolve(name, file)
+	i, _ = v.(*Interface)
+	if i == nil {
+		return nil, ""
+	}
+	return i, matched
+}
+
+// evalSelectorOnValue evaluates the selector on v.
+func (e *Env) evalSelectorOnValue(v *vdl.Value, selector string) (opconst.Const, error) {
+	for _, fieldName := range strings.Split(selector, ".") {
+		if v.Kind() != vdl.Struct {
+			return opconst.Const{}, fmt.Errorf("invalid selector on const of kind: %v", v.Type().Kind())
+		}
+		next := v.StructFieldByName(fieldName)
+		if next == nil {
+			return opconst.Const{}, fmt.Errorf("invalid field name on struct %s: %s", v, fieldName)
+		}
+		v = next
+	}
+	return opconst.FromValue(v), nil
+}
+
+// evalSelectorOnType evaluates the selector on t.
+func (e *Env) evalSelectorOnType(t *vdl.Type, selector string) (opconst.Const, error) {
+	if t.Kind() != vdl.Enum {
+		return opconst.Const{}, fmt.Errorf("invalid selector on type of kind: %v", t.Kind())
+	}
+	index := t.EnumIndex(selector)
+	if index < 0 {
+		return opconst.Const{}, fmt.Errorf("invalid label on enum %s: %s", t.Name(), selector)
+	}
+	enum := vdl.ZeroValue(t).AssignEnumIndex(index)
+	return opconst.FromValue(enum), nil
+}
+
+// EvalConst resolves and evaluates a name to a const.
+func (e *Env) EvalConst(name string, file *File) (opconst.Const, error) {
+	if cd, matched := e.ResolveConst(name, file); cd != nil {
+		if matched == name {
+			return opconst.FromValue(cd.Value), nil
+		}
+		remainder := name[len(matched)+1:]
+		c, err := e.evalSelectorOnValue(cd.Value, remainder)
+		if err != nil {
+			return opconst.Const{}, err
+		}
+		return c, nil
+	}
+	if td, matched := e.ResolveType(name, file); td != nil {
+		if matched == name {
+			return opconst.Const{}, fmt.Errorf("%s is a type", name)
+		}
+		remainder := name[len(matched)+1:]
+		c, err := e.evalSelectorOnType(td.Type, remainder)
+		if err != nil {
+			return opconst.Const{}, err
+		}
+		return c, nil
+	}
+	return opconst.Const{}, fmt.Errorf("%s undefined", name)
+}
+
+// Errorf is a helper for error reporting, to consistently contain the file and
+// position of the error when possible.
+func (e *Env) Errorf(file *File, pos parse.Pos, format string, v ...interface{}) {
+	e.Errors.Error(fpStringf(file, pos, format, v...))
+}
+
+func (e *Env) prefixErrorf(file *File, pos parse.Pos, err error, format string, v ...interface{}) {
+	e.Errors.Error(fpStringf(file, pos, format, v...) + " (" + err.Error() + ")")
+}
+
+func fpString(file *File, pos parse.Pos) string {
+	return path.Join(file.Package.Path, file.BaseName) + ":" + pos.String()
+}
+
+func fpStringf(file *File, pos parse.Pos, format string, v ...interface{}) string {
+	return fmt.Sprintf(fpString(file, pos)+" "+format, v...)
+}
+
+// DisallowPathQualifiers disables syntax like "a/b/c".Type.
+func (e *Env) DisallowPathQualifiers() *Env {
+	e.disallowPathQualifiers = true
+	return e
+}
+
+// Representation of the components of an vdl file.  These data types represent
+// the results of the compilation, used by generators for different languages.
+
+// Package represents an vdl package, containing a set of files.
+type Package struct {
+	// Name is the name of the package, specified in the vdl files.
+	// E.g. "bar"
+	Name string
+	// Path is the package path; the path used in VDL import clauses.
+	// E.g. "foo/bar".
+	Path string
+	// GenPath is the package path to use for code generation.  It is typically
+	// the same as Path, except for vdlroot standard packages.
+	// E.g. "v.io/v23/vdlroot/time"
+	GenPath string
+	// Files holds the files contained in the package.
+	Files []*File
+	// Config holds the configuration for this package, specifying options used
+	// during compilation and code generation.
+	Config vdltool.Config
+
+	// We hold some internal maps to make local name resolution cheap and easy.
+	typeDefs  map[string]*TypeDef
+	constDefs map[string]*ConstDef
+	ifaceDefs map[string]*Interface
+
+	// lowercaseIdents maps from lowercased identifier to a detail string; it's
+	// used to detect and report identifier conflicts.
+	lowercaseIdents map[string]string
+}
+
+func newPackage(name, pkgPath, genPath string, config vdltool.Config) *Package {
+	return &Package{
+		Name:            name,
+		Path:            pkgPath,
+		GenPath:         genPath,
+		Config:          config,
+		typeDefs:        make(map[string]*TypeDef),
+		constDefs:       make(map[string]*ConstDef),
+		ifaceDefs:       make(map[string]*Interface),
+		lowercaseIdents: make(map[string]string),
+	}
+}
+
+// QualifiedName returns the fully-qualified name of an identifier, by
+// prepending the identifier with the package path.
+func (p *Package) QualifiedName(id string) string {
+	if p.Path == "" {
+		return id
+	}
+	return p.Path + "." + id
+}
+
+// ResolveType resolves the type name to its definition.
+func (p *Package) ResolveType(name string) *TypeDef { return p.typeDefs[name] }
+
+// ResolveConst resolves the const name to its definition.
+func (p *Package) ResolveConst(name string) *ConstDef { return p.constDefs[name] }
+
+// ResolveInterface resolves the interface name to its definition.
+func (p *Package) ResolveInterface(name string) *Interface { return p.ifaceDefs[name] }
+
+// resolve resolves a name against the package.
+// Checks for duplicate definitions should be performed before this is called.
+func (p *Package) resolve(name string, isLocal bool) interface{} {
+	if t := p.ResolveType(name); t != nil && (t.Exported || isLocal) {
+		return t
+	}
+	if c := p.ResolveConst(name); c != nil && (c.Exported || isLocal) {
+		return c
+	}
+	if i := p.ResolveInterface(name); i != nil && (i.Exported || isLocal) {
+		return i
+	}
+	return nil
+}
+
+// File represents a compiled vdl file.
+type File struct {
+	BaseName   string       // Base name of the vdl file, e.g. "foo.vdl"
+	PackageDef NamePos      // Name, position and docs of the "package" clause
+	ErrorDefs  []*ErrorDef  // Errors defined in this file
+	TypeDefs   []*TypeDef   // Types defined in this file
+	ConstDefs  []*ConstDef  // Consts defined in this file
+	Interfaces []*Interface // Interfaces defined in this file
+	Package    *Package     // Parent package
+
+	TypeDeps    map[*vdl.Type]bool // Types the file depends on
+	PackageDeps []*Package         // Packages the file depends on, sorted by path
+
+	// Imports maps the user-supplied imports from local package name to package
+	// path.  They may be different from PackageDeps since we evaluate all consts
+	// to their final typed value.  E.g. let's say we have three vdl files:
+	//
+	//   a/a.vdl  type Foo int32; const A1 = Foo(1)
+	//   b/b.vdl  import "a";     const B1 = a.Foo(1); const B2 = a.A1 + 1
+	//   c/c.vdl  import "b";     const C1 = b.B1;     const C2 = b.B1 + 1
+	//
+	// The final type and value of the constants:
+	//   A1 = a.Foo(1); B1 = a.Foo(1); C1 = a.Foo(1)
+	//                  B2 = a.Foo(2); C2 = a.Foo(2)
+	//
+	// Note that C1 and C2 both have final type a.Foo, even though c.vdl doesn't
+	// explicitly import "a", and the generated c.go shouldn't import "b" since
+	// it's not actually used anymore.
+	imports map[string]*importPath
+}
+
+type importPath struct {
+	path string
+	pos  parse.Pos
+	used bool // was this import path ever used?
+}
+
+// LookupImportPath translates local into a package path name, based on the
+// imports associated with the file.  Returns the empty string "" if local
+// couldn't be found; every valid package path is non-empty.
+func (f *File) LookupImportPath(local string) string {
+	if imp, ok := f.imports[local]; ok {
+		imp.used = true
+		return imp.path
+	}
+	return ""
+}
+
+// ValidateImportPackagePath returns true iff path is listed in the file's
+// imports, and marks the import as used.
+func (f *File) ValidateImportPackagePath(path string) bool {
+	for _, imp := range f.imports {
+		if imp.path == path {
+			imp.used = true
+			return true
+		}
+	}
+	return false
+}
+
+// identDetail formats a detail string for calls to DeclareIdent.
+func identDetail(kind string, file *File, pos parse.Pos) string {
+	return fmt.Sprintf("%s at %s:%s", kind, file.BaseName, pos)
+}
+
+// DeclareIdent declares ident with the given detail string.  Returns an error
+// if ident conflicts with an existing identifier in this file or package, where
+// the error includes the the previous declaration detail.
+func (f *File) DeclareIdent(ident, detail string) error {
+	// Identifiers must be distinct from the the import names used in this file,
+	// but can differ by only their capitalization.  E.g.
+	//   import "foo"
+	//   type foo string // BAD, type "foo" collides with import "foo"
+	//   type Foo string //  OK, type "Foo" distinct from import "foo"
+	//   type FoO string //  OK, type "FoO" distinct from import "foo"
+	if i, ok := f.imports[ident]; ok {
+		return fmt.Errorf("previous import at %s", i.pos)
+	}
+	// Identifiers must be distinct from all other identifiers within this
+	// package, and cannot differ by only their capitalization.  E.g.
+	//   type foo string
+	//   const foo = "a" // BAD, const "foo" collides with type "foo"
+	//   const Foo = "A" // BAD, const "Foo" collides with type "foo"
+	//   const FoO = "A" // BAD, const "FoO" collides with type "foo"
+	lower := strings.ToLower(ident)
+	if prevDetail := f.Package.lowercaseIdents[lower]; prevDetail != "" {
+		return fmt.Errorf("previous %s", prevDetail)
+	}
+	f.Package.lowercaseIdents[lower] = detail
+	return nil
+}
+
+// Interface represents a set of embedded interfaces and methods.
+type Interface struct {
+	NamePos               // interface name, pos and doc
+	Exported bool         // is this interface exported?
+	Embeds   []*Interface // list of embedded interfaces
+	Methods  []*Method    // list of methods
+	File     *File        // parent file
+}
+
+// Method represents a method in an interface.
+type Method struct {
+	NamePos                // method name, pos and doc
+	InArgs    []*Field     // list of positional in-args
+	OutArgs   []*Field     // list of positional out-args
+	InStream  *vdl.Type    // in-stream type, may be nil
+	OutStream *vdl.Type    // out-stream type, may be nil
+	Tags      []*vdl.Value // list of method tags
+}
+
+// Field represents method arguments and error params.
+type Field struct {
+	NamePos           // arg name, pos and doc
+	Type    *vdl.Type // arg type, never nil
+}
+
+// NamePos represents a name, its associated position and documentation.
+type NamePos parse.NamePos
+
+func (x *Method) String() string  { return fmt.Sprintf("%+v", *x) }
+func (x *Field) String() string   { return fmt.Sprintf("%+v", *x) }
+func (x *NamePos) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Package) String() string {
+	c := *x
+	c.typeDefs = nil
+	c.constDefs = nil
+	c.ifaceDefs = nil
+	return fmt.Sprintf("%+v", c)
+}
+func (x *File) String() string {
+	c := *x
+	c.Package = nil // avoid infinite loop
+	return fmt.Sprintf("%+v", c)
+}
+func (x *Interface) String() string {
+	c := *x
+	c.File = nil // avoid infinite loop
+	return fmt.Sprintf("%+v", c)
+}
+func (x *Interface) AllMethods() []*Method {
+	result := make([]*Method, len(x.Methods))
+	copy(result, x.Methods)
+	for _, embed := range x.Embeds {
+		result = append(result, embed.AllMethods()...)
+	}
+	return result
+}
+func (x *Interface) TransitiveEmbeds() []*Interface {
+	return x.transitiveEmbeds(make(map[*Interface]bool))
+}
+func (x *Interface) transitiveEmbeds(seen map[*Interface]bool) []*Interface {
+	var ret []*Interface
+	for _, e := range x.Embeds {
+		if !seen[e] {
+			seen[e] = true
+			ret = append(ret, e)
+			ret = append(ret, e.transitiveEmbeds(seen)...)
+		}
+	}
+	return ret
+}
+
+// We might consider allowing more characters, but we'll need to ensure they're
+// allowed in all our codegen languages.
+var (
+	regexpIdent = regexp.MustCompile("^[A-Za-z][A-Za-z0-9_]*$")
+)
+
+// ValidIdent returns (exported, err) where err is non-nil iff the identifer is
+// valid, and exported is true if the identifier is exported.
+// Valid: "^[A-Za-z][A-Za-z0-9_]*$"
+func ValidIdent(ident string, mode ReservedMode) (bool, error) {
+	if re := regexpIdent; !re.MatchString(ident) {
+		return false, fmt.Errorf("%q invalid, allowed regexp: %q", ident, re)
+	}
+	if reservedWord(ident, mode) {
+		return false, fmt.Errorf("%q invalid identifier (keyword in a generated language)", ident)
+	}
+	return ident[0] >= 'A' && ident[0] <= 'Z', nil
+}
+
+// ValidExportedIdent returns a non-nil error iff the identifier is valid and
+// exported.
+func ValidExportedIdent(ident string, mode ReservedMode) error {
+	exported, err := ValidIdent(ident, mode)
+	if err != nil {
+		return err
+	}
+	if !exported {
+		return fmt.Errorf("%q must be exported", ident)
+	}
+	return nil
+}
diff --git a/lib/vdl/compile/type.go b/lib/vdl/compile/type.go
new file mode 100644
index 0000000..c575881
--- /dev/null
+++ b/lib/vdl/compile/type.go
@@ -0,0 +1,367 @@
+package compile
+
+import (
+	"fmt"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/parse"
+)
+
+// TypeDef represents a user-defined named type definition in the compiled
+// results.
+type TypeDef struct {
+	NamePos            // name, parse position and docs
+	Exported bool      // is this type definition exported?
+	Type     *vdl.Type // type of this type definition
+
+	// BaseType is the type that Type is based on.  The BaseType may be named or
+	// unnamed.  E.g.
+	//                                 BaseType
+	//   type Bool    bool;            bool
+	//   type Bool2   Bool;            Bool
+	//   type List    []int32;         []int32
+	//   type List2   List;            List
+	//   type Struct  struct{A bool};  struct{A bool}
+	//   type Struct2 Struct;          Struct
+	BaseType *vdl.Type
+
+	LabelDoc       []string // [valid for enum] docs for each label
+	LabelDocSuffix []string // [valid for enum] suffix docs for each label
+	FieldDoc       []string // [valid for struct, union] docs for each field
+	FieldDocSuffix []string // [valid for struct, union] suffix docs for each field
+	File           *File    // parent file that this type is defined in
+}
+
+func (x *TypeDef) String() string {
+	c := *x
+	c.File = nil // avoid infinite loop
+	return fmt.Sprintf("%+v", c)
+}
+
+// compileTypeDefs is the "entry point" to the rest of this file.  It takes the
+// types defined in pfiles and compiles them into TypeDefs in pkg.
+func compileTypeDefs(pkg *Package, pfiles []*parse.File, env *Env) {
+	td := typeDefiner{
+		pkg:      pkg,
+		pfiles:   pfiles,
+		env:      env,
+		tbuilder: &vdl.TypeBuilder{},
+		builders: make(map[string]*typeDefBuilder),
+	}
+	if td.Declare(); !env.Errors.IsEmpty() {
+		return
+	}
+	if td.Define(); !env.Errors.IsEmpty() {
+		return
+	}
+	td.Build()
+	// TODO(toddw): should we disallow inter-file cyclic type dependencies?  That
+	// might be an issue for generated C++.
+}
+
+// typeDefiner defines types in a package.  This is split into three phases:
+// 1) Declare ensures local type references can be resolved.
+// 2) Define describes each type, resolving named references.
+// 3) Build builds all types.
+//
+// It holds a builders map from type name to typeDefBuilder, where the
+// typeDefBuilder is responsible for compiling and defining a single type.
+type typeDefiner struct {
+	pkg      *Package
+	pfiles   []*parse.File
+	env      *Env
+	tbuilder *vdl.TypeBuilder
+	builders map[string]*typeDefBuilder
+}
+
+type typeDefBuilder struct {
+	def     *TypeDef
+	ptype   parse.Type
+	pending vdl.PendingNamed // named type that's being built
+	base    vdl.PendingType  // base type that pending is based on
+}
+
+// Declare creates builders for each type defined in the package.
+func (td typeDefiner) Declare() {
+	for ix := range td.pkg.Files {
+		file, pfile := td.pkg.Files[ix], td.pfiles[ix]
+		for _, pdef := range pfile.TypeDefs {
+			detail := identDetail("type", file, pdef.Pos)
+			if err := file.DeclareIdent(pdef.Name, detail); err != nil {
+				td.env.prefixErrorf(file, pdef.Pos, err, "type %s name conflict", pdef.Name)
+				continue
+			}
+			td.builders[pdef.Name] = td.makeTypeDefBuilder(file, pdef)
+		}
+	}
+}
+
+func (td typeDefiner) makeTypeDefBuilder(file *File, pdef *parse.TypeDef) *typeDefBuilder {
+	export, err := ValidIdent(pdef.Name, ReservedNormal)
+	if err != nil {
+		td.env.prefixErrorf(file, pdef.Pos, err, "type %s invalid name", pdef.Name)
+		return nil
+	}
+	ret := new(typeDefBuilder)
+	ret.def = &TypeDef{NamePos: NamePos(pdef.NamePos), Exported: export, File: file}
+	ret.ptype = pdef.Type
+	// We use the qualified name to actually name the type, to ensure types
+	// defined in separate packages are hash-consed separately.
+	qname := file.Package.QualifiedName(pdef.Name)
+	ret.pending = td.tbuilder.Named(qname)
+	switch pt := pdef.Type.(type) {
+	case *parse.TypeEnum:
+		ret.def.LabelDoc = make([]string, len(pt.Labels))
+		ret.def.LabelDocSuffix = make([]string, len(pt.Labels))
+		for index, plabel := range pt.Labels {
+			if err := ValidExportedIdent(plabel.Name, ReservedCamelCase); err != nil {
+				td.env.prefixErrorf(file, plabel.Pos, err, "invalid enum label name %s", plabel.Name)
+				return nil
+			}
+			ret.def.LabelDoc[index] = plabel.Doc
+			ret.def.LabelDocSuffix[index] = plabel.DocSuffix
+		}
+	case *parse.TypeStruct:
+		ret = attachFieldDoc(ret, pt.Fields, file, td.env)
+	case *parse.TypeUnion:
+		ret = attachFieldDoc(ret, pt.Fields, file, td.env)
+	}
+	return ret
+}
+
+func attachFieldDoc(ret *typeDefBuilder, fields []*parse.Field, file *File, env *Env) *typeDefBuilder {
+	ret.def.FieldDoc = make([]string, len(fields))
+	ret.def.FieldDocSuffix = make([]string, len(fields))
+	for index, pfield := range fields {
+		if err := ValidExportedIdent(pfield.Name, ReservedCamelCase); err != nil {
+			env.prefixErrorf(file, pfield.Pos, err, "invalid field name %s", pfield.Name)
+			return nil
+		}
+		ret.def.FieldDoc[index] = pfield.Doc
+		ret.def.FieldDocSuffix[index] = pfield.DocSuffix
+	}
+	return ret
+}
+
+// Define uses the builders to describe each type.  Named types defined in
+// other packages must have already been compiled, and in env.  Named types
+// defined in this package are represented by the builders.
+func (td typeDefiner) Define() {
+	for _, b := range td.builders {
+		def, file := b.def, b.def.File
+		base := compileDefinedType(b.ptype, file, td.env, td.tbuilder, td.builders)
+		switch tbase := base.(type) {
+		case nil:
+			continue // keep going to catch  more errors
+		case *vdl.Type:
+			if tbase == vdl.ErrorType {
+				td.env.Errorf(file, def.Pos, "error cannot be renamed")
+				continue // keep going to catch more errors
+			}
+			def.BaseType = tbase
+		case vdl.PendingType:
+			b.base = tbase
+		default:
+			panic(fmt.Errorf("vdl: typeDefiner.Define unhandled TypeOrPending %T %v", tbase, tbase))
+		}
+		b.pending.AssignBase(base)
+	}
+}
+
+// compileType returns the *vdl.Type corresponding to ptype.  All named types
+// referenced by ptype must already be defined.
+func compileType(ptype parse.Type, file *File, env *Env) *vdl.Type {
+	var tbuilder vdl.TypeBuilder
+	typeOrPending := compileLiteralType(ptype, file, env, &tbuilder, nil)
+	tbuilder.Build()
+	switch top := typeOrPending.(type) {
+	case nil:
+		return nil
+	case *vdl.Type:
+		return top
+	case vdl.PendingType:
+		t, err := top.Built()
+		if err != nil {
+			env.prefixErrorf(file, ptype.Pos(), err, "invalid type")
+			return nil
+		}
+		return t
+	default:
+		panic(fmt.Errorf("vdl: compileType unhandled TypeOrPending %T %v", top, top))
+	}
+}
+
+// compileDefinedType compiles ptype.  It can handle definitions based on array,
+// enum, struct and union, as well as definitions based on any literal type.
+func compileDefinedType(ptype parse.Type, file *File, env *Env, tbuilder *vdl.TypeBuilder, builders map[string]*typeDefBuilder) vdl.TypeOrPending {
+	switch pt := ptype.(type) {
+	case *parse.TypeArray:
+		elem := compileLiteralType(pt.Elem, file, env, tbuilder, builders)
+		if elem == nil {
+			return nil
+		}
+		return tbuilder.Array().AssignLen(pt.Len).AssignElem(elem)
+	case *parse.TypeEnum:
+		enum := tbuilder.Enum()
+		for _, plabel := range pt.Labels {
+			enum.AppendLabel(plabel.Name)
+		}
+		return enum
+	case *parse.TypeStruct:
+		st := tbuilder.Struct()
+		for _, pfield := range pt.Fields {
+			ftype := compileLiteralType(pfield.Type, file, env, tbuilder, builders)
+			if ftype == nil {
+				return nil
+			}
+			st.AppendField(pfield.Name, ftype)
+		}
+		return st
+	case *parse.TypeUnion:
+		union := tbuilder.Union()
+		for _, pfield := range pt.Fields {
+			ftype := compileLiteralType(pfield.Type, file, env, tbuilder, builders)
+			if ftype == nil {
+				return nil
+			}
+			union.AppendField(pfield.Name, ftype)
+		}
+		return union
+	}
+	lit := compileLiteralType(ptype, file, env, tbuilder, builders)
+	if _, ok := lit.(vdl.PendingOptional); ok {
+		// Don't allow Optional at the top-level of a type definition.  The purpose
+		// of this rule is twofold:
+		// 1) Reduce confusion; the Optional modifier cannot be hidden in a type
+		//    definition, it must be explicitly mentioned on each use.
+		// 2) The Optional concept is typically translated to pointers in generated
+		//    languages, and many languages don't support named pointer types.
+		//
+		//   type A string            // ok
+		//   type B []?string         // ok
+		//   type C struct{X ?string} // ok
+		//   type D ?string           // bad
+		//   type E ?struct{X string} // bad
+		env.Errorf(file, ptype.Pos(), "can't define type based on top-level optional")
+		return nil
+	}
+	return lit
+}
+
+// compileLiteralType compiles ptype.  It can handle any literal type.  Note
+// that array, enum, struct and union are required to be defined and named,
+// and aren't allowed as regular literal types.
+func compileLiteralType(ptype parse.Type, file *File, env *Env, tbuilder *vdl.TypeBuilder, builders map[string]*typeDefBuilder) vdl.TypeOrPending {
+	switch pt := ptype.(type) {
+	case *parse.TypeNamed:
+		if def, matched := env.ResolveType(pt.Name, file); def != nil {
+			if len(matched) < len(pt.Name) {
+				env.Errorf(file, pt.Pos(), "type %s invalid (%s unmatched)", pt.Name, pt.Name[len(matched):])
+				return nil
+			}
+			return def.Type
+		}
+		if b, ok := builders[pt.Name]; ok {
+			return b.pending
+		}
+		env.Errorf(file, pt.Pos(), "type %s undefined", pt.Name)
+		return nil
+	case *parse.TypeList:
+		elem := compileLiteralType(pt.Elem, file, env, tbuilder, builders)
+		if elem == nil {
+			return nil
+		}
+		return tbuilder.List().AssignElem(elem)
+	case *parse.TypeSet:
+		key := compileLiteralType(pt.Key, file, env, tbuilder, builders)
+		if key == nil {
+			return nil
+		}
+		return tbuilder.Set().AssignKey(key)
+	case *parse.TypeMap:
+		key := compileLiteralType(pt.Key, file, env, tbuilder, builders)
+		elem := compileLiteralType(pt.Elem, file, env, tbuilder, builders)
+		if key == nil || elem == nil {
+			return nil
+		}
+		return tbuilder.Map().AssignKey(key).AssignElem(elem)
+	case *parse.TypeOptional:
+		elem := compileLiteralType(pt.Base, file, env, tbuilder, builders)
+		if elem == nil {
+			return nil
+		}
+		return tbuilder.Optional().AssignElem(elem)
+	default:
+		env.Errorf(file, pt.Pos(), "unnamed %s type invalid (type must be defined)", ptype.Kind())
+		return nil
+	}
+}
+
+// Build actually builds each type and updates the package with the typedefs.
+// The order we call each pending type doesn't matter; the veyron2/vdl package
+// deals with arbitrary orders, and supports recursive types.  However we want
+// the order to be deterministic, otherwise the output will constantly change.
+// So we use the same order as the parsed file.
+func (td typeDefiner) Build() {
+	td.tbuilder.Build()
+	for _, pfile := range td.pfiles {
+		for _, pdef := range pfile.TypeDefs {
+			b := td.builders[pdef.Name]
+			def, file := b.def, b.def.File
+			if b.base != nil {
+				base, err := b.base.Built()
+				if err != nil {
+					td.env.prefixErrorf(file, b.ptype.Pos(), err, "%s base type invalid", def.Name)
+					continue // keep going to catch more errors
+				}
+				def.BaseType = base
+			}
+			t, err := b.pending.Built()
+			if err != nil {
+				td.env.prefixErrorf(file, def.Pos, err, "%s invalid", def.Name)
+				continue // keep going to catch more errors
+			}
+			def.Type = t
+			addTypeDef(def, td.env)
+		}
+	}
+	// Make another pass to fill in doc and doc suffix slices for enums, structs
+	// and unions.  Typically these are initialized in makeTypeDefBuilder, based
+	// on the underlying parse data.  But type definitions based on other named
+	// types can't be updated until the base type is actually compiled.
+	//
+	// TODO(toddw): This doesn't actually attach comments from the base type, it
+	// just leaves everything empty.  This is fine for now, but we should revamp
+	// the vdl parsing / comment attaching strategy in the future.
+	for _, file := range td.pkg.Files {
+		for _, def := range file.TypeDefs {
+			switch t := def.Type; t.Kind() {
+			case vdl.Enum:
+				if len(def.LabelDoc) == 0 {
+					def.LabelDoc = make([]string, t.NumEnumLabel())
+				}
+				if len(def.LabelDocSuffix) == 0 {
+					def.LabelDocSuffix = make([]string, t.NumEnumLabel())
+				}
+			case vdl.Struct, vdl.Union:
+				if len(def.FieldDoc) == 0 {
+					def.FieldDoc = make([]string, t.NumField())
+				}
+				if len(def.FieldDocSuffix) == 0 {
+					def.FieldDocSuffix = make([]string, t.NumField())
+				}
+			}
+		}
+	}
+}
+
+// addTypeDef updates our various structures to add a new type def.
+func addTypeDef(def *TypeDef, env *Env) {
+	def.File.TypeDefs = append(def.File.TypeDefs, def)
+	def.File.Package.typeDefs[def.Name] = def
+	if env != nil {
+		// env should only be nil during initialization of the built-in package;
+		// NewEnv ensures new environments have the built-in types.
+		env.typeDefs[def.Type] = def
+	}
+}
diff --git a/lib/vdl/compile/type_test.go b/lib/vdl/compile/type_test.go
new file mode 100644
index 0000000..332da7a
--- /dev/null
+++ b/lib/vdl/compile/type_test.go
@@ -0,0 +1,192 @@
+package compile_test
+
+import (
+	"testing"
+
+	"v.io/v23/vdl"
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/compile"
+	"v.io/v23/vdl/vdltest"
+)
+
+const qual = "package path qualified identifier"
+
+func testType(t *testing.T, test typeTest, qualifiedPaths bool) {
+	env := compile.NewEnv(-1)
+	if !qualifiedPaths {
+		env.DisallowPathQualifiers()
+		test.Name = "NoQual" + test.Name
+	}
+	for _, tpkg := range test.Pkgs {
+		// Compile the package with a single file, and adding the "package foo"
+		// prefix to the source data automatically.
+		files := map[string]string{
+			tpkg.Name + ".vdl": "package " + tpkg.Name + "\n" + tpkg.Data,
+		}
+		pkgPath := "p.kg/" + tpkg.Name // use dots in pkgpath to test tricky cases
+		buildPkg := vdltest.FakeBuildPackage(tpkg.Name, pkgPath, files)
+		pkg := build.BuildPackage(buildPkg, env)
+		if tpkg.ErrRE == qual {
+			if qualifiedPaths {
+				tpkg.ErrRE = "" // the test should pass if running with qualified paths.
+			} else {
+				tpkg.ExpectBase = nil // otherwise the test should fail
+			}
+		}
+		vdltest.ExpectResult(t, env.Errors, test.Name, tpkg.ErrRE)
+		if pkg == nil || tpkg.ErrRE != "" {
+			continue
+		}
+		matchTypeRes(t, test.Name, tpkg, pkg.Files[0].TypeDefs)
+	}
+}
+
+func TestType(t *testing.T) {
+	// Run all tests in both regular and qualfiedPaths mode
+	for _, test := range typeTests {
+		testType(t, test, false)
+	}
+	for _, test := range typeTests {
+		testType(t, test, true)
+	}
+}
+
+func matchTypeRes(t *testing.T, tname string, tpkg typePkg, tdefs []*compile.TypeDef) {
+	if tpkg.ExpectBase == nil {
+		return
+	}
+	// Look for a TypeDef called "Res" to compare our expected results.
+	for _, tdef := range tdefs {
+		if tdef.Name == "Res" {
+			base := tpkg.ExpectBase
+			resname := "p.kg/" + tpkg.Name + ".Res"
+			res := vdl.NamedType(resname, base)
+			if got, want := tdef.Type, res; got != want {
+				t.Errorf("%s type got %s, want %s", tname, got, want)
+			}
+			if got, want := tdef.BaseType, base; got != want {
+				t.Errorf("%s base type got %s, want %s", tname, got, want)
+			}
+			return
+		}
+	}
+	t.Errorf("%s couldn't find Res in package %s", tname, tpkg.Name)
+}
+
+func namedX(base *vdl.Type) *vdl.Type   { return vdl.NamedType("p.kg/a.x", base) }
+func namedRes(base *vdl.Type) *vdl.Type { return vdl.NamedType("p.kg/a.Res", base) }
+
+var byteListType = vdl.ListType(vdl.ByteType)
+var byteArrayType = vdl.ArrayType(4, vdl.ByteType)
+
+type typePkg struct {
+	Name       string
+	Data       string
+	ExpectBase *vdl.Type
+	ErrRE      string
+}
+
+type tp []typePkg
+
+type typeTest struct {
+	Name string
+	Pkgs tp
+}
+
+var typeTests = []typeTest{
+	// Test named built-ins.
+	{"Bool", tp{{"a", `type Res bool`, vdl.BoolType, ""}}},
+	{"Byte", tp{{"a", `type Res byte`, vdl.ByteType, ""}}},
+	{"Uint16", tp{{"a", `type Res uint16`, vdl.Uint16Type, ""}}},
+	{"Uint32", tp{{"a", `type Res uint32`, vdl.Uint32Type, ""}}},
+	{"Uint64", tp{{"a", `type Res uint64`, vdl.Uint64Type, ""}}},
+	{"Int16", tp{{"a", `type Res int16`, vdl.Int16Type, ""}}},
+	{"Int32", tp{{"a", `type Res int32`, vdl.Int32Type, ""}}},
+	{"Int64", tp{{"a", `type Res int64`, vdl.Int64Type, ""}}},
+	{"Float32", tp{{"a", `type Res float32`, vdl.Float32Type, ""}}},
+	{"Float64", tp{{"a", `type Res float64`, vdl.Float64Type, ""}}},
+	{"Complex64", tp{{"a", `type Res complex64`, vdl.Complex64Type, ""}}},
+	{"Complex128", tp{{"a", `type Res complex128`, vdl.Complex128Type, ""}}},
+	{"String", tp{{"a", `type Res string`, vdl.StringType, ""}}},
+	{"ByteList", tp{{"a", `type Res []byte`, byteListType, ""}}},
+	{"ByteArray", tp{{"a", `type Res [4]byte`, byteArrayType, ""}}},
+	{"Typeobject", tp{{"a", `type Res typeobject`, nil, "any and typeobject cannot be renamed"}}},
+	{"Any", tp{{"a", `type Res any`, nil, "any and typeobject cannot be renamed"}}},
+	{"Error", tp{{"a", `type Res error`, nil, "error cannot be renamed"}}},
+
+	// Test composite vdl.
+	{"Enum", tp{{"a", `type Res enum{A;B;C}`, vdl.EnumType("A", "B", "C"), ""}}},
+	{"Array", tp{{"a", `type Res [2]bool`, vdl.ArrayType(2, vdl.BoolType), ""}}},
+	{"List", tp{{"a", `type Res []int32`, vdl.ListType(vdl.Int32Type), ""}}},
+	{"Set", tp{{"a", `type Res set[int32]`, vdl.SetType(vdl.Int32Type), ""}}},
+	{"Map", tp{{"a", `type Res map[int32]string`, vdl.MapType(vdl.Int32Type, vdl.StringType), ""}}},
+	{"Struct", tp{{"a", `type Res struct{A int32;B string}`, vdl.StructType([]vdl.Field{{"A", vdl.Int32Type}, {"B", vdl.StringType}}...), ""}}},
+	{"Union", tp{{"a", `type Res union{A bool;B int32;C string}`, vdl.UnionType([]vdl.Field{{"A", vdl.BoolType}, {"B", vdl.Int32Type}, {"C", vdl.StringType}}...), ""}}},
+	{"Optional", tp{{"a", `type Res []?x;type x struct{A bool}`, vdl.ListType(vdl.OptionalType(namedX(vdl.StructType(vdl.Field{"A", vdl.BoolType})))), ""}}},
+
+	// Test named types based on named types.
+	{"NBool", tp{{"a", `type Res x;type x bool`, namedX(vdl.BoolType), ""}}},
+	{"NByte", tp{{"a", `type Res x;type x byte`, namedX(vdl.ByteType), ""}}},
+	{"NUint16", tp{{"a", `type Res x;type x uint16`, namedX(vdl.Uint16Type), ""}}},
+	{"NUint32", tp{{"a", `type Res x;type x uint32`, namedX(vdl.Uint32Type), ""}}},
+	{"NUint64", tp{{"a", `type Res x;type x uint64`, namedX(vdl.Uint64Type), ""}}},
+	{"NInt16", tp{{"a", `type Res x;type x int16`, namedX(vdl.Int16Type), ""}}},
+	{"NInt32", tp{{"a", `type Res x;type x int32`, namedX(vdl.Int32Type), ""}}},
+	{"NInt64", tp{{"a", `type Res x;type x int64`, namedX(vdl.Int64Type), ""}}},
+	{"NFloat32", tp{{"a", `type Res x;type x float32`, namedX(vdl.Float32Type), ""}}},
+	{"NFloat64", tp{{"a", `type Res x;type x float64`, namedX(vdl.Float64Type), ""}}},
+	{"NComplex64", tp{{"a", `type Res x;type x complex64`, namedX(vdl.Complex64Type), ""}}},
+	{"NComplex128", tp{{"a", `type Res x;type x complex128`, namedX(vdl.Complex128Type), ""}}},
+	{"NString", tp{{"a", `type Res x;type x string`, namedX(vdl.StringType), ""}}},
+	{"NByteList", tp{{"a", `type Res x;type x []byte`, namedX(byteListType), ""}}},
+	{"NByteArray", tp{{"a", `type Res x;type x [4]byte`, namedX(byteArrayType), ""}}},
+	{"NEnum", tp{{"a", `type Res x;type x enum{A;B;C}`, namedX(vdl.EnumType("A", "B", "C")), ""}}},
+	{"NArray", tp{{"a", `type Res x;type x [2]bool`, namedX(vdl.ArrayType(2, vdl.BoolType)), ""}}},
+	{"NList", tp{{"a", `type Res x;type x []int32`, namedX(vdl.ListType(vdl.Int32Type)), ""}}},
+	{"NSet", tp{{"a", `type Res x;type x set[int32]`, namedX(vdl.SetType(vdl.Int32Type)), ""}}},
+	{"NMap", tp{{"a", `type Res x;type x map[int32]string`, namedX(vdl.MapType(vdl.Int32Type, vdl.StringType)), ""}}},
+	{"NStruct", tp{{"a", `type Res x;type x struct{A int32;B string}`, namedX(vdl.StructType([]vdl.Field{{"A", vdl.Int32Type}, {"B", vdl.StringType}}...)), ""}}},
+	{"NUnion", tp{{"a", `type Res x; type x union{A bool;B int32;C string}`, namedX(vdl.UnionType([]vdl.Field{{"A", vdl.BoolType}, {"B", vdl.Int32Type}, {"C", vdl.StringType}}...)), ""}}},
+
+	// Test multi-package types
+	{"MultiPkgSameTypeName", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `type Res bool`, vdl.BoolType, ""}}},
+	{"MultiPkgDep", tp{
+		{"a", `type Res x;type x bool`, namedX(vdl.BoolType), ""},
+		{"b", `import "p.kg/a";type Res []a.Res`, vdl.ListType(namedRes(vdl.BoolType)), ""}}},
+	{"MultiPkgDepQualifiedPath", tp{
+		{"a", `type Res x;type x bool`, namedX(vdl.BoolType), ""},
+		{"b", `import "p.kg/a";type Res []"p.kg/a".Res`, vdl.ListType(namedRes(vdl.BoolType)), qual}}},
+	{"MultiPkgUnexportedType", tp{
+		{"a", `type Res x;type x bool`, namedX(vdl.BoolType), ""},
+		{"b", `import "p.kg/a";type Res []a.x`, nil, "type a.x undefined"}}},
+	{"MultiPkgSamePkgName", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"a", `type Res bool`, nil, "invalid recompile"}}},
+	{"MultiPkgUnimportedPkg", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `type Res []a.Res`, nil, "type a.Res undefined"}}},
+	{"RedefinitionOfImportedName", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `import "p.kg/a"; type a string; type Res a`, nil, "type a name conflict"}}},
+
+	// Test errors.
+	{"InvalidName", tp{{"a", `type _Res bool`, nil, "type _Res invalid name"}}},
+	{"Undefined", tp{{"a", `type Res foo`, nil, "type foo undefined"}}},
+	{"UnnamedArray", tp{{"a", `type Res [][3]int64`, nil, "unnamed array type invalid"}}},
+	{"UnnamedEnum", tp{{"a", `type Res []enum{A;B;C}`, nil, "unnamed enum type invalid"}}},
+	{"UnnamedStruct", tp{{"a", `type Res []struct{A int32}`, nil, "unnamed struct type invalid"}}},
+	{"UnnamedUnion", tp{{"a", `type Res []union{A bool;B int32;C string}`, nil, "unnamed union type invalid"}}},
+	{"TopLevelOptional", tp{{"a", `type Res ?bool`, nil, "can't define type based on top-level optional"}}},
+	{"MultiPkgUnmatchedType", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `import "p.kg/a";type Res a.Res.foobar`, nil, `\(\.foobar unmatched\)`}}},
+	{"UnterminatedPath1", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `import "p.kg/a";type Res "a.Res`, nil, "syntax error"}}},
+	{"UnterminatedPath2", tp{
+		{"a", `type Res bool`, vdl.BoolType, ""},
+		{"b", `import "p.kg/a";type Res a".Res`, nil, "syntax error"}}},
+	{"ZeroLengthArray", tp{{"a", `type Res [0]int32`, nil, "negative or zero array length"}}},
+}
diff --git a/lib/vdl/opconst/big_complex.go b/lib/vdl/opconst/big_complex.go
new file mode 100644
index 0000000..b0863e3
--- /dev/null
+++ b/lib/vdl/opconst/big_complex.go
@@ -0,0 +1,88 @@
+package opconst
+
+import (
+	"math/big"
+)
+
+// bigComplex represents a constant complex number.  The semantics are similar
+// to big.Rat; methods are typically of the form:
+//   func (z *bigComplex) Op(x, y *bigComplex) *bigComplex
+// and implement operations z = x Op y with the result as receiver.
+type bigComplex struct {
+	re, im big.Rat
+}
+
+func newComplex(re, im *big.Rat) *bigComplex {
+	return &bigComplex{*re, *im}
+}
+
+// realComplex returns a bigComplex with real part re, and imaginary part zero.
+func realComplex(re *big.Rat) *bigComplex {
+	return &bigComplex{re: *re}
+}
+
+// imagComplex returns a bigComplex with real part zero, and imaginary part im.
+func imagComplex(im *big.Rat) *bigComplex {
+	return &bigComplex{im: *im}
+}
+
+func (z *bigComplex) SetComplex128(c complex128) *bigComplex {
+	z.re.SetFloat64(real(c))
+	z.im.SetFloat64(imag(c))
+	return z
+}
+
+func (z *bigComplex) Equal(x *bigComplex) bool {
+	return z.re.Cmp(&x.re) == 0 && z.im.Cmp(&x.im) == 0
+}
+
+func (z *bigComplex) Add(x, y *bigComplex) *bigComplex {
+	z.re.Add(&x.re, &y.re)
+	z.im.Add(&x.im, &y.im)
+	return z
+}
+
+func (z *bigComplex) Sub(x, y *bigComplex) *bigComplex {
+	z.re.Sub(&x.re, &y.re)
+	z.im.Sub(&x.im, &y.im)
+	return z
+}
+
+func (z *bigComplex) Neg(x *bigComplex) *bigComplex {
+	z.re.Neg(&x.re)
+	z.im.Neg(&x.im)
+	return z
+}
+
+func (z *bigComplex) Mul(x, y *bigComplex) *bigComplex {
+	// (a+bi) * (c+di) = (ac-bd) + (bc+ad)i
+	var ac, ad, bc, bd big.Rat
+	ac.Mul(&x.re, &y.re)
+	ad.Mul(&x.re, &y.im)
+	bc.Mul(&x.im, &y.re)
+	bd.Mul(&x.im, &y.im)
+	z.re.Sub(&ac, &bd)
+	z.im.Add(&bc, &ad)
+	return z
+}
+
+func (z *bigComplex) Div(x, y *bigComplex) (*bigComplex, error) {
+	// (a+bi) / (c+di) = (a+bi)(c-di) / (c+di)(c-di)
+	//                 = ((ac+bd) + (bc-ad)i) / (cc+dd)
+	//                 = (ac+bd)/(cc+dd) + ((bc-ad)/(cc+dd))i
+	a, b, c, d := &x.re, &x.im, &y.re, &y.im
+	var ac, ad, bc, bd, cc, dd, ccdd big.Rat
+	ac.Mul(a, c)
+	ad.Mul(a, d)
+	bc.Mul(b, c)
+	bd.Mul(b, d)
+	cc.Mul(c, c)
+	dd.Mul(d, d)
+	ccdd.Add(&cc, &dd)
+	if ccdd.Cmp(bigRatZero) == 0 {
+		return nil, errDivZero
+	}
+	z.re.Add(&ac, &bd).Quo(&z.re, &ccdd)
+	z.im.Sub(&bc, &ad).Quo(&z.im, &ccdd)
+	return z, nil
+}
diff --git a/lib/vdl/opconst/const.go b/lib/vdl/opconst/const.go
new file mode 100644
index 0000000..b1dda6d
--- /dev/null
+++ b/lib/vdl/opconst/const.go
@@ -0,0 +1,902 @@
+// Package opconst provides the representation and operations for vdl constants.
+package opconst
+
+import (
+	"errors"
+	"fmt"
+	"math"
+	"math/big"
+	"strconv"
+
+	"v.io/v23/vdl"
+)
+
+var (
+	bigIntZero     = new(big.Int)
+	bigRatZero     = new(big.Rat)
+	bigIntOne      = big.NewInt(1)
+	bigRatAbsMin32 = new(big.Rat).SetFloat64(math.SmallestNonzeroFloat32)
+	bigRatAbsMax32 = new(big.Rat).SetFloat64(math.MaxFloat32)
+	bigRatAbsMin64 = new(big.Rat).SetFloat64(math.SmallestNonzeroFloat64)
+	bigRatAbsMax64 = new(big.Rat).SetFloat64(math.MaxFloat64)
+	maxShiftSize   = big.NewInt(463) // use the same max as Go
+
+	errInvalidConst = errors.New("invalid const")
+	errConvertNil   = errors.New("invalid conversion to untyped const")
+	errDivZero      = errors.New("divide by zero")
+)
+
+// Const represents a constant value, similar in spirit to Go constants.  Consts
+// may be typed or untyped.  Typed consts represent unchanging Values; all
+// Values may be converted into valid typed consts, and all typed consts may be
+// converted into valid Values.  Untyped consts belong to one of the following
+// categories:
+//   untyped boolean
+//   untyped string
+//   untyped integer
+//   untyped rational
+//   untyped complex
+// Literal consts are untyped, as are expressions only containing untyped
+// consts.  The result of comparison operations is untyped boolean.
+//
+// Operations are represented by UnaryOp and BinaryOp, and are supported on
+// Consts, but not Values.  We support common logical, bitwise, comparison and
+// arithmetic operations.  Not all operations are supported on all consts.
+//
+// Binary ops where both sides are typed consts return errors on type
+// mismatches; e.g. uint32(1) + uint64(1) is an invalid binary add.  Ops on
+// typed consts also return errors on loss of precision; e.g. uint32(1.1)
+// returns an error.
+//
+// Binary ops where one or both sides are untyped consts perform implicit type
+// conversion.  E.g. uint32(1) + 1 is a valid binary add, where the
+// right-hand-side is the untyped integer const 1, which is coerced to the
+// uint32 type before the op is performed.  Operations only containing untyped
+// consts are performed with "infinite" precision.
+//
+// The zero Const is invalid.
+type Const struct {
+	// rep holds the underlying representation, it may be one of:
+	//   bool        - Represents typed and untyped boolean constants.
+	//   string      - Represents typed and untyped string constants.
+	//   *big.Int    - Represents typed and untyped integer constants.
+	//   *big.Rat    - Represents typed and untyped rational constants.
+	//   *bigComplex - Represents typed and untyped complex constants.
+	//   *Value      - Represents all other typed constants.
+	rep interface{}
+
+	// repType holds the type of rep.  If repType is nil the constant is untyped,
+	// otherwise the constant is typed, and rep must match the kind of repType.
+	// If rep is a *Value, repType is always non-nil.
+	repType *vdl.Type
+}
+
+// Boolean returns an untyped boolean Const.
+func Boolean(x bool) Const { return Const{x, nil} }
+
+// String returns an untyped string Const.
+func String(x string) Const { return Const{x, nil} }
+
+// Integer returns an untyped integer Const.
+func Integer(x *big.Int) Const { return Const{x, nil} }
+
+// Rational returns an untyped rational Const.
+func Rational(x *big.Rat) Const { return Const{x, nil} }
+
+// Complex returns an untyped complex Const.
+func Complex(re, im *big.Rat) Const { return Const{newComplex(re, im), nil} }
+
+// FromValue returns a typed Const based on value v.
+func FromValue(v *vdl.Value) Const {
+	if v.Type().IsBytes() {
+		// Represent []byte and [N]byte as a string, so that conversions are easy.
+		return Const{string(v.Bytes()), v.Type()}
+	}
+	switch v.Kind() {
+	case vdl.Bool:
+		if v.Type() == vdl.BoolType { // Treat unnamed bool as untyped bool.
+			return Boolean(v.Bool())
+		}
+		return Const{v.Bool(), v.Type()}
+	case vdl.String:
+		if v.Type() == vdl.StringType { // Treat unnamed string as untyped string.
+			return String(v.RawString())
+		}
+		return Const{v.RawString(), v.Type()}
+	case vdl.Byte:
+		return Const{new(big.Int).SetUint64(uint64(v.Byte())), v.Type()}
+	case vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		return Const{new(big.Int).SetUint64(v.Uint()), v.Type()}
+	case vdl.Int16, vdl.Int32, vdl.Int64:
+		return Const{new(big.Int).SetInt64(v.Int()), v.Type()}
+	case vdl.Float32, vdl.Float64:
+		return Const{new(big.Rat).SetFloat64(v.Float()), v.Type()}
+	case vdl.Complex64, vdl.Complex128:
+		return Const{new(bigComplex).SetComplex128(v.Complex()), v.Type()}
+	default:
+		return Const{v, v.Type()}
+	}
+}
+
+// IsValid returns true iff the c represents a const; it returns false for the
+// zero Const.
+func (c Const) IsValid() bool {
+	return c.rep != nil
+}
+
+// Type returns the type of c.  Nil indicates c is an untyped const.
+func (c Const) Type() *vdl.Type {
+	return c.repType
+}
+
+// Convert converts c to the target type t, and returns the resulting const.
+// Returns an error if t is nil; you're not allowed to convert into an untyped
+// const.
+func (c Const) Convert(t *vdl.Type) (Const, error) {
+	if t == nil {
+		return Const{}, errConvertNil
+	}
+	// If we're trying to convert to Any or Union, or if c is already a vdl.Value,
+	// use vdl.Convert to convert as a vdl.Value.
+	_, isValue := c.rep.(*vdl.Value)
+	if isValue || t.Kind() == vdl.Any || t.Kind() == vdl.Union {
+		src, err := c.ToValue()
+		if err != nil {
+			return Const{}, err
+		}
+		dst := vdl.ZeroValue(t)
+		if err := vdl.Convert(dst, src); err != nil {
+			return Const{}, err
+		}
+		return FromValue(dst), nil
+	}
+	// Otherwise use makeConst to convert as a Const.
+	return makeConst(c.rep, t)
+}
+
+func (c Const) String() string {
+	if !c.IsValid() {
+		return "invalid"
+	}
+	if v, ok := c.rep.(*vdl.Value); ok {
+		return v.String()
+	}
+	if c.repType == nil {
+		// E.g. 12345
+		return cRepString(c.rep)
+	}
+	// E.g. int32(12345)
+	return c.typeString() + "(" + cRepString(c.rep) + ")"
+}
+
+func (c Const) typeString() string {
+	return cRepTypeString(c.rep, c.repType)
+}
+
+// cRepString returns a human-readable string representing the const value.
+func cRepString(rep interface{}) string {
+	switch trep := rep.(type) {
+	case nil:
+		return "" // invalid const
+	case bool:
+		if trep {
+			return "true"
+		}
+		return "false"
+	case string:
+		return strconv.Quote(trep)
+	case *big.Int:
+		return trep.String()
+	case *big.Rat:
+		if trep.IsInt() {
+			return trep.Num().String() + ".0"
+		}
+		frep, _ := trep.Float64()
+		return strconv.FormatFloat(frep, 'g', -1, 64)
+	case *bigComplex:
+		return fmt.Sprintf("%v+%vi", cRepString(&trep.re), cRepString(&trep.im))
+	case *vdl.Value:
+		return trep.String()
+	default:
+		panic(fmt.Errorf("val: unhandled const type %T value %v", rep, rep))
+	}
+}
+
+// cRepTypeString returns a human-readable string representing the type of
+// the const value.
+func cRepTypeString(rep interface{}, t *vdl.Type) string {
+	if t != nil {
+		return t.String()
+	}
+	switch rep.(type) {
+	case nil:
+		return "invalid"
+	case bool:
+		return "untyped boolean"
+	case string:
+		return "untyped string"
+	case *big.Int:
+		return "untyped integer"
+	case *big.Rat:
+		return "untyped rational"
+	case *bigComplex:
+		return "untyped complex"
+	default:
+		panic(fmt.Errorf("val: unhandled const type %T value %v", rep, rep))
+	}
+}
+
+// ToValue converts Const c to a Value.
+func (c Const) ToValue() (*vdl.Value, error) {
+	if c.rep == nil {
+		return nil, errInvalidConst
+	}
+	// All const defs must have a type.  We implicitly assign bool and string, but
+	// the user must explicitly assign a type for numeric consts.
+	if c.repType == nil {
+		switch c.rep.(type) {
+		case bool:
+			c.repType = vdl.BoolType
+		case string:
+			c.repType = vdl.StringType
+		default:
+			return nil, fmt.Errorf("%s must be assigned a type", c)
+		}
+	}
+	// Create a value of the appropriate type.
+	vx := vdl.ZeroValue(c.repType)
+	switch trep := c.rep.(type) {
+	case bool:
+		switch vx.Kind() {
+		case vdl.Bool:
+			return vx.AssignBool(trep), nil
+		}
+	case string:
+		switch {
+		case vx.Kind() == vdl.String:
+			return vx.AssignString(trep), nil
+		case vx.Type().IsBytes():
+			if vx.Kind() == vdl.Array {
+				if vx.Len() != len(trep) {
+					return nil, fmt.Errorf("%s has a different length than %v", c, vx.Type())
+				}
+			}
+			return vx.AssignBytes([]byte(trep)), nil
+		}
+	case *big.Int:
+		switch vx.Kind() {
+		case vdl.Byte:
+			return vx.AssignByte(byte(trep.Uint64())), nil
+		case vdl.Uint16, vdl.Uint32, vdl.Uint64:
+			return vx.AssignUint(trep.Uint64()), nil
+		case vdl.Int16, vdl.Int32, vdl.Int64:
+			return vx.AssignInt(trep.Int64()), nil
+		}
+	case *big.Rat:
+		switch vx.Kind() {
+		case vdl.Float32, vdl.Float64:
+			f64, _ := trep.Float64()
+			return vx.AssignFloat(f64), nil
+		}
+	case *bigComplex:
+		switch vx.Kind() {
+		case vdl.Complex64, vdl.Complex128:
+			re64, _ := trep.re.Float64()
+			im64, _ := trep.im.Float64()
+			return vx.AssignComplex(complex(re64, im64)), nil
+		}
+	case *vdl.Value:
+		return trep, nil
+	}
+	// Type mismatches shouldn't occur, since makeConst always ensures the rep and
+	// repType are in sync.  If something's wrong we want to know about it.
+	panic(fmt.Errorf("val: mismatched const rep type for %v", c))
+}
+
+func errNotSupported(rep interface{}, t *vdl.Type) error {
+	return fmt.Errorf("%s not supported", cRepTypeString(rep, t))
+}
+
+// EvalUnary returns the result of evaluating (op x).
+func EvalUnary(op UnaryOp, x Const) (Const, error) {
+	if x.rep == nil {
+		return Const{}, errInvalidConst
+	}
+	if _, ok := x.rep.(*vdl.Value); ok {
+		// There are no valid unary ops on *Value consts.
+		return Const{}, errNotSupported(x.rep, x.repType)
+	}
+	switch op {
+	case LogicNot:
+		switch tx := x.rep.(type) {
+		case bool:
+			return makeConst(!tx, x.repType)
+		}
+	case Pos:
+		switch x.rep.(type) {
+		case *big.Int, *big.Rat, *bigComplex:
+			return x, nil
+		}
+	case Neg:
+		switch tx := x.rep.(type) {
+		case *big.Int:
+			return makeConst(new(big.Int).Neg(tx), x.repType)
+		case *big.Rat:
+			return makeConst(new(big.Rat).Neg(tx), x.repType)
+		case *bigComplex:
+			return makeConst(new(bigComplex).Neg(tx), x.repType)
+		}
+	case BitNot:
+		ix, err := constToInt(x)
+		if err != nil {
+			return Const{}, err
+		}
+		// big.Int.Not implements bit-not for signed integers, but we need to
+		// special-case unsigned integers.  E.g. ^int8(1)=-2, ^uint8(1)=254
+		not := new(big.Int)
+		switch {
+		case x.repType != nil && x.repType.Kind() == vdl.Byte:
+			not.SetUint64(uint64(^uint8(ix.Uint64())))
+		case x.repType != nil && x.repType.Kind() == vdl.Uint16:
+			not.SetUint64(uint64(^uint16(ix.Uint64())))
+		case x.repType != nil && x.repType.Kind() == vdl.Uint32:
+			not.SetUint64(uint64(^uint32(ix.Uint64())))
+		case x.repType != nil && x.repType.Kind() == vdl.Uint64:
+			not.SetUint64(^ix.Uint64())
+		default:
+			not.Not(ix)
+		}
+		return makeConst(not, x.repType)
+	}
+	return Const{}, errNotSupported(x.rep, x.repType)
+}
+
+// EvalBinary returns the result of evaluating (x op y).
+func EvalBinary(op BinaryOp, x, y Const) (Const, error) {
+	if x.rep == nil || y.rep == nil {
+		return Const{}, errInvalidConst
+	}
+	switch op {
+	case LeftShift, RightShift:
+		// Shift ops are special since they require an integer lhs and unsigned rhs.
+		return evalShift(op, x, y)
+	}
+	// All other binary ops behave similarly.  First we perform implicit
+	// conversion of x and y.  If either side is untyped, we may need to
+	// implicitly convert it to the type of the other side.  If both sides are
+	// typed they need to match.  The resulting tx and ty are guaranteed to have
+	// the same type, and resType tells us which type we need to convert the
+	// result into when we're done.
+	cx, cy, resType, err := coerceConsts(x, y)
+	if err != nil {
+		return Const{}, err
+	}
+	// Now we perform the actual binary op.
+	var res interface{}
+	switch op {
+	case LogicOr, LogicAnd:
+		res, err = opLogic(op, cx, cy, resType)
+	case EQ, NE, LT, LE, GT, GE:
+		res, err = opComp(op, cx, cy, resType)
+		resType = nil // comparisons always result in untyped bool.
+	case Add, Sub, Mul, Div:
+		res, err = opArith(op, cx, cy, resType)
+	case Mod, BitAnd, BitOr, BitXor:
+		res, err = opIntArith(op, cx, cy, resType)
+	default:
+		err = errNotSupported(cx, resType)
+	}
+	if err != nil {
+		return Const{}, err
+	}
+	// As a final step we convert to the result type.
+	return makeConst(res, resType)
+}
+
+func opLogic(op BinaryOp, x, y interface{}, resType *vdl.Type) (interface{}, error) {
+	switch tx := x.(type) {
+	case bool:
+		switch op {
+		case LogicOr:
+			return tx || y.(bool), nil
+		case LogicAnd:
+			return tx && y.(bool), nil
+		}
+	}
+	return nil, errNotSupported(x, resType)
+}
+
+func opComp(op BinaryOp, x, y interface{}, resType *vdl.Type) (interface{}, error) {
+	switch tx := x.(type) {
+	case bool:
+		switch op {
+		case EQ:
+			return tx == y.(bool), nil
+		case NE:
+			return tx != y.(bool), nil
+		}
+	case string:
+		return compString(op, tx, y.(string)), nil
+	case *big.Int:
+		return opCmpToBool(op, tx.Cmp(y.(*big.Int))), nil
+	case *big.Rat:
+		return opCmpToBool(op, tx.Cmp(y.(*big.Rat))), nil
+	case *bigComplex:
+		switch op {
+		case EQ:
+			return tx.Equal(y.(*bigComplex)), nil
+		case NE:
+			return !tx.Equal(y.(*bigComplex)), nil
+		}
+	case *vdl.Value:
+		switch op {
+		case EQ:
+			return vdl.EqualValue(tx, y.(*vdl.Value)), nil
+		case NE:
+			return !vdl.EqualValue(tx, y.(*vdl.Value)), nil
+		}
+	}
+	return nil, errNotSupported(x, resType)
+}
+
+func opArith(op BinaryOp, x, y interface{}, resType *vdl.Type) (interface{}, error) {
+	switch tx := x.(type) {
+	case string:
+		if op == Add {
+			return tx + y.(string), nil
+		}
+	case *big.Int:
+		return arithBigInt(op, tx, y.(*big.Int))
+	case *big.Rat:
+		return arithBigRat(op, tx, y.(*big.Rat))
+	case *bigComplex:
+		return arithBigComplex(op, tx, y.(*bigComplex))
+	}
+	return nil, errNotSupported(x, resType)
+}
+
+func opIntArith(op BinaryOp, x, y interface{}, resType *vdl.Type) (interface{}, error) {
+	ix, err := constToInt(Const{x, resType})
+	if err != nil {
+		return nil, err
+	}
+	iy, err := constToInt(Const{y, resType})
+	if err != nil {
+		return nil, err
+	}
+	return arithBigInt(op, ix, iy)
+}
+
+func evalShift(op BinaryOp, x, y Const) (Const, error) {
+	// lhs must be an integer.
+	ix, err := constToInt(x)
+	if err != nil {
+		return Const{}, err
+	}
+	// rhs must be a small unsigned integer.
+	iy, err := constToInt(y)
+	if err != nil {
+		return Const{}, err
+	}
+	if iy.Sign() < 0 {
+		return Const{}, fmt.Errorf("shift amount %v isn't unsigned", cRepString(iy))
+	}
+	if iy.Cmp(maxShiftSize) > 0 {
+		return Const{}, fmt.Errorf("shift amount %v greater than max allowed %v", cRepString(iy), cRepString(maxShiftSize))
+	}
+	// Perform the shift and convert it back to the lhs type.
+	return makeConst(shiftBigInt(op, ix, uint(iy.Uint64())), x.repType)
+}
+
+// bigRatToInt converts rational to integer values as long as there isn't any
+// loss in precision, checking resType to make sure the conversion is allowed.
+func bigRatToInt(rat *big.Rat, resType *vdl.Type) (*big.Int, error) {
+	// As a special-case we allow untyped rat consts to be converted to integers,
+	// as long as they can do so without loss of precision.  This is safe since
+	// untyped rat consts have "unbounded" precision.  Typed float consts may have
+	// been rounded at some point, so we don't allow this.  This is the same
+	// behavior as Go.
+	if resType != nil {
+		return nil, fmt.Errorf("can't convert typed %s to integer", cRepTypeString(rat, resType))
+	}
+	if !rat.IsInt() {
+		return nil, fmt.Errorf("converting %s %s to integer loses precision", cRepTypeString(rat, resType), cRepString(rat))
+	}
+	return new(big.Int).Set(rat.Num()), nil
+}
+
+// bigComplexToRat converts complex to rational values as long as the complex
+// value has a zero imaginary component.
+func bigComplexToRat(b *bigComplex) (*big.Rat, error) {
+	if b.im.Cmp(bigRatZero) != 0 {
+		return nil, fmt.Errorf("can't convert complex %s to rational: nonzero imaginary", cRepString(b))
+	}
+	return &b.re, nil
+}
+
+// constToInt converts x to an integer value as long as there isn't any loss in
+// precision.
+func constToInt(x Const) (*big.Int, error) {
+	switch tx := x.rep.(type) {
+	case *big.Int:
+		return tx, nil
+	case *big.Rat:
+		return bigRatToInt(tx, x.repType)
+	case *bigComplex:
+		rat, err := bigComplexToRat(tx)
+		if err != nil {
+			return nil, err
+		}
+		return bigRatToInt(rat, x.repType)
+	}
+	return nil, fmt.Errorf("can't convert %s to integer", x.typeString())
+}
+
+// makeConst creates a Const with value rep and type totype, performing overflow
+// and conversion checks on numeric values.  If totype is nil the resulting
+// const is untyped.
+//
+// TODO(toddw): Update to handle conversions to optional types.
+func makeConst(rep interface{}, totype *vdl.Type) (Const, error) {
+	if rep == nil {
+		return Const{}, errInvalidConst
+	}
+	if totype == nil {
+		if v, ok := rep.(*vdl.Value); ok {
+			return Const{}, fmt.Errorf("can't make typed value %s untyped", v.Type())
+		}
+		return Const{rep, nil}, nil
+	}
+	switch trep := rep.(type) {
+	case bool:
+		if totype.Kind() == vdl.Bool {
+			return Const{trep, totype}, nil
+		}
+	case string:
+		if totype.Kind() == vdl.String || totype.IsBytes() {
+			return Const{trep, totype}, nil
+		}
+	case *big.Int:
+		switch totype.Kind() {
+		case vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64:
+			if err := checkOverflowInt(trep, totype.Kind()); err != nil {
+				return Const{}, err
+			}
+			return Const{trep, totype}, nil
+		case vdl.Float32, vdl.Float64, vdl.Complex64, vdl.Complex128:
+			return makeConst(new(big.Rat).SetInt(trep), totype)
+		}
+	case *big.Rat:
+		switch totype.Kind() {
+		case vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64:
+			// The only way we reach this conversion from big.Rat to a typed integer
+			// is for explicit type conversions.  We pass a nil Type to bigRatToInt
+			// indicating trep is untyped, to allow all conversions from float to int
+			// as long as trep is actually an integer.
+			irep, err := bigRatToInt(trep, nil)
+			if err != nil {
+				return Const{}, err
+			}
+			return makeConst(irep, totype)
+		case vdl.Float32, vdl.Float64:
+			frep, err := convertTypedRat(trep, totype.Kind())
+			if err != nil {
+				return Const{}, err
+			}
+			return Const{frep, totype}, nil
+		case vdl.Complex64, vdl.Complex128:
+			frep, err := convertTypedRat(trep, totype.Kind())
+			if err != nil {
+				return Const{}, err
+			}
+			return Const{realComplex(frep), totype}, nil
+		}
+	case *bigComplex:
+		switch totype.Kind() {
+		case vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64, vdl.Int16, vdl.Int32, vdl.Int64, vdl.Float32, vdl.Float64:
+			v, err := bigComplexToRat(trep)
+			if err != nil {
+				return Const{}, err
+			}
+			return makeConst(v, totype)
+		case vdl.Complex64, vdl.Complex128:
+			v, err := convertTypedComplex(trep, totype.Kind())
+			if err != nil {
+				return Const{}, err
+			}
+			return Const{v, totype}, nil
+		}
+	}
+	return Const{}, fmt.Errorf("can't convert %s to %v", cRepString(rep), cRepTypeString(rep, totype))
+}
+
+func bitLenInt(kind vdl.Kind) int {
+	switch kind {
+	case vdl.Byte:
+		return 8
+	case vdl.Uint16, vdl.Int16:
+		return 16
+	case vdl.Uint32, vdl.Int32:
+		return 32
+	case vdl.Uint64, vdl.Int64:
+		return 64
+	default:
+		panic(fmt.Errorf("val: bitLen unhandled kind %v", kind))
+	}
+}
+
+// checkOverflowInt returns an error iff converting b to the typed integer will
+// cause overflow.
+func checkOverflowInt(b *big.Int, kind vdl.Kind) error {
+	switch bitlen := bitLenInt(kind); kind {
+	case vdl.Byte, vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		if b.Sign() < 0 || b.BitLen() > bitlen {
+			return fmt.Errorf("const %v overflows uint%d", cRepString(b), bitlen)
+		}
+	case vdl.Int16, vdl.Int32, vdl.Int64:
+		// Account for two's complement, where e.g. int8 ranges from -128 to 127
+		if b.Sign() >= 0 {
+			// Positives and 0 - just check bitlen, accounting for the sign bit.
+			if b.BitLen() >= bitlen {
+				return fmt.Errorf("const %v overflows int%d", cRepString(b), bitlen)
+			}
+		} else {
+			// Negatives need to take an extra value into account (e.g. -128 for int8)
+			bplus1 := new(big.Int).Add(b, bigIntOne)
+			if bplus1.BitLen() >= bitlen {
+				return fmt.Errorf("const %v overflows int%d", cRepString(b), bitlen)
+			}
+		}
+	default:
+		panic(fmt.Errorf("val: checkOverflowInt unhandled kind %v", kind))
+	}
+	return nil
+}
+
+// checkOverflowRat returns an error iff converting b to the typed rat will
+// cause overflow or underflow.
+func checkOverflowRat(b *big.Rat, kind vdl.Kind) error {
+	// Exact zero is special cased in ieee754.
+	if b.Cmp(bigRatZero) == 0 {
+		return nil
+	}
+	// TODO(toddw): perhaps allow slightly smaller and larger values, to account
+	// for ieee754 round-to-even rules.
+	switch abs := new(big.Rat).Abs(b); kind {
+	case vdl.Float32, vdl.Complex64:
+		if abs.Cmp(bigRatAbsMin32) < 0 {
+			return fmt.Errorf("const %v underflows float32", cRepString(b))
+		}
+		if abs.Cmp(bigRatAbsMax32) > 0 {
+			return fmt.Errorf("const %v overflows float32", cRepString(b))
+		}
+	case vdl.Float64, vdl.Complex128:
+		if abs.Cmp(bigRatAbsMin64) < 0 {
+			return fmt.Errorf("const %v underflows float64", cRepString(b))
+		}
+		if abs.Cmp(bigRatAbsMax64) > 0 {
+			return fmt.Errorf("const %v overflows float64", cRepString(b))
+		}
+	default:
+		panic(fmt.Errorf("val: checkOverflowRat unhandled kind %v", kind))
+	}
+	return nil
+}
+
+// convertTypedRat converts b to the typed rat, rounding as necessary.
+func convertTypedRat(b *big.Rat, kind vdl.Kind) (*big.Rat, error) {
+	if err := checkOverflowRat(b, kind); err != nil {
+		return nil, err
+	}
+	switch f64, _ := b.Float64(); kind {
+	case vdl.Float32, vdl.Complex64:
+		return new(big.Rat).SetFloat64(float64(float32(f64))), nil
+	case vdl.Float64, vdl.Complex128:
+		return new(big.Rat).SetFloat64(f64), nil
+	default:
+		panic(fmt.Errorf("val: convertTypedRat unhandled kind %v", kind))
+	}
+}
+
+// convertTypedComplex converts b to the typed complex, rounding as necessary.
+func convertTypedComplex(b *bigComplex, kind vdl.Kind) (*bigComplex, error) {
+	re, err := convertTypedRat(&b.re, kind)
+	if err != nil {
+		return nil, err
+	}
+	im, err := convertTypedRat(&b.im, kind)
+	if err != nil {
+		return nil, err
+	}
+	return newComplex(re, im), nil
+}
+
+// coerceConsts performs implicit conversion of cl and cr based on their
+// respective types.  Returns the converted values vl and vr which are
+// guaranteed to be of the same type represented by the returned Type, which may
+// be nil if both consts are untyped.
+func coerceConsts(cl, cr Const) (interface{}, interface{}, *vdl.Type, error) {
+	var err error
+	if cl.repType != nil && cr.repType != nil {
+		// Both consts are typed - their types must match (no implicit conversion).
+		if cl.repType != cr.repType {
+			return nil, nil, nil, fmt.Errorf("type mismatch %v and %v", cl.typeString(), cr.typeString())
+		}
+		return cl.rep, cr.rep, cl.repType, nil
+	}
+	if cl.repType != nil {
+		// Convert rhs to the type of the lhs.
+		cr, err = makeConst(cr.rep, cl.repType)
+		if err != nil {
+			return nil, nil, nil, err
+		}
+		return cl.rep, cr.rep, cl.repType, nil
+	}
+	if cr.repType != nil {
+		// Convert lhs to the type of the rhs.
+		cl, err = makeConst(cl.rep, cr.repType)
+		if err != nil {
+			return nil, nil, nil, err
+		}
+		return cl.rep, cr.rep, cr.repType, nil
+	}
+	// Both consts are untyped, might need to implicitly promote untyped consts.
+	switch vl := cl.rep.(type) {
+	case bool:
+		switch vr := cr.rep.(type) {
+		case bool:
+			return vl, vr, nil, nil
+		}
+	case string:
+		switch vr := cr.rep.(type) {
+		case string:
+			return vl, vr, nil, nil
+		}
+	case *big.Int:
+		switch vr := cr.rep.(type) {
+		case *big.Int:
+			return vl, vr, nil, nil
+		case *big.Rat:
+			// Promote lhs to rat
+			return new(big.Rat).SetInt(vl), vr, nil, nil
+		case *bigComplex:
+			// Promote lhs to complex
+			return realComplex(new(big.Rat).SetInt(vl)), vr, nil, nil
+		}
+	case *big.Rat:
+		switch vr := cr.rep.(type) {
+		case *big.Int:
+			// Promote rhs to rat
+			return vl, new(big.Rat).SetInt(vr), nil, nil
+		case *big.Rat:
+			return vl, vr, nil, nil
+		case *bigComplex:
+			// Promote lhs to complex
+			return realComplex(vl), vr, nil, nil
+		}
+	case *bigComplex:
+		switch vr := cr.rep.(type) {
+		case *big.Int:
+			// Promote rhs to complex
+			return vl, realComplex(new(big.Rat).SetInt(vr)), nil, nil
+		case *big.Rat:
+			// Promote rhs to complex
+			return vl, realComplex(vr), nil, nil
+		case *bigComplex:
+			return vl, vr, nil, nil
+		}
+	}
+	return nil, nil, nil, fmt.Errorf("mismatched %s and %s", cl.typeString(), cr.typeString())
+}
+
+func compString(op BinaryOp, l, r string) bool {
+	switch op {
+	case EQ:
+		return l == r
+	case NE:
+		return l != r
+	case LT:
+		return l < r
+	case LE:
+		return l <= r
+	case GT:
+		return l > r
+	case GE:
+		return l >= r
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
+
+func opCmpToBool(op BinaryOp, cmp int) bool {
+	switch op {
+	case EQ:
+		return cmp == 0
+	case NE:
+		return cmp != 0
+	case LT:
+		return cmp < 0
+	case LE:
+		return cmp <= 0
+	case GT:
+		return cmp > 0
+	case GE:
+		return cmp >= 0
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
+
+func arithBigInt(op BinaryOp, l, r *big.Int) (*big.Int, error) {
+	switch op {
+	case Add:
+		return new(big.Int).Add(l, r), nil
+	case Sub:
+		return new(big.Int).Sub(l, r), nil
+	case Mul:
+		return new(big.Int).Mul(l, r), nil
+	case Div:
+		if r.Cmp(bigIntZero) == 0 {
+			return nil, errDivZero
+		}
+		return new(big.Int).Quo(l, r), nil
+	case Mod:
+		if r.Cmp(bigIntZero) == 0 {
+			return nil, errDivZero
+		}
+		return new(big.Int).Rem(l, r), nil
+	case BitAnd:
+		return new(big.Int).And(l, r), nil
+	case BitOr:
+		return new(big.Int).Or(l, r), nil
+	case BitXor:
+		return new(big.Int).Xor(l, r), nil
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
+
+func arithBigRat(op BinaryOp, l, r *big.Rat) (*big.Rat, error) {
+	switch op {
+	case Add:
+		return new(big.Rat).Add(l, r), nil
+	case Sub:
+		return new(big.Rat).Sub(l, r), nil
+	case Mul:
+		return new(big.Rat).Mul(l, r), nil
+	case Div:
+		if r.Cmp(bigRatZero) == 0 {
+			return nil, errDivZero
+		}
+		inv := new(big.Rat).Inv(r)
+		return inv.Mul(inv, l), nil
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
+
+func arithBigComplex(op BinaryOp, l, r *bigComplex) (*bigComplex, error) {
+	switch op {
+	case Add:
+		return new(bigComplex).Add(l, r), nil
+	case Sub:
+		return new(bigComplex).Sub(l, r), nil
+	case Mul:
+		return new(bigComplex).Mul(l, r), nil
+	case Div:
+		return new(bigComplex).Div(l, r)
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
+
+func shiftBigInt(op BinaryOp, l *big.Int, n uint) *big.Int {
+	switch op {
+	case LeftShift:
+		return new(big.Int).Lsh(l, n)
+	case RightShift:
+		return new(big.Int).Rsh(l, n)
+	default:
+		panic(fmt.Errorf("val: unhandled op %q", op))
+	}
+}
diff --git a/lib/vdl/opconst/const_test.go b/lib/vdl/opconst/const_test.go
new file mode 100644
index 0000000..58329b4
--- /dev/null
+++ b/lib/vdl/opconst/const_test.go
@@ -0,0 +1,612 @@
+package opconst
+
+import (
+	"fmt"
+	"math/big"
+	"testing"
+
+	"v.io/v23/vdl"
+)
+
+const (
+	noType           = "must be assigned a type"
+	cantConvert      = "can't convert"
+	overflows        = "overflows"
+	underflows       = "underflows"
+	losesPrecision   = "loses precision"
+	nonzeroImaginary = "nonzero imaginary"
+	notSupported     = "not supported"
+	divByZero        = "divide by zero"
+)
+
+var (
+	bi0              = new(big.Int)
+	bi1, bi2, bi3    = big.NewInt(1), big.NewInt(2), big.NewInt(3)
+	bi4, bi5, bi6    = big.NewInt(4), big.NewInt(5), big.NewInt(6)
+	bi7, bi8, bi9    = big.NewInt(7), big.NewInt(8), big.NewInt(9)
+	bi_neg1, bi_neg2 = big.NewInt(-1), big.NewInt(-2)
+
+	br0              = new(big.Rat)
+	br1, br2, br3    = big.NewRat(1, 1), big.NewRat(2, 1), big.NewRat(3, 1)
+	br4, br5, br6    = big.NewRat(4, 1), big.NewRat(5, 1), big.NewRat(6, 1)
+	br7, br8, br9    = big.NewRat(7, 1), big.NewRat(8, 1), big.NewRat(9, 1)
+	br_neg1, br_neg2 = big.NewRat(-1, 1), big.NewRat(-2, 1)
+)
+
+func boolConst(t *vdl.Type, x bool) Const          { return FromValue(boolValue(t, x)) }
+func stringConst(t *vdl.Type, x string) Const      { return FromValue(stringValue(t, x)) }
+func bytesConst(t *vdl.Type, x string) Const       { return FromValue(bytesValue(t, x)) }
+func bytes3Const(t *vdl.Type, x string) Const      { return FromValue(bytes3Value(t, x)) }
+func intConst(t *vdl.Type, x int64) Const          { return FromValue(intValue(t, x)) }
+func uintConst(t *vdl.Type, x uint64) Const        { return FromValue(uintValue(t, x)) }
+func floatConst(t *vdl.Type, x float64) Const      { return FromValue(floatValue(t, x)) }
+func complexConst(t *vdl.Type, x complex128) Const { return FromValue(complexValue(t, x)) }
+func structNumConst(t *vdl.Type, x float64) Const {
+	return FromValue(structNumValue(t, sn{"A", x}))
+}
+
+func constEqual(a, b Const) bool {
+	if !a.IsValid() && !b.IsValid() {
+		return true
+	}
+	res, err := EvalBinary(EQ, a, b)
+	if err != nil || !res.IsValid() {
+		return false
+	}
+	val, err := res.ToValue()
+	return err == nil && val != nil && val.Kind() == vdl.Bool && val.Bool()
+}
+
+func TestConstInvalid(t *testing.T) {
+	x := Const{}
+	if x.IsValid() {
+		t.Errorf("zero Const IsValid")
+	}
+	if got, want := x.String(), "invalid"; got != want {
+		t.Errorf("ToValue got string %v, want %v", got, want)
+	}
+	{
+		value, err := x.ToValue()
+		if value != nil {
+			t.Errorf("ToValue got valid value %v, want nil", value)
+		}
+		if got, want := fmt.Sprint(err), "invalid const"; got != want {
+			t.Errorf("ToValue got error %q, want %q", got, want)
+		}
+	}
+	{
+		result, err := x.Convert(vdl.BoolType)
+		if result.IsValid() {
+			t.Errorf("Convert got valid result %v, want invalid", result)
+		}
+		if got, want := fmt.Sprint(err), "invalid const"; got != want {
+			t.Errorf("Convert got error %q, want %q", got, want)
+		}
+	}
+	unary := []UnaryOp{LogicNot, Pos, Neg, BitNot}
+	for _, op := range unary {
+		result, err := EvalUnary(op, Const{})
+		if result.IsValid() {
+			t.Errorf("EvalUnary got valid result %v, want invalid", result)
+		}
+		if got, want := fmt.Sprint(err), "invalid const"; got != want {
+			t.Errorf("EvalUnary got error %q, want %q", got, want)
+		}
+	}
+	binary := []BinaryOp{LogicAnd, LogicOr, EQ, NE, LT, LE, GT, GE, Add, Sub, Mul, Div, Mod, BitAnd, BitOr, BitXor, LeftShift, RightShift}
+	for _, op := range binary {
+		result, err := EvalBinary(op, Const{}, Const{})
+		if result.IsValid() {
+			t.Errorf("EvalBinary got valid result %v, want invalid", result)
+		}
+		if got, want := fmt.Sprint(err), "invalid const"; got != want {
+			t.Errorf("EvalBinary got error %q, want %q", got, want)
+		}
+	}
+}
+
+func TestConstToValueOK(t *testing.T) {
+	tests := []*vdl.Value{
+		boolValue(vdl.BoolType, true), boolValue(boolTypeN, true),
+		stringValue(vdl.StringType, "abc"), stringValue(stringTypeN, "abc"),
+		bytesValue(bytesType, "abc"), bytesValue(bytesTypeN, "abc"),
+		bytes3Value(bytesType, "abc"), bytes3Value(bytesTypeN, "abc"),
+		intValue(vdl.Int32Type, 123), intValue(int32TypeN, 123),
+		uintValue(vdl.Uint32Type, 123), uintValue(uint32TypeN, 123),
+		floatValue(vdl.Float32Type, 123), floatValue(float32TypeN, 123),
+		complexValue(vdl.Complex64Type, 123), complexValue(complex64TypeN, 123),
+		structNumValue(structAIntType, sn{"A", 123}), structNumValue(structAIntTypeN, sn{"A", 123}),
+	}
+	for _, test := range tests {
+		c := FromValue(test)
+		v, err := c.ToValue()
+		if got, want := v, test; !vdl.EqualValue(got, want) {
+			t.Errorf("%v.ToValue got %v, want %v", c, got, want)
+		}
+		expectErr(t, err, "", "%v.ToValue", c)
+	}
+}
+
+func TestConstToValueImplicit(t *testing.T) {
+	tests := []struct {
+		C Const
+		V *vdl.Value
+	}{
+		{Boolean(true), vdl.BoolValue(true)},
+		{String("abc"), vdl.StringValue("abc")},
+	}
+	for _, test := range tests {
+		c := FromValue(test.V)
+		if got, want := c, test.C; !constEqual(got, want) {
+			t.Errorf("FromValue(%v) got %v, want %v", test.C, got, want)
+		}
+		v, err := test.C.ToValue()
+		if got, want := v, test.V; !vdl.EqualValue(got, want) {
+			t.Errorf("%v.ToValue got %v, want %v", test.C, got, want)
+		}
+		expectErr(t, err, "", "%v.ToValue", test.C)
+	}
+}
+
+func TestConstToValueError(t *testing.T) {
+	tests := []struct {
+		C      Const
+		errstr string
+	}{
+		{Integer(bi1), noType},
+		{Rational(br1), noType},
+		{Complex(br1, br0), noType},
+	}
+	for _, test := range tests {
+		v, err := test.C.ToValue()
+		if v != nil {
+			t.Errorf("%v.ToValue got %v, want nil", test.C, v)
+		}
+		expectErr(t, err, test.errstr, "%v.ToValue", test.C)
+	}
+}
+
+type c []Const
+type v []*vdl.Value
+
+func TestConstConvertOK(t *testing.T) {
+	// Each test has a set of consts C and values V that are all convertible to
+	// each other and equivalent.
+	tests := []struct {
+		C c
+		V v
+	}{
+		{c{Boolean(true)},
+			v{boolValue(vdl.BoolType, true), boolValue(boolTypeN, true)}},
+		{c{String("abc")},
+			v{stringValue(vdl.StringType, "abc"), stringValue(stringTypeN, "abc"),
+				bytesValue(bytesType, "abc"), bytesValue(bytesTypeN, "abc"),
+				bytes3Value(bytes3Type, "abc"), bytes3Value(bytes3TypeN, "abc")}},
+		{c{Integer(bi1), Rational(br1), Complex(br1, br0)},
+			v{intValue(vdl.Int32Type, 1), intValue(int32TypeN, 1),
+				uintValue(vdl.Uint32Type, 1), uintValue(uint32TypeN, 1),
+				floatValue(vdl.Float32Type, 1), floatValue(float32TypeN, 1),
+				complexValue(vdl.Complex64Type, 1), complexValue(complex64TypeN, 1)}},
+		{c{Integer(bi_neg1), Rational(br_neg1), Complex(br_neg1, br0)},
+			v{intValue(vdl.Int32Type, -1), intValue(int32TypeN, -1),
+				floatValue(vdl.Float32Type, -1), floatValue(float32TypeN, -1),
+				complexValue(vdl.Complex64Type, -1), complexValue(complex64TypeN, -1)}},
+		{c{Rational(big.NewRat(1, 2)), Complex(big.NewRat(1, 2), br0)},
+			v{floatValue(vdl.Float32Type, 0.5), floatValue(float32TypeN, 0.5),
+				complexValue(vdl.Complex64Type, 0.5), complexValue(complex64TypeN, 0.5)}},
+		{c{Complex(br1, br1)},
+			v{complexValue(vdl.Complex64Type, 1+1i), complexValue(complex64TypeN, 1+1i)}},
+		// Check implicit conversion of untyped bool and string consts.
+		{c{Boolean(true)},
+			v{boolValue(vdl.BoolType, true), anyValue(boolValue(vdl.BoolType, true))}},
+		{c{String("abc")},
+			v{stringValue(vdl.StringType, "abc"), anyValue(stringValue(vdl.StringType, "abc"))}},
+	}
+	for _, test := range tests {
+		// Create a slice of consts containing everything in C and V.
+		consts := make([]Const, len(test.C))
+		copy(consts, test.C)
+		for _, v := range test.V {
+			consts = append(consts, FromValue(v))
+		}
+		// Loop through the consts, and convert each one to each item in V.
+		for _, c := range consts {
+			for _, v := range test.V {
+				vt := v.Type()
+				got, err := c.Convert(vt)
+				if want := FromValue(v); !constEqual(got, want) {
+					t.Errorf("%v.Convert(%v) got %v, want %v", c, vt, got, want)
+				}
+				expectErr(t, err, "", "%v.Convert(%v)", c, vt)
+			}
+		}
+	}
+}
+
+type ty []*vdl.Type
+
+func TestConstConvertError(t *testing.T) {
+	// Each test has a single const C that returns an error that contains errstr
+	// when converted to any of the types in the set T.
+	tests := []struct {
+		C      Const
+		T      ty
+		errstr string
+	}{
+		{Boolean(true),
+			ty{vdl.StringType, stringTypeN, bytesType, bytesTypeN, bytes3Type, bytes3TypeN,
+				vdl.Int32Type, int32TypeN, vdl.Uint32Type, uint32TypeN,
+				vdl.Float32Type, float32TypeN, vdl.Complex64Type, complex64TypeN,
+				structAIntType, structAIntTypeN},
+			cantConvert},
+		{String("abc"),
+			ty{vdl.BoolType, boolTypeN,
+				vdl.Int32Type, int32TypeN, vdl.Uint32Type, uint32TypeN,
+				vdl.Float32Type, float32TypeN, vdl.Complex64Type, complex64TypeN,
+				structAIntType, structAIntTypeN},
+			cantConvert},
+		{Integer(bi1),
+			ty{vdl.BoolType, boolTypeN,
+				vdl.StringType, stringTypeN, bytesType, bytesTypeN, bytes3Type, bytes3TypeN,
+				structAIntType, structAIntTypeN},
+			cantConvert},
+		{Rational(br1),
+			ty{vdl.BoolType, boolTypeN,
+				vdl.StringType, stringTypeN, bytesType, bytesTypeN, bytes3Type, bytes3TypeN,
+				structAIntType, structAIntTypeN},
+			cantConvert},
+		{Complex(br1, br0),
+			ty{vdl.BoolType, boolTypeN,
+				vdl.StringType, stringTypeN, bytesType, bytesTypeN, bytes3Type, bytes3TypeN,
+				structAIntType, structAIntTypeN},
+			cantConvert},
+		// Bounds tests
+		{Integer(bi_neg1), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Integer(big.NewInt(1 << 32)), ty{vdl.Int32Type, int32TypeN}, overflows},
+		{Integer(big.NewInt(1 << 33)), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Rational(br_neg1), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Rational(big.NewRat(1<<32, 1)), ty{vdl.Int32Type, int32TypeN}, overflows},
+		{Rational(big.NewRat(1<<33, 1)), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Rational(big.NewRat(1, 2)),
+			ty{vdl.Int32Type, int32TypeN, vdl.Uint32Type, uint32TypeN},
+			losesPrecision},
+		{Rational(bigRatAbsMin64), ty{vdl.Float32Type, float32TypeN}, underflows},
+		{Rational(bigRatAbsMax64), ty{vdl.Float32Type, float32TypeN}, overflows},
+		{Complex(br_neg1, br0), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Complex(big.NewRat(1<<32, 1), br0), ty{vdl.Int32Type, int32TypeN}, overflows},
+		{Complex(big.NewRat(1<<33, 1), br0), ty{vdl.Uint32Type, uint32TypeN}, overflows},
+		{Complex(big.NewRat(1, 2), br0),
+			ty{vdl.Int32Type, int32TypeN, vdl.Uint32Type, uint32TypeN},
+			losesPrecision},
+		{Complex(bigRatAbsMin64, br0), ty{vdl.Float32Type, float32TypeN}, underflows},
+		{Complex(bigRatAbsMax64, br0), ty{vdl.Float32Type, float32TypeN}, overflows},
+		{Complex(br0, br1),
+			ty{vdl.Int32Type, int32TypeN, vdl.Uint32Type, uint32TypeN, vdl.Float32Type, float32TypeN},
+			nonzeroImaginary},
+	}
+	for _, test := range tests {
+		for _, ct := range test.T {
+			result, err := test.C.Convert(ct)
+			if result.IsValid() {
+				t.Errorf("%v.Convert(%v) result got %v, want invalid", test.C, ct, result)
+			}
+			expectErr(t, err, test.errstr, "%v.Convert(%v)", test.C, ct)
+		}
+	}
+}
+
+func TestConstUnaryOpOK(t *testing.T) {
+	tests := []struct {
+		op        UnaryOp
+		x, expect Const
+	}{
+		{LogicNot, Boolean(true), Boolean(false)},
+		{LogicNot, boolConst(vdl.BoolType, false), boolConst(vdl.BoolType, true)},
+		{LogicNot, boolConst(boolTypeN, true), boolConst(boolTypeN, false)},
+
+		{Pos, Integer(bi1), Integer(bi1)},
+		{Pos, Rational(br1), Rational(br1)},
+		{Pos, Complex(br1, br1), Complex(br1, br1)},
+		{Pos, intConst(vdl.Int32Type, 1), intConst(vdl.Int32Type, 1)},
+		{Pos, floatConst(float32TypeN, 1), floatConst(float32TypeN, 1)},
+		{Pos, complexConst(complex64TypeN, 1), complexConst(complex64TypeN, 1)},
+
+		{Neg, Integer(bi1), Integer(bi_neg1)},
+		{Neg, Rational(br1), Rational(br_neg1)},
+		{Neg, Complex(br1, br1), Complex(br_neg1, br_neg1)},
+		{Neg, intConst(vdl.Int32Type, 1), intConst(vdl.Int32Type, -1)},
+		{Neg, floatConst(float32TypeN, 1), floatConst(float32TypeN, -1)},
+		{Neg, complexConst(complex64TypeN, 1), complexConst(complex64TypeN, -1)},
+
+		{BitNot, Integer(bi1), Integer(bi_neg2)},
+		{BitNot, Rational(br1), Integer(bi_neg2)},
+		{BitNot, Complex(br1, br0), Integer(bi_neg2)},
+		{BitNot, intConst(vdl.Int32Type, 1), intConst(vdl.Int32Type, -2)},
+		{BitNot, uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 1<<32-2)},
+	}
+	for _, test := range tests {
+		result, err := EvalUnary(test.op, test.x)
+		if got, want := result, test.expect; !constEqual(got, want) {
+			t.Errorf("EvalUnary(%v, %v) result got %v, want %v", test.op, test.x, got, want)
+		}
+		expectErr(t, err, "", "EvalUnary(%v, %v)", test.op, test.x)
+	}
+}
+
+func TestConstUnaryOpError(t *testing.T) {
+	tests := []struct {
+		op     UnaryOp
+		x      Const
+		errstr string
+	}{
+		{LogicNot, String("abc"), notSupported},
+		{LogicNot, Integer(bi1), notSupported},
+		{LogicNot, Rational(br1), notSupported},
+		{LogicNot, Complex(br1, br1), notSupported},
+		{LogicNot, structNumConst(structAIntTypeN, 999), notSupported},
+
+		{Pos, Boolean(false), notSupported},
+		{Pos, String("abc"), notSupported},
+		{Pos, structNumConst(structAIntTypeN, 999), notSupported},
+
+		{Neg, Boolean(false), notSupported},
+		{Neg, String("abc"), notSupported},
+		{Neg, structNumConst(structAIntTypeN, 999), notSupported},
+		{Neg, intConst(vdl.Int32Type, 1<<32-1), overflows},
+
+		{BitNot, Boolean(false), cantConvert},
+		{BitNot, String("abc"), cantConvert},
+		{BitNot, Rational(big.NewRat(1, 2)), losesPrecision},
+		{BitNot, Complex(br1, br1), nonzeroImaginary},
+		{BitNot, structNumConst(structAIntTypeN, 999), notSupported},
+		{BitNot, floatConst(float32TypeN, 1), cantConvert},
+		{BitNot, complexConst(complex64TypeN, 1), cantConvert},
+	}
+	for _, test := range tests {
+		result, err := EvalUnary(test.op, test.x)
+		if result.IsValid() {
+			t.Errorf("EvalUnary(%v, %v) result got %v, want invalid", test.op, test.x, result)
+		}
+		expectErr(t, err, test.errstr, "EvalUnary(%v, %v)", test.op, test.x)
+	}
+}
+
+func TestConstBinaryOpOK(t *testing.T) {
+	tests := []struct {
+		op           BinaryOp
+		x, y, expect Const
+	}{
+		{LogicAnd, Boolean(true), Boolean(true), Boolean(true)},
+		{LogicAnd, Boolean(true), Boolean(false), Boolean(false)},
+		{LogicAnd, Boolean(false), Boolean(true), Boolean(false)},
+		{LogicAnd, Boolean(false), Boolean(false), Boolean(false)},
+		{LogicAnd, boolConst(boolTypeN, true), boolConst(boolTypeN, true), boolConst(boolTypeN, true)},
+		{LogicAnd, boolConst(boolTypeN, true), boolConst(boolTypeN, false), boolConst(boolTypeN, false)},
+		{LogicAnd, boolConst(boolTypeN, false), boolConst(boolTypeN, true), boolConst(boolTypeN, false)},
+		{LogicAnd, boolConst(boolTypeN, false), boolConst(boolTypeN, false), boolConst(boolTypeN, false)},
+
+		{LogicOr, Boolean(true), Boolean(true), Boolean(true)},
+		{LogicOr, Boolean(true), Boolean(false), Boolean(true)},
+		{LogicOr, Boolean(false), Boolean(true), Boolean(true)},
+		{LogicOr, Boolean(false), Boolean(false), Boolean(false)},
+		{LogicOr, boolConst(boolTypeN, true), boolConst(boolTypeN, true), boolConst(boolTypeN, true)},
+		{LogicOr, boolConst(boolTypeN, true), boolConst(boolTypeN, false), boolConst(boolTypeN, true)},
+		{LogicOr, boolConst(boolTypeN, false), boolConst(boolTypeN, true), boolConst(boolTypeN, true)},
+		{LogicOr, boolConst(boolTypeN, false), boolConst(boolTypeN, false), boolConst(boolTypeN, false)},
+
+		{Add, String("abc"), String("def"), String("abcdef")},
+		{Add, Integer(bi1), Integer(bi1), Integer(bi2)},
+		{Add, Rational(br1), Rational(br1), Rational(br2)},
+		{Add, Complex(br1, br1), Complex(br1, br1), Complex(br2, br2)},
+		{Add, stringConst(stringTypeN, "abc"), stringConst(stringTypeN, "def"), stringConst(stringTypeN, "abcdef")},
+		{Add, bytesConst(bytesTypeN, "abc"), bytesConst(bytesTypeN, "def"), bytesConst(bytesTypeN, "abcdef")},
+		{Add, intConst(int32TypeN, 1), intConst(int32TypeN, 1), intConst(int32TypeN, 2)},
+		{Add, uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 2)},
+		{Add, floatConst(float32TypeN, 1), floatConst(float32TypeN, 1), floatConst(float32TypeN, 2)},
+		{Add, complexConst(complex64TypeN, 1), complexConst(complex64TypeN, 1), complexConst(complex64TypeN, 2)},
+
+		{Sub, Integer(bi2), Integer(bi1), Integer(bi1)},
+		{Sub, Rational(br2), Rational(br1), Rational(br1)},
+		{Sub, Complex(br2, br2), Complex(br1, br1), Complex(br1, br1)},
+		{Sub, intConst(int32TypeN, 2), intConst(int32TypeN, 1), intConst(int32TypeN, 1)},
+		{Sub, uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 1)},
+		{Sub, floatConst(float32TypeN, 2), floatConst(float32TypeN, 1), floatConst(float32TypeN, 1)},
+		{Sub, complexConst(complex64TypeN, 2), complexConst(complex64TypeN, 1), complexConst(complex64TypeN, 1)},
+
+		{Mul, Integer(bi2), Integer(bi2), Integer(bi4)},
+		{Mul, Rational(br2), Rational(br2), Rational(br4)},
+		{Mul, Complex(br2, br2), Complex(br2, br2), Complex(br0, br8)},
+		{Mul, intConst(int32TypeN, 2), intConst(int32TypeN, 2), intConst(int32TypeN, 4)},
+		{Mul, uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 4)},
+		{Mul, floatConst(float32TypeN, 2), floatConst(float32TypeN, 2), floatConst(float32TypeN, 4)},
+		{Mul, complexConst(complex64TypeN, 2+2i), complexConst(complex64TypeN, 2+2i), complexConst(complex64TypeN, 8i)},
+
+		{Div, Integer(bi4), Integer(bi2), Integer(bi2)},
+		{Div, Rational(br4), Rational(br2), Rational(br2)},
+		{Div, Complex(br4, br4), Complex(br2, br2), Complex(br2, br0)},
+		{Div, intConst(int32TypeN, 4), intConst(int32TypeN, 2), intConst(int32TypeN, 2)},
+		{Div, uintConst(uint32TypeN, 4), uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 2)},
+		{Div, floatConst(float32TypeN, 4), floatConst(float32TypeN, 2), floatConst(float32TypeN, 2)},
+		{Div, complexConst(complex64TypeN, 4+4i), complexConst(complex64TypeN, 2+2i), complexConst(complex64TypeN, 2)},
+
+		{Mod, Integer(bi3), Integer(bi2), Integer(bi1)},
+		{Mod, Rational(br3), Rational(br2), Rational(br1)},
+		{Mod, Complex(br3, br0), Complex(br2, br0), Complex(br1, br0)},
+		{Mod, intConst(int32TypeN, 3), intConst(int32TypeN, 2), intConst(int32TypeN, 1)},
+		{Mod, uintConst(uint32TypeN, 3), uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 1)},
+
+		{BitAnd, Integer(bi3), Integer(bi2), Integer(bi2)},
+		{BitAnd, Rational(br3), Rational(br2), Rational(br2)},
+		{BitAnd, Complex(br3, br0), Complex(br2, br0), Complex(br2, br0)},
+		{BitAnd, intConst(int32TypeN, 3), intConst(int32TypeN, 2), intConst(int32TypeN, 2)},
+		{BitAnd, uintConst(uint32TypeN, 3), uintConst(uint32TypeN, 2), uintConst(uint32TypeN, 2)},
+
+		{BitOr, Integer(bi5), Integer(bi3), Integer(bi7)},
+		{BitOr, Rational(br5), Rational(br3), Rational(br7)},
+		{BitOr, Complex(br5, br0), Complex(br3, br0), Complex(br7, br0)},
+		{BitOr, intConst(int32TypeN, 5), intConst(int32TypeN, 3), intConst(int32TypeN, 7)},
+		{BitOr, uintConst(uint32TypeN, 5), uintConst(uint32TypeN, 3), uintConst(uint32TypeN, 7)},
+
+		{BitXor, Integer(bi5), Integer(bi3), Integer(bi6)},
+		{BitXor, Rational(br5), Rational(br3), Rational(br6)},
+		{BitXor, Complex(br5, br0), Complex(br3, br0), Complex(br6, br0)},
+		{BitXor, intConst(int32TypeN, 5), intConst(int32TypeN, 3), intConst(int32TypeN, 6)},
+		{BitXor, uintConst(uint32TypeN, 5), uintConst(uint32TypeN, 3), uintConst(uint32TypeN, 6)},
+
+		{LeftShift, Integer(bi3), Integer(bi1), Integer(bi6)},
+		{LeftShift, Rational(br3), Rational(br1), Rational(br6)},
+		{LeftShift, Complex(br3, br0), Complex(br1, br0), Complex(br6, br0)},
+		{LeftShift, intConst(int32TypeN, 3), intConst(int32TypeN, 1), intConst(int32TypeN, 6)},
+		{LeftShift, uintConst(uint32TypeN, 3), uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 6)},
+
+		{RightShift, Integer(bi5), Integer(bi1), Integer(bi2)},
+		{RightShift, Rational(br5), Rational(br1), Rational(br2)},
+		{RightShift, Complex(br5, br0), Complex(br1, br0), Complex(br2, br0)},
+		{RightShift, intConst(int32TypeN, 5), intConst(int32TypeN, 1), intConst(int32TypeN, 2)},
+		{RightShift, uintConst(uint32TypeN, 5), uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 2)},
+	}
+	for _, test := range tests {
+		result, err := EvalBinary(test.op, test.x, test.y)
+		if got, want := result, test.expect; !constEqual(got, want) {
+			t.Errorf("EvalBinary(%v, %v, %v) result got %v, want %v", test.op, test.x, test.y, got, want)
+		}
+		expectErr(t, err, "", "EvalBinary(%v, %v, %v)", test.op, test.x, test.y)
+	}
+}
+
+func expectComp(t *testing.T, op BinaryOp, x, y Const, expect bool) {
+	result, err := EvalBinary(op, x, y)
+	if got, want := result, Boolean(expect); !constEqual(got, want) {
+		t.Errorf("EvalBinary(%v, %v, %v) result got %v, want %v", op, x, y, got, want)
+	}
+	expectErr(t, err, "", "EvalBinary(%v, %v, %v)", op, x, y)
+}
+
+func TestConstEQNE(t *testing.T) {
+	tests := []struct {
+		x, y Const // x != y
+	}{
+		{Boolean(false), Boolean(true)},
+		{String("abc"), String("def")},
+		{Complex(br1, br1), Complex(br2, br2)},
+
+		{boolConst(boolTypeN, false), boolConst(boolTypeN, true)},
+		{complexConst(complex64TypeN, 1), complexConst(complex64TypeN, 2)},
+		{structNumConst(structAIntTypeN, 1), structNumConst(structAIntTypeN, 2)},
+	}
+	for _, test := range tests {
+		expectComp(t, EQ, test.x, test.x, true)
+		expectComp(t, EQ, test.x, test.y, false)
+		expectComp(t, EQ, test.y, test.x, false)
+		expectComp(t, EQ, test.y, test.y, true)
+
+		expectComp(t, NE, test.x, test.x, false)
+		expectComp(t, NE, test.x, test.y, true)
+		expectComp(t, NE, test.y, test.x, true)
+		expectComp(t, NE, test.y, test.y, false)
+	}
+}
+
+func TestConstOrdered(t *testing.T) {
+	tests := []struct {
+		x, y Const // x < y
+	}{
+		{String("abc"), String("def")},
+		{Integer(bi1), Integer(bi2)},
+		{Rational(br1), Rational(br2)},
+
+		{stringConst(stringTypeN, "abc"), stringConst(stringTypeN, "def")},
+		{bytesConst(bytesTypeN, "abc"), bytesConst(bytesTypeN, "def")},
+		{bytes3Const(bytes3TypeN, "abc"), bytes3Const(bytes3TypeN, "def")},
+		{intConst(int32TypeN, 1), intConst(int32TypeN, 2)},
+		{uintConst(uint32TypeN, 1), uintConst(uint32TypeN, 2)},
+		{floatConst(float32TypeN, 1), floatConst(float32TypeN, 2)},
+	}
+	for _, test := range tests {
+		expectComp(t, EQ, test.x, test.x, true)
+		expectComp(t, EQ, test.x, test.y, false)
+		expectComp(t, EQ, test.y, test.x, false)
+		expectComp(t, EQ, test.y, test.y, true)
+
+		expectComp(t, NE, test.x, test.x, false)
+		expectComp(t, NE, test.x, test.y, true)
+		expectComp(t, NE, test.y, test.x, true)
+		expectComp(t, NE, test.y, test.y, false)
+
+		expectComp(t, LT, test.x, test.x, false)
+		expectComp(t, LT, test.x, test.y, true)
+		expectComp(t, LT, test.y, test.x, false)
+		expectComp(t, LT, test.y, test.y, false)
+
+		expectComp(t, LE, test.x, test.x, true)
+		expectComp(t, LE, test.x, test.y, true)
+		expectComp(t, LE, test.y, test.x, false)
+		expectComp(t, LE, test.y, test.y, true)
+
+		expectComp(t, GT, test.x, test.x, false)
+		expectComp(t, GT, test.x, test.y, false)
+		expectComp(t, GT, test.y, test.x, true)
+		expectComp(t, GT, test.y, test.y, false)
+
+		expectComp(t, GE, test.x, test.x, true)
+		expectComp(t, GE, test.x, test.y, false)
+		expectComp(t, GE, test.y, test.x, true)
+		expectComp(t, GE, test.y, test.y, true)
+	}
+}
+
+type bo []BinaryOp
+
+func TestConstBinaryOpError(t *testing.T) {
+	// For each op in Bops and each x in C, (x op x) returns errstr.
+	tests := []struct {
+		Bops   bo
+		C      c
+		errstr string
+	}{
+		// Type not supported / can't convert errors
+		{bo{LogicAnd, LogicOr},
+			c{String("abc"),
+				stringConst(stringTypeN, "abc"),
+				bytesConst(bytesTypeN, "abc"), bytes3Const(bytes3TypeN, "abc"),
+				Integer(bi1), intConst(int32TypeN, 1), uintConst(uint32TypeN, 1),
+				Rational(br1), floatConst(float32TypeN, 1),
+				Complex(br1, br1), complexConst(complex64TypeN, 1),
+				structNumConst(structAIntType, 1), structNumConst(structAIntTypeN, 1)},
+			notSupported},
+		{bo{LT, LE, GT, GE},
+			c{Boolean(true), boolConst(boolTypeN, false),
+				Complex(br1, br1), complexConst(complex64TypeN, 1),
+				structNumConst(structAIntType, 1), structNumConst(structAIntTypeN, 1)},
+			notSupported},
+		{bo{Add},
+			c{structNumConst(structAIntType, 1), structNumConst(structAIntTypeN, 1)},
+			notSupported},
+		{bo{Sub, Mul, Div},
+			c{String("abc"), stringConst(stringTypeN, "abc"),
+				bytesConst(bytesTypeN, "abc"), bytes3Const(bytes3TypeN, "abc"),
+				structNumConst(structAIntType, 1), structNumConst(structAIntTypeN, 1)},
+			notSupported},
+		{bo{Mod, BitAnd, BitOr, BitXor, LeftShift, RightShift},
+			c{String("abc"), stringConst(stringTypeN, "abc"),
+				bytesConst(bytesTypeN, "abc"), bytes3Const(bytes3TypeN, "abc"),
+				structNumConst(structAIntType, 1), structNumConst(structAIntTypeN, 1)},
+			cantConvert},
+		// Bounds checking
+		{bo{Add}, c{uintConst(uint32TypeN, 1<<31)}, overflows},
+		{bo{Mul}, c{uintConst(uint32TypeN, 1<<16)}, overflows},
+		{bo{Div}, c{uintConst(uint32TypeN, 0)}, divByZero},
+		{bo{LeftShift}, c{uintConst(uint32TypeN, 32)}, overflows},
+	}
+	for _, test := range tests {
+		for _, op := range test.Bops {
+			for _, c := range test.C {
+				result, err := EvalBinary(op, c, c)
+				if result.IsValid() {
+					t.Errorf("EvalBinary(%v, %v, %v) result got %v, want invalid", op, c, c, result)
+				}
+				expectErr(t, err, test.errstr, "EvalBinary(%v, %v, %v)", op, c, c)
+			}
+		}
+	}
+}
diff --git a/lib/vdl/opconst/op.go b/lib/vdl/opconst/op.go
new file mode 100644
index 0000000..c4f65e1
--- /dev/null
+++ b/lib/vdl/opconst/op.go
@@ -0,0 +1,96 @@
+package opconst
+
+// UnaryOp represents a unary operation to be performed on a Const.
+type UnaryOp uint
+
+// BinaryOp represents a binary operation to be performed on two Consts.
+type BinaryOp uint
+
+const (
+	InvalidUnaryOp UnaryOp = iota
+	LogicNot               //  ! logical not
+	Pos                    //  + positive
+	Neg                    //  - negate
+	BitNot                 //  ^ bitwise not
+)
+
+const (
+	InvalidBinaryOp BinaryOp = iota
+	LogicAnd                 //  && logical and
+	LogicOr                  //  || logical or
+	EQ                       //  == equal
+	NE                       //  != not equal
+	LT                       //  <  less than
+	LE                       //  <= less than or equal
+	GT                       //  >  greater than
+	GE                       //  >= greater than or equal
+	Add                      //  +  add
+	Sub                      //  -  subtract
+	Mul                      //  *  multiply
+	Div                      //  /  divide
+	Mod                      //  %  modulo
+	BitAnd                   //  &  bitwise and
+	BitOr                    //  |  bitwise or
+	BitXor                   //  ^  bitwise xor
+	LeftShift                //  << left shift
+	RightShift               //  >> right shift
+)
+
+var unaryOpTable = [...]struct {
+	symbol, desc string
+}{
+	InvalidUnaryOp: {"invalid", "invalid"},
+	LogicNot:       {"!", "logic_not"},
+	Pos:            {"+", "pos"},
+	Neg:            {"-", "neg"},
+	BitNot:         {"^", "bit_not"},
+}
+
+var binaryOpTable = [...]struct {
+	symbol, desc string
+}{
+	InvalidBinaryOp: {"invalid", "invalid"},
+	LogicAnd:        {"&&", "logic_and"},
+	LogicOr:         {"||", "logic_or"},
+	EQ:              {"==", "eq"},
+	NE:              {"!=", "ne"},
+	LT:              {"<", "lt"},
+	LE:              {"<=", "le"},
+	GT:              {">", "gt"},
+	GE:              {">=", "ge"},
+	Add:             {"+", "add"},
+	Sub:             {"-", "sub"},
+	Mul:             {"*", "mul"},
+	Div:             {"/", "div"},
+	Mod:             {"%", "mod"},
+	BitAnd:          {"&", "bit_and"},
+	BitOr:           {"|", "bit_or"},
+	BitXor:          {"^", "bit_xor"},
+	LeftShift:       {"<<", "left_shift"},
+	RightShift:      {">>", "right_shift"},
+}
+
+func (op UnaryOp) String() string  { return unaryOpTable[op].desc }
+func (op BinaryOp) String() string { return binaryOpTable[op].desc }
+
+// ToUnaryOp converts s into a UnaryOp, or returns InvalidUnaryOp if it couldn't
+// be converted.
+func ToUnaryOp(s string) UnaryOp {
+	for op, item := range unaryOpTable {
+		if s == item.symbol || s == item.desc {
+			return UnaryOp(op)
+		}
+	}
+	return InvalidUnaryOp
+}
+
+// ToBinaryOp converts s into a BinaryOp, or returns InvalidBinaryOp if it
+// couldn't be converted.
+func ToBinaryOp(s string) BinaryOp {
+	for op, item := range binaryOpTable {
+		if s == item.symbol || s == item.desc {
+			return BinaryOp(op)
+		}
+	}
+	return InvalidBinaryOp
+}
diff --git a/lib/vdl/opconst/testutil_test.go b/lib/vdl/opconst/testutil_test.go
new file mode 100644
index 0000000..528f0d7
--- /dev/null
+++ b/lib/vdl/opconst/testutil_test.go
@@ -0,0 +1,482 @@
+package opconst
+
+// TODO(toddw): Merge with vdl/testutil_test.go.
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl"
+)
+
+// CallAndRecover calls the function f and returns the result of recover().
+// This minimizes the scope of the deferred recover, to ensure f is actually the
+// function that paniced.
+func CallAndRecover(f func()) (result interface{}) {
+	defer func() {
+		result = recover()
+	}()
+	f()
+	return
+}
+
+func expectErr(t *testing.T, err error, wantstr string, format string, args ...interface{}) bool {
+	gotstr := fmt.Sprint(err)
+	msg := fmt.Sprintf(format, args...)
+	if wantstr != "" && !strings.Contains(gotstr, wantstr) {
+		t.Errorf(`%s got error %q, want substr %q`, msg, gotstr, wantstr)
+		return false
+	}
+	if wantstr == "" && err != nil {
+		t.Errorf(`%s got error %q, want nil`, msg, gotstr)
+		return false
+	}
+	return true
+}
+
+func expectPanic(t *testing.T, f func(), wantstr string, format string, args ...interface{}) {
+	got := CallAndRecover(f)
+	gotstr := fmt.Sprint(got)
+	msg := fmt.Sprintf(format, args...)
+	if wantstr != "" && !strings.Contains(gotstr, wantstr) {
+		t.Errorf(`%s got panic %q, want substr %q`, msg, gotstr, wantstr)
+	}
+	if wantstr == "" && got != nil {
+		t.Errorf(`%s got panic %q, want nil`, msg, gotstr)
+	}
+}
+
+func expectMismatchedKind(t *testing.T, f func()) {
+	expectPanic(t, f, "mismatched kind", "")
+}
+
+// Define a bunch of regular Go types used in tests.
+type (
+	// Scalars
+	nInterface  interface{}
+	nType       *vdl.Type
+	nBool       bool
+	nUint8      uint8
+	nUint16     uint16
+	nUint32     uint32
+	nUint64     uint64
+	nUint       uint
+	nUintptr    uintptr
+	nInt8       int8
+	nInt16      int16
+	nInt32      int32
+	nInt64      int64
+	nInt        int
+	nFloat32    float32
+	nFloat64    float64
+	nComplex64  complex64
+	nComplex128 complex128
+	nString     string
+	// Arrays
+	nArray3Interface  [3]nInterface
+	nArray3TypeObject [3]*vdl.Type
+	nArray3Bool       [3]bool
+	nArray3Uint8      [3]uint8
+	nArray3Uint16     [3]uint16
+	nArray3Uint32     [3]uint32
+	nArray3Uint64     [3]uint64
+	nArray3Uint       [3]uint
+	nArray3Uintptr    [3]uintptr
+	nArray3Int8       [3]int8
+	nArray3Int16      [3]int16
+	nArray3Int32      [3]int32
+	nArray3Int64      [3]int64
+	nArray3Int        [3]int
+	nArray3Float32    [3]float32
+	nArray3Float64    [3]float64
+	nArray3Complex64  [3]complex64
+	nArray3Complex128 [3]complex128
+	nArray3String     [3]string
+	// Structs
+	nStructInterface  struct{ X nInterface }
+	nStructTypeObject struct{ X *vdl.Type }
+	nStructBool       struct{ X bool }
+	nStructUint8      struct{ X uint8 }
+	nStructUint16     struct{ X uint16 }
+	nStructUint32     struct{ X uint32 }
+	nStructUint64     struct{ X uint64 }
+	nStructUint       struct{ X uint }
+	nStructUintptr    struct{ X uintptr }
+	nStructInt8       struct{ X int8 }
+	nStructInt16      struct{ X int16 }
+	nStructInt32      struct{ X int32 }
+	nStructInt64      struct{ X int64 }
+	nStructInt        struct{ X int }
+	nStructFloat32    struct{ X float32 }
+	nStructFloat64    struct{ X float64 }
+	nStructComplex64  struct{ X complex64 }
+	nStructComplex128 struct{ X complex128 }
+	nStructString     struct{ X string }
+	// Slices
+	nSliceInterface  []nInterface
+	nSliceTypeObject []*vdl.Type
+	nSliceBool       []bool
+	nSliceUint8      []uint8
+	nSliceUint16     []uint16
+	nSliceUint32     []uint32
+	nSliceUint64     []uint64
+	nSliceUint       []uint
+	nSliceUintptr    []uintptr
+	nSliceInt8       []int8
+	nSliceInt16      []int16
+	nSliceInt32      []int32
+	nSliceInt64      []int64
+	nSliceInt        []int
+	nSliceFloat32    []float32
+	nSliceFloat64    []float64
+	nSliceComplex64  []complex64
+	nSliceComplex128 []complex128
+	nSliceString     []string
+	// Sets
+	nSetInterface  map[nInterface]struct{}
+	nSetTypeObject map[*vdl.Type]struct{}
+	nSetBool       map[bool]struct{}
+	nSetUint8      map[uint8]struct{}
+	nSetUint16     map[uint16]struct{}
+	nSetUint32     map[uint32]struct{}
+	nSetUint64     map[uint64]struct{}
+	nSetUint       map[uint]struct{}
+	nSetUintptr    map[uintptr]struct{}
+	nSetInt8       map[int8]struct{}
+	nSetInt16      map[int16]struct{}
+	nSetInt32      map[int32]struct{}
+	nSetInt64      map[int64]struct{}
+	nSetInt        map[int]struct{}
+	nSetFloat32    map[float32]struct{}
+	nSetFloat64    map[float64]struct{}
+	nSetComplex64  map[complex64]struct{}
+	nSetComplex128 map[complex128]struct{}
+	nSetString     map[string]struct{}
+	// Maps
+	nMapInterface  map[nInterface]nInterface
+	nMapTypeObject map[*vdl.Type]*vdl.Type
+	nMapBool       map[bool]bool
+	nMapUint8      map[uint8]uint8
+	nMapUint16     map[uint16]uint16
+	nMapUint32     map[uint32]uint32
+	nMapUint64     map[uint64]uint64
+	nMapUint       map[uint]uint
+	nMapUintptr    map[uintptr]uintptr
+	nMapInt8       map[int8]int8
+	nMapInt16      map[int16]int16
+	nMapInt32      map[int32]int32
+	nMapInt64      map[int64]int64
+	nMapInt        map[int]int
+	nMapFloat32    map[float32]float32
+	nMapFloat64    map[float64]float64
+	nMapComplex64  map[complex64]complex64
+	nMapComplex128 map[complex128]complex128
+	nMapString     map[string]string
+	// Recursive types
+	nRecurseSelf struct{ X []nRecurseSelf }
+	nRecurseA    struct{ B []nRecurseB }
+	nRecurseB    struct{ A []nRecurseA }
+
+	// Composite types representing sets of numbers.
+	nMapUint64Empty    map[nUint64]struct{}
+	nMapInt64Empty     map[nUint64]struct{}
+	nMapFloat64Empty   map[nUint64]struct{}
+	nMapComplex64Empty map[nUint64]struct{}
+	nMapUint64Bool     map[nUint64]nBool
+	nMapInt64Bool      map[nInt64]nBool
+	nMapFloat64Bool    map[nFloat64]nBool
+	nMapComplex64Bool  map[nComplex64]nBool
+	// Composite types representing sets of strings.
+	nMapStringEmpty map[nString]struct{}
+	nMapStringBool  map[nString]nBool
+	nStructXYZBool  struct{ X, Y, Z nBool }
+	nStructWXBool   struct{ W, X nBool }
+	// Composite types representing maps of strings to numbers.
+	nMapStringUint64    map[nString]nUint64
+	nMapStringInt64     map[nString]nInt64
+	nMapStringFloat64   map[nString]nFloat64
+	nMapStringComplex64 map[nString]nComplex64
+	nStructVWXUint64    struct{ V, W, X nUint64 }
+	nStructVWXInt64     struct{ V, W, X nInt64 }
+	nStructVWXFloat64   struct{ V, W, X nFloat64 }
+	nStructVWXComplex64 struct{ V, W, X nComplex64 }
+	nStructUVUint64     struct{ U, V nUint64 }
+	nStructUVInt64      struct{ U, V nInt64 }
+	nStructUVFloat64    struct{ U, V nFloat64 }
+	nStructUVComplex64  struct{ U, V nComplex64 }
+	// Types that cannot be converted to sets.  We represent sets as
+	// map[key]struct{} on the Go side, but don't allow map[key]nEmpty.
+	nEmpty           struct{}
+	nMapStringnEmpty map[nString]nEmpty
+	nStructXYZEmpty  struct{ X, Y, Z struct{} }
+	nStructXYZnEmpty struct{ X, Y, Z nEmpty }
+)
+
+func recurseSelfType() *vdl.Type {
+	var builder vdl.TypeBuilder
+	n := builder.Named("v.io/v23/vdl.nRecurseSelf")
+	n.AssignBase(builder.Struct().AppendField("X", builder.List().AssignElem(n)))
+	builder.Build()
+	t, err := n.Built()
+	if err != nil {
+		panic(err)
+	}
+	return t
+}
+
+func recurseABTypes() [2]*vdl.Type {
+	var builder vdl.TypeBuilder
+	a := builder.Named("v.io/v23/vdl.nRecurseA")
+	b := builder.Named("v.io/v23/vdl.nRecurseB")
+	a.AssignBase(builder.Struct().AppendField("B", builder.List().AssignElem(b)))
+	b.AssignBase(builder.Struct().AppendField("A", builder.List().AssignElem(a)))
+	builder.Build()
+	aT, err := a.Built()
+	if err != nil {
+		panic(err)
+	}
+	bT, err := b.Built()
+	if err != nil {
+		panic(err)
+	}
+	return [2]*vdl.Type{aT, bT}
+}
+
+func recurseAType() *vdl.Type { return recurseABTypes()[0] }
+func recurseBType() *vdl.Type { return recurseABTypes()[1] }
+
+// Define a bunch of *Type types used in tests.
+var (
+	// Named scalar types
+	boolTypeN       = vdl.NamedType("nBool", vdl.BoolType)
+	nByteType       = vdl.NamedType("nByte", vdl.ByteType)
+	uint16TypeN     = vdl.NamedType("nUint16", vdl.Uint16Type)
+	uint32TypeN     = vdl.NamedType("nUint32", vdl.Uint32Type)
+	uint64TypeN     = vdl.NamedType("nUint64", vdl.Uint64Type)
+	int16TypeN      = vdl.NamedType("nInt16", vdl.Int16Type)
+	int32TypeN      = vdl.NamedType("nInt32", vdl.Int32Type)
+	int64TypeN      = vdl.NamedType("nInt64", vdl.Int64Type)
+	float32TypeN    = vdl.NamedType("nFloat32", vdl.Float32Type)
+	float64TypeN    = vdl.NamedType("nFloat64", vdl.Float64Type)
+	complex64TypeN  = vdl.NamedType("nComplex64", vdl.Complex64Type)
+	complex128TypeN = vdl.NamedType("nComplex128", vdl.Complex128Type)
+	stringTypeN     = vdl.NamedType("nString", vdl.StringType)
+
+	// Composite types representing strings and bytes.
+	bytesType   = vdl.ListType(vdl.ByteType)
+	bytesTypeN  = vdl.NamedType("nBytes", bytesType)
+	bytes3Type  = vdl.ArrayType(3, vdl.ByteType)
+	bytes3TypeN = vdl.NamedType("nBytes3", bytes3Type)
+	// Composite types representing sequences of numbers.
+	array3Uint64Type     = vdl.ArrayType(3, vdl.Uint64Type)
+	array3Uint64TypeN    = vdl.NamedType("nArray3Uint64", vdl.ArrayType(3, uint64TypeN))
+	array3Int64Type      = vdl.ArrayType(3, vdl.Int64Type)
+	array3Int64TypeN     = vdl.NamedType("nArray3Int64", vdl.ArrayType(3, int64TypeN))
+	array3Float64Type    = vdl.ArrayType(3, vdl.Float64Type)
+	array3Float64TypeN   = vdl.NamedType("nArray3Float64", vdl.ArrayType(3, float64TypeN))
+	array3Complex64Type  = vdl.ArrayType(3, vdl.Complex64Type)
+	array3Complex64TypeN = vdl.NamedType("nArray3Complex64", vdl.ArrayType(3, complex64TypeN))
+	listUint64Type       = vdl.ListType(vdl.Uint64Type)
+	listUint64TypeN      = vdl.NamedType("nListUint64", vdl.ListType(uint64TypeN))
+	listInt64Type        = vdl.ListType(vdl.Int64Type)
+	listInt64TypeN       = vdl.NamedType("nListInt64", vdl.ListType(int64TypeN))
+	listFloat64Type      = vdl.ListType(vdl.Float64Type)
+	listFloat64TypeN     = vdl.NamedType("nListFloat64", vdl.ListType(float64TypeN))
+	listComplex64Type    = vdl.ListType(vdl.Complex64Type)
+	listComplex64TypeN   = vdl.NamedType("nListComplex64", vdl.ListType(complex64TypeN))
+	// Composite types representing sets of numbers.
+	setUint64Type         = vdl.SetType(vdl.Uint64Type)
+	setUint64TypeN        = vdl.NamedType("nSetUint64", vdl.SetType(uint64TypeN))
+	setInt64Type          = vdl.SetType(vdl.Int64Type)
+	setInt64TypeN         = vdl.NamedType("nSetInt64", vdl.SetType(int64TypeN))
+	setFloat64Type        = vdl.SetType(vdl.Float64Type)
+	setFloat64TypeN       = vdl.NamedType("nSetFloat64", vdl.SetType(float64TypeN))
+	setComplex64Type      = vdl.SetType(vdl.Complex64Type)
+	setComplex64TypeN     = vdl.NamedType("nSetComplex64", vdl.SetType(complex64TypeN))
+	mapUint64BoolType     = vdl.MapType(vdl.Uint64Type, vdl.BoolType)
+	mapUint64BoolTypeN    = vdl.NamedType("nMapUint64Bool", vdl.MapType(uint64TypeN, boolTypeN))
+	mapInt64BoolType      = vdl.MapType(vdl.Int64Type, vdl.BoolType)
+	mapInt64BoolTypeN     = vdl.NamedType("nMapInt64Bool", vdl.MapType(int64TypeN, boolTypeN))
+	mapFloat64BoolType    = vdl.MapType(vdl.Float64Type, vdl.BoolType)
+	mapFloat64BoolTypeN   = vdl.NamedType("nMapFloat64Bool", vdl.MapType(float64TypeN, boolTypeN))
+	mapComplex64BoolType  = vdl.MapType(vdl.Complex64Type, vdl.BoolType)
+	mapComplex64BoolTypeN = vdl.NamedType("nMapComplex64Bool", vdl.MapType(complex64TypeN, boolTypeN))
+	// Composite types representing sets of strings.
+	setStringType      = vdl.SetType(vdl.StringType)
+	setStringTypeN     = vdl.NamedType("nSetString", vdl.SetType(stringTypeN))
+	mapStringBoolType  = vdl.MapType(vdl.StringType, vdl.BoolType)
+	mapStringBoolTypeN = vdl.NamedType("nMapStringBool", vdl.MapType(stringTypeN, boolTypeN))
+	structXYZBoolType  = vdl.StructType(vdl.Field{"X", vdl.BoolType}, vdl.Field{"Y", vdl.BoolType}, vdl.Field{"Z", vdl.BoolType})
+	structXYZBoolTypeN = vdl.NamedType("nStructXYZBool", vdl.StructType(vdl.Field{"X", boolTypeN}, vdl.Field{"Y", boolTypeN}, vdl.Field{"Z", boolTypeN}))
+	structWXBoolType   = vdl.StructType(vdl.Field{"W", vdl.BoolType}, vdl.Field{"X", vdl.BoolType})
+	structWXBoolTypeN  = vdl.NamedType("nStructWXBool", vdl.StructType(vdl.Field{"W", boolTypeN}, vdl.Field{"X", boolTypeN}))
+	// Composite types representing maps of strings to numbers.
+	mapStringUint64Type     = vdl.MapType(vdl.StringType, vdl.Uint64Type)
+	mapStringUint64TypeN    = vdl.NamedType("nMapStringUint64", vdl.MapType(stringTypeN, uint64TypeN))
+	mapStringInt64Type      = vdl.MapType(vdl.StringType, vdl.Int64Type)
+	mapStringInt64TypeN     = vdl.NamedType("nMapStringInt64", vdl.MapType(stringTypeN, int64TypeN))
+	mapStringFloat64Type    = vdl.MapType(vdl.StringType, vdl.Float64Type)
+	mapStringFloat64TypeN   = vdl.NamedType("nMapStringFloat64", vdl.MapType(stringTypeN, float64TypeN))
+	mapStringComplex64Type  = vdl.MapType(vdl.StringType, vdl.Complex64Type)
+	mapStringComplex64TypeN = vdl.NamedType("nMapStringComplex64", vdl.MapType(stringTypeN, complex64TypeN))
+	structVWXUint64Type     = vdl.StructType(vdl.Field{"V", vdl.Uint64Type}, vdl.Field{"W", vdl.Uint64Type}, vdl.Field{"X", vdl.Uint64Type})
+	structVWXUint64TypeN    = vdl.NamedType("nStructVWXUint64", vdl.StructType(vdl.Field{"V", uint64TypeN}, vdl.Field{"W", uint64TypeN}, vdl.Field{"X", uint64TypeN}))
+	structVWXInt64Type      = vdl.StructType(vdl.Field{"V", vdl.Int64Type}, vdl.Field{"W", vdl.Int64Type}, vdl.Field{"X", vdl.Int64Type})
+	structVWXInt64TypeN     = vdl.NamedType("nStructVWXInt64", vdl.StructType(vdl.Field{"V", int64TypeN}, vdl.Field{"W", int64TypeN}, vdl.Field{"X", int64TypeN}))
+	structVWXFloat64Type    = vdl.StructType(vdl.Field{"V", vdl.Float64Type}, vdl.Field{"W", vdl.Float64Type}, vdl.Field{"X", vdl.Float64Type})
+	structVWXFloat64TypeN   = vdl.NamedType("nStructVWXFloat64", vdl.StructType(vdl.Field{"V", float64TypeN}, vdl.Field{"W", float64TypeN}, vdl.Field{"X", float64TypeN}))
+	structVWXComplex64Type  = vdl.StructType(vdl.Field{"V", vdl.Complex64Type}, vdl.Field{"W", vdl.Complex64Type}, vdl.Field{"X", vdl.Complex64Type})
+	structVWXComplex64TypeN = vdl.NamedType("nStructVWXComplex64", vdl.StructType(vdl.Field{"V", complex64TypeN}, vdl.Field{"W", complex64TypeN}, vdl.Field{"X", complex64TypeN}))
+	structUVUint64Type      = vdl.StructType(vdl.Field{"U", vdl.Uint64Type}, vdl.Field{"V", vdl.Uint64Type})
+	structUVUint64TypeN     = vdl.NamedType("nStructUVUint64", vdl.StructType(vdl.Field{"U", uint64TypeN}, vdl.Field{"V", uint64TypeN}))
+	structUVInt64Type       = vdl.StructType(vdl.Field{"U", vdl.Int64Type}, vdl.Field{"V", vdl.Int64Type})
+	structUVInt64TypeN      = vdl.NamedType("nStructUVInt64", vdl.StructType(vdl.Field{"U", int64TypeN}, vdl.Field{"V", int64TypeN}))
+	structUVFloat64Type     = vdl.StructType(vdl.Field{"U", vdl.Float64Type}, vdl.Field{"V", vdl.Float64Type})
+	structUVFloat64TypeN    = vdl.NamedType("nStructUVFloat64", vdl.StructType(vdl.Field{"U", float64TypeN}, vdl.Field{"V", float64TypeN}))
+	structUVComplex64Type   = vdl.StructType(vdl.Field{"U", vdl.Complex64Type}, vdl.Field{"V", vdl.Complex64Type})
+	structUVComplex64TypeN  = vdl.NamedType("nStructUVComplex64", vdl.StructType(vdl.Field{"U", complex64TypeN}, vdl.Field{"V", complex64TypeN}))
+
+	structAIntType  = vdl.StructType(vdl.Field{"A", vdl.Int64Type})
+	structAIntTypeN = vdl.NamedType("nStructA", structAIntType)
+
+	// Types that cannot be converted to sets.  Although we represent sets as
+	// map[key]struct{} on the Go side, we don't allow these as general
+	// conversions for val.Value.
+	emptyType           = vdl.StructType()
+	emptyTypeN          = vdl.NamedType("nEmpty", vdl.StructType())
+	mapStringEmptyType  = vdl.MapType(vdl.StringType, emptyType)
+	mapStringEmptyTypeN = vdl.NamedType("nMapStringEmpty", vdl.MapType(stringTypeN, emptyTypeN))
+	structXYZEmptyType  = vdl.StructType(vdl.Field{"X", emptyType}, vdl.Field{"Y", emptyType}, vdl.Field{"Z", emptyType})
+	structXYZEmptyTypeN = vdl.NamedType("nStructXYZEmpty", vdl.StructType(vdl.Field{"X", emptyTypeN}, vdl.Field{"Y", emptyTypeN}, vdl.Field{"Z", emptyTypeN}))
+)
+
+func anyValue(x *vdl.Value) *vdl.Value                  { return vdl.ZeroValue(vdl.AnyType).Assign(x) }
+func boolValue(t *vdl.Type, x bool) *vdl.Value          { return vdl.ZeroValue(t).AssignBool(x) }
+func byteValue(t *vdl.Type, x byte) *vdl.Value          { return vdl.ZeroValue(t).AssignByte(x) }
+func uintValue(t *vdl.Type, x uint64) *vdl.Value        { return vdl.ZeroValue(t).AssignUint(x) }
+func intValue(t *vdl.Type, x int64) *vdl.Value          { return vdl.ZeroValue(t).AssignInt(x) }
+func floatValue(t *vdl.Type, x float64) *vdl.Value      { return vdl.ZeroValue(t).AssignFloat(x) }
+func complexValue(t *vdl.Type, x complex128) *vdl.Value { return vdl.ZeroValue(t).AssignComplex(x) }
+func stringValue(t *vdl.Type, x string) *vdl.Value      { return vdl.ZeroValue(t).AssignString(x) }
+func bytesValue(t *vdl.Type, x string) *vdl.Value       { return vdl.ZeroValue(t).AssignBytes([]byte(x)) }
+func bytes3Value(t *vdl.Type, x string) *vdl.Value      { return vdl.ZeroValue(t).CopyBytes([]byte(x)) }
+
+func setStringValue(t *vdl.Type, x ...string) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, vx := range x {
+		key := vdl.ZeroValue(t.Key()).AssignString(vx)
+		res.AssignSetKey(key)
+	}
+	return res
+}
+
+type sb struct {
+	s string
+	b bool
+}
+
+func mapStringBoolValue(t *vdl.Type, x ...sb) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, sb := range x {
+		key := vdl.ZeroValue(t.Key()).AssignString(sb.s)
+		val := vdl.ZeroValue(t.Elem()).AssignBool(sb.b)
+		res.AssignMapIndex(key, val)
+	}
+	return res
+}
+
+func mapStringEmptyValue(t *vdl.Type, x ...string) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, vx := range x {
+		key := vdl.ZeroValue(t.Key()).AssignString(vx)
+		val := vdl.ZeroValue(t.Elem())
+		res.AssignMapIndex(key, val)
+	}
+	return res
+}
+
+func structBoolValue(t *vdl.Type, x ...sb) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, sb := range x {
+		_, index := t.FieldByName(sb.s)
+		res.StructField(index).AssignBool(sb.b)
+	}
+	return res
+}
+
+func assignNum(v *vdl.Value, num float64) *vdl.Value {
+	switch v.Kind() {
+	case vdl.Byte:
+		v.AssignByte(byte(num))
+	case vdl.Uint16, vdl.Uint32, vdl.Uint64:
+		v.AssignUint(uint64(num))
+	case vdl.Int16, vdl.Int32, vdl.Int64:
+		v.AssignInt(int64(num))
+	case vdl.Float32, vdl.Float64:
+		v.AssignFloat(num)
+	case vdl.Complex64, vdl.Complex128:
+		v.AssignComplex(complex(num, 0))
+	default:
+		panic(fmt.Errorf("val: assignNum unhandled %v", v.Type()))
+	}
+	return v
+}
+
+func seqNumValue(t *vdl.Type, x ...float64) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	if t.Kind() == vdl.List {
+		res.AssignLen(len(x))
+	}
+	for index, n := range x {
+		assignNum(res.Index(index), n)
+	}
+	return res
+}
+
+func setNumValue(t *vdl.Type, x ...float64) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, n := range x {
+		res.AssignSetKey(assignNum(vdl.ZeroValue(t.Key()), n))
+	}
+	return res
+}
+
+type nb struct {
+	n float64
+	b bool
+}
+
+func mapNumBoolValue(t *vdl.Type, x ...nb) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, nb := range x {
+		key := assignNum(vdl.ZeroValue(t.Key()), nb.n)
+		val := vdl.ZeroValue(t.Elem()).AssignBool(nb.b)
+		res.AssignMapIndex(key, val)
+	}
+	return res
+}
+
+type sn struct {
+	s string
+	n float64
+}
+
+func mapStringNumValue(t *vdl.Type, x ...sn) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, sn := range x {
+		key := vdl.ZeroValue(t.Key()).AssignString(sn.s)
+		val := assignNum(vdl.ZeroValue(t.Elem()), sn.n)
+		res.AssignMapIndex(key, val)
+	}
+	return res
+}
+
+func structNumValue(t *vdl.Type, x ...sn) *vdl.Value {
+	res := vdl.ZeroValue(t)
+	for _, sn := range x {
+		_, index := t.FieldByName(sn.s)
+		assignNum(res.StructField(index), sn.n)
+	}
+	return res
+}
diff --git a/lib/vdl/parse/const.go b/lib/vdl/parse/const.go
new file mode 100644
index 0000000..d141e01
--- /dev/null
+++ b/lib/vdl/parse/const.go
@@ -0,0 +1,156 @@
+package parse
+
+import (
+	"fmt"
+	"math/big"
+	"strconv"
+)
+
+// ConstExpr is the interface for all nodes in an expression.
+type ConstExpr interface {
+	String() string
+	Pos() Pos
+}
+
+// ConstLit represents scalar literals in const expressions.  The supported
+// types for Lit are:
+//   string     - Represents all string constants.
+//   *big.Int   - Represents all integer constants.
+//   *big.Rat   - Represents all rational constants.
+//   *BigImag   - Represents all imaginary constants.
+type ConstLit struct {
+	Lit interface{}
+	P   Pos
+}
+
+// BigImag represents a literal imaginary number.
+type BigImag big.Rat
+
+// ConstCompositeLit represents composite literals in const expressions.
+type ConstCompositeLit struct {
+	Type   Type
+	KVList []KVLit
+	P      Pos
+}
+
+// KVLit represents a key/value literal in composite literals.
+type KVLit struct {
+	Key   ConstExpr
+	Value ConstExpr
+}
+
+// ConstNamed represents named references to other consts.
+type ConstNamed struct {
+	Name string
+	P    Pos
+}
+
+// ConstIndexed represents an index operation on a composite type.
+type ConstIndexed struct {
+	Expr      *ConstNamed
+	IndexExpr ConstExpr
+	P         Pos
+}
+
+// ConstTypeConv represents explicit type conversions.
+type ConstTypeConv struct {
+	Type Type
+	Expr ConstExpr
+	P    Pos
+}
+
+// ConstTypeObject represents typeobject; a type used as a value.
+type ConstTypeObject struct {
+	Type Type
+	P    Pos
+}
+
+// ConstUnaryOp represents all unary operations.
+type ConstUnaryOp struct {
+	Op   string
+	Expr ConstExpr
+	P    Pos
+}
+
+// ConstBinaryOp represents all binary operations.
+type ConstBinaryOp struct {
+	Op    string
+	Lexpr ConstExpr
+	Rexpr ConstExpr
+	P     Pos
+}
+
+// ConstDef represents a user-defined named const.
+type ConstDef struct {
+	NamePos
+	Expr ConstExpr
+}
+
+// cvString returns a human-readable string representing the const value.
+func cvString(val interface{}) string {
+	switch tv := val.(type) {
+	case string:
+		return strconv.Quote(tv)
+	case *big.Int:
+		return tv.String()
+	case *big.Rat:
+		if tv.IsInt() {
+			return tv.Num().String() + ".0"
+		}
+		fv, _ := tv.Float64()
+		return strconv.FormatFloat(fv, 'g', -1, 64)
+	case *BigImag:
+		return cvString((*big.Rat)(tv)) + "i"
+	default:
+		panic(fmt.Errorf("vdl: unhandled const type %T value %v", val, val))
+	}
+}
+
+func (c *ConstLit) String() string {
+	return cvString(c.Lit)
+}
+func (c *ConstCompositeLit) String() string {
+	var s string
+	if c.Type != nil {
+		s += c.Type.String()
+	}
+	s += "{"
+	for index, kv := range c.KVList {
+		if index > 0 {
+			s += ", "
+		}
+		if kv.Key != nil {
+			s += kv.Key.String() + ": "
+		}
+		s += kv.Value.String()
+	}
+	return s + "}"
+}
+func (c *ConstNamed) String() string {
+	return c.Name
+}
+func (c *ConstIndexed) String() string {
+	return c.Expr.String() + "[" + c.IndexExpr.String() + "]"
+}
+func (c *ConstTypeConv) String() string {
+	return c.Type.String() + "(" + c.Expr.String() + ")"
+}
+func (c *ConstTypeObject) String() string {
+	return c.Type.String()
+}
+func (c *ConstUnaryOp) String() string {
+	return c.Op + c.Expr.String()
+}
+func (c *ConstBinaryOp) String() string {
+	return "(" + c.Lexpr.String() + c.Op + c.Rexpr.String() + ")"
+}
+func (c *ConstDef) String() string { return fmt.Sprintf("%+v", *c) }
+
+func (c *ConstLit) Pos() Pos          { return c.P }
+func (c *ConstCompositeLit) Pos() Pos { return c.P }
+func (c *ConstNamed) Pos() Pos        { return c.P }
+func (c *ConstIndexed) Pos() Pos      { return c.P }
+func (c *ConstTypeConv) Pos() Pos     { return c.P }
+func (c *ConstTypeObject) Pos() Pos   { return c.P }
+func (c *ConstUnaryOp) Pos() Pos      { return c.P }
+func (c *ConstBinaryOp) Pos() Pos     { return c.P }
diff --git a/lib/vdl/parse/grammar.y b/lib/vdl/parse/grammar.y
new file mode 100644
index 0000000..6d19942
--- /dev/null
+++ b/lib/vdl/parse/grammar.y
@@ -0,0 +1,646 @@
+// Yacc grammar file for the veyron VDL langage.
+// http://goto/veyron:vdl
+//
+// Similar to Go, the formal grammar uses semicolons ';' as terminators, but
+// idiomatic usage may omit most semicolons using the following rules:
+//   1) During the tokenization phase, semicolons are always auto-inserted at
+//      the end of each line after certain tokens.  This is implemented in
+//      the lexer via the autoSemi function.
+//   2) Semicolons may be omitted before a closing ')' or '}'.  This is
+//      implemented via the osemi rule below.
+//
+// To generate the grammar.go source file containing the parser, run
+// grammar_gen.sh in this same directory, or run go generate on this package.
+
+////////////////////////////////////////////////////////////////////////
+// Declarations section.
+%{
+// This grammar.y.go file was auto-generated by yacc from grammar.y.
+
+package parse
+
+import (
+  "math/big"
+  "strings"
+)
+
+type intPos struct {
+  int *big.Int
+  pos Pos
+}
+
+type ratPos struct {
+  rat *big.Rat
+  pos Pos
+}
+
+type imagPos struct {
+  imag *BigImag
+  pos  Pos
+}
+
+// typeListToStrList converts a slice of Type to a slice of StringPos.  Each
+// type must be a TypeNamed with an empty PackageName, otherwise errors are
+// reported, and ok=false is returned.
+func typeListToStrList(yylex yyLexer, typeList []Type) (strList []StringPos, ok bool) {
+  ok = true
+  for _, t := range typeList {
+    var tn *TypeNamed
+    if tn, ok = t.(*TypeNamed); !ok {
+      lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names)", t.String())
+      return
+    }
+    if strings.ContainsRune(tn.Name, '.') {
+      ok = false
+      lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names).", tn.Name)
+      return
+    }
+    strList = append(strList, StringPos{tn.Name, tn.P})
+  }
+  return
+}
+%}
+
+// This union is turned into the struct type yySymType.  Most symbols include
+// positional information; this is necessary since Go yacc doesn't support
+// passing positional information, so we need to track it ourselves.
+%union {
+  pos        Pos
+  strpos     StringPos
+  intpos     intPos
+  ratpos     ratPos
+  imagpos    imagPos
+  namepos    NamePos
+  nameposes  []NamePos
+  typeexpr   Type
+  typeexprs  []Type
+  fields     []*Field
+  iface      *Interface
+  constexpr  ConstExpr
+  constexprs []ConstExpr
+  complit    *ConstCompositeLit
+  kvlit      KVLit
+  kvlits     []KVLit
+  errordef   ErrorDef
+}
+
+// Terminal tokens.  We leave single-char tokens as-is using their ascii code as
+// their id, to make the grammar more readable; multi-char tokens get their own
+// id.  The start* tokens are dummy tokens to kick off the parse.
+%token            startFileImports startFile startConfigImports startConfig
+%token            startExprs
+%token <pos>      ';' ':' ',' '.' '(' ')' '[' ']' '{' '}' '<' '>' '='
+%token <pos>      '!' '+' '-' '*' '/' '%' '|' '&' '^' '?'
+%token <pos>      tOROR tANDAND tLE tGE tNE tEQEQ tLSH tRSH
+%token <pos>      tCONST tENUM tERROR tIMPORT tINTERFACE tMAP tPACKAGE
+%token <pos>      tSET tSTREAM tSTRUCT tTYPE tTYPEOBJECT tUNION
+%token <strpos>   tIDENT tSTRLIT
+%token <intpos>   tINTLIT
+%token <ratpos>   tRATLIT
+%token <imagpos>  tIMAGLIT
+
+// Labeled rules holding typed values.
+%type <strpos>     nameref dotnameref
+%type <namepos>    label_spec
+%type <nameposes>  label_spec_list
+%type <typeexpr>   type type_no_typeobject otype
+%type <typeexprs>  type_comma_list streamargs
+%type <fields>     field_spec_list field_spec named_arg_list inargs outargs
+%type <iface>      iface_item_list iface_item
+%type <constexpr>  expr unary_expr operand
+%type <constexprs> tags expr_comma_list
+%type <complit>    comp_lit
+%type <kvlit>      kv_lit
+%type <kvlits>     kv_lit_list
+%type <errordef>   error_details error_detail_list error_detail
+
+// There are 5 precedence levels for operators, all left-associative, just like
+// Go.  Lines are listed in order of increasing precedence.
+%left tOROR
+%left tANDAND
+%left '<' '>' tLE tGE tNE tEQEQ
+%left '+' '-' '|' '^'
+%left '*' '/' '%' '&' tLSH tRSH
+
+%left notPackage notConfig
+
+%start start
+
+%%
+////////////////////////////////////////////////////////////////////////
+// Rules section.
+
+// Note that vdl files and config files use an identical grammar, other than the
+// initial package or config clause respectively.  Error checking for config
+// files that include error, type or interface definitions occurs afterwards, to
+// improve error reporting.
+start:
+  startFileImports   package imports gen_imports_eof
+| startFile          package imports defs
+| startConfigImports config imports gen_imports_eof
+| startConfig        config imports defs
+| startExprs         expr_comma_list ';'
+  { lexStoreExprs(yylex, $2) }
+
+// Dummy rule to terminate the parse after the imports, regardless of whether
+// there are any defs.  Defs always start with either the tTYPE, tCONST or
+// tERROR tokens, and the rule handles all cases - either there's no trailing
+// text (the empty case, which would have resulted in EOF anyways), or there's
+// one or more defs, where we need to force an EOF.
+gen_imports_eof:
+  // Empty.
+  { lexGenEOF(yylex) }
+| tTYPE
+  { lexGenEOF(yylex) }
+| tCONST
+  { lexGenEOF(yylex) }
+| tERROR
+  { lexGenEOF(yylex) }
+
+// PACKAGE
+package:
+  %prec notPackage
+  { lexPosErrorf(yylex, Pos{}, "vdl file must start with package clause") }
+| tPACKAGE tIDENT ';'
+  { lexVDLFile(yylex).PackageDef = NamePos{Name:$2.String, Pos:$2.Pos} }
+
+// CONFIG
+config:
+  %prec notConfig
+  { lexPosErrorf(yylex, Pos{}, "config file must start with config clause") }
+| tIDENT '=' expr ';'
+  {
+    // We allow "config" as an identifier; it is not a keyword.  So we check
+    // manually to make sure the syntax is correct.
+    if $1.String != "config" {
+      lexPosErrorf(yylex, $1.Pos, "config file must start with config clause")
+      return 1 // Any non-zero code indicates an error
+    }
+    file := lexVDLFile(yylex)
+    file.PackageDef = NamePos{Name:"config", Pos:$1.Pos}
+    file.ConstDefs = []*ConstDef{{Expr:$3}}
+  }
+
+// IMPORTS
+imports:
+  // Empty.
+| imports import ';'
+
+import:
+  tIMPORT '(' ')'
+| tIMPORT '(' import_spec_list osemi ')'
+| tIMPORT import_spec
+
+import_spec_list:
+  import_spec
+| import_spec_list ';' import_spec
+
+import_spec:
+  tSTRLIT
+  {
+    imps := &lexVDLFile(yylex).Imports
+    *imps = append(*imps, &Import{Path:$1.String, NamePos:NamePos{Pos:$1.Pos}})
+  }
+| tIDENT tSTRLIT
+  {
+    imps := &lexVDLFile(yylex).Imports
+    *imps = append(*imps, &Import{Path:$2.String, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+  }
+
+// DEFINITIONS
+defs:
+  // Empty.
+| defs type_def ';'
+| defs const_def ';'
+| defs error_def ';'
+
+type_def:
+  tTYPE '(' ')'
+| tTYPE '(' type_spec_list osemi ')'
+| tTYPE type_spec
+| tTYPE interface_spec
+
+const_def:
+  tCONST '(' ')'
+| tCONST '(' const_spec_list osemi ')'
+| tCONST const_spec
+
+error_def:
+  tERROR '(' ')'
+| tERROR '(' error_spec_list osemi ')'
+| tERROR error_spec
+
+// TYPE DEFINITIONS
+type_spec_list:
+  type_spec
+| type_spec_list ';' type_spec
+
+type_spec:
+  tIDENT type
+  {
+    tds := &lexVDLFile(yylex).TypeDefs
+    *tds = append(*tds, &TypeDef{Type:$2, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+  }
+
+// The type_no_typeobject rule is necessary to avoid a shift/reduce conflict
+// between type conversions and typeobject const expressions.  E.g.
+//   type(expr)       // type conversion
+//   typeobject(type) // typeobject const expression
+//
+// We've chosen similar syntax to make it easier for the user to remember how to
+// use the feature, but since "typeobject" is itself a type, there is a problem.
+// We resolve the conflict by restricting the type conversion to the rule:
+//   type_no_typeobject '(' expr ')'
+//
+// Note that if we wanted to add general-purpose functions with the func(expr)
+// syntax, we'll need to pull nameref out of type_no_typeobject, and parse both
+// func(expr) and nameref(expr) into a generic structure.  We can't use that
+// same mechanism for typeobject, since the thing inside the parens is a value
+// expression for type conversions, but a type expression for typeobject.
+type_no_typeobject:
+  nameref
+  { $$ = &TypeNamed{Name:$1.String, P:$1.Pos} }
+| tERROR // Special-case to allow the "error" keyword as a named type.
+  { $$ = &TypeNamed{Name:"error", P:$1} }
+| '[' tINTLIT ']' type
+  { $$ = &TypeArray{Len:int($2.int.Int64()), Elem:$4, P:$1} }
+| '[' ']' type
+  { $$ = &TypeList{Elem:$3, P:$1} }
+| tENUM '{' label_spec_list osemi '}'
+  { $$ = &TypeEnum{Labels:$3, P:$1} }
+| tSET '[' type ']'
+  { $$ = &TypeSet{Key:$3, P:$1} }
+| tMAP '[' type ']' type
+  { $$ = &TypeMap{Key:$3, Elem:$5, P:$1} }
+| tSTRUCT '{' field_spec_list osemi '}'
+  { $$ = &TypeStruct{Fields:$3, P:$1} }
+| tSTRUCT '{' '}'
+  { $$ = &TypeStruct{P:$1} }
+| tUNION '{' field_spec_list osemi '}'
+  { $$ = &TypeUnion{Fields:$3, P:$1} }
+| tUNION '{' '}'
+  { $$ = &TypeUnion{P:$1} }
+| '?' type
+  { $$ = &TypeOptional{Base:$2, P:$1} }
+
+// The type rule expands to all the actual types, including typeobject.
+type:
+  type_no_typeobject
+  { $$ = $1}
+| tTYPEOBJECT
+  { $$ = &TypeNamed{Name:"typeobject", P:$1} }
+
+label_spec_list:
+  label_spec
+  { $$ = []NamePos{$1} }
+| label_spec_list ';' label_spec
+  { $$ = append($1, $3) }
+
+label_spec:
+  tIDENT
+  { $$ = NamePos{Name:$1.String, Pos:$1.Pos} }
+
+field_spec_list:
+  field_spec
+  { $$ = $1 }
+| field_spec_list ';' field_spec
+  { $$ = append($1, $3...) }
+
+// The field_spec rule is intended to capture the following patterns:
+//    var type
+//    var0, var1, var2 type
+// where var* refers to a variable name, and type refers to a type.  Each var
+// is expressed as an identifier.  An oddity here is that we use a type_list to
+// capture the list of variables rather than using a list of IDENTS.  This means
+// the grammar accepts invalid constructions, and we must validate afterwards.
+//
+// We do this to avoid a LALR reduce/reduce conflict with function arguments.
+// The problem is exhibited by the in-args of these two functions, where func1
+// has three args respectively named A, B, C all of type t1, and func2 has three
+// args with name and type t2, t3 and t4 respectively.  The func1 style is
+// captured by field_spec in named_arg_list, while the func2 style is captured
+// by type_list in args.
+//   func1(A, B, C t1)
+//   func2(t2, t3, t4)
+//
+// If we used an ident_list to capture "A, B, C" in func1, but used a type_list
+// to capture "t2, t3, t4" in func2, we'd have a reduce/reduce conflict since
+// yacc cannot determine whether to reduce as an ident_list or as a type_list;
+// we don't know until we've reached token t1 in func1, or token ')' in func2.
+//
+// The fix can be considered both beautiful and a huge hack.  To avoid the
+// conflict we force both forms to use type_list to capture both "A, B, C" and
+// "t2, t3, t4".  This avoids the conflict since we're now always reducing via
+// type_list, but allows invalid constructions like "[]int, []int []int".  So we
+// validate in the action and throw errors.
+//
+// An alternate fix would have been to remove the IDENT case from the type rule,
+// use ident_list to capture both cases, and manually "expand" the grammar to
+// distinguish the cases appropriately.  That would ensure we don't allow
+// constructions like "int, int int" in the grammar itself, but would lead to a
+// much more complicated grammar.  As a bonus, with the type_list solution we
+// can give better error messages.
+field_spec:
+  type_comma_list type
+  {
+    if names, ok := typeListToStrList(yylex, $1); ok {
+      for _, n := range names {
+        $$ = append($$, &Field{Type:$2, NamePos:NamePos{Name:n.String, Pos:n.Pos}})
+      }
+    } else {
+      lexPosErrorf(yylex, $2.Pos(), "perhaps you forgot a comma before %q?.", $2.String())
+    }
+  }
+
+type_comma_list:
+  type
+  { $$ = []Type{$1} }
+| type_comma_list ',' type
+  { $$ = append($1, $3) }
+
+// INTERFACE DEFINITIONS
+interface_spec:
+  tIDENT tINTERFACE '{' '}'
+  {
+    ifs := &lexVDLFile(yylex).Interfaces
+    *ifs = append(*ifs, &Interface{NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+  }
+| tIDENT tINTERFACE '{' iface_item_list osemi '}'
+  {
+    $4.Name, $4.Pos = $1.String, $1.Pos
+    ifs := &lexVDLFile(yylex).Interfaces
+    *ifs = append(*ifs, $4)
+  }
+
+iface_item_list:
+  iface_item
+  { $$ = $1 }
+| iface_item_list ';' iface_item
+  {
+    $1.Embeds = append($1.Embeds, $3.Embeds...)
+    $1.Methods = append($1.Methods, $3.Methods...)
+    $$ = $1
+  }
+
+iface_item:
+  tIDENT inargs streamargs outargs tags
+  { $$ = &Interface{Methods: []*Method{{InArgs:$2, InStream:$3[0], OutStream:$3[1], OutArgs:$4, Tags:$5, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}}}} }
+| nameref
+  { $$ = &Interface{Embeds: []*NamePos{{Name:$1.String, Pos:$1.Pos}}} }
+
+inargs:
+  '(' ')'
+  { $$ = nil }
+| '(' named_arg_list ocomma ')'
+  { $$ = $2 }
+| '(' type_comma_list ocomma ')'
+  // Just like Go, we allow a list of types without variable names.  See the
+  // field_spec rule for a workaround to avoid a reduce/reduce conflict.
+  {
+    for _, t := range $2 {
+      $$ = append($$, &Field{Type:t, NamePos:NamePos{Pos:t.Pos()}})
+    }
+  }
+
+// The named_arg_list rule is just like the field_spec_list, but uses comma ','
+// as a delimiter rather than semicolon ';'.
+named_arg_list:
+  field_spec
+  { $$ = $1 }
+| named_arg_list ',' field_spec
+  { $$ = append($1, $3...) }
+
+// The outargs use special syntax to denote the error associated with each
+// method.  For parsing we accept these forms:
+//  error
+//  (string | error)
+//  (a, b string, c bool | error)
+//
+// TODO(toddw): Improve parser syntax errors.
+outargs:
+  tERROR
+  { $$ = nil }
+| '(' named_arg_list ocomma '|' tERROR ')'
+  { $$ = $2 }
+| '(' type_comma_list ocomma '|' tERROR ')'
+  // Just like Go, we allow a list of types without variable names.  See the
+  // field_spec rule for a workaround to avoid a reduce/reduce conflict.
+  {
+    for _, t := range $2 {
+      $$ = append($$, &Field{Type:t, NamePos:NamePos{Pos:t.Pos()}})
+    }
+  }
+
+streamargs:
+  // Empty.
+  { $$ = []Type{nil, nil} }
+| tSTREAM '<' '>'
+  { $$ = []Type{nil, nil} }
+| tSTREAM '<' type '>'
+  { $$ = []Type{$3, nil} }
+| tSTREAM '<' type ',' type '>'
+  { $$ = []Type{$3, $5} }
+
+tags:
+  // Empty.
+  { $$ = nil }
+| '{' '}'
+  { $$ = nil }
+| '{' expr_comma_list ocomma '}'
+  { $$ = $2 }
+
+expr_comma_list:
+  expr
+  { $$ = []ConstExpr{$1} }
+| expr_comma_list ',' expr
+  { $$ = append($1, $3) }
+
+// CONST DEFINITIONS
+const_spec_list:
+  const_spec
+| const_spec_list ';' const_spec
+
+const_spec:
+  tIDENT '=' expr
+  {
+    cds := &lexVDLFile(yylex).ConstDefs
+    *cds = append(*cds, &ConstDef{Expr:$3, NamePos:NamePos{Name:$1.String, Pos:$1.Pos}})
+  }
+
+expr:
+  unary_expr
+  { $$ = $1 }
+| expr tOROR expr
+  { $$ = &ConstBinaryOp{"||", $1, $3, $2} }
+| expr tANDAND expr
+  { $$ = &ConstBinaryOp{"&&", $1, $3, $2} }
+| expr '<' expr
+  { $$ = &ConstBinaryOp{"<", $1, $3, $2} }
+| expr '>' expr
+  { $$ = &ConstBinaryOp{">", $1, $3, $2} }
+| expr tLE expr
+  { $$ = &ConstBinaryOp{"<=", $1, $3, $2} }
+| expr tGE expr
+  { $$ = &ConstBinaryOp{">=", $1, $3, $2} }
+| expr tNE expr
+  { $$ = &ConstBinaryOp{"!=", $1, $3, $2} }
+| expr tEQEQ expr
+  { $$ = &ConstBinaryOp{"==", $1, $3, $2} }
+| expr '+' expr
+  { $$ = &ConstBinaryOp{"+", $1, $3, $2} }
+| expr '-' expr
+  { $$ = &ConstBinaryOp{"-", $1, $3, $2} }
+| expr '*' expr
+  { $$ = &ConstBinaryOp{"*", $1, $3, $2} }
+| expr '/' expr
+  { $$ = &ConstBinaryOp{"/", $1, $3, $2} }
+| expr '%' expr
+  { $$ = &ConstBinaryOp{"%", $1, $3, $2} }
+| expr '|' expr
+  { $$ = &ConstBinaryOp{"|", $1, $3, $2} }
+| expr '&' expr
+  { $$ = &ConstBinaryOp{"&", $1, $3, $2} }
+| expr '^' expr
+  { $$ = &ConstBinaryOp{"^", $1, $3, $2} }
+| expr tLSH expr
+  { $$ = &ConstBinaryOp{"<<", $1, $3, $2} }
+| expr tRSH expr
+  { $$ = &ConstBinaryOp{">>", $1, $3, $2} }
+
+unary_expr:
+  operand
+  { $$ = $1 }
+| '!' unary_expr
+  { $$ = &ConstUnaryOp{"!", $2, $1} }
+| '+' unary_expr
+  { $$ = &ConstUnaryOp{"+", $2, $1} }
+| '-' unary_expr
+  { $$ = &ConstUnaryOp{"-", $2, $1} }
+| '^' unary_expr
+  { $$ = &ConstUnaryOp{"^", $2, $1} }
+| type_no_typeobject '(' expr ')'
+  { $$ = &ConstTypeConv{$1, $3, $1.Pos()} }
+| tTYPEOBJECT '(' type ')'
+  { $$ = &ConstTypeObject{$3, $1} }
+// TODO(bprosnitz) Add .real() and .imag() for complex.
+
+operand:
+  tSTRLIT
+  { $$ = &ConstLit{$1.String, $1.Pos} }
+| tINTLIT
+  { $$ = &ConstLit{$1.int, $1.pos} }
+| tRATLIT
+  { $$ = &ConstLit{$1.rat, $1.pos} }
+| tIMAGLIT
+  { $$ = &ConstLit{$1.imag, $1.pos} }
+| nameref
+  { $$ = &ConstNamed{$1.String, $1.Pos} }
+| comp_lit
+  { $$ = $1 }
+| comp_lit '.' tIDENT
+  { lexPosErrorf(yylex, $2, "cannot apply selector operator to unnamed constant")}
+| comp_lit '[' expr ']'
+  { lexPosErrorf(yylex, $2, "cannot apply index operator to unnamed constant")}
+| nameref '[' expr ']'
+  { $$ = &ConstIndexed{&ConstNamed{$1.String, $1.Pos}, $3, $1.Pos} }
+| '(' expr ')'
+  { $$ = $2 }
+
+comp_lit:
+  otype '{' '}'
+  { $$ = &ConstCompositeLit{$1, nil, $2} }
+| otype '{' kv_lit_list ocomma '}'
+  { $$ = &ConstCompositeLit{$1, $3, $2} }
+
+kv_lit_list:
+  kv_lit
+  { $$ = []KVLit{$1} }
+| kv_lit_list ',' kv_lit
+  { $$ = append($1, $3) }
+
+kv_lit:
+  expr
+  { $$ = KVLit{Value:$1} }
+| expr ':' expr
+  { $$ = KVLit{Key:$1, Value:$3} }
+
+// ERROR DEFINITIONS
+error_spec_list:
+  error_spec
+| error_spec_list ';' error_spec
+
+error_spec:
+  tIDENT inargs error_details
+  {
+    // Create *ErrorDef starting with a copy of error_details, filling in the
+    // name and params
+    ed := $3
+    ed.NamePos = NamePos{Name:$1.String, Pos:$1.Pos}
+    ed.Params = $2
+    eds := &lexVDLFile(yylex).ErrorDefs
+    *eds = append(*eds, &ed)
+  }
+
+error_details:
+  // Empty.
+  { $$ = ErrorDef{} }
+| '{' '}'
+  { $$ = ErrorDef{} }
+| '{' error_detail_list ocomma '}'
+  { $$ = $2 }
+
+error_detail_list:
+  error_detail
+  { $$ = $1 }
+| error_detail_list ',' error_detail
+  {
+    // Merge each ErrorDef in-order to build the final ErrorDef.
+    $$ = $1
+    switch {
+    case len($3.Actions) > 0:
+      $$.Actions = append($$.Actions, $3.Actions...)
+    case len($3.Formats) > 0:
+      $$.Formats = append($$.Formats, $3.Formats...)
+    }
+  }
+
+error_detail:
+  tIDENT
+  { $$ = ErrorDef{Actions: []StringPos{$1}} }
+| tSTRLIT ':' tSTRLIT
+  { $$ = ErrorDef{Formats: []LangFmt{{Lang: $1, Fmt: $3}}} }
+
+// MISC TOKENS
+
+// nameref describes a named reference to another type, interface or const.  We
+// allow the following forms:
+//   foo
+//   foo.bar            (and multi-dot variants)
+//   "pkg/path".foo
+//   "pkg/path".foo.bar (and multi-dot variants)
+nameref:
+  dotnameref
+  { $$ = $1 }
+| tSTRLIT '.' dotnameref
+  { $$ = StringPos{"\""+$1.String+"\"."+$3.String, $1.Pos} }
+
+// dotnameref describes just the dotted portion of nameref.
+dotnameref:
+  tIDENT
+  { $$ = $1 }
+| dotnameref '.' tIDENT
+  { $$ = StringPos{$1.String+"."+$3.String, $1.Pos} }
+
+otype:
+  // Empty.
+  { $$ = nil }
+| type
+  { $$ = $1 }
+
+osemi:
+  // Empty.
+| ';'
+
+ocomma:
+  // Empty.
+| ','
diff --git a/lib/vdl/parse/grammar.y.debug b/lib/vdl/parse/grammar.y.debug
new file mode 100644
index 0000000..a3fb1fc
--- /dev/null
+++ b/lib/vdl/parse/grammar.y.debug
@@ -0,0 +1,4347 @@
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+* The main reason this file has been generated and submitted is to try to ensure
+* we never submit changes that cause shift/reduce or reduce/reduce conflicts.
+* The Go yacc tool doesn't support the %expect directive, and will happily
+* generate a parser even if such conflicts exist; it's up to the developer
+* running the tool to notice that an error message is reported.  The bottom of
+* this file contains stats, including the number of conflicts.  If you're
+* reviewing a change make sure it says 0 conflicts.
+*
+* If you're updating the grammar, just cut-and-paste this message from the old
+* file to the new one, so that this comment block persists.
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+
+state 0
+	$accept: .start $end 
+
+	startFileImports  shift 2
+	startFile  shift 3
+	startConfigImports  shift 4
+	startConfig  shift 5
+	startExprs  shift 6
+	.  error
+
+	start  goto 1
+
+state 1
+	$accept:  start.$end 
+
+	$end  accept
+	.  error
+
+
+state 2
+	start:  startFileImports.package imports gen_imports_eof 
+	package: .    (10)
+
+	tPACKAGE  shift 8
+	.  reduce 10 (src line 161)
+
+	package  goto 7
+
+state 3
+	start:  startFile.package imports defs 
+	package: .    (10)
+
+	tPACKAGE  shift 8
+	.  reduce 10 (src line 161)
+
+	package  goto 9
+
+state 4
+	start:  startConfigImports.config imports gen_imports_eof 
+	config: .    (12)
+
+	tIDENT  shift 11
+	.  reduce 12 (src line 168)
+
+	config  goto 10
+
+state 5
+	start:  startConfig.config imports defs 
+	config: .    (12)
+
+	tIDENT  shift 11
+	.  reduce 12 (src line 168)
+
+	config  goto 12
+
+state 6
+	start:  startExprs.expr_comma_list ';' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 14
+	unary_expr  goto 15
+	operand  goto 16
+	expr_comma_list  goto 13
+	comp_lit  goto 28
+
+state 7
+	start:  startFileImports package.imports gen_imports_eof 
+	imports: .    (14)
+
+	.  reduce 14 (src line 185)
+
+	imports  goto 42
+
+state 8
+	package:  tPACKAGE.tIDENT ';' 
+
+	tIDENT  shift 43
+	.  error
+
+
+state 9
+	start:  startFile package.imports defs 
+	imports: .    (14)
+
+	.  reduce 14 (src line 185)
+
+	imports  goto 44
+
+state 10
+	start:  startConfigImports config.imports gen_imports_eof 
+	imports: .    (14)
+
+	.  reduce 14 (src line 185)
+
+	imports  goto 45
+
+state 11
+	config:  tIDENT.'=' expr ';' 
+
+	'='  shift 46
+	.  error
+
+
+state 12
+	start:  startConfig config.imports defs 
+	imports: .    (14)
+
+	.  reduce 14 (src line 185)
+
+	imports  goto 47
+
+state 13
+	start:  startExprs expr_comma_list.';' 
+	expr_comma_list:  expr_comma_list.',' expr 
+
+	';'  shift 48
+	','  shift 49
+	.  error
+
+
+state 14
+	expr_comma_list:  expr.    (83)
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 83 (src line 452)
+
+
+state 15
+	expr:  unary_expr.    (88)
+
+	.  reduce 88 (src line 470)
+
+
+state 16
+	unary_expr:  operand.    (107)
+
+	.  reduce 107 (src line 510)
+
+
+state 17
+	unary_expr:  '!'.unary_expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	unary_expr  goto 68
+	operand  goto 16
+	comp_lit  goto 28
+
+state 18
+	unary_expr:  '+'.unary_expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	unary_expr  goto 69
+	operand  goto 16
+	comp_lit  goto 28
+
+state 19
+	unary_expr:  '-'.unary_expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	unary_expr  goto 70
+	operand  goto 16
+	comp_lit  goto 28
+
+state 20
+	unary_expr:  '^'.unary_expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	unary_expr  goto 71
+	operand  goto 16
+	comp_lit  goto 28
+
+state 21
+	type:  type_no_typeobject.    (52)
+	unary_expr:  type_no_typeobject.'(' expr ')' 
+
+	'('  shift 72
+	.  reduce 52 (src line 287)
+
+
+state 22
+	type:  tTYPEOBJECT.    (53)
+	unary_expr:  tTYPEOBJECT.'(' type ')' 
+
+	'('  shift 73
+	.  reduce 53 (src line 290)
+
+
+state 23
+	operand:  tSTRLIT.    (114)
+	nameref:  tSTRLIT.'.' dotnameref 
+
+	'.'  shift 74
+	.  reduce 114 (src line 527)
+
+
+state 24
+	operand:  tINTLIT.    (115)
+
+	.  reduce 115 (src line 530)
+
+
+state 25
+	operand:  tRATLIT.    (116)
+
+	.  reduce 116 (src line 532)
+
+
+state 26
+	operand:  tIMAGLIT.    (117)
+
+	.  reduce 117 (src line 534)
+
+
+state 27
+	type_no_typeobject:  nameref.    (40)
+	operand:  nameref.    (118)
+	operand:  nameref.'[' expr ']' 
+
+	'('  reduce 40 (src line 260)
+	'['  shift 75
+	'{'  reduce 40 (src line 260)
+	.  reduce 118 (src line 536)
+
+
+state 28
+	operand:  comp_lit.    (119)
+	operand:  comp_lit.'.' tIDENT 
+	operand:  comp_lit.'[' expr ']' 
+
+	'.'  shift 76
+	'['  shift 77
+	.  reduce 119 (src line 538)
+
+
+state 29
+	operand:  '('.expr ')' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 78
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 30
+	type_no_typeobject:  tERROR.    (41)
+
+	.  reduce 41 (src line 263)
+
+
+state 31
+	type_no_typeobject:  '['.tINTLIT ']' type 
+	type_no_typeobject:  '['.']' type 
+
+	']'  shift 80
+	tINTLIT  shift 79
+	.  error
+
+
+state 32
+	type_no_typeobject:  tENUM.'{' label_spec_list osemi '}' 
+
+	'{'  shift 81
+	.  error
+
+
+state 33
+	type_no_typeobject:  tSET.'[' type ']' 
+
+	'['  shift 82
+	.  error
+
+
+state 34
+	type_no_typeobject:  tMAP.'[' type ']' type 
+
+	'['  shift 83
+	.  error
+
+
+state 35
+	type_no_typeobject:  tSTRUCT.'{' field_spec_list osemi '}' 
+	type_no_typeobject:  tSTRUCT.'{' '}' 
+
+	'{'  shift 84
+	.  error
+
+
+state 36
+	type_no_typeobject:  tUNION.'{' field_spec_list osemi '}' 
+	type_no_typeobject:  tUNION.'{' '}' 
+
+	'{'  shift 85
+	.  error
+
+
+state 37
+	type_no_typeobject:  '?'.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 86
+	type_no_typeobject  goto 87
+
+state 38
+	nameref:  dotnameref.    (140)
+	dotnameref:  dotnameref.'.' tIDENT 
+
+	'.'  shift 91
+	.  reduce 140 (src line 621)
+
+
+state 39
+	comp_lit:  otype.'{' '}' 
+	comp_lit:  otype.'{' kv_lit_list ocomma '}' 
+
+	'{'  shift 92
+	.  error
+
+
+state 40
+	dotnameref:  tIDENT.    (142)
+
+	.  reduce 142 (src line 628)
+
+
+state 41
+	otype:  type.    (145)
+
+	.  reduce 145 (src line 637)
+
+
+state 42
+	start:  startFileImports package imports.gen_imports_eof 
+	imports:  imports.import ';' 
+	gen_imports_eof: .    (6)
+
+	tCONST  shift 96
+	tERROR  shift 97
+	tIMPORT  shift 98
+	tTYPE  shift 95
+	.  reduce 6 (src line 150)
+
+	gen_imports_eof  goto 93
+	import  goto 94
+
+state 43
+	package:  tPACKAGE tIDENT.';' 
+
+	';'  shift 99
+	.  error
+
+
+state 44
+	start:  startFile package imports.defs 
+	imports:  imports.import ';' 
+	defs: .    (23)
+
+	tIMPORT  shift 98
+	.  reduce 23 (src line 211)
+
+	defs  goto 100
+	import  goto 94
+
+state 45
+	start:  startConfigImports config imports.gen_imports_eof 
+	imports:  imports.import ';' 
+	gen_imports_eof: .    (6)
+
+	tCONST  shift 96
+	tERROR  shift 97
+	tIMPORT  shift 98
+	tTYPE  shift 95
+	.  reduce 6 (src line 150)
+
+	gen_imports_eof  goto 101
+	import  goto 94
+
+state 46
+	config:  tIDENT '='.expr ';' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 102
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 47
+	start:  startConfig config imports.defs 
+	imports:  imports.import ';' 
+	defs: .    (23)
+
+	tIMPORT  shift 98
+	.  reduce 23 (src line 211)
+
+	defs  goto 103
+	import  goto 94
+
+state 48
+	start:  startExprs expr_comma_list ';'.    (5)
+
+	.  reduce 5 (src line 142)
+
+
+state 49
+	expr_comma_list:  expr_comma_list ','.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 104
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 50
+	expr:  expr tOROR.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 105
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 51
+	expr:  expr tANDAND.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 106
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 52
+	expr:  expr '<'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 107
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 53
+	expr:  expr '>'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 108
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 54
+	expr:  expr tLE.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 109
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 55
+	expr:  expr tGE.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 110
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 56
+	expr:  expr tNE.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 111
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 57
+	expr:  expr tEQEQ.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 112
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 58
+	expr:  expr '+'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 113
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 59
+	expr:  expr '-'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 114
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 60
+	expr:  expr '*'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 115
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 61
+	expr:  expr '/'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 116
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 62
+	expr:  expr '%'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 117
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 63
+	expr:  expr '|'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 118
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 64
+	expr:  expr '&'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 119
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 65
+	expr:  expr '^'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 120
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 66
+	expr:  expr tLSH.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 121
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 67
+	expr:  expr tRSH.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 122
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 68
+	unary_expr:  '!' unary_expr.    (108)
+
+	.  reduce 108 (src line 513)
+
+
+state 69
+	unary_expr:  '+' unary_expr.    (109)
+
+	.  reduce 109 (src line 515)
+
+
+state 70
+	unary_expr:  '-' unary_expr.    (110)
+
+	.  reduce 110 (src line 517)
+
+
+state 71
+	unary_expr:  '^' unary_expr.    (111)
+
+	.  reduce 111 (src line 519)
+
+
+state 72
+	unary_expr:  type_no_typeobject '('.expr ')' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 123
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 73
+	unary_expr:  tTYPEOBJECT '('.type ')' 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 124
+	type_no_typeobject  goto 87
+
+state 74
+	nameref:  tSTRLIT '.'.dotnameref 
+
+	tIDENT  shift 40
+	.  error
+
+	dotnameref  goto 125
+
+state 75
+	operand:  nameref '['.expr ']' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 126
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 76
+	operand:  comp_lit '.'.tIDENT 
+
+	tIDENT  shift 127
+	.  error
+
+
+state 77
+	operand:  comp_lit '['.expr ']' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 128
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 78
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	operand:  '(' expr.')' 
+
+	')'  shift 129
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  error
+
+
+state 79
+	type_no_typeobject:  '[' tINTLIT.']' type 
+
+	']'  shift 130
+	.  error
+
+
+state 80
+	type_no_typeobject:  '[' ']'.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 131
+	type_no_typeobject  goto 87
+
+state 81
+	type_no_typeobject:  tENUM '{'.label_spec_list osemi '}' 
+
+	tIDENT  shift 134
+	.  error
+
+	label_spec  goto 133
+	label_spec_list  goto 132
+
+state 82
+	type_no_typeobject:  tSET '['.type ']' 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 135
+	type_no_typeobject  goto 87
+
+state 83
+	type_no_typeobject:  tMAP '['.type ']' type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 136
+	type_no_typeobject  goto 87
+
+state 84
+	type_no_typeobject:  tSTRUCT '{'.field_spec_list osemi '}' 
+	type_no_typeobject:  tSTRUCT '{'.'}' 
+
+	'['  shift 31
+	'}'  shift 138
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 140
+	field_spec_list  goto 137
+	field_spec  goto 139
+
+state 85
+	type_no_typeobject:  tUNION '{'.field_spec_list osemi '}' 
+	type_no_typeobject:  tUNION '{'.'}' 
+
+	'['  shift 31
+	'}'  shift 143
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 140
+	field_spec_list  goto 142
+	field_spec  goto 139
+
+state 86
+	type_no_typeobject:  '?' type.    (51)
+
+	.  reduce 51 (src line 283)
+
+
+state 87
+	type:  type_no_typeobject.    (52)
+
+	.  reduce 52 (src line 287)
+
+
+state 88
+	type:  tTYPEOBJECT.    (53)
+
+	.  reduce 53 (src line 290)
+
+
+state 89
+	type_no_typeobject:  nameref.    (40)
+
+	.  reduce 40 (src line 260)
+
+
+state 90
+	nameref:  tSTRLIT.'.' dotnameref 
+
+	'.'  shift 74
+	.  error
+
+
+state 91
+	dotnameref:  dotnameref '.'.tIDENT 
+
+	tIDENT  shift 144
+	.  error
+
+
+state 92
+	comp_lit:  otype '{'.'}' 
+	comp_lit:  otype '{'.kv_lit_list ocomma '}' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'}'  shift 145
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 148
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+	kv_lit  goto 147
+	kv_lit_list  goto 146
+
+state 93
+	start:  startFileImports package imports gen_imports_eof.    (1)
+
+	.  reduce 1 (src line 137)
+
+
+state 94
+	imports:  imports import.';' 
+
+	';'  shift 149
+	.  error
+
+
+state 95
+	gen_imports_eof:  tTYPE.    (7)
+
+	.  reduce 7 (src line 153)
+
+
+state 96
+	gen_imports_eof:  tCONST.    (8)
+
+	.  reduce 8 (src line 155)
+
+
+state 97
+	gen_imports_eof:  tERROR.    (9)
+
+	.  reduce 9 (src line 157)
+
+
+state 98
+	import:  tIMPORT.'(' ')' 
+	import:  tIMPORT.'(' import_spec_list osemi ')' 
+	import:  tIMPORT.import_spec 
+
+	'('  shift 150
+	tIDENT  shift 153
+	tSTRLIT  shift 152
+	.  error
+
+	import_spec  goto 151
+
+state 99
+	package:  tPACKAGE tIDENT ';'.    (11)
+
+	.  reduce 11 (src line 164)
+
+
+state 100
+	start:  startFile package imports defs.    (2)
+	defs:  defs.type_def ';' 
+	defs:  defs.const_def ';' 
+	defs:  defs.error_def ';' 
+
+	tCONST  shift 158
+	tERROR  shift 159
+	tTYPE  shift 157
+	.  reduce 2 (src line 139)
+
+	type_def  goto 154
+	const_def  goto 155
+	error_def  goto 156
+
+state 101
+	start:  startConfigImports config imports gen_imports_eof.    (3)
+
+	.  reduce 3 (src line 140)
+
+
+state 102
+	config:  tIDENT '=' expr.';' 
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	';'  shift 160
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  error
+
+
+state 103
+	start:  startConfig config imports defs.    (4)
+	defs:  defs.type_def ';' 
+	defs:  defs.const_def ';' 
+	defs:  defs.error_def ';' 
+
+	tCONST  shift 158
+	tERROR  shift 159
+	tTYPE  shift 157
+	.  reduce 4 (src line 141)
+
+	type_def  goto 154
+	const_def  goto 155
+	error_def  goto 156
+
+state 104
+	expr_comma_list:  expr_comma_list ',' expr.    (84)
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 84 (src line 455)
+
+
+state 105
+	expr:  expr.tOROR expr 
+	expr:  expr tOROR expr.    (89)
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 89 (src line 473)
+
+
+state 106
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr tANDAND expr.    (90)
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 90 (src line 475)
+
+
+state 107
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr '<' expr.    (91)
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 91 (src line 477)
+
+
+state 108
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr '>' expr.    (92)
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 92 (src line 479)
+
+
+state 109
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr tLE expr.    (93)
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 93 (src line 481)
+
+
+state 110
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr tGE expr.    (94)
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 94 (src line 483)
+
+
+state 111
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr tNE expr.    (95)
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 95 (src line 485)
+
+
+state 112
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr tEQEQ expr.    (96)
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 96 (src line 487)
+
+
+state 113
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr '+' expr.    (97)
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'&'  shift 64
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 97 (src line 489)
+
+
+state 114
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr '-' expr.    (98)
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'&'  shift 64
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 98 (src line 491)
+
+
+state 115
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr '*' expr.    (99)
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	.  reduce 99 (src line 493)
+
+
+state 116
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr '/' expr.    (100)
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	.  reduce 100 (src line 495)
+
+
+state 117
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr '%' expr.    (101)
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	.  reduce 101 (src line 497)
+
+
+state 118
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr '|' expr.    (102)
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'&'  shift 64
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 102 (src line 499)
+
+
+state 119
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr '&' expr.    (103)
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	.  reduce 103 (src line 501)
+
+
+state 120
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr '^' expr.    (104)
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'&'  shift 64
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 104 (src line 503)
+
+
+state 121
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr tLSH expr.    (105)
+	expr:  expr.tRSH expr 
+
+	.  reduce 105 (src line 505)
+
+
+state 122
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	expr:  expr tRSH expr.    (106)
+
+	.  reduce 106 (src line 507)
+
+
+state 123
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	unary_expr:  type_no_typeobject '(' expr.')' 
+
+	')'  shift 161
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  error
+
+
+state 124
+	unary_expr:  tTYPEOBJECT '(' type.')' 
+
+	')'  shift 162
+	.  error
+
+
+state 125
+	nameref:  tSTRLIT '.' dotnameref.    (141)
+	dotnameref:  dotnameref.'.' tIDENT 
+
+	'.'  shift 91
+	.  reduce 141 (src line 624)
+
+
+state 126
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	operand:  nameref '[' expr.']' 
+
+	']'  shift 163
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  error
+
+
+state 127
+	operand:  comp_lit '.' tIDENT.    (120)
+
+	.  reduce 120 (src line 540)
+
+
+state 128
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	operand:  comp_lit '[' expr.']' 
+
+	']'  shift 164
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  error
+
+
+state 129
+	operand:  '(' expr ')'.    (123)
+
+	.  reduce 123 (src line 546)
+
+
+state 130
+	type_no_typeobject:  '[' tINTLIT ']'.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 165
+	type_no_typeobject  goto 87
+
+state 131
+	type_no_typeobject:  '[' ']' type.    (43)
+
+	.  reduce 43 (src line 267)
+
+
+state 132
+	type_no_typeobject:  tENUM '{' label_spec_list.osemi '}' 
+	label_spec_list:  label_spec_list.';' label_spec 
+	osemi: .    (146)
+
+	';'  shift 167
+	.  reduce 146 (src line 640)
+
+	osemi  goto 166
+
+state 133
+	label_spec_list:  label_spec.    (54)
+
+	.  reduce 54 (src line 293)
+
+
+state 134
+	label_spec:  tIDENT.    (56)
+
+	.  reduce 56 (src line 299)
+
+
+state 135
+	type_no_typeobject:  tSET '[' type.']' 
+
+	']'  shift 168
+	.  error
+
+
+state 136
+	type_no_typeobject:  tMAP '[' type.']' type 
+
+	']'  shift 169
+	.  error
+
+
+state 137
+	type_no_typeobject:  tSTRUCT '{' field_spec_list.osemi '}' 
+	field_spec_list:  field_spec_list.';' field_spec 
+	osemi: .    (146)
+
+	';'  shift 171
+	.  reduce 146 (src line 640)
+
+	osemi  goto 170
+
+state 138
+	type_no_typeobject:  tSTRUCT '{' '}'.    (48)
+
+	.  reduce 48 (src line 277)
+
+
+state 139
+	field_spec_list:  field_spec.    (57)
+
+	.  reduce 57 (src line 303)
+
+
+state 140
+	field_spec:  type_comma_list.type 
+	type_comma_list:  type_comma_list.',' type 
+
+	','  shift 173
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 172
+	type_no_typeobject  goto 87
+
+state 141
+	type_comma_list:  type.    (60)
+
+	.  reduce 60 (src line 355)
+
+
+state 142
+	type_no_typeobject:  tUNION '{' field_spec_list.osemi '}' 
+	field_spec_list:  field_spec_list.';' field_spec 
+	osemi: .    (146)
+
+	';'  shift 171
+	.  reduce 146 (src line 640)
+
+	osemi  goto 174
+
+state 143
+	type_no_typeobject:  tUNION '{' '}'.    (50)
+
+	.  reduce 50 (src line 281)
+
+
+state 144
+	dotnameref:  dotnameref '.' tIDENT.    (143)
+
+	.  reduce 143 (src line 631)
+
+
+state 145
+	comp_lit:  otype '{' '}'.    (124)
+
+	.  reduce 124 (src line 549)
+
+
+state 146
+	comp_lit:  otype '{' kv_lit_list.ocomma '}' 
+	kv_lit_list:  kv_lit_list.',' kv_lit 
+	ocomma: .    (148)
+
+	','  shift 176
+	.  reduce 148 (src line 644)
+
+	ocomma  goto 175
+
+state 147
+	kv_lit_list:  kv_lit.    (126)
+
+	.  reduce 126 (src line 555)
+
+
+state 148
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	kv_lit:  expr.    (128)
+	kv_lit:  expr.':' expr 
+
+	':'  shift 177
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 128 (src line 561)
+
+
+state 149
+	imports:  imports import ';'.    (15)
+
+	.  reduce 15 (src line 187)
+
+
+state 150
+	import:  tIMPORT '('.')' 
+	import:  tIMPORT '('.import_spec_list osemi ')' 
+
+	')'  shift 178
+	tIDENT  shift 153
+	tSTRLIT  shift 152
+	.  error
+
+	import_spec_list  goto 179
+	import_spec  goto 180
+
+state 151
+	import:  tIMPORT import_spec.    (18)
+
+	.  reduce 18 (src line 192)
+
+
+state 152
+	import_spec:  tSTRLIT.    (21)
+
+	.  reduce 21 (src line 198)
+
+
+state 153
+	import_spec:  tIDENT.tSTRLIT 
+
+	tSTRLIT  shift 181
+	.  error
+
+
+state 154
+	defs:  defs type_def.';' 
+
+	';'  shift 182
+	.  error
+
+
+state 155
+	defs:  defs const_def.';' 
+
+	';'  shift 183
+	.  error
+
+
+state 156
+	defs:  defs error_def.';' 
+
+	';'  shift 184
+	.  error
+
+
+state 157
+	type_def:  tTYPE.'(' ')' 
+	type_def:  tTYPE.'(' type_spec_list osemi ')' 
+	type_def:  tTYPE.type_spec 
+	type_def:  tTYPE.interface_spec 
+
+	'('  shift 185
+	tIDENT  shift 188
+	.  error
+
+	type_spec  goto 186
+	interface_spec  goto 187
+
+state 158
+	const_def:  tCONST.'(' ')' 
+	const_def:  tCONST.'(' const_spec_list osemi ')' 
+	const_def:  tCONST.const_spec 
+
+	'('  shift 189
+	tIDENT  shift 191
+	.  error
+
+	const_spec  goto 190
+
+state 159
+	error_def:  tERROR.'(' ')' 
+	error_def:  tERROR.'(' error_spec_list osemi ')' 
+	error_def:  tERROR.error_spec 
+
+	'('  shift 192
+	tIDENT  shift 194
+	.  error
+
+	error_spec  goto 193
+
+state 160
+	config:  tIDENT '=' expr ';'.    (13)
+
+	.  reduce 13 (src line 171)
+
+
+state 161
+	unary_expr:  type_no_typeobject '(' expr ')'.    (112)
+
+	.  reduce 112 (src line 521)
+
+
+state 162
+	unary_expr:  tTYPEOBJECT '(' type ')'.    (113)
+
+	.  reduce 113 (src line 523)
+
+
+state 163
+	operand:  nameref '[' expr ']'.    (122)
+
+	.  reduce 122 (src line 544)
+
+
+state 164
+	operand:  comp_lit '[' expr ']'.    (121)
+
+	.  reduce 121 (src line 542)
+
+
+state 165
+	type_no_typeobject:  '[' tINTLIT ']' type.    (42)
+
+	.  reduce 42 (src line 265)
+
+
+state 166
+	type_no_typeobject:  tENUM '{' label_spec_list osemi.'}' 
+
+	'}'  shift 195
+	.  error
+
+
+state 167
+	label_spec_list:  label_spec_list ';'.label_spec 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 134
+	.  reduce 147 (src line 642)
+
+	label_spec  goto 196
+
+state 168
+	type_no_typeobject:  tSET '[' type ']'.    (45)
+
+	.  reduce 45 (src line 271)
+
+
+state 169
+	type_no_typeobject:  tMAP '[' type ']'.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 197
+	type_no_typeobject  goto 87
+
+state 170
+	type_no_typeobject:  tSTRUCT '{' field_spec_list osemi.'}' 
+
+	'}'  shift 198
+	.  error
+
+
+state 171
+	field_spec_list:  field_spec_list ';'.field_spec 
+	osemi:  ';'.    (147)
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  reduce 147 (src line 642)
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 140
+	field_spec  goto 199
+
+state 172
+	field_spec:  type_comma_list type.    (59)
+
+	.  reduce 59 (src line 343)
+
+
+state 173
+	type_comma_list:  type_comma_list ','.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 200
+	type_no_typeobject  goto 87
+
+state 174
+	type_no_typeobject:  tUNION '{' field_spec_list osemi.'}' 
+
+	'}'  shift 201
+	.  error
+
+
+state 175
+	comp_lit:  otype '{' kv_lit_list ocomma.'}' 
+
+	'}'  shift 202
+	.  error
+
+
+state 176
+	kv_lit_list:  kv_lit_list ','.kv_lit 
+	ocomma:  ','.    (149)
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'}'  reduce 149 (src line 646)
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 148
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+	kv_lit  goto 203
+
+state 177
+	kv_lit:  expr ':'.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 204
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 178
+	import:  tIMPORT '(' ')'.    (16)
+
+	.  reduce 16 (src line 189)
+
+
+state 179
+	import:  tIMPORT '(' import_spec_list.osemi ')' 
+	import_spec_list:  import_spec_list.';' import_spec 
+	osemi: .    (146)
+
+	';'  shift 206
+	.  reduce 146 (src line 640)
+
+	osemi  goto 205
+
+state 180
+	import_spec_list:  import_spec.    (19)
+
+	.  reduce 19 (src line 194)
+
+
+state 181
+	import_spec:  tIDENT tSTRLIT.    (22)
+
+	.  reduce 22 (src line 204)
+
+
+state 182
+	defs:  defs type_def ';'.    (24)
+
+	.  reduce 24 (src line 213)
+
+
+state 183
+	defs:  defs const_def ';'.    (25)
+
+	.  reduce 25 (src line 214)
+
+
+state 184
+	defs:  defs error_def ';'.    (26)
+
+	.  reduce 26 (src line 215)
+
+
+state 185
+	type_def:  tTYPE '('.')' 
+	type_def:  tTYPE '('.type_spec_list osemi ')' 
+
+	')'  shift 207
+	tIDENT  shift 210
+	.  error
+
+	type_spec_list  goto 208
+	type_spec  goto 209
+
+state 186
+	type_def:  tTYPE type_spec.    (29)
+
+	.  reduce 29 (src line 220)
+
+
+state 187
+	type_def:  tTYPE interface_spec.    (30)
+
+	.  reduce 30 (src line 221)
+
+
+state 188
+	type_spec:  tIDENT.type 
+	interface_spec:  tIDENT.tINTERFACE '{' '}' 
+	interface_spec:  tIDENT.tINTERFACE '{' iface_item_list osemi '}' 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tINTERFACE  shift 212
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 211
+	type_no_typeobject  goto 87
+
+state 189
+	const_def:  tCONST '('.')' 
+	const_def:  tCONST '('.const_spec_list osemi ')' 
+
+	')'  shift 213
+	tIDENT  shift 191
+	.  error
+
+	const_spec_list  goto 214
+	const_spec  goto 215
+
+state 190
+	const_def:  tCONST const_spec.    (33)
+
+	.  reduce 33 (src line 226)
+
+
+state 191
+	const_spec:  tIDENT.'=' expr 
+
+	'='  shift 216
+	.  error
+
+
+state 192
+	error_def:  tERROR '('.')' 
+	error_def:  tERROR '('.error_spec_list osemi ')' 
+
+	')'  shift 217
+	tIDENT  shift 194
+	.  error
+
+	error_spec_list  goto 218
+	error_spec  goto 219
+
+state 193
+	error_def:  tERROR error_spec.    (36)
+
+	.  reduce 36 (src line 231)
+
+
+state 194
+	error_spec:  tIDENT.inargs error_details 
+
+	'('  shift 221
+	.  error
+
+	inargs  goto 220
+
+state 195
+	type_no_typeobject:  tENUM '{' label_spec_list osemi '}'.    (44)
+
+	.  reduce 44 (src line 269)
+
+
+state 196
+	label_spec_list:  label_spec_list ';' label_spec.    (55)
+
+	.  reduce 55 (src line 296)
+
+
+state 197
+	type_no_typeobject:  tMAP '[' type ']' type.    (46)
+
+	.  reduce 46 (src line 273)
+
+
+state 198
+	type_no_typeobject:  tSTRUCT '{' field_spec_list osemi '}'.    (47)
+
+	.  reduce 47 (src line 275)
+
+
+state 199
+	field_spec_list:  field_spec_list ';' field_spec.    (58)
+
+	.  reduce 58 (src line 306)
+
+
+state 200
+	type_comma_list:  type_comma_list ',' type.    (61)
+
+	.  reduce 61 (src line 358)
+
+
+state 201
+	type_no_typeobject:  tUNION '{' field_spec_list osemi '}'.    (49)
+
+	.  reduce 49 (src line 279)
+
+
+state 202
+	comp_lit:  otype '{' kv_lit_list ocomma '}'.    (125)
+
+	.  reduce 125 (src line 552)
+
+
+state 203
+	kv_lit_list:  kv_lit_list ',' kv_lit.    (127)
+
+	.  reduce 127 (src line 558)
+
+
+state 204
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+	kv_lit:  expr ':' expr.    (129)
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 129 (src line 564)
+
+
+state 205
+	import:  tIMPORT '(' import_spec_list osemi.')' 
+
+	')'  shift 222
+	.  error
+
+
+state 206
+	import_spec_list:  import_spec_list ';'.import_spec 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 153
+	tSTRLIT  shift 152
+	.  reduce 147 (src line 642)
+
+	import_spec  goto 223
+
+state 207
+	type_def:  tTYPE '(' ')'.    (27)
+
+	.  reduce 27 (src line 217)
+
+
+state 208
+	type_def:  tTYPE '(' type_spec_list.osemi ')' 
+	type_spec_list:  type_spec_list.';' type_spec 
+	osemi: .    (146)
+
+	';'  shift 225
+	.  reduce 146 (src line 640)
+
+	osemi  goto 224
+
+state 209
+	type_spec_list:  type_spec.    (37)
+
+	.  reduce 37 (src line 234)
+
+
+state 210
+	type_spec:  tIDENT.type 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 211
+	type_no_typeobject  goto 87
+
+state 211
+	type_spec:  tIDENT type.    (39)
+
+	.  reduce 39 (src line 238)
+
+
+state 212
+	interface_spec:  tIDENT tINTERFACE.'{' '}' 
+	interface_spec:  tIDENT tINTERFACE.'{' iface_item_list osemi '}' 
+
+	'{'  shift 226
+	.  error
+
+
+state 213
+	const_def:  tCONST '(' ')'.    (31)
+
+	.  reduce 31 (src line 223)
+
+
+state 214
+	const_def:  tCONST '(' const_spec_list.osemi ')' 
+	const_spec_list:  const_spec_list.';' const_spec 
+	osemi: .    (146)
+
+	';'  shift 228
+	.  reduce 146 (src line 640)
+
+	osemi  goto 227
+
+state 215
+	const_spec_list:  const_spec.    (85)
+
+	.  reduce 85 (src line 459)
+
+
+state 216
+	const_spec:  tIDENT '='.expr 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 229
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 217
+	error_def:  tERROR '(' ')'.    (34)
+
+	.  reduce 34 (src line 228)
+
+
+state 218
+	error_def:  tERROR '(' error_spec_list.osemi ')' 
+	error_spec_list:  error_spec_list.';' error_spec 
+	osemi: .    (146)
+
+	';'  shift 231
+	.  reduce 146 (src line 640)
+
+	osemi  goto 230
+
+state 219
+	error_spec_list:  error_spec.    (130)
+
+	.  reduce 130 (src line 568)
+
+
+state 220
+	error_spec:  tIDENT inargs.error_details 
+	error_details: .    (133)
+
+	'{'  shift 233
+	.  reduce 133 (src line 584)
+
+	error_details  goto 232
+
+state 221
+	inargs:  '('.')' 
+	inargs:  '('.named_arg_list ocomma ')' 
+	inargs:  '('.type_comma_list ocomma ')' 
+
+	')'  shift 234
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 236
+	field_spec  goto 237
+	named_arg_list  goto 235
+
+state 222
+	import:  tIMPORT '(' import_spec_list osemi ')'.    (17)
+
+	.  reduce 17 (src line 191)
+
+
+state 223
+	import_spec_list:  import_spec_list ';' import_spec.    (20)
+
+	.  reduce 20 (src line 196)
+
+
+state 224
+	type_def:  tTYPE '(' type_spec_list osemi.')' 
+
+	')'  shift 238
+	.  error
+
+
+state 225
+	type_spec_list:  type_spec_list ';'.type_spec 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 210
+	.  reduce 147 (src line 642)
+
+	type_spec  goto 239
+
+state 226
+	interface_spec:  tIDENT tINTERFACE '{'.'}' 
+	interface_spec:  tIDENT tINTERFACE '{'.iface_item_list osemi '}' 
+
+	'}'  shift 240
+	tIDENT  shift 243
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 244
+	dotnameref  goto 38
+	iface_item_list  goto 241
+	iface_item  goto 242
+
+state 227
+	const_def:  tCONST '(' const_spec_list osemi.')' 
+
+	')'  shift 245
+	.  error
+
+
+state 228
+	const_spec_list:  const_spec_list ';'.const_spec 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 191
+	.  reduce 147 (src line 642)
+
+	const_spec  goto 246
+
+state 229
+	const_spec:  tIDENT '=' expr.    (87)
+	expr:  expr.tOROR expr 
+	expr:  expr.tANDAND expr 
+	expr:  expr.'<' expr 
+	expr:  expr.'>' expr 
+	expr:  expr.tLE expr 
+	expr:  expr.tGE expr 
+	expr:  expr.tNE expr 
+	expr:  expr.tEQEQ expr 
+	expr:  expr.'+' expr 
+	expr:  expr.'-' expr 
+	expr:  expr.'*' expr 
+	expr:  expr.'/' expr 
+	expr:  expr.'%' expr 
+	expr:  expr.'|' expr 
+	expr:  expr.'&' expr 
+	expr:  expr.'^' expr 
+	expr:  expr.tLSH expr 
+	expr:  expr.tRSH expr 
+
+	'<'  shift 52
+	'>'  shift 53
+	'+'  shift 58
+	'-'  shift 59
+	'*'  shift 60
+	'/'  shift 61
+	'%'  shift 62
+	'|'  shift 63
+	'&'  shift 64
+	'^'  shift 65
+	tOROR  shift 50
+	tANDAND  shift 51
+	tLE  shift 54
+	tGE  shift 55
+	tNE  shift 56
+	tEQEQ  shift 57
+	tLSH  shift 66
+	tRSH  shift 67
+	.  reduce 87 (src line 463)
+
+
+state 230
+	error_def:  tERROR '(' error_spec_list osemi.')' 
+
+	')'  shift 247
+	.  error
+
+
+state 231
+	error_spec_list:  error_spec_list ';'.error_spec 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 194
+	.  reduce 147 (src line 642)
+
+	error_spec  goto 248
+
+state 232
+	error_spec:  tIDENT inargs error_details.    (132)
+
+	.  reduce 132 (src line 572)
+
+
+state 233
+	error_details:  '{'.'}' 
+	error_details:  '{'.error_detail_list ocomma '}' 
+
+	'}'  shift 249
+	tIDENT  shift 252
+	tSTRLIT  shift 253
+	.  error
+
+	error_detail_list  goto 250
+	error_detail  goto 251
+
+state 234
+	inargs:  '(' ')'.    (68)
+
+	.  reduce 68 (src line 391)
+
+
+state 235
+	inargs:  '(' named_arg_list.ocomma ')' 
+	named_arg_list:  named_arg_list.',' field_spec 
+	ocomma: .    (148)
+
+	','  shift 255
+	.  reduce 148 (src line 644)
+
+	ocomma  goto 254
+
+state 236
+	field_spec:  type_comma_list.type 
+	type_comma_list:  type_comma_list.',' type 
+	inargs:  '(' type_comma_list.ocomma ')' 
+	ocomma: .    (148)
+
+	','  shift 256
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  reduce 148 (src line 644)
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 172
+	type_no_typeobject  goto 87
+	ocomma  goto 257
+
+state 237
+	named_arg_list:  field_spec.    (71)
+
+	.  reduce 71 (src line 407)
+
+
+state 238
+	type_def:  tTYPE '(' type_spec_list osemi ')'.    (28)
+
+	.  reduce 28 (src line 219)
+
+
+state 239
+	type_spec_list:  type_spec_list ';' type_spec.    (38)
+
+	.  reduce 38 (src line 236)
+
+
+state 240
+	interface_spec:  tIDENT tINTERFACE '{' '}'.    (62)
+
+	.  reduce 62 (src line 362)
+
+
+state 241
+	interface_spec:  tIDENT tINTERFACE '{' iface_item_list.osemi '}' 
+	iface_item_list:  iface_item_list.';' iface_item 
+	osemi: .    (146)
+
+	';'  shift 259
+	.  reduce 146 (src line 640)
+
+	osemi  goto 258
+
+state 242
+	iface_item_list:  iface_item.    (64)
+
+	.  reduce 64 (src line 375)
+
+
+state 243
+	iface_item:  tIDENT.inargs streamargs outargs tags 
+	dotnameref:  tIDENT.    (142)
+
+	'('  shift 221
+	.  reduce 142 (src line 628)
+
+	inargs  goto 260
+
+state 244
+	iface_item:  nameref.    (67)
+
+	.  reduce 67 (src line 388)
+
+
+state 245
+	const_def:  tCONST '(' const_spec_list osemi ')'.    (32)
+
+	.  reduce 32 (src line 225)
+
+
+state 246
+	const_spec_list:  const_spec_list ';' const_spec.    (86)
+
+	.  reduce 86 (src line 461)
+
+
+state 247
+	error_def:  tERROR '(' error_spec_list osemi ')'.    (35)
+
+	.  reduce 35 (src line 230)
+
+
+state 248
+	error_spec_list:  error_spec_list ';' error_spec.    (131)
+
+	.  reduce 131 (src line 570)
+
+
+state 249
+	error_details:  '{' '}'.    (134)
+
+	.  reduce 134 (src line 587)
+
+
+state 250
+	error_details:  '{' error_detail_list.ocomma '}' 
+	error_detail_list:  error_detail_list.',' error_detail 
+	ocomma: .    (148)
+
+	','  shift 262
+	.  reduce 148 (src line 644)
+
+	ocomma  goto 261
+
+state 251
+	error_detail_list:  error_detail.    (136)
+
+	.  reduce 136 (src line 592)
+
+
+state 252
+	error_detail:  tIDENT.    (138)
+
+	.  reduce 138 (src line 607)
+
+
+state 253
+	error_detail:  tSTRLIT.':' tSTRLIT 
+
+	':'  shift 263
+	.  error
+
+
+state 254
+	inargs:  '(' named_arg_list ocomma.')' 
+
+	')'  shift 264
+	.  error
+
+
+state 255
+	named_arg_list:  named_arg_list ','.field_spec 
+	ocomma:  ','.    (149)
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  reduce 149 (src line 646)
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 140
+	field_spec  goto 265
+
+state 256
+	type_comma_list:  type_comma_list ','.type 
+	ocomma:  ','.    (149)
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  reduce 149 (src line 646)
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 200
+	type_no_typeobject  goto 87
+
+state 257
+	inargs:  '(' type_comma_list ocomma.')' 
+
+	')'  shift 266
+	.  error
+
+
+state 258
+	interface_spec:  tIDENT tINTERFACE '{' iface_item_list osemi.'}' 
+
+	'}'  shift 267
+	.  error
+
+
+state 259
+	iface_item_list:  iface_item_list ';'.iface_item 
+	osemi:  ';'.    (147)
+
+	tIDENT  shift 243
+	tSTRLIT  shift 90
+	.  reduce 147 (src line 642)
+
+	nameref  goto 244
+	dotnameref  goto 38
+	iface_item  goto 268
+
+state 260
+	iface_item:  tIDENT inargs.streamargs outargs tags 
+	streamargs: .    (76)
+
+	tSTREAM  shift 270
+	.  reduce 76 (src line 434)
+
+	streamargs  goto 269
+
+state 261
+	error_details:  '{' error_detail_list ocomma.'}' 
+
+	'}'  shift 271
+	.  error
+
+
+state 262
+	error_detail_list:  error_detail_list ','.error_detail 
+	ocomma:  ','.    (149)
+
+	tIDENT  shift 252
+	tSTRLIT  shift 253
+	.  reduce 149 (src line 646)
+
+	error_detail  goto 272
+
+state 263
+	error_detail:  tSTRLIT ':'.tSTRLIT 
+
+	tSTRLIT  shift 273
+	.  error
+
+
+state 264
+	inargs:  '(' named_arg_list ocomma ')'.    (69)
+
+	.  reduce 69 (src line 394)
+
+
+state 265
+	named_arg_list:  named_arg_list ',' field_spec.    (72)
+
+	.  reduce 72 (src line 410)
+
+
+state 266
+	inargs:  '(' type_comma_list ocomma ')'.    (70)
+
+	.  reduce 70 (src line 396)
+
+
+state 267
+	interface_spec:  tIDENT tINTERFACE '{' iface_item_list osemi '}'.    (63)
+
+	.  reduce 63 (src line 368)
+
+
+state 268
+	iface_item_list:  iface_item_list ';' iface_item.    (65)
+
+	.  reduce 65 (src line 378)
+
+
+state 269
+	iface_item:  tIDENT inargs streamargs.outargs tags 
+
+	'('  shift 276
+	tERROR  shift 275
+	.  error
+
+	outargs  goto 274
+
+state 270
+	streamargs:  tSTREAM.'<' '>' 
+	streamargs:  tSTREAM.'<' type '>' 
+	streamargs:  tSTREAM.'<' type ',' type '>' 
+
+	'<'  shift 277
+	.  error
+
+
+state 271
+	error_details:  '{' error_detail_list ocomma '}'.    (135)
+
+	.  reduce 135 (src line 589)
+
+
+state 272
+	error_detail_list:  error_detail_list ',' error_detail.    (137)
+
+	.  reduce 137 (src line 595)
+
+
+state 273
+	error_detail:  tSTRLIT ':' tSTRLIT.    (139)
+
+	.  reduce 139 (src line 610)
+
+
+state 274
+	iface_item:  tIDENT inargs streamargs outargs.tags 
+	tags: .    (80)
+
+	'{'  shift 279
+	.  reduce 80 (src line 444)
+
+	tags  goto 278
+
+state 275
+	outargs:  tERROR.    (73)
+
+	.  reduce 73 (src line 420)
+
+
+state 276
+	outargs:  '('.named_arg_list ocomma '|' tERROR ')' 
+	outargs:  '('.type_comma_list ocomma '|' tERROR ')' 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 141
+	type_no_typeobject  goto 87
+	type_comma_list  goto 281
+	field_spec  goto 237
+	named_arg_list  goto 280
+
+state 277
+	streamargs:  tSTREAM '<'.'>' 
+	streamargs:  tSTREAM '<'.type '>' 
+	streamargs:  tSTREAM '<'.type ',' type '>' 
+
+	'['  shift 31
+	'>'  shift 282
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 283
+	type_no_typeobject  goto 87
+
+state 278
+	iface_item:  tIDENT inargs streamargs outargs tags.    (66)
+
+	.  reduce 66 (src line 385)
+
+
+state 279
+	tags:  '{'.'}' 
+	tags:  '{'.expr_comma_list ocomma '}' 
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'}'  shift 284
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 14
+	unary_expr  goto 15
+	operand  goto 16
+	expr_comma_list  goto 285
+	comp_lit  goto 28
+
+state 280
+	named_arg_list:  named_arg_list.',' field_spec 
+	outargs:  '(' named_arg_list.ocomma '|' tERROR ')' 
+	ocomma: .    (148)
+
+	','  shift 255
+	.  reduce 148 (src line 644)
+
+	ocomma  goto 286
+
+state 281
+	field_spec:  type_comma_list.type 
+	type_comma_list:  type_comma_list.',' type 
+	outargs:  '(' type_comma_list.ocomma '|' tERROR ')' 
+	ocomma: .    (148)
+
+	','  shift 256
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  reduce 148 (src line 644)
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 172
+	type_no_typeobject  goto 87
+	ocomma  goto 287
+
+state 282
+	streamargs:  tSTREAM '<' '>'.    (77)
+
+	.  reduce 77 (src line 437)
+
+
+state 283
+	streamargs:  tSTREAM '<' type.'>' 
+	streamargs:  tSTREAM '<' type.',' type '>' 
+
+	','  shift 289
+	'>'  shift 288
+	.  error
+
+
+state 284
+	tags:  '{' '}'.    (81)
+
+	.  reduce 81 (src line 447)
+
+
+state 285
+	tags:  '{' expr_comma_list.ocomma '}' 
+	expr_comma_list:  expr_comma_list.',' expr 
+	ocomma: .    (148)
+
+	','  shift 291
+	.  reduce 148 (src line 644)
+
+	ocomma  goto 290
+
+state 286
+	outargs:  '(' named_arg_list ocomma.'|' tERROR ')' 
+
+	'|'  shift 292
+	.  error
+
+
+state 287
+	outargs:  '(' type_comma_list ocomma.'|' tERROR ')' 
+
+	'|'  shift 293
+	.  error
+
+
+state 288
+	streamargs:  tSTREAM '<' type '>'.    (78)
+
+	.  reduce 78 (src line 439)
+
+
+state 289
+	streamargs:  tSTREAM '<' type ','.type '>' 
+
+	'['  shift 31
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 88
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 90
+	.  error
+
+	nameref  goto 89
+	dotnameref  goto 38
+	type  goto 294
+	type_no_typeobject  goto 87
+
+state 290
+	tags:  '{' expr_comma_list ocomma.'}' 
+
+	'}'  shift 295
+	.  error
+
+
+state 291
+	expr_comma_list:  expr_comma_list ','.expr 
+	ocomma:  ','.    (149)
+	otype: .    (144)
+
+	'('  shift 29
+	'['  shift 31
+	'}'  reduce 149 (src line 646)
+	'!'  shift 17
+	'+'  shift 18
+	'-'  shift 19
+	'^'  shift 20
+	'?'  shift 37
+	tENUM  shift 32
+	tERROR  shift 30
+	tMAP  shift 34
+	tSET  shift 33
+	tSTRUCT  shift 35
+	tTYPEOBJECT  shift 22
+	tUNION  shift 36
+	tIDENT  shift 40
+	tSTRLIT  shift 23
+	tINTLIT  shift 24
+	tRATLIT  shift 25
+	tIMAGLIT  shift 26
+	.  reduce 144 (src line 634)
+
+	nameref  goto 27
+	dotnameref  goto 38
+	type  goto 41
+	type_no_typeobject  goto 21
+	otype  goto 39
+	expr  goto 104
+	unary_expr  goto 15
+	operand  goto 16
+	comp_lit  goto 28
+
+state 292
+	outargs:  '(' named_arg_list ocomma '|'.tERROR ')' 
+
+	tERROR  shift 296
+	.  error
+
+
+state 293
+	outargs:  '(' type_comma_list ocomma '|'.tERROR ')' 
+
+	tERROR  shift 297
+	.  error
+
+
+state 294
+	streamargs:  tSTREAM '<' type ',' type.'>' 
+
+	'>'  shift 298
+	.  error
+
+
+state 295
+	tags:  '{' expr_comma_list ocomma '}'.    (82)
+
+	.  reduce 82 (src line 449)
+
+
+state 296
+	outargs:  '(' named_arg_list ocomma '|' tERROR.')' 
+
+	')'  shift 299
+	.  error
+
+
+state 297
+	outargs:  '(' type_comma_list ocomma '|' tERROR.')' 
+
+	')'  shift 300
+	.  error
+
+
+state 298
+	streamargs:  tSTREAM '<' type ',' type '>'.    (79)
+
+	.  reduce 79 (src line 441)
+
+
+state 299
+	outargs:  '(' named_arg_list ocomma '|' tERROR ')'.    (74)
+
+	.  reduce 74 (src line 423)
+
+
+state 300
+	outargs:  '(' type_comma_list ocomma '|' tERROR ')'.    (75)
+
+	.  reduce 75 (src line 425)
+
+
+59 terminals, 49 nonterminals
+150 grammar rules, 301/2000 states
+0 shift/reduce, 0 reduce/reduce conflicts reported
+98 working sets used
+memory: parser 590/30000
+142 extra closures
+1361 shift entries, 5 exceptions
+192 goto entries
+303 entries saved by goto default
+Optimizer space used: output 800/30000
+800 table entries, 148 zero
+maximum spread: 57, maximum offset: 291
diff --git a/lib/vdl/parse/grammar.y.go b/lib/vdl/parse/grammar.y.go
new file mode 100644
index 0000000..effcb3a
--- /dev/null
+++ b/lib/vdl/parse/grammar.y.go
@@ -0,0 +1,1289 @@
+//line grammar.y:18
+
+// This grammar.y.go file was auto-generated by yacc from grammar.y.
+
+package parse
+
+import __yyfmt__ "fmt"
+
+//line grammar.y:20
+import (
+	"math/big"
+	"strings"
+)
+
+type intPos struct {
+	int *big.Int
+	pos Pos
+}
+
+type ratPos struct {
+	rat *big.Rat
+	pos Pos
+}
+
+type imagPos struct {
+	imag *BigImag
+	pos  Pos
+}
+
+// typeListToStrList converts a slice of Type to a slice of StringPos.  Each
+// type must be a TypeNamed with an empty PackageName, otherwise errors are
+// reported, and ok=false is returned.
+func typeListToStrList(yylex yyLexer, typeList []Type) (strList []StringPos, ok bool) {
+	ok = true
+	for _, t := range typeList {
+		var tn *TypeNamed
+		if tn, ok = t.(*TypeNamed); !ok {
+			lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names)", t.String())
+			return
+		}
+		if strings.ContainsRune(tn.Name, '.') {
+			ok = false
+			lexPosErrorf(yylex, t.Pos(), "%s invalid (expected one or more variable names).", tn.Name)
+			return
+		}
+		strList = append(strList, StringPos{tn.Name, tn.P})
+	}
+	return
+}
+
+//line grammar.y:67
+type yySymType struct {
+	yys        int
+	pos        Pos
+	strpos     StringPos
+	intpos     intPos
+	ratpos     ratPos
+	imagpos    imagPos
+	namepos    NamePos
+	nameposes  []NamePos
+	typeexpr   Type
+	typeexprs  []Type
+	fields     []*Field
+	iface      *Interface
+	constexpr  ConstExpr
+	constexprs []ConstExpr
+	complit    *ConstCompositeLit
+	kvlit      KVLit
+	kvlits     []KVLit
+	errordef   ErrorDef
+}
+
+const startFileImports = 57346
+const startFile = 57347
+const startConfigImports = 57348
+const startConfig = 57349
+const startExprs = 57350
+const tOROR = 57351
+const tANDAND = 57352
+const tLE = 57353
+const tGE = 57354
+const tNE = 57355
+const tEQEQ = 57356
+const tLSH = 57357
+const tRSH = 57358
+const tCONST = 57359
+const tENUM = 57360
+const tERROR = 57361
+const tIMPORT = 57362
+const tINTERFACE = 57363
+const tMAP = 57364
+const tPACKAGE = 57365
+const tSET = 57366
+const tSTREAM = 57367
+const tSTRUCT = 57368
+const tTYPE = 57369
+const tTYPEOBJECT = 57370
+const tUNION = 57371
+const tIDENT = 57372
+const tSTRLIT = 57373
+const tINTLIT = 57374
+const tRATLIT = 57375
+const tIMAGLIT = 57376
+const notPackage = 57377
+const notConfig = 57378
+
+var yyToknames = []string{
+	"startFileImports",
+	"startFile",
+	"startConfigImports",
+	"startConfig",
+	"startExprs",
+	"';'",
+	"':'",
+	"','",
+	"'.'",
+	"'('",
+	"')'",
+	"'['",
+	"']'",
+	"'{'",
+	"'}'",
+	"'<'",
+	"'>'",
+	"'='",
+	"'!'",
+	"'+'",
+	"'-'",
+	"'*'",
+	"'/'",
+	"'%'",
+	"'|'",
+	"'&'",
+	"'^'",
+	"'?'",
+	"tOROR",
+	"tANDAND",
+	"tLE",
+	"tGE",
+	"tNE",
+	"tEQEQ",
+	"tLSH",
+	"tRSH",
+	"tCONST",
+	"tENUM",
+	"tERROR",
+	"tIMPORT",
+	"tINTERFACE",
+	"tMAP",
+	"tPACKAGE",
+	"tSET",
+	"tSTREAM",
+	"tSTRUCT",
+	"tTYPE",
+	"tTYPEOBJECT",
+	"tUNION",
+	"tIDENT",
+	"tSTRLIT",
+	"tINTLIT",
+	"tRATLIT",
+	"tIMAGLIT",
+	"notPackage",
+	"notConfig",
+}
+var yyStatenames = []string{}
+
+const yyEofCode = 1
+const yyErrCode = 2
+const yyMaxDepth = 200
+
+//line yacctab:1
+var yyExca = []int{
+	-1, 1,
+	1, -1,
+	-2, 0,
+	-1, 27,
+	13, 40,
+	17, 40,
+	-2, 118,
+	-1, 176,
+	18, 149,
+	-2, 144,
+	-1, 291,
+	18, 149,
+	-2, 144,
+}
+
+const yyNprod = 150
+const yyPrivate = 57344
+
+var yyTokenNames []string
+var yyStates []string
+
+const yyLast = 800
+
+var yyAct = []int{
+
+	14, 27, 13, 140, 139, 242, 251, 235, 220, 151,
+	186, 193, 175, 137, 147, 38, 190, 252, 253, 133,
+	166, 243, 90, 153, 152, 273, 181, 194, 191, 210,
+	78, 21, 134, 217, 192, 249, 144, 127, 80, 89,
+	40, 43, 213, 11, 240, 270, 8, 102, 98, 178,
+	104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+	114, 115, 116, 117, 118, 119, 120, 121, 122, 87,
+	252, 253, 194, 123, 194, 89, 126, 79, 128, 243,
+	90, 191, 89, 150, 89, 89, 89, 89, 153, 152,
+	125, 207, 297, 148, 189, 185, 158, 276, 159, 142,
+	41, 296, 93, 293, 292, 87, 157, 216, 100, 46,
+	289, 29, 87, 31, 87, 87, 87, 87, 295, 288,
+	17, 18, 19, 153, 152, 298, 275, 277, 20, 37,
+	210, 271, 89, 267, 191, 188, 202, 201, 86, 32,
+	30, 198, 89, 34, 195, 33, 279, 35, 101, 22,
+	36, 40, 23, 24, 25, 26, 103, 233, 170, 226,
+	180, 92, 87, 174, 85, 84, 60, 61, 62, 169,
+	64, 89, 87, 89, 124, 89, 199, 148, 204, 66,
+	67, 131, 81, 135, 136, 141, 141, 196, 168, 15,
+	89, 203, 96, 130, 97, 98, 209, 83, 82, 75,
+	205, 87, 95, 87, 219, 87, 215, 68, 69, 70,
+	71, 76, 89, 221, 77, 300, 223, 229, 299, 266,
+	87, 264, 247, 89, 245, 236, 237, 238, 244, 224,
+	222, 165, 162, 73, 72, 227, 239, 91, 89, 230,
+	74, 172, 87, 248, 48, 246, 49, 291, 254, 257,
+	255, 262, 260, 87, 176, 263, 259, 89, 89, 231,
+	265, 244, 258, 261, 228, 268, 225, 206, 87, 272,
+	197, 184, 141, 183, 200, 182, 171, 167, 89, 89,
+	281, 237, 285, 89, 280, 218, 149, 87, 87, 211,
+	99, 89, 104, 286, 287, 214, 187, 208, 290, 58,
+	59, 60, 61, 62, 63, 64, 65, 42, 87, 87,
+	10, 211, 156, 87, 66, 67, 12, 44, 45, 155,
+	47, 87, 141, 2, 3, 4, 5, 6, 154, 7,
+	29, 179, 31, 9, 94, 284, 1, 172, 250, 17,
+	18, 19, 232, 146, 28, 278, 16, 20, 37, 241,
+	274, 269, 39, 132, 0, 0, 141, 200, 32, 30,
+	0, 0, 34, 0, 33, 0, 35, 0, 22, 36,
+	40, 23, 24, 25, 26, 0, 0, 141, 283, 0,
+	0, 0, 172, 29, 0, 31, 0, 0, 145, 0,
+	294, 0, 17, 18, 19, 0, 0, 0, 256, 0,
+	20, 37, 31, 0, 0, 0, 0, 0, 0, 0,
+	0, 32, 30, 0, 0, 34, 0, 33, 37, 35,
+	0, 22, 36, 40, 23, 24, 25, 26, 32, 30,
+	31, 0, 34, 0, 33, 282, 35, 0, 88, 36,
+	40, 90, 0, 0, 234, 31, 37, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 32, 30, 0, 0,
+	34, 37, 33, 0, 35, 0, 88, 36, 40, 90,
+	31, 32, 30, 0, 0, 34, 0, 33, 0, 35,
+	0, 88, 36, 40, 90, 173, 37, 0, 0, 31,
+	0, 0, 0, 0, 0, 0, 32, 30, 0, 212,
+	34, 0, 33, 0, 35, 37, 88, 36, 40, 90,
+	0, 0, 0, 0, 31, 32, 30, 143, 0, 34,
+	0, 33, 0, 35, 0, 88, 36, 40, 90, 31,
+	37, 0, 138, 0, 0, 0, 0, 0, 0, 0,
+	32, 30, 0, 0, 34, 37, 33, 0, 35, 0,
+	88, 36, 40, 90, 31, 32, 30, 0, 0, 34,
+	0, 33, 0, 35, 0, 88, 36, 40, 90, 0,
+	37, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	32, 30, 0, 0, 34, 177, 33, 0, 35, 0,
+	88, 36, 40, 90, 52, 53, 0, 0, 58, 59,
+	60, 61, 62, 63, 64, 65, 0, 50, 51, 54,
+	55, 56, 57, 66, 67, 164, 0, 0, 52, 53,
+	0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+	0, 50, 51, 54, 55, 56, 57, 66, 67, 163,
+	0, 0, 52, 53, 0, 0, 58, 59, 60, 61,
+	62, 63, 64, 65, 0, 50, 51, 54, 55, 56,
+	57, 66, 67, 161, 0, 0, 0, 0, 52, 53,
+	0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+	160, 50, 51, 54, 55, 56, 57, 66, 67, 0,
+	52, 53, 0, 0, 58, 59, 60, 61, 62, 63,
+	64, 65, 0, 50, 51, 54, 55, 56, 57, 66,
+	67, 129, 0, 0, 0, 0, 52, 53, 0, 0,
+	58, 59, 60, 61, 62, 63, 64, 65, 0, 50,
+	51, 54, 55, 56, 57, 66, 67, 52, 53, 0,
+	0, 58, 59, 60, 61, 62, 63, 64, 65, 0,
+	50, 51, 54, 55, 56, 57, 66, 67, 52, 53,
+	0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
+	0, 0, 51, 54, 55, 56, 57, 66, 67, 52,
+	53, 0, 0, 58, 59, 60, 61, 62, 63, 64,
+	65, 0, 0, 0, 54, 55, 56, 57, 66, 67,
+}
+var yyPact = []int{
+
+	319, -1000, 0, 0, -10, -10, 98, -1000, -12, -1000,
+	-1000, 88, -1000, 235, 718, -1000, -1000, 98, 98, 98,
+	98, 221, 220, 228, -1000, -1000, -1000, 184, 199, 98,
+	-1000, 22, 165, 183, 182, 148, 147, 539, 225, 144,
+	-1000, -1000, 152, 281, 5, 152, 98, 5, -1000, 98,
+	98, 98, 98, 98, 98, 98, 98, 98, 98, 98,
+	98, 98, 98, 98, 98, 98, 98, 98, -1000, -1000,
+	-1000, -1000, 98, 539, -13, 98, -16, 98, 697, 177,
+	539, -21, 539, 539, 514, 499, -1000, -1000, -1000, -1000,
+	228, -17, 370, -1000, 277, -1000, -1000, -1000, 70, -1000,
+	56, -1000, 671, 56, 718, 739, 760, 276, 276, 276,
+	276, 276, 276, 141, 141, -1000, -1000, -1000, 141, -1000,
+	141, -1000, -1000, 649, 218, 225, 623, -1000, 599, -1000,
+	539, -1000, 268, -1000, -1000, 172, 153, 267, -1000, -1000,
+	474, -1000, 267, -1000, -1000, -1000, 243, -1000, 575, -1000,
+	35, -1000, -1000, -28, 266, 264, 262, 82, 81, 21,
+	-1000, -1000, -1000, -1000, -1000, -1000, 126, -21, -1000, 539,
+	123, 539, -1000, 539, 119, 118, 98, 98, -1000, 258,
+	-1000, -1000, -1000, -1000, -1000, 77, -1000, -1000, 455, 28,
+	-1000, 86, 19, -1000, 200, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, 718, 216, -30, -1000, 257, -1000,
+	539, -1000, 142, -1000, 255, -1000, 98, -1000, 250, -1000,
+	140, 430, -1000, -1000, 213, -24, 26, 210, -25, 718,
+	208, -26, -1000, 17, -1000, 239, 387, -1000, -1000, -1000,
+	-1000, 247, -1000, 200, -1000, -1000, -1000, -1000, -1000, -1000,
+	240, -1000, -1000, 245, 207, 539, 539, 205, 115, -32,
+	-3, 113, -36, -29, -1000, -1000, -1000, -1000, -1000, 84,
+	108, -1000, -1000, -1000, 129, -1000, 539, 415, -1000, 317,
+	239, 387, -1000, 99, -1000, 236, 76, 75, -1000, 539,
+	100, 98, 59, 50, 105, -1000, 204, 201, -1000, -1000,
+	-1000,
+}
+var yyPgo = []int{
+
+	0, 1, 15, 19, 353, 100, 31, 352, 3, 351,
+	13, 4, 7, 8, 350, 349, 5, 0, 189, 346,
+	345, 2, 344, 14, 343, 342, 338, 6, 336, 329,
+	307, 102, 108, 310, 334, 331, 20, 9, 328, 319,
+	312, 297, 10, 296, 295, 16, 285, 11, 12,
+}
+var yyR1 = []int{
+
+	0, 28, 28, 28, 28, 28, 31, 31, 31, 31,
+	29, 29, 33, 33, 30, 30, 34, 34, 34, 35,
+	35, 37, 37, 32, 32, 32, 32, 38, 38, 38,
+	38, 39, 39, 39, 40, 40, 40, 41, 41, 42,
+	6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+	6, 6, 5, 5, 4, 4, 3, 10, 10, 11,
+	8, 8, 43, 43, 15, 15, 16, 16, 13, 13,
+	13, 12, 12, 14, 14, 14, 9, 9, 9, 9,
+	20, 20, 20, 21, 21, 44, 44, 45, 17, 17,
+	17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+	17, 17, 17, 17, 17, 17, 17, 18, 18, 18,
+	18, 18, 18, 18, 19, 19, 19, 19, 19, 19,
+	19, 19, 19, 19, 22, 22, 24, 24, 23, 23,
+	46, 46, 47, 25, 25, 25, 26, 26, 27, 27,
+	1, 1, 2, 2, 7, 7, 36, 36, 48, 48,
+}
+var yyR2 = []int{
+
+	0, 4, 4, 4, 4, 3, 0, 1, 1, 1,
+	0, 3, 0, 4, 0, 3, 3, 5, 2, 1,
+	3, 1, 2, 0, 3, 3, 3, 3, 5, 2,
+	2, 3, 5, 2, 3, 5, 2, 1, 3, 2,
+	1, 1, 4, 3, 5, 4, 5, 5, 3, 5,
+	3, 2, 1, 1, 1, 3, 1, 1, 3, 2,
+	1, 3, 4, 6, 1, 3, 5, 1, 2, 4,
+	4, 1, 3, 1, 6, 6, 0, 3, 4, 6,
+	0, 2, 4, 1, 3, 1, 3, 3, 1, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 1, 2, 2,
+	2, 2, 4, 4, 1, 1, 1, 1, 1, 1,
+	3, 4, 4, 3, 3, 5, 1, 3, 1, 3,
+	1, 3, 3, 0, 2, 4, 1, 3, 1, 3,
+	1, 3, 1, 3, 0, 1, 0, 1, 0, 1,
+}
+var yyChk = []int{
+
+	-1000, -28, 4, 5, 6, 7, 8, -29, 46, -29,
+	-33, 53, -33, -21, -17, -18, -19, 22, 23, 24,
+	30, -6, 51, 54, 55, 56, 57, -1, -22, 13,
+	42, 15, 41, 47, 45, 49, 52, 31, -2, -7,
+	53, -5, -30, 53, -30, -30, 21, -30, 9, 11,
+	32, 33, 19, 20, 34, 35, 36, 37, 23, 24,
+	25, 26, 27, 28, 29, 30, 38, 39, -18, -18,
+	-18, -18, 13, 13, 12, 15, 12, 15, -17, 55,
+	16, 17, 15, 15, 17, 17, -5, -6, 51, -1,
+	54, 12, 17, -31, -34, 50, 40, 42, 43, 9,
+	-32, -31, -17, -32, -17, -17, -17, -17, -17, -17,
+	-17, -17, -17, -17, -17, -17, -17, -17, -17, -17,
+	-17, -17, -17, -17, -5, -2, -17, 53, -17, 14,
+	16, -5, -4, -3, 53, -5, -5, -10, 18, -11,
+	-8, -5, -10, 18, 53, 18, -24, -23, -17, 9,
+	13, -37, 54, 53, -38, -39, -40, 50, 40, 42,
+	9, 14, 14, 16, 16, -5, -36, 9, 16, 16,
+	-36, 9, -5, 11, -36, -48, 11, 10, 14, -35,
+	-37, 54, 9, 9, 9, 13, -42, -43, 53, 13,
+	-45, 53, 13, -47, 53, 18, -3, -5, 18, -11,
+	-5, 18, 18, -23, -17, -36, 9, 14, -41, -42,
+	53, -5, 44, 14, -44, -45, 21, 14, -46, -47,
+	-13, 13, 14, -37, -36, 9, 17, -36, 9, -17,
+	-36, 9, -25, 17, 14, -12, -8, -11, 14, -42,
+	18, -15, -16, 53, -1, 14, -45, 14, -47, 18,
+	-26, -27, 53, 54, -48, 11, 11, -48, -36, 9,
+	-13, -48, 11, 10, 14, -11, 14, 18, -16, -9,
+	48, 18, -27, 54, -14, 42, 13, 19, -20, 17,
+	-12, -8, 20, -5, 18, -21, -48, -48, 20, 11,
+	-48, 11, 28, 28, -5, 18, 42, 42, 20, 14,
+	14,
+}
+var yyDef = []int{
+
+	0, -2, 10, 10, 12, 12, 144, 14, 0, 14,
+	14, 0, 14, 0, 83, 88, 107, 144, 144, 144,
+	144, 52, 53, 114, 115, 116, 117, -2, 119, 144,
+	41, 0, 0, 0, 0, 0, 0, 0, 140, 0,
+	142, 145, 6, 0, 23, 6, 144, 23, 5, 144,
+	144, 144, 144, 144, 144, 144, 144, 144, 144, 144,
+	144, 144, 144, 144, 144, 144, 144, 144, 108, 109,
+	110, 111, 144, 0, 0, 144, 0, 144, 0, 0,
+	0, 0, 0, 0, 0, 0, 51, 52, 53, 40,
+	0, 0, 144, 1, 0, 7, 8, 9, 0, 11,
+	2, 3, 0, 4, 84, 89, 90, 91, 92, 93,
+	94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+	104, 105, 106, 0, 0, 141, 0, 120, 0, 123,
+	0, 43, 146, 54, 56, 0, 0, 146, 48, 57,
+	0, 60, 146, 50, 143, 124, 148, 126, 128, 15,
+	0, 18, 21, 0, 0, 0, 0, 0, 0, 0,
+	13, 112, 113, 122, 121, 42, 0, 147, 45, 0,
+	0, 147, 59, 0, 0, 0, -2, 144, 16, 146,
+	19, 22, 24, 25, 26, 0, 29, 30, 0, 0,
+	33, 0, 0, 36, 0, 44, 55, 46, 47, 58,
+	61, 49, 125, 127, 129, 0, 147, 27, 146, 37,
+	0, 39, 0, 31, 146, 85, 144, 34, 146, 130,
+	133, 0, 17, 20, 0, 147, 0, 0, 147, 87,
+	0, 147, 132, 0, 68, 148, 148, 71, 28, 38,
+	62, 146, 64, 142, 67, 32, 86, 35, 131, 134,
+	148, 136, 138, 0, 0, 149, 149, 0, 0, 147,
+	76, 0, 149, 0, 69, 72, 70, 63, 65, 0,
+	0, 135, 137, 139, 80, 73, 0, 0, 66, 144,
+	148, 148, 77, 0, 81, 148, 0, 0, 78, 0,
+	0, -2, 0, 0, 0, 82, 0, 0, 79, 74,
+	75,
+}
+var yyTok1 = []int{
+
+	1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 22, 3, 3, 3, 27, 29, 3,
+	13, 14, 25, 23, 11, 24, 12, 26, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 10, 9,
+	19, 21, 20, 31, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 15, 3, 16, 30, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 17, 28, 18,
+}
+var yyTok2 = []int{
+
+	2, 3, 4, 5, 6, 7, 8, 32, 33, 34,
+	35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59,
+}
+var yyTok3 = []int{
+	0,
+}
+
+//line yaccpar:1
+
+/*	parser for yacc output	*/
+
+var yyDebug = 0
+
+type yyLexer interface {
+	Lex(lval *yySymType) int
+	Error(s string)
+}
+
+const yyFlag = -1000
+
+func yyTokname(c int) string {
+	// 4 is TOKSTART above
+	if c >= 4 && c-4 < len(yyToknames) {
+		if yyToknames[c-4] != "" {
+			return yyToknames[c-4]
+		}
+	}
+	return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func yyStatname(s int) string {
+	if s >= 0 && s < len(yyStatenames) {
+		if yyStatenames[s] != "" {
+			return yyStatenames[s]
+		}
+	}
+	return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func yylex1(lex yyLexer, lval *yySymType) int {
+	c := 0
+	char := lex.Lex(lval)
+	if char <= 0 {
+		c = yyTok1[0]
+		goto out
+	}
+	if char < len(yyTok1) {
+		c = yyTok1[char]
+		goto out
+	}
+	if char >= yyPrivate {
+		if char < yyPrivate+len(yyTok2) {
+			c = yyTok2[char-yyPrivate]
+			goto out
+		}
+	}
+	for i := 0; i < len(yyTok3); i += 2 {
+		c = yyTok3[i+0]
+		if c == char {
+			c = yyTok3[i+1]
+			goto out
+		}
+	}
+
+out:
+	if c == 0 {
+		c = yyTok2[1] /* unknown char */
+	}
+	if yyDebug >= 3 {
+		__yyfmt__.Printf("lex %s(%d)\n", yyTokname(c), uint(char))
+	}
+	return c
+}
+
+func yyParse(yylex yyLexer) int {
+	var yyn int
+	var yylval yySymType
+	var yyVAL yySymType
+	yyS := make([]yySymType, yyMaxDepth)
+
+	Nerrs := 0   /* number of errors */
+	Errflag := 0 /* error recovery flag */
+	yystate := 0
+	yychar := -1
+	yyp := -1
+	goto yystack
+
+ret0:
+	return 0
+
+ret1:
+	return 1
+
+yystack:
+	/* put a state and value onto the stack */
+	if yyDebug >= 4 {
+		__yyfmt__.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
+	}
+
+	yyp++
+	if yyp >= len(yyS) {
+		nyys := make([]yySymType, len(yyS)*2)
+		copy(nyys, yyS)
+		yyS = nyys
+	}
+	yyS[yyp] = yyVAL
+	yyS[yyp].yys = yystate
+
+yynewstate:
+	yyn = yyPact[yystate]
+	if yyn <= yyFlag {
+		goto yydefault /* simple state */
+	}
+	if yychar < 0 {
+		yychar = yylex1(yylex, &yylval)
+	}
+	yyn += yychar
+	if yyn < 0 || yyn >= yyLast {
+		goto yydefault
+	}
+	yyn = yyAct[yyn]
+	if yyChk[yyn] == yychar { /* valid shift */
+		yychar = -1
+		yyVAL = yylval
+		yystate = yyn
+		if Errflag > 0 {
+			Errflag--
+		}
+		goto yystack
+	}
+
+yydefault:
+	/* default state action */
+	yyn = yyDef[yystate]
+	if yyn == -2 {
+		if yychar < 0 {
+			yychar = yylex1(yylex, &yylval)
+		}
+
+		/* look through exception table */
+		xi := 0
+		for {
+			if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
+				break
+			}
+			xi += 2
+		}
+		for xi += 2; ; xi += 2 {
+			yyn = yyExca[xi+0]
+			if yyn < 0 || yyn == yychar {
+				break
+			}
+		}
+		yyn = yyExca[xi+1]
+		if yyn < 0 {
+			goto ret0
+		}
+	}
+	if yyn == 0 {
+		/* error ... attempt to resume parsing */
+		switch Errflag {
+		case 0: /* brand new error */
+			yylex.Error("syntax error")
+			Nerrs++
+			if yyDebug >= 1 {
+				__yyfmt__.Printf("%s", yyStatname(yystate))
+				__yyfmt__.Printf(" saw %s\n", yyTokname(yychar))
+			}
+			fallthrough
+
+		case 1, 2: /* incompletely recovered error ... try again */
+			Errflag = 3
+
+			/* find a state where "error" is a legal shift action */
+			for yyp >= 0 {
+				yyn = yyPact[yyS[yyp].yys] + yyErrCode
+				if yyn >= 0 && yyn < yyLast {
+					yystate = yyAct[yyn] /* simulate a shift of "error" */
+					if yyChk[yystate] == yyErrCode {
+						goto yystack
+					}
+				}
+
+				/* the current p has no shift on "error", pop stack */
+				if yyDebug >= 2 {
+					__yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys)
+				}
+				yyp--
+			}
+			/* there is no state on the stack with an error shift ... abort */
+			goto ret1
+
+		case 3: /* no shift yet; clobber input char */
+			if yyDebug >= 2 {
+				__yyfmt__.Printf("error recovery discards %s\n", yyTokname(yychar))
+			}
+			if yychar == yyEofCode {
+				goto ret1
+			}
+			yychar = -1
+			goto yynewstate /* try again in the same state */
+		}
+	}
+
+	/* reduction by production yyn */
+	if yyDebug >= 2 {
+		__yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
+	}
+
+	yynt := yyn
+	yypt := yyp
+	_ = yypt // guard against "declared and not used"
+
+	yyp -= yyR2[yyn]
+	yyVAL = yyS[yyp+1]
+
+	/* consult goto table to find next state */
+	yyn = yyR1[yyn]
+	yyg := yyPgo[yyn]
+	yyj := yyg + yyS[yyp].yys + 1
+
+	if yyj >= yyLast {
+		yystate = yyAct[yyg]
+	} else {
+		yystate = yyAct[yyj]
+		if yyChk[yystate] != -yyn {
+			yystate = yyAct[yyg]
+		}
+	}
+	// dummy call; replaced with literal code
+	switch yynt {
+
+	case 5:
+		//line grammar.y:143
+		{
+			lexStoreExprs(yylex, yyS[yypt-1].constexprs)
+		}
+	case 6:
+		//line grammar.y:152
+		{
+			lexGenEOF(yylex)
+		}
+	case 7:
+		//line grammar.y:154
+		{
+			lexGenEOF(yylex)
+		}
+	case 8:
+		//line grammar.y:156
+		{
+			lexGenEOF(yylex)
+		}
+	case 9:
+		//line grammar.y:158
+		{
+			lexGenEOF(yylex)
+		}
+	case 10:
+		//line grammar.y:163
+		{
+			lexPosErrorf(yylex, Pos{}, "vdl file must start with package clause")
+		}
+	case 11:
+		//line grammar.y:165
+		{
+			lexVDLFile(yylex).PackageDef = NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}
+		}
+	case 12:
+		//line grammar.y:170
+		{
+			lexPosErrorf(yylex, Pos{}, "config file must start with config clause")
+		}
+	case 13:
+		//line grammar.y:172
+		{
+			// We allow "config" as an identifier; it is not a keyword.  So we check
+			// manually to make sure the syntax is correct.
+			if yyS[yypt-3].strpos.String != "config" {
+				lexPosErrorf(yylex, yyS[yypt-3].strpos.Pos, "config file must start with config clause")
+				return 1 // Any non-zero code indicates an error
+			}
+			file := lexVDLFile(yylex)
+			file.PackageDef = NamePos{Name: "config", Pos: yyS[yypt-3].strpos.Pos}
+			file.ConstDefs = []*ConstDef{{Expr: yyS[yypt-1].constexpr}}
+		}
+	case 21:
+		//line grammar.y:200
+		{
+			imps := &lexVDLFile(yylex).Imports
+			*imps = append(*imps, &Import{Path: yyS[yypt-0].strpos.String, NamePos: NamePos{Pos: yyS[yypt-0].strpos.Pos}})
+		}
+	case 22:
+		//line grammar.y:205
+		{
+			imps := &lexVDLFile(yylex).Imports
+			*imps = append(*imps, &Import{Path: yyS[yypt-0].strpos.String, NamePos: NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}})
+		}
+	case 39:
+		//line grammar.y:240
+		{
+			tds := &lexVDLFile(yylex).TypeDefs
+			*tds = append(*tds, &TypeDef{Type: yyS[yypt-0].typeexpr, NamePos: NamePos{Name: yyS[yypt-1].strpos.String, Pos: yyS[yypt-1].strpos.Pos}})
+		}
+	case 40:
+		//line grammar.y:262
+		{
+			yyVAL.typeexpr = &TypeNamed{Name: yyS[yypt-0].strpos.String, P: yyS[yypt-0].strpos.Pos}
+		}
+	case 41:
+		//line grammar.y:264
+		{
+			yyVAL.typeexpr = &TypeNamed{Name: "error", P: yyS[yypt-0].pos}
+		}
+	case 42:
+		//line grammar.y:266
+		{
+			yyVAL.typeexpr = &TypeArray{Len: int(yyS[yypt-2].intpos.int.Int64()), Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-3].pos}
+		}
+	case 43:
+		//line grammar.y:268
+		{
+			yyVAL.typeexpr = &TypeList{Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-2].pos}
+		}
+	case 44:
+		//line grammar.y:270
+		{
+			yyVAL.typeexpr = &TypeEnum{Labels: yyS[yypt-2].nameposes, P: yyS[yypt-4].pos}
+		}
+	case 45:
+		//line grammar.y:272
+		{
+			yyVAL.typeexpr = &TypeSet{Key: yyS[yypt-1].typeexpr, P: yyS[yypt-3].pos}
+		}
+	case 46:
+		//line grammar.y:274
+		{
+			yyVAL.typeexpr = &TypeMap{Key: yyS[yypt-2].typeexpr, Elem: yyS[yypt-0].typeexpr, P: yyS[yypt-4].pos}
+		}
+	case 47:
+		//line grammar.y:276
+		{
+			yyVAL.typeexpr = &TypeStruct{Fields: yyS[yypt-2].fields, P: yyS[yypt-4].pos}
+		}
+	case 48:
+		//line grammar.y:278
+		{
+			yyVAL.typeexpr = &TypeStruct{P: yyS[yypt-2].pos}
+		}
+	case 49:
+		//line grammar.y:280
+		{
+			yyVAL.typeexpr = &TypeUnion{Fields: yyS[yypt-2].fields, P: yyS[yypt-4].pos}
+		}
+	case 50:
+		//line grammar.y:282
+		{
+			yyVAL.typeexpr = &TypeUnion{P: yyS[yypt-2].pos}
+		}
+	case 51:
+		//line grammar.y:284
+		{
+			yyVAL.typeexpr = &TypeOptional{Base: yyS[yypt-0].typeexpr, P: yyS[yypt-1].pos}
+		}
+	case 52:
+		//line grammar.y:289
+		{
+			yyVAL.typeexpr = yyS[yypt-0].typeexpr
+		}
+	case 53:
+		//line grammar.y:291
+		{
+			yyVAL.typeexpr = &TypeNamed{Name: "typeobject", P: yyS[yypt-0].pos}
+		}
+	case 54:
+		//line grammar.y:295
+		{
+			yyVAL.nameposes = []NamePos{yyS[yypt-0].namepos}
+		}
+	case 55:
+		//line grammar.y:297
+		{
+			yyVAL.nameposes = append(yyS[yypt-2].nameposes, yyS[yypt-0].namepos)
+		}
+	case 56:
+		//line grammar.y:301
+		{
+			yyVAL.namepos = NamePos{Name: yyS[yypt-0].strpos.String, Pos: yyS[yypt-0].strpos.Pos}
+		}
+	case 57:
+		//line grammar.y:305
+		{
+			yyVAL.fields = yyS[yypt-0].fields
+		}
+	case 58:
+		//line grammar.y:307
+		{
+			yyVAL.fields = append(yyS[yypt-2].fields, yyS[yypt-0].fields...)
+		}
+	case 59:
+		//line grammar.y:345
+		{
+			if names, ok := typeListToStrList(yylex, yyS[yypt-1].typeexprs); ok {
+				for _, n := range names {
+					yyVAL.fields = append(yyVAL.fields, &Field{Type: yyS[yypt-0].typeexpr, NamePos: NamePos{Name: n.String, Pos: n.Pos}})
+				}
+			} else {
+				lexPosErrorf(yylex, yyS[yypt-0].typeexpr.Pos(), "perhaps you forgot a comma before %q?.", yyS[yypt-0].typeexpr.String())
+			}
+		}
+	case 60:
+		//line grammar.y:357
+		{
+			yyVAL.typeexprs = []Type{yyS[yypt-0].typeexpr}
+		}
+	case 61:
+		//line grammar.y:359
+		{
+			yyVAL.typeexprs = append(yyS[yypt-2].typeexprs, yyS[yypt-0].typeexpr)
+		}
+	case 62:
+		//line grammar.y:364
+		{
+			ifs := &lexVDLFile(yylex).Interfaces
+			*ifs = append(*ifs, &Interface{NamePos: NamePos{Name: yyS[yypt-3].strpos.String, Pos: yyS[yypt-3].strpos.Pos}})
+		}
+	case 63:
+		//line grammar.y:369
+		{
+			yyS[yypt-2].iface.Name, yyS[yypt-2].iface.Pos = yyS[yypt-5].strpos.String, yyS[yypt-5].strpos.Pos
+			ifs := &lexVDLFile(yylex).Interfaces
+			*ifs = append(*ifs, yyS[yypt-2].iface)
+		}
+	case 64:
+		//line grammar.y:377
+		{
+			yyVAL.iface = yyS[yypt-0].iface
+		}
+	case 65:
+		//line grammar.y:379
+		{
+			yyS[yypt-2].iface.Embeds = append(yyS[yypt-2].iface.Embeds, yyS[yypt-0].iface.Embeds...)
+			yyS[yypt-2].iface.Methods = append(yyS[yypt-2].iface.Methods, yyS[yypt-0].iface.Methods...)
+			yyVAL.iface = yyS[yypt-2].iface
+		}
+	case 66:
+		//line grammar.y:387
+		{
+			yyVAL.iface = &Interface{Methods: []*Method{{InArgs: yyS[yypt-3].fields, InStream: yyS[yypt-2].typeexprs[0], OutStream: yyS[yypt-2].typeexprs[1], OutArgs: yyS[yypt-1].fields, Tags: yyS[yypt-0].constexprs, NamePos: NamePos{Name: yyS[yypt-4].strpos.String, Pos: yyS[yypt-4].strpos.Pos}}}}
+		}
+	case 67:
+		//line grammar.y:389
+		{
+			yyVAL.iface = &Interface{Embeds: []*NamePos{{Name: yyS[yypt-0].strpos.String, Pos: yyS[yypt-0].strpos.Pos}}}
+		}
+	case 68:
+		//line grammar.y:393
+		{
+			yyVAL.fields = nil
+		}
+	case 69:
+		//line grammar.y:395
+		{
+			yyVAL.fields = yyS[yypt-2].fields
+		}
+	case 70:
+		//line grammar.y:399
+		{
+			for _, t := range yyS[yypt-2].typeexprs {
+				yyVAL.fields = append(yyVAL.fields, &Field{Type: t, NamePos: NamePos{Pos: t.Pos()}})
+			}
+		}
+	case 71:
+		//line grammar.y:409
+		{
+			yyVAL.fields = yyS[yypt-0].fields
+		}
+	case 72:
+		//line grammar.y:411
+		{
+			yyVAL.fields = append(yyS[yypt-2].fields, yyS[yypt-0].fields...)
+		}
+	case 73:
+		//line grammar.y:422
+		{
+			yyVAL.fields = nil
+		}
+	case 74:
+		//line grammar.y:424
+		{
+			yyVAL.fields = yyS[yypt-4].fields
+		}
+	case 75:
+		//line grammar.y:428
+		{
+			for _, t := range yyS[yypt-4].typeexprs {
+				yyVAL.fields = append(yyVAL.fields, &Field{Type: t, NamePos: NamePos{Pos: t.Pos()}})
+			}
+		}
+	case 76:
+		//line grammar.y:436
+		{
+			yyVAL.typeexprs = []Type{nil, nil}
+		}
+	case 77:
+		//line grammar.y:438
+		{
+			yyVAL.typeexprs = []Type{nil, nil}
+		}
+	case 78:
+		//line grammar.y:440
+		{
+			yyVAL.typeexprs = []Type{yyS[yypt-1].typeexpr, nil}
+		}
+	case 79:
+		//line grammar.y:442
+		{
+			yyVAL.typeexprs = []Type{yyS[yypt-3].typeexpr, yyS[yypt-1].typeexpr}
+		}
+	case 80:
+		//line grammar.y:446
+		{
+			yyVAL.constexprs = nil
+		}
+	case 81:
+		//line grammar.y:448
+		{
+			yyVAL.constexprs = nil
+		}
+	case 82:
+		//line grammar.y:450
+		{
+			yyVAL.constexprs = yyS[yypt-2].constexprs
+		}
+	case 83:
+		//line grammar.y:454
+		{
+			yyVAL.constexprs = []ConstExpr{yyS[yypt-0].constexpr}
+		}
+	case 84:
+		//line grammar.y:456
+		{
+			yyVAL.constexprs = append(yyS[yypt-2].constexprs, yyS[yypt-0].constexpr)
+		}
+	case 87:
+		//line grammar.y:465
+		{
+			cds := &lexVDLFile(yylex).ConstDefs
+			*cds = append(*cds, &ConstDef{Expr: yyS[yypt-0].constexpr, NamePos: NamePos{Name: yyS[yypt-2].strpos.String, Pos: yyS[yypt-2].strpos.Pos}})
+		}
+	case 88:
+		//line grammar.y:472
+		{
+			yyVAL.constexpr = yyS[yypt-0].constexpr
+		}
+	case 89:
+		//line grammar.y:474
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"||", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 90:
+		//line grammar.y:476
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"&&", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 91:
+		//line grammar.y:478
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"<", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 92:
+		//line grammar.y:480
+		{
+			yyVAL.constexpr = &ConstBinaryOp{">", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 93:
+		//line grammar.y:482
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"<=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 94:
+		//line grammar.y:484
+		{
+			yyVAL.constexpr = &ConstBinaryOp{">=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 95:
+		//line grammar.y:486
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"!=", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 96:
+		//line grammar.y:488
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"==", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 97:
+		//line grammar.y:490
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"+", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 98:
+		//line grammar.y:492
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"-", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 99:
+		//line grammar.y:494
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"*", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 100:
+		//line grammar.y:496
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"/", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 101:
+		//line grammar.y:498
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"%", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 102:
+		//line grammar.y:500
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"|", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 103:
+		//line grammar.y:502
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"&", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 104:
+		//line grammar.y:504
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"^", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 105:
+		//line grammar.y:506
+		{
+			yyVAL.constexpr = &ConstBinaryOp{"<<", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 106:
+		//line grammar.y:508
+		{
+			yyVAL.constexpr = &ConstBinaryOp{">>", yyS[yypt-2].constexpr, yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 107:
+		//line grammar.y:512
+		{
+			yyVAL.constexpr = yyS[yypt-0].constexpr
+		}
+	case 108:
+		//line grammar.y:514
+		{
+			yyVAL.constexpr = &ConstUnaryOp{"!", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 109:
+		//line grammar.y:516
+		{
+			yyVAL.constexpr = &ConstUnaryOp{"+", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 110:
+		//line grammar.y:518
+		{
+			yyVAL.constexpr = &ConstUnaryOp{"-", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 111:
+		//line grammar.y:520
+		{
+			yyVAL.constexpr = &ConstUnaryOp{"^", yyS[yypt-0].constexpr, yyS[yypt-1].pos}
+		}
+	case 112:
+		//line grammar.y:522
+		{
+			yyVAL.constexpr = &ConstTypeConv{yyS[yypt-3].typeexpr, yyS[yypt-1].constexpr, yyS[yypt-3].typeexpr.Pos()}
+		}
+	case 113:
+		//line grammar.y:524
+		{
+			yyVAL.constexpr = &ConstTypeObject{yyS[yypt-1].typeexpr, yyS[yypt-3].pos}
+		}
+	case 114:
+		//line grammar.y:529
+		{
+			yyVAL.constexpr = &ConstLit{yyS[yypt-0].strpos.String, yyS[yypt-0].strpos.Pos}
+		}
+	case 115:
+		//line grammar.y:531
+		{
+			yyVAL.constexpr = &ConstLit{yyS[yypt-0].intpos.int, yyS[yypt-0].intpos.pos}
+		}
+	case 116:
+		//line grammar.y:533
+		{
+			yyVAL.constexpr = &ConstLit{yyS[yypt-0].ratpos.rat, yyS[yypt-0].ratpos.pos}
+		}
+	case 117:
+		//line grammar.y:535
+		{
+			yyVAL.constexpr = &ConstLit{yyS[yypt-0].imagpos.imag, yyS[yypt-0].imagpos.pos}
+		}
+	case 118:
+		//line grammar.y:537
+		{
+			yyVAL.constexpr = &ConstNamed{yyS[yypt-0].strpos.String, yyS[yypt-0].strpos.Pos}
+		}
+	case 119:
+		//line grammar.y:539
+		{
+			yyVAL.constexpr = yyS[yypt-0].complit
+		}
+	case 120:
+		//line grammar.y:541
+		{
+			lexPosErrorf(yylex, yyS[yypt-1].pos, "cannot apply selector operator to unnamed constant")
+		}
+	case 121:
+		//line grammar.y:543
+		{
+			lexPosErrorf(yylex, yyS[yypt-2].pos, "cannot apply index operator to unnamed constant")
+		}
+	case 122:
+		//line grammar.y:545
+		{
+			yyVAL.constexpr = &ConstIndexed{&ConstNamed{yyS[yypt-3].strpos.String, yyS[yypt-3].strpos.Pos}, yyS[yypt-1].constexpr, yyS[yypt-3].strpos.Pos}
+		}
+	case 123:
+		//line grammar.y:547
+		{
+			yyVAL.constexpr = yyS[yypt-1].constexpr
+		}
+	case 124:
+		//line grammar.y:551
+		{
+			yyVAL.complit = &ConstCompositeLit{yyS[yypt-2].typeexpr, nil, yyS[yypt-1].pos}
+		}
+	case 125:
+		//line grammar.y:553
+		{
+			yyVAL.complit = &ConstCompositeLit{yyS[yypt-4].typeexpr, yyS[yypt-2].kvlits, yyS[yypt-3].pos}
+		}
+	case 126:
+		//line grammar.y:557
+		{
+			yyVAL.kvlits = []KVLit{yyS[yypt-0].kvlit}
+		}
+	case 127:
+		//line grammar.y:559
+		{
+			yyVAL.kvlits = append(yyS[yypt-2].kvlits, yyS[yypt-0].kvlit)
+		}
+	case 128:
+		//line grammar.y:563
+		{
+			yyVAL.kvlit = KVLit{Value: yyS[yypt-0].constexpr}
+		}
+	case 129:
+		//line grammar.y:565
+		{
+			yyVAL.kvlit = KVLit{Key: yyS[yypt-2].constexpr, Value: yyS[yypt-0].constexpr}
+		}
+	case 132:
+		//line grammar.y:574
+		{
+			// Create *ErrorDef starting with a copy of error_details, filling in the
+			// name and params
+			ed := yyS[yypt-0].errordef
+			ed.NamePos = NamePos{Name: yyS[yypt-2].strpos.String, Pos: yyS[yypt-2].strpos.Pos}
+			ed.Params = yyS[yypt-1].fields
+			eds := &lexVDLFile(yylex).ErrorDefs
+			*eds = append(*eds, &ed)
+		}
+	case 133:
+		//line grammar.y:586
+		{
+			yyVAL.errordef = ErrorDef{}
+		}
+	case 134:
+		//line grammar.y:588
+		{
+			yyVAL.errordef = ErrorDef{}
+		}
+	case 135:
+		//line grammar.y:590
+		{
+			yyVAL.errordef = yyS[yypt-2].errordef
+		}
+	case 136:
+		//line grammar.y:594
+		{
+			yyVAL.errordef = yyS[yypt-0].errordef
+		}
+	case 137:
+		//line grammar.y:596
+		{
+			// Merge each ErrorDef in-order to build the final ErrorDef.
+			yyVAL.errordef = yyS[yypt-2].errordef
+			switch {
+			case len(yyS[yypt-0].errordef.Actions) > 0:
+				yyVAL.errordef.Actions = append(yyVAL.errordef.Actions, yyS[yypt-0].errordef.Actions...)
+			case len(yyS[yypt-0].errordef.Formats) > 0:
+				yyVAL.errordef.Formats = append(yyVAL.errordef.Formats, yyS[yypt-0].errordef.Formats...)
+			}
+		}
+	case 138:
+		//line grammar.y:609
+		{
+			yyVAL.errordef = ErrorDef{Actions: []StringPos{yyS[yypt-0].strpos}}
+		}
+	case 139:
+		//line grammar.y:611
+		{
+			yyVAL.errordef = ErrorDef{Formats: []LangFmt{{Lang: yyS[yypt-2].strpos, Fmt: yyS[yypt-0].strpos}}}
+		}
+	case 140:
+		//line grammar.y:623
+		{
+			yyVAL.strpos = yyS[yypt-0].strpos
+		}
+	case 141:
+		//line grammar.y:625
+		{
+			yyVAL.strpos = StringPos{"\"" + yyS[yypt-2].strpos.String + "\"." + yyS[yypt-0].strpos.String, yyS[yypt-2].strpos.Pos}
+		}
+	case 142:
+		//line grammar.y:630
+		{
+			yyVAL.strpos = yyS[yypt-0].strpos
+		}
+	case 143:
+		//line grammar.y:632
+		{
+			yyVAL.strpos = StringPos{yyS[yypt-2].strpos.String + "." + yyS[yypt-0].strpos.String, yyS[yypt-2].strpos.Pos}
+		}
+	case 144:
+		//line grammar.y:636
+		{
+			yyVAL.typeexpr = nil
+		}
+	case 145:
+		//line grammar.y:638
+		{
+			yyVAL.typeexpr = yyS[yypt-0].typeexpr
+		}
+	}
+	goto yystack /* stack new state and value */
+}
diff --git a/lib/vdl/parse/grammar_gen.sh b/lib/vdl/parse/grammar_gen.sh
new file mode 100755
index 0000000..e2efbbe
--- /dev/null
+++ b/lib/vdl/parse/grammar_gen.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Generate the grammar.go source file, which contains the parser, by running
+# this shell script in the same directory, or by running go generate.  This also
+# generates grammar.y.debug, which contains a list of all states produced for
+# the parser, and some stats.
+
+set -e
+
+go tool yacc -o grammar.y.go -v grammar.y.debug.tmp grammar.y
+gofmt -l -w grammar.y.go
+cat - grammar.y.debug.tmp > grammar.y.debug <<EOF
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+* The main reason this file has been generated and submitted is to try to ensure
+* we never submit changes that cause shift/reduce or reduce/reduce conflicts.
+* The Go yacc tool doesn't support the %expect directive, and will happily
+* generate a parser even if such conflicts exist; it's up to the developer
+* running the tool to notice that an error message is reported.  The bottom of
+* this file contains stats, including the number of conflicts.  If you're
+* reviewing a change make sure it says 0 conflicts.
+*
+* If you're updating the grammar, just cut-and-paste this message from the old
+* file to the new one, so that this comment block persists.
+***** PLEASE READ THIS! DO NOT DELETE THIS BLOCK! *****
+EOF
+
+rm grammar.y.debug.tmp
diff --git a/lib/vdl/parse/parse.go b/lib/vdl/parse/parse.go
new file mode 100644
index 0000000..c544f97
--- /dev/null
+++ b/lib/vdl/parse/parse.go
@@ -0,0 +1,679 @@
+// Package parse provides utilities to parse vdl files into a parse tree.  The
+// Parse function is the main entry point.
+package parse
+
+//go:generate ./grammar_gen.sh
+
+// This is the only file in this package that uses the yacc-generated parser
+// with entrypoint yyParse.  The result of the parse is the simple parse.File
+// representation, which is used by the compilation stage.
+//
+// TODO(toddw): The yacc-generated parser returns pretty lousy error messages;
+// basically "syntax error" is the only string returned.  Improve them.
+import (
+	"fmt"
+	"io"
+	"log"
+	"math/big"
+	"path"
+	"strconv"
+	"strings"
+	"text/scanner"
+
+	"v.io/v23/vdl/vdlutil"
+)
+
+// Opts specifies vdl parsing options.
+type Opts struct {
+	ImportsOnly bool // Only parse imports; skip everything else.
+}
+
+// ParseFile takes a file name, the contents of the vdl file src, and the
+// accumulated errors, and parses the vdl into a parse.File containing the parse
+// tree.  Returns nil if any errors are encountered, with errs containing more
+// information.  Otherwise returns the parsed File.
+func ParseFile(fileName string, src io.Reader, opts Opts, errs *vdlutil.Errors) *File {
+	start := startFile
+	if opts.ImportsOnly {
+		start = startFileImports
+	}
+	return parse(fileName, src, start, errs)
+}
+
+// ParseConfig takes a file name, the contents of the config file src, and the
+// accumulated errors, and parses the config into a parse.Config containing the
+// parse tree.  Returns nil if any errors are encountered, with errs containing
+// more information.  Otherwise returns the parsed Config.
+func ParseConfig(fileName string, src io.Reader, opts Opts, errs *vdlutil.Errors) *Config {
+	start := startConfig
+	if opts.ImportsOnly {
+		start = startConfigImports
+	}
+	// Since the syntax is so similar between config files and vdl files, we just
+	// parse it as a vdl file and populate Config afterwards.
+	file := parse(fileName, src, start, errs)
+	if file == nil {
+		return nil
+	}
+	if len(file.ErrorDefs) > 0 || len(file.TypeDefs) > 0 || len(file.Interfaces) > 0 {
+		errs.Errorf("%s: config files may not contain error, type or interface definitions", fileName)
+		return nil
+	}
+	config := &Config{
+		FileName:  fileName,
+		ConfigDef: file.PackageDef,
+		Imports:   file.Imports,
+		Config:    file.ConstDefs[0].Expr,
+		ConstDefs: file.ConstDefs[1:],
+	}
+	if len(config.ConstDefs) == 0 {
+		config.ConstDefs = nil
+	}
+	if opts.ImportsOnly {
+		// Clear out the const expression from the config clause.
+		config.Config = nil
+		config.ConstDefs = nil
+	}
+	return config
+}
+
+func parse(fileName string, src io.Reader, startTok int, errs *vdlutil.Errors) *File {
+	if errs == nil {
+		log.Fatal("Nil errors specified for Parse")
+	}
+	origErrs := errs.NumErrors()
+	lex := newLexer(fileName, src, startTok, errs)
+	if errCode := yyParse(lex); errCode != 0 {
+		errs.Errorf("%s: yyParse returned error code %v", fileName, errCode)
+	}
+	lex.attachComments()
+	if startTok == startFile || startTok == startConfig {
+		vdlutil.Vlog.Printf("PARSE RESULTS\n\n%v\n\n", lex.vdlFile)
+	}
+	if origErrs != errs.NumErrors() {
+		return nil
+	}
+	return lex.vdlFile
+}
+
+// ParseExprs parses data into a slice of parsed const expressions.  The input
+// data is specified in VDL syntax, with commas separating multiple expressions.
+// There must be at least one expression specified in data.  Errors are returned
+// in errs.
+func ParseExprs(data string, errs *vdlutil.Errors) []ConstExpr {
+	const name = "exprs"
+	lex := newLexer(name, strings.NewReader(data), startExprs, errs)
+	if errCode := yyParse(lex); errCode != 0 {
+		errs.Errorf("vdl: yyParse returned error code %d", errCode)
+	}
+	return lex.exprs
+}
+
+// lexer implements the yyLexer interface for the yacc-generated parser.
+//
+// An oddity: lexer also holds the result of the parse.  Most yacc examples hold
+// parse results in package-scoped (global) variables, but doing that would mean
+// we wouldn't be able to run separate parses concurrently.  To enable that we'd
+// need each invocation of yyParse to mutate its own result, but unfortunately
+// the Go yacc tool doesn't provide any way to pass extra arguments to yyParse.
+//
+// So we cheat and hold the parse result in the lexer, and in the yacc rules we
+// call lexVDLFile(yylex) to convert from the yyLexer interface back to the
+// concrete lexer type, and retrieve a pointer to the parse result.
+type lexer struct {
+	// Fields for lexing / scanning the input source file.
+	name     string
+	scanner  scanner.Scanner
+	errs     *vdlutil.Errors
+	startTok int   // One of our dummy start tokens.
+	started  bool  // Has the dummy start token already been emitted?
+	sawEOF   bool  // Have we already seen the end-of-file?
+	prevTok  token // Previous token, used for auto-semicolons and errors.
+
+	// Fields holding the result of file and config parsing.
+	comments commentMap
+	vdlFile  *File
+
+	// Field holding the result of expr parsing.
+	exprs []ConstExpr
+}
+
+func newLexer(fileName string, src io.Reader, startTok int, errs *vdlutil.Errors) *lexer {
+	l := &lexer{name: fileName, errs: errs, startTok: startTok, vdlFile: &File{BaseName: path.Base(fileName)}}
+	l.comments.init()
+	l.scanner.Init(src)
+	// Don't produce character literal tokens, but do scan comments.
+	l.scanner.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments
+	// Don't treat '\n' as whitespace, so we can auto-insert semicolons.
+	l.scanner.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
+	l.scanner.Error = func(s *scanner.Scanner, msg string) {
+		l.Error(msg)
+	}
+	return l
+}
+
+type token struct {
+	t    rune
+	text string
+	pos  Pos
+}
+
+func (t token) String() string {
+	return fmt.Sprintf("%v %U %s", t.pos, t.t, t.text)
+}
+
+// The lex* functions below all convert the yyLexer input arg into a concrete
+// lexer as their first step.  The type conversion is always safe since we're
+// the ones who called yyParse, and thus know the concrete type is always lexer.
+
+// lexVDLFile retrieves the File parse result from the yyLexer interface.  This
+// is called in the yacc rules to fill in the parse result.
+func lexVDLFile(yylex yyLexer) *File {
+	return yylex.(*lexer).vdlFile
+}
+
+// lexPosErrorf adds an error with positional information, on a type
+// implementing the yyLexer interface.  This is called in the yacc rules to
+// throw errors.
+func lexPosErrorf(yylex yyLexer, pos Pos, format string, v ...interface{}) {
+	yylex.(*lexer).posErrorf(pos, format, v...)
+}
+
+// lexGenEOF tells the lexer to generate EOF tokens from now on, as if the end
+// of file had been seen.  This is called in the yacc rules to terminate the
+// parse even if the file still has tokens.
+func lexGenEOF(yylex yyLexer) {
+	yylex.(*lexer).sawEOF = true
+}
+
+// lexStoreExprs stores the parsed exprs in the lexer.
+func lexStoreExprs(yylex yyLexer, exprs []ConstExpr) {
+	yylex.(*lexer).exprs = exprs
+}
+
+var keywords = map[string]int{
+	"const":      tCONST,
+	"enum":       tENUM,
+	"error":      tERROR,
+	"import":     tIMPORT,
+	"interface":  tINTERFACE,
+	"map":        tMAP,
+	"package":    tPACKAGE,
+	"set":        tSET,
+	"stream":     tSTREAM,
+	"struct":     tSTRUCT,
+	"type":       tTYPE,
+	"typeobject": tTYPEOBJECT,
+	"union":      tUNION,
+}
+
+type nextRune struct {
+	t  rune
+	id int
+}
+
+// knownPunct is a map of our known punctuation.  We support 1 and 2 rune
+// combinations, where 2 rune combos must be immediately adjacent with no
+// intervening whitespace.  The 2-rune combos always take precedence over the
+// 1-rune combos.  Every entry is a valid 1-rune combo, which is returned as-is
+// without a special token id; the ascii value represents itself.
+var knownPunct = map[rune][]nextRune{
+	';': nil,
+	':': nil,
+	',': nil,
+	'.': nil,
+	'*': nil,
+	'(': nil,
+	')': nil,
+	'[': nil,
+	']': nil,
+	'{': nil,
+	'}': nil,
+	'+': nil,
+	'-': nil,
+	'/': nil,
+	'%': nil,
+	'^': nil,
+	'?': nil,
+	'!': {{'=', tNE}},
+	'=': {{'=', tEQEQ}},
+	'<': {{'=', tLE}, {'<', tLSH}},
+	'>': {{'=', tGE}, {'>', tRSH}},
+	'|': {{'|', tOROR}},
+	'&': {{'&', tANDAND}},
+}
+
+// autoSemi determines whether to automatically add a semicolon, based on the
+// rule that semicolons are always added at the end of each line after certain
+// tokens.  The Go auto-semicolon rule is described here:
+//   http://golang.org/ref/spec#Semicolons
+func autoSemi(prevTok token) bool {
+	return prevAutoSemi[prevTok.t] && prevTok.pos.IsValid()
+}
+
+var prevAutoSemi = map[rune]bool{
+	scanner.Ident:     true,
+	scanner.Int:       true,
+	scanner.Float:     true,
+	scanner.String:    true,
+	scanner.RawString: true,
+	')':               true,
+	']':               true,
+	'}':               true,
+	'>':               true,
+}
+
+const yaccEOF int = 0 // yacc interprets 0 as the end-of-file marker
+
+func init() {
+	// yyDebug is defined in the yacc-generated grammar.go file.  Setting it to 1
+	// only produces output on syntax errors; set it to 4 to generate full debug
+	// output.  Sadly yacc doesn't give position information describing the error.
+	yyDebug = 1
+}
+
+// A note on the comment-tracking strategy.  During lexing we generate
+// commentBlocks, defined as a sequence of adjacent or abutting comments (either
+// // or /**/) with no intervening tokens.  Adjacent means that the previous
+// comment ends on the line immediately before the next one starts, and abutting
+// means that the previous comment ends on the same line as the next one starts.
+//
+// At the end of the parse we try to attach comment blocks to parse tree items.
+// We use a heuristic that works for common cases, but isn't perfect - it
+// mis-associates some styles of comments, and we don't ensure all comment
+// blocks will be associated to an item.
+
+type commentBlock struct {
+	text      string
+	firstLine int
+	lastLine  int
+}
+
+// update returns true and adds tok to this block if tok is adjacent or
+// abutting, otherwise it returns false without mutating the block.  Since we're
+// handling newlines explicitly in the lexer, we never get comment tokens with
+// trailing newlines.  We can get embedded newlines via /**/ style comments.
+func (cb *commentBlock) update(tok token) bool {
+	if cb.text == "" {
+		// First update in this block.
+		cb.text = tok.text
+		cb.firstLine = tok.pos.Line
+		cb.lastLine = tok.pos.Line + strings.Count(tok.text, "\n")
+		return true
+	}
+	if cb.lastLine >= tok.pos.Line-1 {
+		// The tok is adjacent or abutting.
+		if cb.lastLine == tok.pos.Line-1 {
+			// The tok is adjacent - need a newline.
+			cb.text += "\n"
+			cb.lastLine++
+		}
+		cb.text += tok.text
+		cb.lastLine += strings.Count(tok.text, "\n")
+		return true
+	}
+	return false
+}
+
+// commentMap keeps track of blocks of comments in a file.  We store comment
+// blocks in maps by first line, and by last line.  Note that technically there
+// could be more than one commentBlock ending on the same line, due to /**/
+// style comments.  We ignore this rare case and just keep the first one.
+type commentMap struct {
+	byFirst      map[int]commentBlock
+	byLast       map[int]commentBlock
+	cur          commentBlock
+	prevTokenPos Pos
+}
+
+func (cm *commentMap) init() {
+	cm.byFirst = make(map[int]commentBlock)
+	cm.byLast = make(map[int]commentBlock)
+}
+
+// addComment adds a comment token to the map, either appending to the current
+// block or ending the current block and starting a new one.
+func (cm *commentMap) addComment(tok token) {
+	if !cm.cur.update(tok) {
+		cm.endBlock()
+		if !cm.cur.update(tok) {
+			panic(fmt.Errorf("vdl: couldn't update current comment block with token %v", tok))
+		}
+	}
+	// Here's an example of why we need the special case endBlock logic.
+	//
+	//   type Foo struct {
+	//     // doc1
+	//     A int // doc2
+	//     // doc3
+	//     B int
+	//   }
+	//
+	// The problem is that without the special-case, we'd group doc2 and doc3
+	// together into the same block.  That may actually be correct some times, but
+	// it's more common for doc3 to be semantically associated with field B.  Thus
+	// if we've already seen any token on the same line as this comment block, we
+	// end the block immediately.  This means that comments appearing on the same
+	// line as any other token are forced to be a single comment block.
+	if cm.prevTokenPos.Line == tok.pos.Line {
+		cm.endBlock()
+	}
+}
+
+func (cm *commentMap) handleToken(tok token) {
+	cm.endBlock()
+	cm.prevTokenPos = tok.pos
+}
+
+// endBlock adds the the current comment block to the map, and resets it in
+// preparation for new comments to be added.  In the rare case where we see
+// comment blocks that either start or end on the same line, we just keep the
+// first comment block that was inserted.
+func (cm *commentMap) endBlock() {
+	_, inFirst := cm.byFirst[cm.cur.firstLine]
+	_, inLast := cm.byLast[cm.cur.lastLine]
+	if cm.cur.text != "" && !inFirst && !inLast {
+		cm.byFirst[cm.cur.firstLine] = cm.cur
+		cm.byLast[cm.cur.lastLine] = cm.cur
+	}
+	cm.cur.text = ""
+	cm.cur.firstLine = 0
+	cm.cur.lastLine = 0
+}
+
+// getDoc returns the documentation string associated with pos.  Our rule is the
+// last line of the documentation must end on the line immediately before pos.
+// Once a comment block has been returned it isn't eligible to be attached to
+// any other item, and is deleted from the map.
+//
+// The returned string is either empty, or is newline terminated.
+func (cm *commentMap) getDoc(pos Pos) string {
+	block := cm.byLast[pos.Line-1]
+	if block.text == "" {
+		return ""
+	}
+	doc := block.text + "\n"
+	delete(cm.byFirst, block.firstLine)
+	delete(cm.byLast, block.lastLine)
+	return doc
+}
+
+// getDocSuffix returns the suffix documentation associated with pos.  Our rule
+// is the first line of the documentation must be on the same line as pos.  Once
+// a comment block as been returned it isn't eligible to be attached to any
+// other item, and is deleted from the map.
+//
+// The returned string is either empty, or has a leading space.
+func (cm *commentMap) getDocSuffix(pos Pos) string {
+	block := cm.byFirst[pos.Line]
+	if block.text == "" {
+		return ""
+	}
+	doc := " " + block.text
+	delete(cm.byFirst, block.firstLine)
+	delete(cm.byLast, block.lastLine)
+	return doc
+}
+
+func attachTypeComments(t Type, cm *commentMap, suffix bool) {
+	switch tu := t.(type) {
+	case *TypeEnum:
+		for _, label := range tu.Labels {
+			if suffix {
+				label.DocSuffix = cm.getDocSuffix(label.Pos)
+			} else {
+				label.Doc = cm.getDoc(label.Pos)
+			}
+		}
+	case *TypeArray:
+		attachTypeComments(tu.Elem, cm, suffix)
+	case *TypeList:
+		attachTypeComments(tu.Elem, cm, suffix)
+	case *TypeSet:
+		attachTypeComments(tu.Key, cm, suffix)
+	case *TypeMap:
+		attachTypeComments(tu.Key, cm, suffix)
+		attachTypeComments(tu.Elem, cm, suffix)
+	case *TypeStruct:
+		for _, field := range tu.Fields {
+			if suffix {
+				field.DocSuffix = cm.getDocSuffix(field.Pos)
+			} else {
+				field.Doc = cm.getDoc(field.Pos)
+			}
+			attachTypeComments(field.Type, cm, suffix)
+		}
+	case *TypeUnion:
+		for _, field := range tu.Fields {
+			if suffix {
+				field.DocSuffix = cm.getDocSuffix(field.Pos)
+			} else {
+				field.Doc = cm.getDoc(field.Pos)
+			}
+			attachTypeComments(field.Type, cm, suffix)
+		}
+	case *TypeOptional:
+		attachTypeComments(tu.Base, cm, suffix)
+	case *TypeNamed:
+		// Terminate the recursion at named types.
+	default:
+		panic(fmt.Errorf("vdl: unhandled type %#v", t))
+	}
+}
+
+// attachComments causes all comments collected during the parse to be attached
+// to the appropriate parse tree items.  This should only be called after the
+// parse has completed.
+func (l *lexer) attachComments() {
+	f := l.vdlFile
+	// First attach all suffix docs - these occur on the same line.
+	f.PackageDef.DocSuffix = l.comments.getDocSuffix(f.PackageDef.Pos)
+	for _, x := range f.Imports {
+		x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+	}
+	for _, x := range f.ErrorDefs {
+		x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+	}
+	for _, x := range f.TypeDefs {
+		x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+		attachTypeComments(x.Type, &l.comments, true)
+	}
+	for _, x := range f.ConstDefs {
+		x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+	}
+	for _, x := range f.Interfaces {
+		x.DocSuffix = l.comments.getDocSuffix(x.Pos)
+		for _, y := range x.Embeds {
+			y.DocSuffix = l.comments.getDocSuffix(y.Pos)
+		}
+		for _, y := range x.Methods {
+			y.DocSuffix = l.comments.getDocSuffix(y.Pos)
+		}
+	}
+	// Now attach the docs - these occur on the line immediately before.
+	f.PackageDef.Doc = l.comments.getDoc(f.PackageDef.Pos)
+	for _, x := range f.Imports {
+		x.Doc = l.comments.getDoc(x.Pos)
+	}
+	for _, x := range f.ErrorDefs {
+		x.Doc = l.comments.getDoc(x.Pos)
+	}
+	for _, x := range f.TypeDefs {
+		x.Doc = l.comments.getDoc(x.Pos)
+		attachTypeComments(x.Type, &l.comments, false)
+	}
+	for _, x := range f.ConstDefs {
+		x.Doc = l.comments.getDoc(x.Pos)
+	}
+	for _, x := range f.Interfaces {
+		x.Doc = l.comments.getDoc(x.Pos)
+		for _, y := range x.Embeds {
+			y.Doc = l.comments.getDoc(y.Pos)
+		}
+		for _, y := range x.Methods {
+			y.Doc = l.comments.getDoc(y.Pos)
+		}
+	}
+}
+
+// nextToken uses the text/scanner package to scan the input for the next token.
+func (l *lexer) nextToken() (tok token) {
+	tok.t = l.scanner.Scan()
+	tok.text = l.scanner.TokenText()
+	// Both Pos and scanner.Position start line and column numbering at 1.
+	tok.pos = Pos{Line: l.scanner.Position.Line, Col: l.scanner.Position.Column}
+	return
+}
+
+// handleImag handles imaginary literals "[number]i" by peeking ahead.
+func (l *lexer) handleImag(tok token, lval *yySymType) bool {
+	if l.scanner.Peek() != 'i' {
+		return false
+	}
+	l.scanner.Next()
+
+	rat := new(big.Rat)
+	if _, ok := rat.SetString(tok.text); !ok {
+		l.posErrorf(tok.pos, "can't convert token [%v] to imaginary literal", tok)
+	}
+	lval.imagpos.pos = tok.pos
+	lval.imagpos.imag = (*BigImag)(rat)
+	return true
+}
+
+// translateToken takes the token we just scanned, and translates it into a
+// token usable by yacc (lval and id).  The done return arg is true when a real
+// yacc token was generated, or false if we need another next/translate pass.
+func (l *lexer) translateToken(tok token, lval *yySymType) (id int, done bool) {
+	switch tok.t {
+	case scanner.EOF:
+		l.sawEOF = true
+		if autoSemi(l.prevTok) {
+			return ';', true
+		}
+		return yaccEOF, true
+
+	case '\n':
+		if autoSemi(l.prevTok) {
+			return ';', true
+		}
+		// Returning done=false ensures next/translate will be called again so that
+		// this newline is skipped; id=yaccEOF is a dummy value that's ignored.
+		return yaccEOF, false
+
+	case scanner.String, scanner.RawString:
+		var err error
+		lval.strpos.Pos = tok.pos
+		lval.strpos.String, err = strconv.Unquote(tok.text)
+		if err != nil {
+			l.posErrorf(tok.pos, "can't convert token [%v] to string literal", tok)
+		}
+		return tSTRLIT, true
+
+	case scanner.Int:
+		if l.handleImag(tok, lval) {
+			return tIMAGLIT, true
+		}
+		lval.intpos.pos = tok.pos
+		lval.intpos.int = new(big.Int)
+		if _, ok := lval.intpos.int.SetString(tok.text, 0); !ok {
+			l.posErrorf(tok.pos, "can't convert token [%v] to integer literal", tok)
+		}
+		return tINTLIT, true
+
+	case scanner.Float:
+		if l.handleImag(tok, lval) {
+			return tIMAGLIT, true
+		}
+		lval.ratpos.pos = tok.pos
+		lval.ratpos.rat = new(big.Rat)
+		if _, ok := lval.ratpos.rat.SetString(tok.text); !ok {
+			l.posErrorf(tok.pos, "can't convert token [%v] to float literal", tok)
+		}
+		return tRATLIT, true
+
+	case scanner.Ident:
+		// Either the identifier is a known keyword, or we pass it through as IDENT.
+		if keytok, ok := keywords[tok.text]; ok {
+			lval.pos = tok.pos
+			return keytok, true
+		}
+		lval.strpos.Pos = tok.pos
+		lval.strpos.String = tok.text
+		return tIDENT, true
+
+	case scanner.Comment:
+		l.comments.addComment(tok)
+		// Comments aren't considered tokens, just like the '\n' case.
+		return yaccEOF, false
+
+	default:
+		// Either the rune is in our known punctuation whitelist, or we've hit a
+		// syntax error.
+		if nextRunes, ok := knownPunct[tok.t]; ok {
+			// Peek at the next rune and compare against our list of next runes.  If
+			// we find a match we return the id in next, otherwise just return the
+			// original rune.  This means that 2-rune tokens always take precedence
+			// over 1-rune tokens.  Either way the pos is set to the original rune.
+			lval.pos = tok.pos
+			peek := l.scanner.Peek()
+			for _, next := range nextRunes {
+				if peek == next.t {
+					l.scanner.Next()
+					return next.id, true
+				}
+			}
+			return int(tok.t), true
+		}
+		l.posErrorf(tok.pos, "unexpected token [%v]", tok)
+		l.sawEOF = true
+		return yaccEOF, true
+	}
+}
+
+// Lex is part of the yyLexer interface, called by the yacc-generated parser.
+func (l *lexer) Lex(lval *yySymType) int {
+	// Emit a dummy start token indicating what type of parse we're performing.
+	if !l.started {
+		l.started = true
+		switch l.startTok {
+		case startFileImports, startFile, startConfigImports, startConfig, startExprs:
+			return l.startTok
+		default:
+			panic(fmt.Errorf("vdl: unhandled parse start token %d", l.startTok))
+		}
+	}
+	// Always return EOF after we've scanned it.  This ensures we emit EOF on the
+	// next Lex call after scanning EOF and adding an auto-semicolon.
+	if l.sawEOF {
+		return yaccEOF
+	}
+	// Run next/translate in a loop to handle newline-triggered auto-semicolons;
+	// nextToken needs to generate newline tokens so that we can trigger the
+	// auto-semicolon logic, but if the newline doesn't generate an auto-semicolon
+	// we should skip the token and move on to the next one.
+	for {
+		tok := l.nextToken()
+		if id, done := l.translateToken(tok, lval); done {
+			l.prevTok = tok
+			l.comments.handleToken(tok)
+			return id
+		}
+	}
+}
+
+// Error is part of the yyLexer interface, called by the yacc-generated parser.
+// Unfortunately yacc doesn't give good error information - we dump the position
+// of the previous scanned token as an approximation of where the error is.
+func (l *lexer) Error(s string) {
+	l.posErrorf(l.prevTok.pos, "%s", s)
+}
+
+// posErrorf generates an error with file and pos info.
+func (l *lexer) posErrorf(pos Pos, format string, v ...interface{}) {
+	var posstr string
+	if pos.IsValid() {
+		posstr = pos.String()
+	}
+	l.errs.Errorf(l.name+":"+posstr+" "+format, v...)
+}
diff --git a/lib/vdl/parse/parse_test.go b/lib/vdl/parse/parse_test.go
new file mode 100644
index 0000000..ca4f798
--- /dev/null
+++ b/lib/vdl/parse/parse_test.go
@@ -0,0 +1,1389 @@
+package parse_test
+
+// TODO(toddw): Add tests for imaginary literals.
+
+import (
+	"math/big"
+	"reflect"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl/parse"
+	"v.io/v23/vdl/vdltest"
+	"v.io/v23/vdl/vdlutil"
+)
+
+func pos(line, col int) parse.Pos {
+	return parse.Pos{line, col}
+}
+
+func sp(str string, line, col int) parse.StringPos {
+	return parse.StringPos{String: str, Pos: pos(line, col)}
+}
+
+func lf(l, f parse.StringPos) parse.LangFmt {
+	return parse.LangFmt{Lang: l, Fmt: f}
+}
+
+func np(name string, line, col int) parse.NamePos {
+	return parse.NamePos{Name: name, Pos: pos(line, col)}
+}
+
+func npptr(name string, line, col int) *parse.NamePos {
+	ret := np(name, line, col)
+	return &ret
+}
+
+func tn(name string, line, col int) *parse.TypeNamed {
+	return &parse.TypeNamed{Name: name, P: pos(line, col)}
+}
+
+func cn(name string, line, col int) *parse.ConstNamed {
+	return &parse.ConstNamed{Name: name, P: pos(line, col)}
+}
+
+func cl(lit interface{}, line, col int) *parse.ConstLit {
+	return &parse.ConstLit{Lit: lit, P: pos(line, col)}
+}
+
+// Tests of vdl imports and file parsing.
+type vdlTest struct {
+	name   string
+	src    string
+	expect *parse.File
+	errors []string
+}
+
+func testParseVDL(t *testing.T, test vdlTest, opts parse.Opts) {
+	errs := vdlutil.NewErrors(-1)
+	actual := parse.ParseFile("testfile", strings.NewReader(test.src), opts, errs)
+	vdltest.ExpectResult(t, errs, test.name, test.errors...)
+	if !reflect.DeepEqual(test.expect, actual) {
+		t.Errorf("%v\nEXPECT %+v\nACTUAL %+v", test.name, test.expect, actual)
+	}
+}
+
+func TestParseVDLImports(t *testing.T) {
+	for _, test := range vdlImportsTests {
+		testParseVDL(t, test, parse.Opts{ImportsOnly: true})
+	}
+	for _, test := range vdlFileTests {
+		// We only run the success tests from vdlFileTests on the imports only
+		// parser, since the failure tests are testing failures in stuff after the
+		// imports, which won't cause failures in the imports only parser.
+		//
+		// The imports-only parser isn't supposed to fill in fields after the
+		// imports, so we clear them from the expected result.  We must copy the
+		// file to ensure the actual vdlTests isn't overwritten since the
+		// full-parser tests needs the full expectations.  The test itself doesn't
+		// need to be copied, since it's already copied in the range-for.
+		if test.expect != nil {
+			copyFile := *test.expect
+			test.expect = &copyFile
+			test.expect.TypeDefs = nil
+			test.expect.ConstDefs = nil
+			test.expect.ErrorDefs = nil
+			test.expect.Interfaces = nil
+			testParseVDL(t, test, parse.Opts{ImportsOnly: true})
+		}
+	}
+}
+
+func TestParseVDLFile(t *testing.T) {
+	for _, test := range append(vdlImportsTests, vdlFileTests...) {
+		testParseVDL(t, test, parse.Opts{ImportsOnly: false})
+	}
+}
+
+// Tests of config imports and file parsing.
+type configTest struct {
+	name   string
+	src    string
+	expect *parse.Config
+	errors []string
+}
+
+func testParseConfig(t *testing.T, test configTest, opts parse.Opts) {
+	errs := vdlutil.NewErrors(-1)
+	actual := parse.ParseConfig("testfile", strings.NewReader(test.src), opts, errs)
+	vdltest.ExpectResult(t, errs, test.name, test.errors...)
+	if !reflect.DeepEqual(test.expect, actual) {
+		t.Errorf("%v\nEXPECT %+v\nACTUAL %+v", test.name, test.expect, actual)
+	}
+}
+
+func TestParseConfigImports(t *testing.T) {
+	for _, test := range configTests {
+		// We only run the success tests from configTests on the imports only
+		// parser, since the failure tests are testing failures in stuff after the
+		// imports, which won't cause failures in the imports only parser.
+		//
+		// The imports-only parser isn't supposed to fill in fields after the
+		// imports, so we clear them from the expected result.  We must copy the
+		// file to ensure the actual configTests isn't overwritten since the
+		// full-parser tests needs the full expectations.  The test itself doesn't
+		// need to be copied, since it's already copied in the range-for.
+		if test.expect != nil {
+			copyConfig := *test.expect
+			test.expect = &copyConfig
+			test.expect.Config = nil
+			test.expect.ConstDefs = nil
+			testParseConfig(t, test, parse.Opts{ImportsOnly: true})
+		}
+	}
+}
+
+func TestParseConfig(t *testing.T) {
+	for _, test := range configTests {
+		testParseConfig(t, test, parse.Opts{ImportsOnly: false})
+	}
+}
+
+// vdlImportsTests contains tests of stuff up to and including the imports.
+var vdlImportsTests = []vdlTest{
+	// Empty file isn't allowed (need at least a package clause).
+	{
+		"FAILEmptyFile",
+		"",
+		nil,
+		[]string{"vdl file must start with package clause"}},
+
+	// Comment tests.
+	{
+		"PackageDocOneLiner",
+		`// One liner
+// Another line
+package testpkg`,
+		&parse.File{BaseName: "testfile", PackageDef: parse.NamePos{Name: "testpkg", Pos: pos(3, 9), Doc: `// One liner
+// Another line
+`}},
+		nil},
+	{
+		"PackageDocMultiLiner",
+		`/* Multi liner
+Another line
+*/
+package testpkg`,
+		&parse.File{BaseName: "testfile", PackageDef: parse.NamePos{Name: "testpkg", Pos: pos(4, 9), Doc: `/* Multi liner
+Another line
+*/
+`}},
+		nil},
+	{
+		"NotPackageDoc",
+		`// Extra newline, not package doc
+
+package testpkg`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 3, 9)},
+		nil},
+	{
+		"FAILUnterminatedComment",
+		`/* Unterminated
+Another line
+package testpkg`,
+		nil,
+		[]string{"comment not terminated"}},
+
+	// Package tests.
+	{
+		"Package",
+		"package testpkg;",
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+		nil},
+	{
+		"PackageNoSemi",
+		"package testpkg",
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+		nil},
+	{
+		"FAILBadPackageName",
+		"package foo.bar",
+		nil,
+		[]string{"testfile:1:12 syntax error"}},
+
+	// Import tests.
+	{
+		"EmptyImport",
+		`package testpkg;
+import (
+)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+		nil},
+	{
+		"OneImport",
+		`package testpkg;
+import "foo/bar";`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 2, 8)}}},
+		nil},
+	{
+		"OneImportLocalNameNoSemi",
+		`package testpkg
+import baz "foo/bar"`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 2, 8)}}},
+		nil},
+	{
+		"OneImportParens",
+		`package testpkg
+import (
+  "foo/bar";
+)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}}},
+		nil},
+	{
+		"OneImportParensNoSemi",
+		`package testpkg
+import (
+  "foo/bar"
+)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}}},
+		nil},
+	{
+		"MixedImports",
+		`package testpkg
+import "foo/bar"
+import (
+  "baz";"a/b"
+  "c/d"
+)
+import "z"`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Imports: []*parse.Import{
+				{Path: "foo/bar", NamePos: np("", 2, 8)},
+				{Path: "baz", NamePos: np("", 4, 3)},
+				{Path: "a/b", NamePos: np("", 4, 9)},
+				{Path: "c/d", NamePos: np("", 5, 3)},
+				{Path: "z", NamePos: np("", 7, 8)}}},
+		nil},
+	{
+		"FAILImportParensNotClosed",
+		`package testpkg
+import (
+  "foo/bar"`,
+		nil,
+		[]string{"testfile:3:12 syntax error"}},
+}
+
+// vdlFileTests contains tests of stuff after the imports.
+var vdlFileTests = []vdlTest{
+	// Data type tests.
+	{
+		"TypeNamed",
+		`package testpkg
+type foo bar`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: tn("bar", 2, 10)}}},
+		nil},
+	{
+		"TypeNamedQualified",
+		`package testpkg
+type foo bar.baz`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: tn("bar.baz", 2, 10)}}},
+		nil},
+	{
+		"TypeNamedQualifiedPath",
+		`package testpkg
+type foo "a/b/c/bar".baz`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: tn(`"a/b/c/bar".baz`, 2, 10)}}},
+		nil},
+	{
+		"TypeEnum",
+		`package testpkg
+type foo enum{A;B;C}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeEnum{
+					Labels: []parse.NamePos{np("A", 2, 15), np("B", 2, 17), np("C", 2, 19)},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeEnumNewlines",
+		`package testpkg
+type foo enum {
+  A
+  B
+  C
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeEnum{
+					Labels: []parse.NamePos{np("A", 3, 3), np("B", 4, 3), np("C", 5, 3)},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeArray",
+		`package testpkg
+type foo [2]bar`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeArray{
+					Len: 2, Elem: tn("bar", 2, 13), P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeList",
+		`package testpkg
+type foo []bar`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeList{
+					Elem: tn("bar", 2, 12), P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeSet",
+		`package testpkg
+type foo set[bar]`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeSet{
+					Key: tn("bar", 2, 14), P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeMap",
+		`package testpkg
+type foo map[bar]baz`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeMap{
+					Key: tn("bar", 2, 14), Elem: tn("baz", 2, 18), P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructOneField",
+		`package testpkg
+type foo struct{a b;}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{{NamePos: np("a", 2, 17), Type: tn("b", 2, 19)}},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructOneFieldNoSemi",
+		`package testpkg
+type foo struct{a b}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{{NamePos: np("a", 2, 17), Type: tn("b", 2, 19)}},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructOneFieldNewline",
+		`package testpkg
+type foo struct{
+  a b;
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{{NamePos: np("a", 3, 3), Type: tn("b", 3, 5)}},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructOneFieldNewlineNoSemi",
+		`package testpkg
+type foo struct{
+  a b
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{{NamePos: np("a", 3, 3), Type: tn("b", 3, 5)}},
+					P:      pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructOneFieldList",
+		`package testpkg
+type foo struct{a,b,c d}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{
+						{NamePos: np("a", 2, 17), Type: tn("d", 2, 23)},
+						{NamePos: np("b", 2, 19), Type: tn("d", 2, 23)},
+						{NamePos: np("c", 2, 21), Type: tn("d", 2, 23)}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeStructMixed",
+		`package testpkg
+type foo struct{
+  a b;c,d e
+  f,g h
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeStruct{
+					Fields: []*parse.Field{
+						{NamePos: np("a", 3, 3), Type: tn("b", 3, 5)},
+						{NamePos: np("c", 3, 7), Type: tn("e", 3, 11)},
+						{NamePos: np("d", 3, 9), Type: tn("e", 3, 11)},
+						{NamePos: np("f", 4, 3), Type: tn("h", 4, 7)},
+						{NamePos: np("g", 4, 5), Type: tn("h", 4, 7)}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeUnion",
+		`package testpkg
+type foo union{A a;B b;C c}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+					Fields: []*parse.Field{
+						{NamePos: np("A", 2, 16), Type: tn("a", 2, 18)},
+						{NamePos: np("B", 2, 20), Type: tn("b", 2, 22)},
+						{NamePos: np("C", 2, 24), Type: tn("c", 2, 26)}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeUnionNewlines",
+		`package testpkg
+type foo union{
+  A a
+  B b
+  C c
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+					Fields: []*parse.Field{
+						{NamePos: np("A", 3, 3), Type: tn("a", 3, 5)},
+						{NamePos: np("B", 4, 3), Type: tn("b", 4, 5)},
+						{NamePos: np("C", 5, 3), Type: tn("c", 5, 5)}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeOptional",
+		`package testpkg
+type foo union{A a;B ?b;C ?c}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+					Fields: []*parse.Field{
+						{NamePos: np("A", 2, 16), Type: tn("a", 2, 18)},
+						{NamePos: np("B", 2, 20),
+							Type: &parse.TypeOptional{Base: tn("b", 2, 23), P: pos(2, 22)}},
+						{NamePos: np("C", 2, 25),
+							Type: &parse.TypeOptional{Base: tn("c", 2, 28), P: pos(2, 27)}}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"TypeOptionalNewlines",
+		`package testpkg
+type foo union{
+  A a
+  B ?b
+  C ?c
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			TypeDefs: []*parse.TypeDef{
+				{NamePos: np("foo", 2, 6), Type: &parse.TypeUnion{
+					Fields: []*parse.Field{
+						{NamePos: np("A", 3, 3), Type: tn("a", 3, 5)},
+						{NamePos: np("B", 4, 3),
+							Type: &parse.TypeOptional{Base: tn("b", 4, 6), P: pos(4, 5)}},
+						{NamePos: np("C", 5, 3),
+							Type: &parse.TypeOptional{Base: tn("c", 5, 6), P: pos(5, 5)}}},
+					P: pos(2, 10)}}}},
+		nil},
+	{
+		"FAILTypeStructNotClosed",
+		`package testpkg
+type foo struct{
+  a b`,
+		nil,
+		[]string{"testfile:3:6 syntax error"}},
+	{
+		"FAILTypeStructUnnamedField",
+		`package testpkg
+type foo struct{a}`,
+		nil,
+		[]string{"testfile:2:18 syntax error"}},
+	{
+		"FAILTypeStructUnnamedFieldList",
+		`package testpkg
+type foo struct{a, b}`,
+		nil,
+		[]string{"testfile:2:21 syntax error"}},
+
+	// Const definition tests.
+	{
+		"BoolConst",
+		`package testpkg
+const foo = true
+const bar = false`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cn("true", 2, 13)},
+				{NamePos: np("bar", 3, 7), Expr: cn("false", 3, 13)}}},
+		nil},
+	{
+		"StringConst",
+		"package testpkg\nconst foo = \"abc\"\nconst bar = `def`",
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cl("abc", 2, 13)},
+				{NamePos: np("bar", 3, 7), Expr: cl("def", 3, 13)}}},
+		nil},
+	{
+		"IntegerConst",
+		`package testpkg
+const foo = 123`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cl(big.NewInt(123), 2, 13)}}},
+		nil},
+	{
+		"FloatConst",
+		`package testpkg
+const foo = 1.5`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cl(big.NewRat(3, 2), 2, 13)}}},
+		nil},
+	{
+		"NamedConst",
+		`package testpkg
+const foo = baz
+const bar = pkg.box`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cn("baz", 2, 13)},
+				{NamePos: np("bar", 3, 7), Expr: cn("pkg.box", 3, 13)}}},
+		nil},
+	{
+		"NamedConstQualified",
+		`package testpkg
+const foo = bar.baz`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cn("bar.baz", 2, 13)}}},
+		nil},
+	{
+		"NamedConstQualifiedPath",
+		`package testpkg
+const foo = "a/b/c/bar".baz`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: cn(`"a/b/c/bar".baz`, 2, 13)}}},
+		nil},
+	{
+		"CompLitConst",
+		`package testpkg
+const foo = {"a","b"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+					KVList: []parse.KVLit{
+						{Value: cl("a", 2, 14)},
+						{Value: cl("b", 2, 18)}},
+					P: pos(2, 13)}}}},
+		nil},
+	{
+		"CompLitKVConst",
+		`package testpkg
+const foo = {"a":1,"b":2}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+					KVList: []parse.KVLit{
+						{cl("a", 2, 14), cl(big.NewInt(1), 2, 18)},
+						{cl("b", 2, 20), cl(big.NewInt(2), 2, 24)}},
+					P: pos(2, 13)}}}},
+		nil},
+	{
+		"CompLitTypedConst",
+		`package testpkg
+const foo = bar{"a","b"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+					Type: tn("bar", 2, 13),
+					KVList: []parse.KVLit{
+						{Value: cl("a", 2, 17)},
+						{Value: cl("b", 2, 21)}},
+					P: pos(2, 16)}}}},
+		nil},
+	{
+		"CompLitKVTypedConst",
+		`package testpkg
+const foo = bar{"a":1,"b":2}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstCompositeLit{
+					Type: tn("bar", 2, 13),
+					KVList: []parse.KVLit{
+						{cl("a", 2, 17), cl(big.NewInt(1), 2, 21)},
+						{cl("b", 2, 23), cl(big.NewInt(2), 2, 27)}},
+					P: pos(2, 16)}}}},
+		nil},
+	{
+		"UnaryOpConst",
+		`package testpkg
+const foo = !false
+const bar = +1
+const baz = -2
+const box = ^3`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstUnaryOp{"!",
+					cn("false", 2, 14), pos(2, 13)}},
+				{NamePos: np("bar", 3, 7), Expr: &parse.ConstUnaryOp{"+",
+					cl(big.NewInt(1), 3, 14), pos(3, 13)}},
+				{NamePos: np("baz", 4, 7), Expr: &parse.ConstUnaryOp{"-",
+					cl(big.NewInt(2), 4, 14), pos(4, 13)}},
+				{NamePos: np("box", 5, 7), Expr: &parse.ConstUnaryOp{"^",
+					cl(big.NewInt(3), 5, 14), pos(5, 13)}}}},
+		nil},
+	{
+		"TypeConvConst",
+		`package testpkg
+const foo = baz(true)
+const bar = pkg.box(false)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstTypeConv{tn("baz", 2, 13),
+					cn("true", 2, 17), pos(2, 13)}},
+				{NamePos: np("bar", 3, 7), Expr: &parse.ConstTypeConv{tn("pkg.box", 3, 13),
+					cn("false", 3, 21), pos(3, 13)}}}},
+		nil},
+	{
+		"TypeObjectConst",
+		`package testpkg
+const foo = typeobject(bool)
+const bar = typeobject(pkg.box)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("foo", 2, 7), Expr: &parse.ConstTypeObject{tn("bool", 2, 24),
+					pos(2, 13)}},
+				{NamePos: np("bar", 3, 7), Expr: &parse.ConstTypeObject{tn("pkg.box", 3, 24),
+					pos(3, 13)}}}},
+		nil},
+	{
+		"BinaryOpConst",
+		`package testpkg
+const a = true || false
+const b = true && false
+const c = 1 < 2
+const d = 3 > 4
+const e = 5 <= 6
+const f = 7 >= 8
+const g = 9 != 8
+const h = 7 == 6
+const i = 5 + 4
+const j = 3 - 2
+const k = 1 * 2
+const l = 3 / 4
+const m = 5 % 6
+const n = 7 | 8
+const o = 9 & 8
+const p = 7 ^ 6
+const q = 5 << 4
+const r = 3 >> 2`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("a", 2, 7),
+					Expr: &parse.ConstBinaryOp{
+						"||", cn("true", 2, 11), cn("false", 2, 19), pos(2, 16)}},
+				{NamePos: np("b", 3, 7),
+					Expr: &parse.ConstBinaryOp{
+						"&&", cn("true", 3, 11), cn("false", 3, 19), pos(3, 16)}},
+				{NamePos: np("c", 4, 7),
+					Expr: &parse.ConstBinaryOp{"<", cl(big.NewInt(1), 4, 11),
+						cl(big.NewInt(2), 4, 15), pos(4, 13)}},
+				{NamePos: np("d", 5, 7),
+					Expr: &parse.ConstBinaryOp{">", cl(big.NewInt(3), 5, 11),
+						cl(big.NewInt(4), 5, 15), pos(5, 13)}},
+				{NamePos: np("e", 6, 7),
+					Expr: &parse.ConstBinaryOp{"<=", cl(big.NewInt(5), 6, 11),
+						cl(big.NewInt(6), 6, 16), pos(6, 13)}},
+				{NamePos: np("f", 7, 7),
+					Expr: &parse.ConstBinaryOp{">=", cl(big.NewInt(7), 7, 11),
+						cl(big.NewInt(8), 7, 16), pos(7, 13)}},
+				{NamePos: np("g", 8, 7),
+					Expr: &parse.ConstBinaryOp{"!=", cl(big.NewInt(9), 8, 11),
+						cl(big.NewInt(8), 8, 16), pos(8, 13)}},
+				{NamePos: np("h", 9, 7),
+					Expr: &parse.ConstBinaryOp{"==", cl(big.NewInt(7), 9, 11),
+						cl(big.NewInt(6), 9, 16), pos(9, 13)}},
+				{NamePos: np("i", 10, 7),
+					Expr: &parse.ConstBinaryOp{"+", cl(big.NewInt(5), 10, 11),
+						cl(big.NewInt(4), 10, 15), pos(10, 13)}},
+				{NamePos: np("j", 11, 7),
+					Expr: &parse.ConstBinaryOp{"-", cl(big.NewInt(3), 11, 11),
+						cl(big.NewInt(2), 11, 15), pos(11, 13)}},
+				{NamePos: np("k", 12, 7),
+					Expr: &parse.ConstBinaryOp{"*", cl(big.NewInt(1), 12, 11),
+						cl(big.NewInt(2), 12, 15), pos(12, 13)}},
+				{NamePos: np("l", 13, 7),
+					Expr: &parse.ConstBinaryOp{"/", cl(big.NewInt(3), 13, 11),
+						cl(big.NewInt(4), 13, 15), pos(13, 13)}},
+				{NamePos: np("m", 14, 7),
+					Expr: &parse.ConstBinaryOp{"%", cl(big.NewInt(5), 14, 11),
+						cl(big.NewInt(6), 14, 15), pos(14, 13)}},
+				{NamePos: np("n", 15, 7),
+					Expr: &parse.ConstBinaryOp{"|", cl(big.NewInt(7), 15, 11),
+						cl(big.NewInt(8), 15, 15), pos(15, 13)}},
+				{NamePos: np("o", 16, 7),
+					Expr: &parse.ConstBinaryOp{"&", cl(big.NewInt(9), 16, 11),
+						cl(big.NewInt(8), 16, 15), pos(16, 13)}},
+				{NamePos: np("p", 17, 7),
+					Expr: &parse.ConstBinaryOp{"^", cl(big.NewInt(7), 17, 11),
+						cl(big.NewInt(6), 17, 15), pos(17, 13)}},
+				{NamePos: np("q", 18, 7),
+					Expr: &parse.ConstBinaryOp{"<<", cl(big.NewInt(5), 18, 11),
+						cl(big.NewInt(4), 18, 16), pos(18, 13)}},
+				{NamePos: np("r", 19, 7),
+					Expr: &parse.ConstBinaryOp{">>", cl(big.NewInt(3), 19, 11),
+						cl(big.NewInt(2), 19, 16), pos(19, 13)}}}},
+		nil},
+	{
+		"FAILConstOnlyName",
+		`package testpkg
+const foo`,
+		nil,
+		[]string{"testfile:2:10 syntax error"}},
+	{
+		"FAILConstNoEquals",
+		`package testpkg
+const foo bar`,
+		nil,
+		[]string{"testfile:2:11 syntax error"}},
+	{
+		"FAILConstNoValue",
+		`package testpkg
+const foo =`,
+		nil,
+		[]string{"testfile:2:12 syntax error"}},
+
+	// Error definition tests.
+	{
+		"ErrorEmpty",
+		`package testpkg
+error()`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9)},
+		nil},
+	{
+		"ErrorDefNoParamsNoDetails1",
+		`package testpkg
+error ErrFoo()`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{NamePos: np("ErrFoo", 2, 7)}}},
+		nil},
+	{
+		"ErrorDefNoParamsNoDetails2",
+		`package testpkg
+error ErrFoo() {}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{NamePos: np("ErrFoo", 2, 7)}}},
+		nil},
+	{
+		"ErrorDefNoParamsWithDetails1",
+		`package testpkg
+error ErrFoo() {NoRetry}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Actions: []parse.StringPos{sp("NoRetry", 2, 17)}}}},
+		nil},
+	{
+		"ErrorDefNoParamsWithDetails2",
+		`package testpkg
+error ErrFoo() {"en":"a"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Formats: []parse.LangFmt{lf(sp("en", 2, 17), sp("a", 2, 22))}}}},
+		nil},
+	{
+		"ErrorDefNoParamsWithDetails3",
+		`package testpkg
+error ErrFoo() {NoRetry, "en":"a", "zh":"b"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Actions: []parse.StringPos{sp("NoRetry", 2, 17)},
+				Formats: []parse.LangFmt{
+					lf(sp("en", 2, 26), sp("a", 2, 31)),
+					lf(sp("zh", 2, 36), sp("b", 2, 41)),
+				}}}},
+		nil},
+	{
+		"ErrorDefWithParamsNoDetails1",
+		`package testpkg
+error ErrFoo(x int, y bool)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Params: []*parse.Field{
+					{NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+					{NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}}}}},
+		nil},
+	{
+		"ErrorDefWithParamsNoDetails2",
+		`package testpkg
+error ErrFoo(x int, y bool) {}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Params: []*parse.Field{
+					{NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+					{NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}}}}},
+		nil},
+	{
+		"ErrorDefWithParamsWithDetails1",
+		`package testpkg
+error ErrFoo(x int, y bool) {NoRetry}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Params: []*parse.Field{
+					{NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+					{NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+				Actions: []parse.StringPos{sp("NoRetry", 2, 30)}}}},
+		nil},
+	{
+		"ErrorDefWithParamsWithDetails2",
+		`package testpkg
+error ErrFoo(x int, y bool) {"en":"a"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Params: []*parse.Field{
+					{NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+					{NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+				Formats: []parse.LangFmt{lf(sp("en", 2, 30), sp("a", 2, 35))}}}},
+		nil},
+	{
+		"ErrorDefWithParamsWithDetails3",
+		`package testpkg
+error ErrFoo(x int, y bool) {NoRetry, "en":"a", "zh":"b"}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{{
+				NamePos: np("ErrFoo", 2, 7),
+				Params: []*parse.Field{
+					{NamePos: np("x", 2, 14), Type: tn("int", 2, 16)},
+					{NamePos: np("y", 2, 21), Type: tn("bool", 2, 23)}},
+				Actions: []parse.StringPos{sp("NoRetry", 2, 30)},
+				Formats: []parse.LangFmt{
+					lf(sp("en", 2, 39), sp("a", 2, 44)),
+					lf(sp("zh", 2, 49), sp("b", 2, 54)),
+				}}}},
+		nil},
+	{
+		"ErrorDefMulti",
+		`package testpkg
+error (
+  ErrFoo()
+  ErrBar() {NoRetry, "en":"a", "zh":"b"}
+  ErrBaz(x int, y bool) {NoRetry, "en":"a", "zh":"b"}
+)`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			ErrorDefs: []*parse.ErrorDef{
+				{
+					NamePos: np("ErrFoo", 3, 3),
+				},
+				{
+					NamePos: np("ErrBar", 4, 3),
+					Actions: []parse.StringPos{sp("NoRetry", 4, 13)},
+					Formats: []parse.LangFmt{
+						lf(sp("en", 4, 22), sp("a", 4, 27)),
+						lf(sp("zh", 4, 32), sp("b", 4, 37)),
+					},
+				},
+				{
+					NamePos: np("ErrBaz", 5, 3),
+					Params: []*parse.Field{
+						{NamePos: np("x", 5, 10), Type: tn("int", 5, 12)},
+						{NamePos: np("y", 5, 17), Type: tn("bool", 5, 19)}},
+					Actions: []parse.StringPos{sp("NoRetry", 5, 26)},
+					Formats: []parse.LangFmt{
+						lf(sp("en", 5, 35), sp("a", 5, 40)),
+						lf(sp("zh", 5, 45), sp("b", 5, 50)),
+					},
+				},
+			}},
+		nil},
+
+	// Interface tests.
+	{
+		"InterfaceEmpty",
+		`package testpkg
+type foo interface{}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6)}}},
+		nil},
+	{
+		"InterfaceOneMethodOneInUnnamedOut",
+		`package testpkg
+type foo interface{meth1(a b) (c | error)}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Methods: []*parse.Method{{NamePos: np("meth1", 2, 20),
+					InArgs:  []*parse.Field{{NamePos: np("a", 2, 26), Type: tn("b", 2, 28)}},
+					OutArgs: []*parse.Field{{NamePos: np("", 2, 32), Type: tn("c", 2, 32)}}}}}}},
+		nil},
+	{
+		"InterfaceErrors",
+		`package testpkg
+type foo interface{meth1(err error) error}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Methods: []*parse.Method{{NamePos: np("meth1", 2, 20),
+					InArgs: []*parse.Field{{NamePos: np("err", 2, 26), Type: tn("error", 2, 30)}}}}}}},
+		nil},
+	{
+		"InterfaceMixedMethods",
+		`package testpkg
+type foo interface{
+  meth1(a b) (c | error);meth2() error
+  meth3(e f, g, h i) (j k, l, m n | error)
+}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Methods: []*parse.Method{
+					{NamePos: np("meth1", 3, 3),
+						InArgs:  []*parse.Field{{NamePos: np("a", 3, 9), Type: tn("b", 3, 11)}},
+						OutArgs: []*parse.Field{{NamePos: np("", 3, 15), Type: tn("c", 3, 15)}}},
+					{NamePos: np("meth2", 3, 26)},
+					{NamePos: np("meth3", 4, 3),
+						InArgs: []*parse.Field{
+							{NamePos: np("e", 4, 9), Type: tn("f", 4, 11)},
+							{NamePos: np("g", 4, 14), Type: tn("i", 4, 19)},
+							{NamePos: np("h", 4, 17), Type: tn("i", 4, 19)}},
+						OutArgs: []*parse.Field{
+							{NamePos: np("j", 4, 23), Type: tn("k", 4, 25)},
+							{NamePos: np("l", 4, 28), Type: tn("n", 4, 33)},
+							{NamePos: np("m", 4, 31), Type: tn("n", 4, 33)}}}}}}},
+		nil},
+	{
+		"InterfaceEmbed",
+		`package testpkg
+type foo interface{bar}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Embeds: []*parse.NamePos{npptr("bar", 2, 20)}}}},
+		nil},
+	{
+		"InterfaceEmbedQualified",
+		`package testpkg
+type foo interface{bar.baz}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Embeds: []*parse.NamePos{npptr("bar.baz", 2, 20)}}}},
+		nil},
+	{
+		"InterfaceEmbedQualifiedPath",
+		`package testpkg
+type foo interface{"a/b/c/bar".baz}`,
+		&parse.File{BaseName: "testfile", PackageDef: np("testpkg", 1, 9),
+			Interfaces: []*parse.Interface{{NamePos: np("foo", 2, 6),
+				Embeds: []*parse.NamePos{npptr(`"a/b/c/bar".baz`, 2, 20)}}}},
+		nil},
+	{
+		"FAILInterfaceUnclosedInterface",
+		`package testpkg
+type foo interface{
+  meth1()`,
+		nil,
+		[]string{"testfile:3:10 syntax error"}},
+	{
+		"FAILInterfaceUnclosedArgs",
+		`package testpkg
+type foo interface{
+  meth1(
+}`,
+		nil,
+		[]string{"testfile:4:1 syntax error"}},
+	{
+		"FAILInterfaceVariableNames",
+		`package testpkg
+type foo interface{
+  meth1([]a, []b []c)
+}`,
+		nil,
+		[]string{"expected one or more variable names",
+			"testfile:3:18 perhaps you forgot a comma"}},
+}
+
+// configTests contains tests of config files.
+var configTests = []configTest{
+	// Empty file isn't allowed (need at least a package clause).
+	{
+		"FAILEmptyFile",
+		"",
+		nil,
+		[]string{"config file must start with config clause"}},
+
+	// Comment tests.
+	{
+		"ConfigDocOneLiner",
+		`// One liner
+// Another line
+config = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: parse.NamePos{Name: "config", Pos: pos(3, 1), Doc: `// One liner
+// Another line
+`},
+			Config: cn("true", 3, 10)},
+		nil},
+	{
+		"ConfigDocMultiLiner",
+		`/* Multi liner
+Another line
+*/
+config = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: parse.NamePos{Name: "config", Pos: pos(4, 1), Doc: `/* Multi liner
+Another line
+*/
+`},
+			Config: cn("true", 4, 10)},
+		nil},
+	{
+		"NotConfigDoc",
+		`// Extra newline, not config doc
+
+config = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 3, 1),
+			Config: cn("true", 3, 10)},
+		nil},
+	{
+		"FAILUnterminatedComment",
+		`/* Unterminated
+Another line
+config = true`,
+		nil,
+		[]string{"comment not terminated"}},
+
+	// Config tests.
+	{
+		"Config",
+		"config = true;",
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("true", 1, 10)},
+		nil},
+	{
+		"ConfigNoSemi",
+		"config = true",
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("true", 1, 10)},
+		nil},
+	{
+		"ConfigNamedConfig",
+		"config = config",
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("config", 1, 10)},
+		nil},
+	{
+		"FAILConfigNoEqual",
+		"config true",
+		nil,
+		[]string{"testfile:1:8 syntax error"}},
+
+	// Import tests.
+	{
+		"EmptyImport",
+		`config = foo
+import (
+)`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("foo", 1, 10)},
+		nil},
+	{
+		"OneImport",
+		`config = foo
+import "foo/bar";`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 2, 8)}},
+			Config:  cn("foo", 1, 10)},
+		nil},
+	{
+		"OneImportLocalNameNoSemi",
+		`config = foo
+import baz "foo/bar"`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 2, 8)}},
+			Config:  cn("foo", 1, 10)},
+		nil},
+	{
+		"OneImportParens",
+		`config = foo
+import (
+  "foo/bar";
+)`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}},
+			Config:  cn("foo", 1, 10)},
+		nil},
+	{
+		"OneImportParensNoSemi",
+		`config = foo
+import (
+  "foo/bar"
+)`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("", 3, 3)}},
+			Config:  cn("foo", 1, 10)},
+		nil},
+	{
+		"OneImportParensNamed",
+		`config = foo
+import (
+  baz "foo/bar"
+)`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo/bar", NamePos: np("baz", 3, 3)}},
+			Config:  cn("foo", 1, 10)},
+		nil},
+	{
+		"MixedImports",
+		`config = foo
+import "foo/bar"
+import (
+  "baz";"a/b"
+  "c/d"
+)
+import "z"`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{
+				{Path: "foo/bar", NamePos: np("", 2, 8)},
+				{Path: "baz", NamePos: np("", 4, 3)},
+				{Path: "a/b", NamePos: np("", 4, 9)},
+				{Path: "c/d", NamePos: np("", 5, 3)},
+				{Path: "z", NamePos: np("", 7, 8)}},
+			Config: cn("foo", 1, 10)},
+		nil},
+	{
+		"FAILImportParensNotClosed",
+		`config = foo
+import (
+  "foo/bar"`,
+		nil,
+		[]string{"testfile:3:12 syntax error"}},
+
+	// Inline config tests.
+	{
+		"BoolConst",
+		`config = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("true", 1, 10)},
+		nil},
+	{
+		"StringConst",
+		`config = "abc"`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cl("abc", 1, 10)},
+		nil},
+	{
+		"IntegerConst",
+		`config = 123`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cl(big.NewInt(123), 1, 10)},
+		nil},
+	{
+		"FloatConst",
+		`config = 1.5`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cl(big.NewRat(3, 2), 1, 10)},
+		nil},
+	{
+		"NamedConst",
+		`config = pkg.foo`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: cn("pkg.foo", 1, 10)},
+		nil},
+	{
+		"CompLitConst",
+		`config = {"a","b"}`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: &parse.ConstCompositeLit{
+				KVList: []parse.KVLit{
+					{Value: cl("a", 1, 11)},
+					{Value: cl("b", 1, 15)}},
+				P: pos(1, 10)}},
+		nil},
+	{
+		"CompLitKVConst",
+		`config = {"a":1,"b":2}`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: &parse.ConstCompositeLit{
+				KVList: []parse.KVLit{
+					{cl("a", 1, 11), cl(big.NewInt(1), 1, 15)},
+					{cl("b", 1, 17), cl(big.NewInt(2), 1, 21)}},
+				P: pos(1, 10)}},
+		nil},
+	{
+		"CompLitTypedConst",
+		`config = foo{"a","b"}`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: &parse.ConstCompositeLit{
+				Type: tn("foo", 1, 10),
+				KVList: []parse.KVLit{
+					{Value: cl("a", 1, 14)},
+					{Value: cl("b", 1, 18)}},
+				P: pos(1, 13)}},
+		nil},
+	{
+		"CompLitKVTypedConst",
+		`config = foo{"a":1,"b":2}`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Config: &parse.ConstCompositeLit{
+				Type: tn("foo", 1, 10),
+				KVList: []parse.KVLit{
+					{cl("a", 1, 14), cl(big.NewInt(1), 1, 18)},
+					{cl("b", 1, 20), cl(big.NewInt(2), 1, 24)}},
+				P: pos(1, 13)}},
+		nil},
+	{
+		"FAILConstNoEquals",
+		`config 123`,
+		nil,
+		[]string{"testfile:1:8 syntax error"}},
+	{
+		"FAILConstNoValue",
+		`config =`,
+		nil,
+		[]string{"testfile:1:9 syntax error"}},
+
+	// Out-of-line config tests.
+	{
+		"BoolOutOfLineConfig",
+		`config = config
+import "foo"
+const config = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo", NamePos: np("", 2, 8)}},
+			Config:  cn("config", 1, 10),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("config", 3, 7), Expr: cn("true", 3, 16)}}},
+		nil},
+	{
+		"BoolOutOfLineBar",
+		`config = bar
+import "foo"
+const bar = true`,
+		&parse.Config{FileName: "testfile", ConfigDef: np("config", 1, 1),
+			Imports: []*parse.Import{{Path: "foo", NamePos: np("", 2, 8)}},
+			Config:  cn("bar", 1, 10),
+			ConstDefs: []*parse.ConstDef{
+				{NamePos: np("bar", 3, 7), Expr: cn("true", 3, 13)}}},
+		nil},
+
+	// Errors, types and interfaces return error
+	{
+		"FAILError",
+		`config = true
+error foo()`,
+		nil,
+		[]string{"config files may not contain error, type or interface definitions"}},
+	{
+		"FAILType",
+		`config = true
+type foo bool`,
+		nil,
+		[]string{"config files may not contain error, type or interface definitions"}},
+	{
+		"FAILInterface",
+		`config = true
+type foo interface{}`,
+		nil,
+		[]string{"config files may not contain error, type or interface definitions"}},
+}
+
+func configImports(imports ...string) *parse.Config {
+	config := new(parse.Config)
+	for _, i := range imports {
+		config.Imports = append(config.Imports, &parse.Import{Path: i})
+	}
+	return config
+}
+
+func TestConfigHasImport(t *testing.T) {
+	config := configImports("a", "b/c")
+	tests := []struct {
+		Path string
+		Want bool
+	}{
+		{"a", true},
+		{"b/c", true},
+		{"b", false},
+		{"c", false},
+		{"d", false},
+	}
+	for _, test := range tests {
+		if got, want := config.HasImport(test.Path), test.Want; got != want {
+			t.Errorf("HasImport(%q) got %v, want %v", test.Path, got, want)
+		}
+	}
+}
+
+func TestConfigAddImports(t *testing.T) {
+	tests := []struct {
+		Base    *parse.Config
+		Imports []string
+		Want    *parse.Config
+	}{
+		{configImports(), []string{"a", "b/c"}, configImports("a", "b/c")},
+		{configImports("a"), []string{"a", "b/c"}, configImports("a", "b/c")},
+		{configImports("a", "b/c"), []string{"a", "b/c"}, configImports("a", "b/c")},
+		{configImports("a", "b/c"), []string{"a", "b/c", "d"}, configImports("a", "b/c", "d")},
+	}
+	for _, test := range tests {
+		test.Base.AddImports(test.Imports...)
+		if got, want := test.Base, test.Want; !reflect.DeepEqual(got, want) {
+			t.Errorf("AddImports(%q) got %v, want %v", test.Imports, got, want)
+		}
+	}
+}
+
+func TestParseExprs(t *testing.T) {
+	tests := []struct {
+		Data  string
+		Exprs []parse.ConstExpr
+		Err   string
+	}{
+		{``, nil, "syntax error"},
+		{`true`, []parse.ConstExpr{cn("true", 1, 1)}, ""},
+		{`false`, []parse.ConstExpr{cn("false", 1, 1)}, ""},
+		{`abc`, []parse.ConstExpr{cn("abc", 1, 1)}, ""},
+		{`"abc"`, []parse.ConstExpr{cl("abc", 1, 1)}, ""},
+		{`1`, []parse.ConstExpr{cl(big.NewInt(1), 1, 1)}, ""},
+		{`123`, []parse.ConstExpr{cl(big.NewInt(123), 1, 1)}, ""},
+		{`1.0`, []parse.ConstExpr{cl(big.NewRat(1, 1), 1, 1)}, ""},
+		{`1.5`, []parse.ConstExpr{cl(big.NewRat(3, 2), 1, 1)}, ""},
+		{`{1,2}`, []parse.ConstExpr{
+			&parse.ConstCompositeLit{
+				KVList: []parse.KVLit{
+					{Value: cl(big.NewInt(1), 1, 2)},
+					{Value: cl(big.NewInt(2), 1, 4)},
+				},
+				P: pos(1, 1),
+			},
+		}, ""},
+		{`1+2`, []parse.ConstExpr{
+			&parse.ConstBinaryOp{"+",
+				cl(big.NewInt(1), 1, 1),
+				cl(big.NewInt(2), 1, 3),
+				pos(1, 2),
+			},
+		}, ""},
+		{`1,"abc"`, []parse.ConstExpr{
+			cl(big.NewInt(1), 1, 1),
+			cl("abc", 1, 3),
+		}, ""},
+	}
+	for _, test := range tests {
+		errs := vdlutil.NewErrors(-1)
+		exprs := parse.ParseExprs(test.Data, errs)
+		vdltest.ExpectResult(t, errs, test.Data, test.Err)
+		if got, want := exprs, test.Exprs; !reflect.DeepEqual(got, want) {
+			t.Errorf("%s got %v, want %v", test.Data, got, want)
+		}
+	}
+}
diff --git a/lib/vdl/parse/result.go b/lib/vdl/parse/result.go
new file mode 100644
index 0000000..7fd0e37
--- /dev/null
+++ b/lib/vdl/parse/result.go
@@ -0,0 +1,186 @@
+package parse
+
+import (
+	"fmt"
+	"path"
+	"strconv"
+	"strings"
+
+	"v.io/v23/vdl/vdlutil"
+)
+
+// Pos captures positional information during parsing.
+type Pos struct {
+	Line int // Line number, starting at 1
+	Col  int // Column number (character count), starting at 1
+}
+
+// StringPos holds a string and a Pos.
+type StringPos struct {
+	String string
+	Pos    Pos
+}
+
+// Returns true iff this Pos has been initialized.  The zero Pos is invalid.
+func (p Pos) IsValid() bool {
+	return p.Line > 0 && p.Col > 0
+}
+
+func (p Pos) String() string {
+	if !p.IsValid() {
+		return "[no pos]"
+	}
+	return fmt.Sprintf("%v:%v", p.Line, p.Col)
+}
+
+// InferPackageName returns the package name from a group of files.  Every file
+// must specify the same package name, otherwise an error is reported in errs.
+func InferPackageName(files []*File, errs *vdlutil.Errors) (pkgName string) {
+	var firstFile string
+	for _, f := range files {
+		switch {
+		case pkgName == "":
+			firstFile = f.BaseName
+			pkgName = f.PackageDef.Name
+		case pkgName != f.PackageDef.Name:
+			errs.Errorf("Files in the same directory must be in the same package; %v has package %v, but %v has package %v", firstFile, pkgName, f.BaseName, f.PackageDef.Name)
+		}
+	}
+	return
+}
+
+// Representation of the components of an vdl file.  These data types represent
+// the parse tree generated by the parse.
+
+// File represents a parsed vdl file.
+type File struct {
+	BaseName   string       // Base name of the vdl file, e.g. "foo.vdl"
+	PackageDef NamePos      // Name, position and docs of the "package" clause
+	Imports    []*Import    // Imports listed in this file.
+	ErrorDefs  []*ErrorDef  // Errors defined in this file
+	TypeDefs   []*TypeDef   // Types defined in this file
+	ConstDefs  []*ConstDef  // Consts defined in this file
+	Interfaces []*Interface // Interfaces defined in this file
+}
+
+// Config represents a parsed config file.  Config files use a similar syntax as
+// vdl files, with similar concepts.
+type Config struct {
+	FileName  string      // Config file name, e.g. "a/b/foo.config"
+	ConfigDef NamePos     // Name, position and docs of the "config" clause
+	Imports   []*Import   // Imports listed in this file.
+	Config    ConstExpr   // Const expression exported from this config.
+	ConstDefs []*ConstDef // Consts defined in this file.
+}
+
+// AddImports adds the path imports that don't already exist to c.
+func (c *Config) AddImports(path ...string) {
+	for _, p := range path {
+		if !c.HasImport(p) {
+			c.Imports = append(c.Imports, &Import{Path: p})
+		}
+	}
+}
+
+// HasImport returns true iff path exists in c.Imports.
+func (c *Config) HasImport(path string) bool {
+	for _, imp := range c.Imports {
+		if imp.Path == path {
+			return true
+		}
+	}
+	return false
+}
+
+// Import represents an import definition, which is used to import other
+// packages into an vdl file.  An example of the syntax in the vdl file:
+//   import foo "some/package/path"
+type Import struct {
+	NamePos        // e.g. foo (from above), or typically empty
+	Path    string // e.g. "some/package/path" (from above)
+}
+
+// LocalName returns the name used locally within the File to refer to the
+// imported package.
+func (i *Import) LocalName() string {
+	if i.Name != "" {
+		return i.Name
+	}
+	return path.Base(i.Path)
+}
+
+// ErrorDef represents an error definition.
+type ErrorDef struct {
+	NamePos             // error name, pos and doc
+	Params  []*Field    // list of positional parameters
+	Actions []StringPos // list of action code identifiers
+	Formats []LangFmt   // list of language / format pairs
+}
+
+// LangFmt represents a language / format string pair.
+type LangFmt struct {
+	Lang StringPos // IETF language tag
+	Fmt  StringPos // i18n format string in the given language
+}
+
+// Pos returns the position of the LangFmt.
+func (x LangFmt) Pos() Pos {
+	if x.Lang.Pos.IsValid() {
+		return x.Lang.Pos
+	}
+	return x.Fmt.Pos
+}
+
+// Interface represents a set of embedded interfaces and methods.
+type Interface struct {
+	NamePos            // interface name, pos and doc
+	Embeds  []*NamePos // names of embedded interfaces
+	Methods []*Method  // list of methods
+}
+
+// Method represents a method in an interface.
+type Method struct {
+	NamePos               // method name, pos and doc
+	InArgs    []*Field    // list of positional in-args
+	OutArgs   []*Field    // list of positional out-args
+	InStream  Type        // in-stream type, may be nil
+	OutStream Type        // out-stream type, may be nil
+	Tags      []ConstExpr // list of method tags
+}
+
+// Field represents fields in structs as well as method arguments.
+type Field struct {
+	NamePos      // field name, pos and doc
+	Type    Type // field type, never nil
+}
+
+// NamePos represents a name, its associated position and documentation.
+type NamePos struct {
+	Name      string
+	Pos       Pos    // position of first character in name
+	Doc       string // docs that occur before the item
+	DocSuffix string // docs that occur on the same line after the item
+}
+
+func (x *File) String() string      { return fmt.Sprintf("%+v", *x) }
+func (x *Import) String() string    { return fmt.Sprintf("%+v", *x) }
+func (x *ErrorDef) String() string  { return fmt.Sprintf("%+v", *x) }
+func (x *Interface) String() string { return fmt.Sprintf("%+v", *x) }
+func (x *Method) String() string    { return fmt.Sprintf("%+v", *x) }
+func (x *Field) String() string     { return fmt.Sprintf("%+v", *x) }
+func (x *NamePos) String() string   { return fmt.Sprintf("%+v", *x) }
+
+// QuoteStripDoc takes a Doc string, which includes comment markers /**/ and
+// double-slash, and returns a raw-quoted string.
+//
+// TODO(toddw): This should remove comment markers.  This is non-trivial, since
+// we should handle removing leading whitespace "rectangles", and might want to
+// retain inline /**/ or adjacent /**/ on the same line.  For now we just leave
+// them in the output.
+func QuoteStripDoc(doc string) string {
+	trimmed := strings.Trim(doc, "\n")
+	if strconv.CanBackquote(doc) {
+		return "`" + trimmed + "`"
+	}
+	return strconv.Quote(trimmed)
+}
diff --git a/lib/vdl/parse/type.go b/lib/vdl/parse/type.go
new file mode 100644
index 0000000..8c2a9e3
--- /dev/null
+++ b/lib/vdl/parse/type.go
@@ -0,0 +1,139 @@
+package parse
+
+import (
+	"fmt"
+)
+
+// Type is an interface representing symbolic occurrences of types in VDL files.
+type Type interface {
+	// String returns a human-readable description of the type.
+	String() string
+	// Kind returns a short human-readable string describing the kind of type.
+	Kind() string
+	// Pos returns the position of the first character in the type.
+	Pos() Pos
+}
+
+// TypeNamed captures named references to other types.  Both built-in primitives
+// and user-defined named types use this representation.
+type TypeNamed struct {
+	Name string
+	P    Pos
+}
+
+// TypeEnum represents enum types.
+type TypeEnum struct {
+	Labels []NamePos
+	P      Pos
+}
+
+// TypeArray represents array types.
+type TypeArray struct {
+	Len  int
+	Elem Type
+	P    Pos
+}
+
+// TypeList represents list types.
+type TypeList struct {
+	Elem Type
+	P    Pos
+}
+
+// TypeSet represents set types.
+type TypeSet struct {
+	Key Type
+	P   Pos
+}
+
+// TypeMap represents map types.
+type TypeMap struct {
+	Key  Type
+	Elem Type
+	P    Pos
+}
+
+// TypeStruct represents struct types.
+type TypeStruct struct {
+	Fields []*Field
+	P      Pos
+}
+
+// TypeUnion represents union types.
+type TypeUnion struct {
+	Fields []*Field
+	P      Pos
+}
+
+// TypeOptional represents optional types.
+type TypeOptional struct {
+	Base Type
+	P    Pos
+}
+
+// TypeDef represents a user-defined named type.
+type TypeDef struct {
+	NamePos      // name assigned by the user, pos and doc
+	Type    Type // the underlying type of the type definition.
+}
+
+func (t *TypeNamed) Pos() Pos    { return t.P }
+func (t *TypeEnum) Pos() Pos     { return t.P }
+func (t *TypeArray) Pos() Pos    { return t.P }
+func (t *TypeList) Pos() Pos     { return t.P }
+func (t *TypeSet) Pos() Pos      { return t.P }
+func (t *TypeMap) Pos() Pos      { return t.P }
+func (t *TypeStruct) Pos() Pos   { return t.P }
+func (t *TypeUnion) Pos() Pos    { return t.P }
+func (t *TypeOptional) Pos() Pos { return t.P }
+
+func (t *TypeNamed) Kind() string    { return "named" }
+func (t *TypeEnum) Kind() string     { return "enum" }
+func (t *TypeArray) Kind() string    { return "array" }
+func (t *TypeList) Kind() string     { return "list" }
+func (t *TypeSet) Kind() string      { return "set" }
+func (t *TypeMap) Kind() string      { return "map" }
+func (t *TypeStruct) Kind() string   { return "struct" }
+func (t *TypeUnion) Kind() string    { return "union" }
+func (t *TypeOptional) Kind() string { return "optional" }
+
+func (t *TypeNamed) String() string { return t.Name }
+func (t *TypeEnum) String() string {
+	result := "enum{"
+	for index, label := range t.Labels {
+		if index > 0 {
+			result += ";"
+		}
+		result += label.Name
+	}
+	return result + "}"
+}
+func (t *TypeArray) String() string { return fmt.Sprintf("[%v]%v", t.Len, t.Elem) }
+func (t *TypeList) String() string  { return fmt.Sprintf("[]%v", t.Elem) }
+func (t *TypeSet) String() string   { return fmt.Sprintf("set[%v]", t.Key) }
+func (t *TypeMap) String() string   { return fmt.Sprintf("map[%v]%v", t.Key, t.Elem) }
+func (t *TypeStruct) String() string {
+	result := "struct{"
+	for index, field := range t.Fields {
+		if index > 0 {
+			result += ";"
+		}
+		result += field.Name + " " + field.Type.String()
+	}
+	return result + "}"
+}
+func (t *TypeUnion) String() string {
+	result := "union{"
+	for index, field := range t.Fields {
+		if index > 0 {
+			result += ";"
+		}
+		result += field.Name + " " + field.Type.String()
+	}
+	return result + "}"
+}
+func (t *TypeOptional) String() string { return fmt.Sprintf("?%v", t.Base) }
+
+func (t *TypeDef) String() string {
+	return fmt.Sprintf("(%v %v %v)", t.Pos, t.Name, t.Type)
+}
diff --git a/lib/vdl/testdata/arith/advanced.vdl b/lib/vdl/testdata/arith/advanced.vdl
new file mode 100644
index 0000000..88fffe1
--- /dev/null
+++ b/lib/vdl/testdata/arith/advanced.vdl
@@ -0,0 +1,20 @@
+package arith
+
+import (
+	"v.io/v23/vdl/testdata/arith/exp"
+)
+
+// Trigonometry is an interface that specifies a couple trigonometric functions.
+type Trigonometry interface {
+	Sine(angle float64) (float64 | error)
+	Cosine(angle float64) (float64 | error)
+}
+
+// AdvancedMath is an interface for more advanced math than arith.  It embeds
+// interfaces defined both in the same file and in an external package; and in
+// turn it is embedded by arith.Calculator (which is in the same package but
+// different file) to verify that embedding works in all these scenarios.
+type AdvancedMath interface {
+	Trigonometry
+	exp.Exp
+}
diff --git a/lib/vdl/testdata/arith/advanced.vdl.go b/lib/vdl/testdata/arith/advanced.vdl.go
new file mode 100644
index 0000000..1b311db
--- /dev/null
+++ b/lib/vdl/testdata/arith/advanced.vdl.go
@@ -0,0 +1,279 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: advanced.vdl
+
+package arith
+
+import (
+	// VDL system imports
+	"v.io/v23"
+	"v.io/v23/context"
+	"v.io/v23/ipc"
+
+	// VDL user imports
+	"v.io/v23/vdl/testdata/arith/exp"
+)
+
+// TrigonometryClientMethods is the client interface
+// containing Trigonometry methods.
+//
+// Trigonometry is an interface that specifies a couple trigonometric functions.
+type TrigonometryClientMethods interface {
+	Sine(ctx *context.T, angle float64, opts ...ipc.CallOpt) (float64, error)
+	Cosine(ctx *context.T, angle float64, opts ...ipc.CallOpt) (float64, error)
+}
+
+// TrigonometryClientStub adds universal methods to TrigonometryClientMethods.
+type TrigonometryClientStub interface {
+	TrigonometryClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// TrigonometryClient returns a client stub for Trigonometry.
+func TrigonometryClient(name string, opts ...ipc.BindOpt) TrigonometryClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implTrigonometryClientStub{name, client}
+}
+
+type implTrigonometryClientStub struct {
+	name   string
+	client ipc.Client
+}
+
+func (c implTrigonometryClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implTrigonometryClientStub) Sine(ctx *context.T, i0 float64, opts ...ipc.CallOpt) (o0 float64, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Sine", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+func (c implTrigonometryClientStub) Cosine(ctx *context.T, i0 float64, opts ...ipc.CallOpt) (o0 float64, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Cosine", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+// TrigonometryServerMethods is the interface a server writer
+// implements for Trigonometry.
+//
+// Trigonometry is an interface that specifies a couple trigonometric functions.
+type TrigonometryServerMethods interface {
+	Sine(ctx ipc.ServerContext, angle float64) (float64, error)
+	Cosine(ctx ipc.ServerContext, angle float64) (float64, error)
+}
+
+// TrigonometryServerStubMethods is the server interface containing
+// Trigonometry methods, as expected by ipc.Server.
+// There is no difference between this interface and TrigonometryServerMethods
+// since there are no streaming methods.
+type TrigonometryServerStubMethods TrigonometryServerMethods
+
+// TrigonometryServerStub adds universal methods to TrigonometryServerStubMethods.
+type TrigonometryServerStub interface {
+	TrigonometryServerStubMethods
+	// Describe the Trigonometry interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// TrigonometryServer returns a server stub for Trigonometry.
+// It converts an implementation of TrigonometryServerMethods into
+// an object that may be used by ipc.Server.
+func TrigonometryServer(impl TrigonometryServerMethods) TrigonometryServerStub {
+	stub := implTrigonometryServerStub{
+		impl: impl,
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implTrigonometryServerStub struct {
+	impl TrigonometryServerMethods
+	gs   *ipc.GlobState
+}
+
+func (s implTrigonometryServerStub) Sine(ctx ipc.ServerContext, i0 float64) (float64, error) {
+	return s.impl.Sine(ctx, i0)
+}
+
+func (s implTrigonometryServerStub) Cosine(ctx ipc.ServerContext, i0 float64) (float64, error) {
+	return s.impl.Cosine(ctx, i0)
+}
+
+func (s implTrigonometryServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implTrigonometryServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{TrigonometryDesc}
+}
+
+// TrigonometryDesc describes the Trigonometry interface.
+var TrigonometryDesc ipc.InterfaceDesc = descTrigonometry
+
+// descTrigonometry hides the desc to keep godoc clean.
+var descTrigonometry = ipc.InterfaceDesc{
+	Name:    "Trigonometry",
+	PkgPath: "v.io/v23/vdl/testdata/arith",
+	Doc:     "// Trigonometry is an interface that specifies a couple trigonometric functions.",
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "Sine",
+			InArgs: []ipc.ArgDesc{
+				{"angle", ``}, // float64
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // float64
+			},
+		},
+		{
+			Name: "Cosine",
+			InArgs: []ipc.ArgDesc{
+				{"angle", ``}, // float64
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // float64
+			},
+		},
+	},
+}
+
+// AdvancedMathClientMethods is the client interface
+// containing AdvancedMath methods.
+//
+// AdvancedMath is an interface for more advanced math than arith.  It embeds
+// interfaces defined both in the same file and in an external package; and in
+// turn it is embedded by arith.Calculator (which is in the same package but
+// different file) to verify that embedding works in all these scenarios.
+type AdvancedMathClientMethods interface {
+	// Trigonometry is an interface that specifies a couple trigonometric functions.
+	TrigonometryClientMethods
+	exp.ExpClientMethods
+}
+
+// AdvancedMathClientStub adds universal methods to AdvancedMathClientMethods.
+type AdvancedMathClientStub interface {
+	AdvancedMathClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// AdvancedMathClient returns a client stub for AdvancedMath.
+func AdvancedMathClient(name string, opts ...ipc.BindOpt) AdvancedMathClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implAdvancedMathClientStub{name, client, TrigonometryClient(name, client), exp.ExpClient(name, client)}
+}
+
+type implAdvancedMathClientStub struct {
+	name   string
+	client ipc.Client
+
+	TrigonometryClientStub
+	exp.ExpClientStub
+}
+
+func (c implAdvancedMathClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+// AdvancedMathServerMethods is the interface a server writer
+// implements for AdvancedMath.
+//
+// AdvancedMath is an interface for more advanced math than arith.  It embeds
+// interfaces defined both in the same file and in an external package; and in
+// turn it is embedded by arith.Calculator (which is in the same package but
+// different file) to verify that embedding works in all these scenarios.
+type AdvancedMathServerMethods interface {
+	// Trigonometry is an interface that specifies a couple trigonometric functions.
+	TrigonometryServerMethods
+	exp.ExpServerMethods
+}
+
+// AdvancedMathServerStubMethods is the server interface containing
+// AdvancedMath methods, as expected by ipc.Server.
+// There is no difference between this interface and AdvancedMathServerMethods
+// since there are no streaming methods.
+type AdvancedMathServerStubMethods AdvancedMathServerMethods
+
+// AdvancedMathServerStub adds universal methods to AdvancedMathServerStubMethods.
+type AdvancedMathServerStub interface {
+	AdvancedMathServerStubMethods
+	// Describe the AdvancedMath interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// AdvancedMathServer returns a server stub for AdvancedMath.
+// It converts an implementation of AdvancedMathServerMethods into
+// an object that may be used by ipc.Server.
+func AdvancedMathServer(impl AdvancedMathServerMethods) AdvancedMathServerStub {
+	stub := implAdvancedMathServerStub{
+		impl: impl,
+		TrigonometryServerStub: TrigonometryServer(impl),
+		ExpServerStub:          exp.ExpServer(impl),
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implAdvancedMathServerStub struct {
+	impl AdvancedMathServerMethods
+	TrigonometryServerStub
+	exp.ExpServerStub
+	gs *ipc.GlobState
+}
+
+func (s implAdvancedMathServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implAdvancedMathServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{AdvancedMathDesc, TrigonometryDesc, exp.ExpDesc}
+}
+
+// AdvancedMathDesc describes the AdvancedMath interface.
+var AdvancedMathDesc ipc.InterfaceDesc = descAdvancedMath
+
+// descAdvancedMath hides the desc to keep godoc clean.
+var descAdvancedMath = ipc.InterfaceDesc{
+	Name:    "AdvancedMath",
+	PkgPath: "v.io/v23/vdl/testdata/arith",
+	Doc:     "// AdvancedMath is an interface for more advanced math than arith.  It embeds\n// interfaces defined both in the same file and in an external package; and in\n// turn it is embedded by arith.Calculator (which is in the same package but\n// different file) to verify that embedding works in all these scenarios.",
+	Embeds: []ipc.EmbedDesc{
+		{"Trigonometry", "v.io/v23/vdl/testdata/arith", "// Trigonometry is an interface that specifies a couple trigonometric functions."},
+		{"Exp", "v.io/v23/vdl/testdata/arith/exp", ``},
+	},
+}
diff --git a/lib/vdl/testdata/arith/arith.vdl b/lib/vdl/testdata/arith/arith.vdl
new file mode 100644
index 0000000..79978f7
--- /dev/null
+++ b/lib/vdl/testdata/arith/arith.vdl
@@ -0,0 +1,89 @@
+// Package arith is an example of an IDL definition in veyron.  The syntax for
+// IDL files is similar to, but not identical to, Go.  Here are the main
+// concepts:
+//   * PACKAGES - Just like in Go you must define the package at the beginning
+//     of an IDL file, and everything defined in the file is part of this
+//     package.  By convention all files in the same dir should be in the same
+//     package.
+//   * IMPORTS - Just like in Go you can import other idl packages, and you may
+//     assign a local package name, or if unspecified the basename of the import
+//     path is used as the import package name.
+//   * DATA TYPES - Just like in Go you can define data types.  You get most of
+//     the primitives (int32, float64, string, etc), the "error" built-in, and a
+//     special "any" built-in described below.  In addition you can create
+//     composite types like arrays, structs, etc.
+//   * CONSTS - Just like in Go you can define constants, and numerics are
+//     "infinite precision" within expressions.  Unlike Go numerics must be
+//     typed to be used as const definitions or tags.
+//   * INTERFACES - Just like in Go you can define interface types, which are
+//     just a set of methods.  Interfaces can embed other interfaces.  Unlike
+//     Go, you cannot use an interface as a data type; interfaces are purely
+//     method sets.
+//   * ERRORS - Errors may be defined in IDL files, and unlike Go they work
+//     across separate address spaces.
+package arith
+
+// Test the import mechanism.
+import (
+	"v.io/v23/vdl/testdata/base"
+)
+
+// Constants.
+const (
+	// Yes shows that bools may be untyped.
+	Yes      = true // yes trailing doc
+	// No shows explicit boolean typing.
+	No       = bool(false)
+	Hello    = "hello"
+	// Int32Const shows explicit integer typing.
+	Int32Const    = int32(123)
+	// Int64Const shows explicit integer conversion from another type, and referencing
+	// a constant from another package.
+	Int64Const    = int64(Int32Const + base.Five)
+	// FloatConst shows arithmetic expressions may be used.
+	FloatConst    = float64(3.0 / 2 + 0.5)
+	// Mask shows bitwise operations.
+	Mask     = uint64(0x1 << 8)
+)
+
+// Arith is an example of an interface definition for an arithmetic service.
+// Things to note:
+//   * There must be at least 1 out-arg, and the last out-arg must be error.
+type Arith interface {
+	// Add is a typical method with multiple input and output arguments.
+	Add(a int32, b int32) (int32 | error)
+
+	// DivMod shows that runs of args with the same type can use the short form,
+	// just like Go.
+	DivMod(a, b int32) (quot, rem int32 | error)
+
+	// Sub shows that you can use data types defined in other packages.
+	Sub(args base.Args) (int32 | error)
+
+	// Mul tries another data type defined in another package.
+	Mul(nested base.NestedArgs) (int32 | error)
+
+	// GenError shows that it's fine to have no in args, and no out args other
+	// than "error".  In addition GenError shows the usage of tags.  Tags are a
+	// sequence of constants.  There's no requirement on uniqueness of types or
+	// values, and regular const expressions may also be used.
+	GenError() error {"foo", "bar" + "z", Hello, int32(Int64Const + 1), base.SixSquared}
+
+	// Count shows using only an int32 out-stream type, with no in-stream type.
+	Count(start int32) stream<_, int32> error
+
+	// StreamingAdd shows a bidirectional stream.
+	StreamingAdd() stream<int32, int32> (total int32 | error)
+
+	// QuoteAny shows the any built-in type, representing a value of any type.
+	QuoteAny(a any) (any | error)
+}
+
+type Calculator interface {
+	// A calculator can do basic arithmetic.
+	Arith  // Arith provides the interface to basic arithmetic operations.
+	// A calculator has basic advanced function support.
+	AdvancedMath
+	On() error             // On turns the calculator on.
+	Off() error {"offtag"} // Off turns the calculator off.
+}
diff --git a/lib/vdl/testdata/arith/arith.vdl.go b/lib/vdl/testdata/arith/arith.vdl.go
new file mode 100644
index 0000000..43d2f1d
--- /dev/null
+++ b/lib/vdl/testdata/arith/arith.vdl.go
@@ -0,0 +1,868 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: arith.vdl
+
+// Package arith is an example of an IDL definition in veyron.  The syntax for
+// IDL files is similar to, but not identical to, Go.  Here are the main
+// concepts:
+//   * PACKAGES - Just like in Go you must define the package at the beginning
+//     of an IDL file, and everything defined in the file is part of this
+//     package.  By convention all files in the same dir should be in the same
+//     package.
+//   * IMPORTS - Just like in Go you can import other idl packages, and you may
+//     assign a local package name, or if unspecified the basename of the import
+//     path is used as the import package name.
+//   * DATA TYPES - Just like in Go you can define data types.  You get most of
+//     the primitives (int32, float64, string, etc), the "error" built-in, and a
+//     special "any" built-in described below.  In addition you can create
+//     composite types like arrays, structs, etc.
+//   * CONSTS - Just like in Go you can define constants, and numerics are
+//     "infinite precision" within expressions.  Unlike Go numerics must be
+//     typed to be used as const definitions or tags.
+//   * INTERFACES - Just like in Go you can define interface types, which are
+//     just a set of methods.  Interfaces can embed other interfaces.  Unlike
+//     Go, you cannot use an interface as a data type; interfaces are purely
+//     method sets.
+//   * ERRORS - Errors may be defined in IDL files, and unlike Go they work
+//     across separate address spaces.
+package arith
+
+import (
+	// VDL system imports
+	"io"
+	"v.io/v23"
+	"v.io/v23/context"
+	"v.io/v23/ipc"
+	"v.io/v23/vdl"
+
+	// VDL user imports
+	"v.io/v23/vdl/testdata/arith/exp"
+	"v.io/v23/vdl/testdata/base"
+)
+
+// Yes shows that bools may be untyped.
+const Yes = true // yes trailing doc
+
+// No shows explicit boolean typing.
+const No = false
+
+const Hello = "hello"
+
+// Int32Const shows explicit integer typing.
+const Int32Const = int32(123)
+
+// Int64Const shows explicit integer conversion from another type, and referencing
+// a constant from another package.
+const Int64Const = int64(128)
+
+// FloatConst shows arithmetic expressions may be used.
+const FloatConst = float64(2)
+
+// Mask shows bitwise operations.
+const Mask = uint64(256)
+
+// ArithClientMethods is the client interface
+// containing Arith methods.
+//
+// Arith is an example of an interface definition for an arithmetic service.
+// Things to note:
+//   * There must be at least 1 out-arg, and the last out-arg must be error.
+type ArithClientMethods interface {
+	// Add is a typical method with multiple input and output arguments.
+	Add(ctx *context.T, a int32, b int32, opts ...ipc.CallOpt) (int32, error)
+	// DivMod shows that runs of args with the same type can use the short form,
+	// just like Go.
+	DivMod(ctx *context.T, a int32, b int32, opts ...ipc.CallOpt) (quot int32, rem int32, err error)
+	// Sub shows that you can use data types defined in other packages.
+	Sub(ctx *context.T, args base.Args, opts ...ipc.CallOpt) (int32, error)
+	// Mul tries another data type defined in another package.
+	Mul(ctx *context.T, nested base.NestedArgs, opts ...ipc.CallOpt) (int32, error)
+	// GenError shows that it's fine to have no in args, and no out args other
+	// than "error".  In addition GenError shows the usage of tags.  Tags are a
+	// sequence of constants.  There's no requirement on uniqueness of types or
+	// values, and regular const expressions may also be used.
+	GenError(*context.T, ...ipc.CallOpt) error
+	// Count shows using only an int32 out-stream type, with no in-stream type.
+	Count(ctx *context.T, start int32, opts ...ipc.CallOpt) (ArithCountCall, error)
+	// StreamingAdd shows a bidirectional stream.
+	StreamingAdd(*context.T, ...ipc.CallOpt) (ArithStreamingAddCall, error)
+	// QuoteAny shows the any built-in type, representing a value of any type.
+	QuoteAny(ctx *context.T, a *vdl.Value, opts ...ipc.CallOpt) (*vdl.Value, error)
+}
+
+// ArithClientStub adds universal methods to ArithClientMethods.
+type ArithClientStub interface {
+	ArithClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// ArithClient returns a client stub for Arith.
+func ArithClient(name string, opts ...ipc.BindOpt) ArithClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implArithClientStub{name, client}
+}
+
+type implArithClientStub struct {
+	name   string
+	client ipc.Client
+}
+
+func (c implArithClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implArithClientStub) Add(ctx *context.T, i0 int32, i1 int32, opts ...ipc.CallOpt) (o0 int32, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Add", []interface{}{i0, i1}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+func (c implArithClientStub) DivMod(ctx *context.T, i0 int32, i1 int32, opts ...ipc.CallOpt) (o0 int32, o1 int32, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "DivMod", []interface{}{i0, i1}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0, &o1)
+	return
+}
+
+func (c implArithClientStub) Sub(ctx *context.T, i0 base.Args, opts ...ipc.CallOpt) (o0 int32, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Sub", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+func (c implArithClientStub) Mul(ctx *context.T, i0 base.NestedArgs, opts ...ipc.CallOpt) (o0 int32, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Mul", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+func (c implArithClientStub) GenError(ctx *context.T, opts ...ipc.CallOpt) (err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "GenError", nil, opts...); err != nil {
+		return
+	}
+	err = call.Finish()
+	return
+}
+
+func (c implArithClientStub) Count(ctx *context.T, i0 int32, opts ...ipc.CallOpt) (ocall ArithCountCall, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Count", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	ocall = &implArithCountCall{Call: call}
+	return
+}
+
+func (c implArithClientStub) StreamingAdd(ctx *context.T, opts ...ipc.CallOpt) (ocall ArithStreamingAddCall, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "StreamingAdd", nil, opts...); err != nil {
+		return
+	}
+	ocall = &implArithStreamingAddCall{Call: call}
+	return
+}
+
+func (c implArithClientStub) QuoteAny(ctx *context.T, i0 *vdl.Value, opts ...ipc.CallOpt) (o0 *vdl.Value, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "QuoteAny", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+// ArithCountClientStream is the client stream for Arith.Count.
+type ArithCountClientStream interface {
+	// RecvStream returns the receiver side of the Arith.Count client stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() int32
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+}
+
+// ArithCountCall represents the call returned from Arith.Count.
+type ArithCountCall interface {
+	ArithCountClientStream
+	// Finish blocks until the server is done, and returns the positional return
+	// values for call.
+	//
+	// Finish returns immediately if the call has been canceled; depending on the
+	// timing the output could either be an error signaling cancelation, or the
+	// valid positional return values from the server.
+	//
+	// Calling Finish is mandatory for releasing stream resources, unless the call
+	// has been canceled or any of the other methods return an error.  Finish should
+	// be called at most once.
+	Finish() error
+}
+
+type implArithCountCall struct {
+	ipc.Call
+	valRecv int32
+	errRecv error
+}
+
+func (c *implArithCountCall) RecvStream() interface {
+	Advance() bool
+	Value() int32
+	Err() error
+} {
+	return implArithCountCallRecv{c}
+}
+
+type implArithCountCallRecv struct {
+	c *implArithCountCall
+}
+
+func (c implArithCountCallRecv) Advance() bool {
+	c.c.errRecv = c.c.Recv(&c.c.valRecv)
+	return c.c.errRecv == nil
+}
+func (c implArithCountCallRecv) Value() int32 {
+	return c.c.valRecv
+}
+func (c implArithCountCallRecv) Err() error {
+	if c.c.errRecv == io.EOF {
+		return nil
+	}
+	return c.c.errRecv
+}
+func (c *implArithCountCall) Finish() (err error) {
+	err = c.Call.Finish()
+	return
+}
+
+// ArithStreamingAddClientStream is the client stream for Arith.StreamingAdd.
+type ArithStreamingAddClientStream interface {
+	// RecvStream returns the receiver side of the Arith.StreamingAdd client stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() int32
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+	// SendStream returns the send side of the Arith.StreamingAdd client stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors
+		// encountered while sending, or if Send is called after Close or
+		// the stream has been canceled.  Blocks if there is no buffer
+		// space; will unblock when buffer space is available or after
+		// the stream has been canceled.
+		Send(item int32) error
+		// Close indicates to the server that no more items will be sent;
+		// server Recv calls will receive io.EOF after all sent items.
+		// This is an optional call - e.g. a client might call Close if it
+		// needs to continue receiving items from the server after it's
+		// done sending.  Returns errors encountered while closing, or if
+		// Close is called after the stream has been canceled.  Like Send,
+		// blocks if there is no buffer space available.
+		Close() error
+	}
+}
+
+// ArithStreamingAddCall represents the call returned from Arith.StreamingAdd.
+type ArithStreamingAddCall interface {
+	ArithStreamingAddClientStream
+	// Finish performs the equivalent of SendStream().Close, then blocks until
+	// the server is done, and returns the positional return values for the call.
+	//
+	// Finish returns immediately if the call has been canceled; depending on the
+	// timing the output could either be an error signaling cancelation, or the
+	// valid positional return values from the server.
+	//
+	// Calling Finish is mandatory for releasing stream resources, unless the call
+	// has been canceled or any of the other methods return an error.  Finish should
+	// be called at most once.
+	Finish() (total int32, err error)
+}
+
+type implArithStreamingAddCall struct {
+	ipc.Call
+	valRecv int32
+	errRecv error
+}
+
+func (c *implArithStreamingAddCall) RecvStream() interface {
+	Advance() bool
+	Value() int32
+	Err() error
+} {
+	return implArithStreamingAddCallRecv{c}
+}
+
+type implArithStreamingAddCallRecv struct {
+	c *implArithStreamingAddCall
+}
+
+func (c implArithStreamingAddCallRecv) Advance() bool {
+	c.c.errRecv = c.c.Recv(&c.c.valRecv)
+	return c.c.errRecv == nil
+}
+func (c implArithStreamingAddCallRecv) Value() int32 {
+	return c.c.valRecv
+}
+func (c implArithStreamingAddCallRecv) Err() error {
+	if c.c.errRecv == io.EOF {
+		return nil
+	}
+	return c.c.errRecv
+}
+func (c *implArithStreamingAddCall) SendStream() interface {
+	Send(item int32) error
+	Close() error
+} {
+	return implArithStreamingAddCallSend{c}
+}
+
+type implArithStreamingAddCallSend struct {
+	c *implArithStreamingAddCall
+}
+
+func (c implArithStreamingAddCallSend) Send(item int32) error {
+	return c.c.Send(item)
+}
+func (c implArithStreamingAddCallSend) Close() error {
+	return c.c.CloseSend()
+}
+func (c *implArithStreamingAddCall) Finish() (o0 int32, err error) {
+	err = c.Call.Finish(&o0)
+	return
+}
+
+// ArithServerMethods is the interface a server writer
+// implements for Arith.
+//
+// Arith is an example of an interface definition for an arithmetic service.
+// Things to note:
+//   * There must be at least 1 out-arg, and the last out-arg must be error.
+type ArithServerMethods interface {
+	// Add is a typical method with multiple input and output arguments.
+	Add(ctx ipc.ServerContext, a int32, b int32) (int32, error)
+	// DivMod shows that runs of args with the same type can use the short form,
+	// just like Go.
+	DivMod(ctx ipc.ServerContext, a int32, b int32) (quot int32, rem int32, err error)
+	// Sub shows that you can use data types defined in other packages.
+	Sub(ctx ipc.ServerContext, args base.Args) (int32, error)
+	// Mul tries another data type defined in another package.
+	Mul(ctx ipc.ServerContext, nested base.NestedArgs) (int32, error)
+	// GenError shows that it's fine to have no in args, and no out args other
+	// than "error".  In addition GenError shows the usage of tags.  Tags are a
+	// sequence of constants.  There's no requirement on uniqueness of types or
+	// values, and regular const expressions may also be used.
+	GenError(ipc.ServerContext) error
+	// Count shows using only an int32 out-stream type, with no in-stream type.
+	Count(ctx ArithCountContext, start int32) error
+	// StreamingAdd shows a bidirectional stream.
+	StreamingAdd(ArithStreamingAddContext) (total int32, err error)
+	// QuoteAny shows the any built-in type, representing a value of any type.
+	QuoteAny(ctx ipc.ServerContext, a *vdl.Value) (*vdl.Value, error)
+}
+
+// ArithServerStubMethods is the server interface containing
+// Arith methods, as expected by ipc.Server.
+// The only difference between this interface and ArithServerMethods
+// is the streaming methods.
+type ArithServerStubMethods interface {
+	// Add is a typical method with multiple input and output arguments.
+	Add(ctx ipc.ServerContext, a int32, b int32) (int32, error)
+	// DivMod shows that runs of args with the same type can use the short form,
+	// just like Go.
+	DivMod(ctx ipc.ServerContext, a int32, b int32) (quot int32, rem int32, err error)
+	// Sub shows that you can use data types defined in other packages.
+	Sub(ctx ipc.ServerContext, args base.Args) (int32, error)
+	// Mul tries another data type defined in another package.
+	Mul(ctx ipc.ServerContext, nested base.NestedArgs) (int32, error)
+	// GenError shows that it's fine to have no in args, and no out args other
+	// than "error".  In addition GenError shows the usage of tags.  Tags are a
+	// sequence of constants.  There's no requirement on uniqueness of types or
+	// values, and regular const expressions may also be used.
+	GenError(ipc.ServerContext) error
+	// Count shows using only an int32 out-stream type, with no in-stream type.
+	Count(ctx *ArithCountContextStub, start int32) error
+	// StreamingAdd shows a bidirectional stream.
+	StreamingAdd(*ArithStreamingAddContextStub) (total int32, err error)
+	// QuoteAny shows the any built-in type, representing a value of any type.
+	QuoteAny(ctx ipc.ServerContext, a *vdl.Value) (*vdl.Value, error)
+}
+
+// ArithServerStub adds universal methods to ArithServerStubMethods.
+type ArithServerStub interface {
+	ArithServerStubMethods
+	// Describe the Arith interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// ArithServer returns a server stub for Arith.
+// It converts an implementation of ArithServerMethods into
+// an object that may be used by ipc.Server.
+func ArithServer(impl ArithServerMethods) ArithServerStub {
+	stub := implArithServerStub{
+		impl: impl,
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implArithServerStub struct {
+	impl ArithServerMethods
+	gs   *ipc.GlobState
+}
+
+func (s implArithServerStub) Add(ctx ipc.ServerContext, i0 int32, i1 int32) (int32, error) {
+	return s.impl.Add(ctx, i0, i1)
+}
+
+func (s implArithServerStub) DivMod(ctx ipc.ServerContext, i0 int32, i1 int32) (int32, int32, error) {
+	return s.impl.DivMod(ctx, i0, i1)
+}
+
+func (s implArithServerStub) Sub(ctx ipc.ServerContext, i0 base.Args) (int32, error) {
+	return s.impl.Sub(ctx, i0)
+}
+
+func (s implArithServerStub) Mul(ctx ipc.ServerContext, i0 base.NestedArgs) (int32, error) {
+	return s.impl.Mul(ctx, i0)
+}
+
+func (s implArithServerStub) GenError(ctx ipc.ServerContext) error {
+	return s.impl.GenError(ctx)
+}
+
+func (s implArithServerStub) Count(ctx *ArithCountContextStub, i0 int32) error {
+	return s.impl.Count(ctx, i0)
+}
+
+func (s implArithServerStub) StreamingAdd(ctx *ArithStreamingAddContextStub) (int32, error) {
+	return s.impl.StreamingAdd(ctx)
+}
+
+func (s implArithServerStub) QuoteAny(ctx ipc.ServerContext, i0 *vdl.Value) (*vdl.Value, error) {
+	return s.impl.QuoteAny(ctx, i0)
+}
+
+func (s implArithServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implArithServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{ArithDesc}
+}
+
+// ArithDesc describes the Arith interface.
+var ArithDesc ipc.InterfaceDesc = descArith
+
+// descArith hides the desc to keep godoc clean.
+var descArith = ipc.InterfaceDesc{
+	Name:    "Arith",
+	PkgPath: "v.io/v23/vdl/testdata/arith",
+	Doc:     "// Arith is an example of an interface definition for an arithmetic service.\n// Things to note:\n//   * There must be at least 1 out-arg, and the last out-arg must be error.",
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "Add",
+			Doc:  "// Add is a typical method with multiple input and output arguments.",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // int32
+				{"b", ``}, // int32
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // int32
+			},
+		},
+		{
+			Name: "DivMod",
+			Doc:  "// DivMod shows that runs of args with the same type can use the short form,\n// just like Go.",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // int32
+				{"b", ``}, // int32
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"quot", ``}, // int32
+				{"rem", ``},  // int32
+			},
+		},
+		{
+			Name: "Sub",
+			Doc:  "// Sub shows that you can use data types defined in other packages.",
+			InArgs: []ipc.ArgDesc{
+				{"args", ``}, // base.Args
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // int32
+			},
+		},
+		{
+			Name: "Mul",
+			Doc:  "// Mul tries another data type defined in another package.",
+			InArgs: []ipc.ArgDesc{
+				{"nested", ``}, // base.NestedArgs
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // int32
+			},
+		},
+		{
+			Name: "GenError",
+			Doc:  "// GenError shows that it's fine to have no in args, and no out args other\n// than \"error\".  In addition GenError shows the usage of tags.  Tags are a\n// sequence of constants.  There's no requirement on uniqueness of types or\n// values, and regular const expressions may also be used.",
+			Tags: []*vdl.Value{vdl.ValueOf("foo"), vdl.ValueOf("barz"), vdl.ValueOf("hello"), vdl.ValueOf(int32(129)), vdl.ValueOf(uint64(36))},
+		},
+		{
+			Name: "Count",
+			Doc:  "// Count shows using only an int32 out-stream type, with no in-stream type.",
+			InArgs: []ipc.ArgDesc{
+				{"start", ``}, // int32
+			},
+		},
+		{
+			Name: "StreamingAdd",
+			Doc:  "// StreamingAdd shows a bidirectional stream.",
+			OutArgs: []ipc.ArgDesc{
+				{"total", ``}, // int32
+			},
+		},
+		{
+			Name: "QuoteAny",
+			Doc:  "// QuoteAny shows the any built-in type, representing a value of any type.",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // *vdl.Value
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // *vdl.Value
+			},
+		},
+	},
+}
+
+// ArithCountServerStream is the server stream for Arith.Count.
+type ArithCountServerStream interface {
+	// SendStream returns the send side of the Arith.Count server stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors encountered
+		// while sending.  Blocks if there is no buffer space; will unblock when
+		// buffer space is available.
+		Send(item int32) error
+	}
+}
+
+// ArithCountContext represents the context passed to Arith.Count.
+type ArithCountContext interface {
+	ipc.ServerContext
+	ArithCountServerStream
+}
+
+// ArithCountContextStub is a wrapper that converts ipc.ServerCall into
+// a typesafe stub that implements ArithCountContext.
+type ArithCountContextStub struct {
+	ipc.ServerCall
+}
+
+// Init initializes ArithCountContextStub from ipc.ServerCall.
+func (s *ArithCountContextStub) Init(call ipc.ServerCall) {
+	s.ServerCall = call
+}
+
+// SendStream returns the send side of the Arith.Count server stream.
+func (s *ArithCountContextStub) SendStream() interface {
+	Send(item int32) error
+} {
+	return implArithCountContextSend{s}
+}
+
+type implArithCountContextSend struct {
+	s *ArithCountContextStub
+}
+
+func (s implArithCountContextSend) Send(item int32) error {
+	return s.s.Send(item)
+}
+
+// ArithStreamingAddServerStream is the server stream for Arith.StreamingAdd.
+type ArithStreamingAddServerStream interface {
+	// RecvStream returns the receiver side of the Arith.StreamingAdd server stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() int32
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+	// SendStream returns the send side of the Arith.StreamingAdd server stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors encountered
+		// while sending.  Blocks if there is no buffer space; will unblock when
+		// buffer space is available.
+		Send(item int32) error
+	}
+}
+
+// ArithStreamingAddContext represents the context passed to Arith.StreamingAdd.
+type ArithStreamingAddContext interface {
+	ipc.ServerContext
+	ArithStreamingAddServerStream
+}
+
+// ArithStreamingAddContextStub is a wrapper that converts ipc.ServerCall into
+// a typesafe stub that implements ArithStreamingAddContext.
+type ArithStreamingAddContextStub struct {
+	ipc.ServerCall
+	valRecv int32
+	errRecv error
+}
+
+// Init initializes ArithStreamingAddContextStub from ipc.ServerCall.
+func (s *ArithStreamingAddContextStub) Init(call ipc.ServerCall) {
+	s.ServerCall = call
+}
+
+// RecvStream returns the receiver side of the Arith.StreamingAdd server stream.
+func (s *ArithStreamingAddContextStub) RecvStream() interface {
+	Advance() bool
+	Value() int32
+	Err() error
+} {
+	return implArithStreamingAddContextRecv{s}
+}
+
+type implArithStreamingAddContextRecv struct {
+	s *ArithStreamingAddContextStub
+}
+
+func (s implArithStreamingAddContextRecv) Advance() bool {
+	s.s.errRecv = s.s.Recv(&s.s.valRecv)
+	return s.s.errRecv == nil
+}
+func (s implArithStreamingAddContextRecv) Value() int32 {
+	return s.s.valRecv
+}
+func (s implArithStreamingAddContextRecv) Err() error {
+	if s.s.errRecv == io.EOF {
+		return nil
+	}
+	return s.s.errRecv
+}
+
+// SendStream returns the send side of the Arith.StreamingAdd server stream.
+func (s *ArithStreamingAddContextStub) SendStream() interface {
+	Send(item int32) error
+} {
+	return implArithStreamingAddContextSend{s}
+}
+
+type implArithStreamingAddContextSend struct {
+	s *ArithStreamingAddContextStub
+}
+
+func (s implArithStreamingAddContextSend) Send(item int32) error {
+	return s.s.Send(item)
+}
+
+// CalculatorClientMethods is the client interface
+// containing Calculator methods.
+type CalculatorClientMethods interface {
+	// Arith is an example of an interface definition for an arithmetic service.
+	// Things to note:
+	//   * There must be at least 1 out-arg, and the last out-arg must be error.
+	ArithClientMethods
+	// AdvancedMath is an interface for more advanced math than arith.  It embeds
+	// interfaces defined both in the same file and in an external package; and in
+	// turn it is embedded by arith.Calculator (which is in the same package but
+	// different file) to verify that embedding works in all these scenarios.
+	AdvancedMathClientMethods
+	On(*context.T, ...ipc.CallOpt) error  // On turns the calculator on.
+	Off(*context.T, ...ipc.CallOpt) error // Off turns the calculator off.
+}
+
+// CalculatorClientStub adds universal methods to CalculatorClientMethods.
+type CalculatorClientStub interface {
+	CalculatorClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// CalculatorClient returns a client stub for Calculator.
+func CalculatorClient(name string, opts ...ipc.BindOpt) CalculatorClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implCalculatorClientStub{name, client, ArithClient(name, client), AdvancedMathClient(name, client)}
+}
+
+type implCalculatorClientStub struct {
+	name   string
+	client ipc.Client
+
+	ArithClientStub
+	AdvancedMathClientStub
+}
+
+func (c implCalculatorClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implCalculatorClientStub) On(ctx *context.T, opts ...ipc.CallOpt) (err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "On", nil, opts...); err != nil {
+		return
+	}
+	err = call.Finish()
+	return
+}
+
+func (c implCalculatorClientStub) Off(ctx *context.T, opts ...ipc.CallOpt) (err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Off", nil, opts...); err != nil {
+		return
+	}
+	err = call.Finish()
+	return
+}
+
+// CalculatorServerMethods is the interface a server writer
+// implements for Calculator.
+type CalculatorServerMethods interface {
+	// Arith is an example of an interface definition for an arithmetic service.
+	// Things to note:
+	//   * There must be at least 1 out-arg, and the last out-arg must be error.
+	ArithServerMethods
+	// AdvancedMath is an interface for more advanced math than arith.  It embeds
+	// interfaces defined both in the same file and in an external package; and in
+	// turn it is embedded by arith.Calculator (which is in the same package but
+	// different file) to verify that embedding works in all these scenarios.
+	AdvancedMathServerMethods
+	On(ipc.ServerContext) error  // On turns the calculator on.
+	Off(ipc.ServerContext) error // Off turns the calculator off.
+}
+
+// CalculatorServerStubMethods is the server interface containing
+// Calculator methods, as expected by ipc.Server.
+// The only difference between this interface and CalculatorServerMethods
+// is the streaming methods.
+type CalculatorServerStubMethods interface {
+	// Arith is an example of an interface definition for an arithmetic service.
+	// Things to note:
+	//   * There must be at least 1 out-arg, and the last out-arg must be error.
+	ArithServerStubMethods
+	// AdvancedMath is an interface for more advanced math than arith.  It embeds
+	// interfaces defined both in the same file and in an external package; and in
+	// turn it is embedded by arith.Calculator (which is in the same package but
+	// different file) to verify that embedding works in all these scenarios.
+	AdvancedMathServerStubMethods
+	On(ipc.ServerContext) error  // On turns the calculator on.
+	Off(ipc.ServerContext) error // Off turns the calculator off.
+}
+
+// CalculatorServerStub adds universal methods to CalculatorServerStubMethods.
+type CalculatorServerStub interface {
+	CalculatorServerStubMethods
+	// Describe the Calculator interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// CalculatorServer returns a server stub for Calculator.
+// It converts an implementation of CalculatorServerMethods into
+// an object that may be used by ipc.Server.
+func CalculatorServer(impl CalculatorServerMethods) CalculatorServerStub {
+	stub := implCalculatorServerStub{
+		impl:                   impl,
+		ArithServerStub:        ArithServer(impl),
+		AdvancedMathServerStub: AdvancedMathServer(impl),
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implCalculatorServerStub struct {
+	impl CalculatorServerMethods
+	ArithServerStub
+	AdvancedMathServerStub
+	gs *ipc.GlobState
+}
+
+func (s implCalculatorServerStub) On(ctx ipc.ServerContext) error {
+	return s.impl.On(ctx)
+}
+
+func (s implCalculatorServerStub) Off(ctx ipc.ServerContext) error {
+	return s.impl.Off(ctx)
+}
+
+func (s implCalculatorServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implCalculatorServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{CalculatorDesc, ArithDesc, AdvancedMathDesc, TrigonometryDesc, exp.ExpDesc}
+}
+
+// CalculatorDesc describes the Calculator interface.
+var CalculatorDesc ipc.InterfaceDesc = descCalculator
+
+// descCalculator hides the desc to keep godoc clean.
+var descCalculator = ipc.InterfaceDesc{
+	Name:    "Calculator",
+	PkgPath: "v.io/v23/vdl/testdata/arith",
+	Embeds: []ipc.EmbedDesc{
+		{"Arith", "v.io/v23/vdl/testdata/arith", "// Arith is an example of an interface definition for an arithmetic service.\n// Things to note:\n//   * There must be at least 1 out-arg, and the last out-arg must be error."},
+		{"AdvancedMath", "v.io/v23/vdl/testdata/arith", "// AdvancedMath is an interface for more advanced math than arith.  It embeds\n// interfaces defined both in the same file and in an external package; and in\n// turn it is embedded by arith.Calculator (which is in the same package but\n// different file) to verify that embedding works in all these scenarios."},
+	},
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "On",
+		},
+		{
+			Name: "Off",
+			Tags: []*vdl.Value{vdl.ValueOf("offtag")},
+		},
+	},
+}
diff --git a/lib/vdl/testdata/arith/exp/exp.vdl b/lib/vdl/testdata/arith/exp/exp.vdl
new file mode 100644
index 0000000..b9dff3b
--- /dev/null
+++ b/lib/vdl/testdata/arith/exp/exp.vdl
@@ -0,0 +1,7 @@
+// Package exp is used to test that embedding interfaces works across packages.
+// The arith.Calculator vdl interface embeds the Exp interface.
+package exp
+
+type Exp interface {
+	Exp(x float64) (float64 | error)
+}
diff --git a/lib/vdl/testdata/arith/exp/exp.vdl.go b/lib/vdl/testdata/arith/exp/exp.vdl.go
new file mode 100644
index 0000000..a4faa3d
--- /dev/null
+++ b/lib/vdl/testdata/arith/exp/exp.vdl.go
@@ -0,0 +1,130 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: exp.vdl
+
+// Package exp is used to test that embedding interfaces works across packages.
+// The arith.Calculator vdl interface embeds the Exp interface.
+package exp
+
+import (
+	// VDL system imports
+	"v.io/v23"
+	"v.io/v23/context"
+	"v.io/v23/ipc"
+)
+
+// ExpClientMethods is the client interface
+// containing Exp methods.
+type ExpClientMethods interface {
+	Exp(ctx *context.T, x float64, opts ...ipc.CallOpt) (float64, error)
+}
+
+// ExpClientStub adds universal methods to ExpClientMethods.
+type ExpClientStub interface {
+	ExpClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// ExpClient returns a client stub for Exp.
+func ExpClient(name string, opts ...ipc.BindOpt) ExpClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implExpClientStub{name, client}
+}
+
+type implExpClientStub struct {
+	name   string
+	client ipc.Client
+}
+
+func (c implExpClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implExpClientStub) Exp(ctx *context.T, i0 float64, opts ...ipc.CallOpt) (o0 float64, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "Exp", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+// ExpServerMethods is the interface a server writer
+// implements for Exp.
+type ExpServerMethods interface {
+	Exp(ctx ipc.ServerContext, x float64) (float64, error)
+}
+
+// ExpServerStubMethods is the server interface containing
+// Exp methods, as expected by ipc.Server.
+// There is no difference between this interface and ExpServerMethods
+// since there are no streaming methods.
+type ExpServerStubMethods ExpServerMethods
+
+// ExpServerStub adds universal methods to ExpServerStubMethods.
+type ExpServerStub interface {
+	ExpServerStubMethods
+	// Describe the Exp interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// ExpServer returns a server stub for Exp.
+// It converts an implementation of ExpServerMethods into
+// an object that may be used by ipc.Server.
+func ExpServer(impl ExpServerMethods) ExpServerStub {
+	stub := implExpServerStub{
+		impl: impl,
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implExpServerStub struct {
+	impl ExpServerMethods
+	gs   *ipc.GlobState
+}
+
+func (s implExpServerStub) Exp(ctx ipc.ServerContext, i0 float64) (float64, error) {
+	return s.impl.Exp(ctx, i0)
+}
+
+func (s implExpServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implExpServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{ExpDesc}
+}
+
+// ExpDesc describes the Exp interface.
+var ExpDesc ipc.InterfaceDesc = descExp
+
+// descExp hides the desc to keep godoc clean.
+var descExp = ipc.InterfaceDesc{
+	Name:    "Exp",
+	PkgPath: "v.io/v23/vdl/testdata/arith/exp",
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "Exp",
+			InArgs: []ipc.ArgDesc{
+				{"x", ``}, // float64
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"", ``}, // float64
+			},
+		},
+	},
+}
diff --git a/lib/vdl/testdata/base/base.vdl b/lib/vdl/testdata/base/base.vdl
new file mode 100644
index 0000000..1a6976a
--- /dev/null
+++ b/lib/vdl/testdata/base/base.vdl
@@ -0,0 +1,235 @@
+// Package base is a simple single-file test of vdl functionality.
+package base
+
+type (
+	NamedBool       bool
+	NamedByte       byte
+	NamedUint16     uint16
+	NamedUint32     uint32
+	NamedUint64     uint64
+	NamedInt16      int16
+	NamedInt32      int32
+	NamedInt64      int64
+	NamedFloat32    float32
+	NamedFloat64    float64
+	NamedComplex64  complex64
+	NamedComplex128 complex128
+	NamedString     string
+	NamedEnum       enum{A;B;C}
+	NamedArray      [2]bool
+	NamedList       []uint32
+	NamedSet        set[string]
+	NamedMap        map[string]float32
+	NamedStruct     struct{A bool;B string;C int32}
+	NamedUnion      union{A bool;B string;C int32}
+)
+
+type Scalars struct {
+	A0  bool
+	A1  byte
+	A2  uint16
+	A3  uint32
+	A4  uint64
+	A5  int16
+	A6  int32
+	A7  int64
+	A8  float32
+	A9  float64
+	A10 complex64
+	A11 complex128
+	A12 string
+	A13 error
+	A14 any
+	A15 typeobject
+
+	B0  NamedBool
+	B1  NamedByte
+	B2  NamedUint16
+	B3  NamedUint32
+	B4  NamedUint64
+	B5  NamedInt16
+	B6  NamedInt32
+	B7  NamedInt64
+	B8  NamedFloat32
+	B9  NamedFloat64
+	B10 NamedComplex64
+	B11 NamedComplex128
+	B12 NamedString
+	B13 NamedEnum
+	B14 NamedUnion
+}
+
+// These are all scalars that may be used as map or set keys.
+type KeyScalars struct {
+	A0  bool
+	A1  byte
+	A2  uint16
+	A3  uint32
+	A4  uint64
+	A5  int16
+	A6  int32
+	A7  int64
+	A8  float32
+	A9  float64
+	A10 complex64
+	A11 complex128
+	A12 string
+
+	B0  NamedBool
+	B1  NamedByte
+	B2  NamedUint16
+	B3  NamedUint32
+	B4  NamedUint64
+	B5  NamedInt16
+	B6  NamedInt32
+	B7  NamedInt64
+	B8  NamedFloat32
+	B9  NamedFloat64
+	B10 NamedComplex64
+	B11 NamedComplex128
+	B12 NamedString
+}
+
+type ScalarsArray [2]Scalars
+
+type Composites struct {
+	A0 Scalars
+	A1 ScalarsArray
+	A2 []Scalars
+	A3 set[KeyScalars]
+	A4 map[string]Scalars
+	A5 map[KeyScalars][]map[string]complex128
+}
+
+type CompositesArray [2]Composites
+
+type CompComp struct {
+	A0 Composites
+	A1 CompositesArray
+	A2 []Composites
+	A3 map[string]Composites
+	A4 map[KeyScalars][]map[string]Composites
+}
+
+// NestedArgs is defined before Args; that's allowed in regular Go, and also
+// allowed in our vdl files.  The compiler will re-order dependent types to ease
+// code generation in other languages.
+type NestedArgs struct {
+	Args Args
+}
+
+// Args will be reordered to show up before NestedArgs in the generated output.
+type Args struct {
+	A int32
+	B int32
+}
+
+const (
+	Cbool = true
+	Cbyte = byte(1)
+	Cint32 = int32(2)
+	Cint64 = int64(3)
+	Cuint32 = uint32(4)
+	Cuint64 = uint64(5)
+	Cfloat32 = float32(6)
+	Cfloat64 = float64(7)
+	CNamedBool = NamedBool(true)
+	CNamedStruct = NamedStruct{A:true, B: "test",}
+	Ccomplex64 = complex64(8+9i)
+	Ccomplex128 = complex128(10+11i)
+	Cstring = "foo"
+	Cenum  = NamedEnum.A
+	Cunion = NamedUnion{A: true}
+	Carray = NamedArray{true, false}
+	Clist  = []int32{1, 2, 3}
+	Cset   = set[int32]{1, 2, 3}
+	cmap   = map[int32]string{1: "A", 2: "B", 3: "C"}
+	Cargs  = Args{1, 2}
+
+	CScalars = Scalars{
+		A0: true,
+		A1: 1,
+		A2: 2,
+		A3: 3,
+		A4: 4,
+		A5: 5,
+		A6: 6,
+		A7: 7,
+		A8: 8,
+		A9: 9,
+		A10: 10,
+		A11: 11,
+		A12: "abc",
+		A14: false,
+		A15: typeobject(bool),
+
+		B0: true,
+		B1: 1,
+		B2: 2,
+		B3: 3,
+		B4: 4,
+		B5: 5,
+		B6: 6,
+		B7: 7,
+		B8: 8,
+		B9: 9,
+		B10: 10,
+		B11: 11,
+		B12: "abc",
+		B13: NamedEnum.B,
+		B14: NamedUnion{C: 123},
+	}
+
+	True = true
+	Foo = "foo"
+	Five = int32(5)
+	SixSquared = Six*Six
+	FiveSquared = Five*Five
+	Six = uint64(6)
+
+	CTObool       = typeobject(bool)
+	CTOstring     = typeobject(string)
+	CTObytes      = typeobject([]byte)
+	CTObyte       = typeobject(byte)
+	CTOuint16     = typeobject(uint16)
+	CTOint16      = typeobject(int16)
+	CTOfloat32    = typeobject(float32)
+	CTOcomplex64  = typeobject(complex64)
+	CTOenum       = typeobject(NamedEnum)
+	CTOArray      = typeobject(NamedArray)
+	CTOList       = typeobject([]string)
+	CTOSet        = typeobject(set[string])
+	CTOMap        = typeobject(map[string]int64)
+	CTOStruct     = typeobject(Scalars)
+	CTOUnion      = typeobject(NamedUnion)
+	CTOTypeObject = typeobject(typeobject)
+	CTOAny        = typeobject(any)
+)
+
+type ServiceA interface {
+	MethodA1() error
+	MethodA2(a int32, b string) (s string | error)
+	MethodA3(a int32) stream<_, Scalars> (s string | error) {"tag", Six}
+	MethodA4(a int32) stream<int32, string> error
+}
+
+type ServiceB interface {
+	ServiceA
+	MethodB1(a Scalars, b Composites) (c CompComp | error)
+}
+
+// Error definitions, which allow stable error-checking across different address
+// spaces.
+error (
+	NoParams1() {"en":"en msg"}
+	NoParams2() {RetryRefetch, "en":"en msg", "fr":"fr msg"}
+
+	WithParams1(x string, y int32) {"en":"en x={x} y={y}"}
+	WithParams2(x string, y int32) {
+		RetryRefetch,
+		"en":"en x={x} y={y}",
+		"fr":"fr y={y} x={x}",
+	}
+
+	notExported(x string, y int32) {"en":"en x={x} y={y}"}
+)
diff --git a/lib/vdl/testdata/base/base.vdl.go b/lib/vdl/testdata/base/base.vdl.go
new file mode 100644
index 0000000..9ee20cb
--- /dev/null
+++ b/lib/vdl/testdata/base/base.vdl.go
@@ -0,0 +1,1215 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: base.vdl
+
+// Package base is a simple single-file test of vdl functionality.
+package base
+
+import (
+	// VDL system imports
+	"fmt"
+	"io"
+	"v.io/v23"
+	"v.io/v23/context"
+	"v.io/v23/i18n"
+	"v.io/v23/ipc"
+	"v.io/v23/vdl"
+	"v.io/v23/verror"
+)
+
+type NamedBool bool
+
+func (NamedBool) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedBool"
+}) {
+}
+
+type NamedByte byte
+
+func (NamedByte) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedByte"
+}) {
+}
+
+type NamedUint16 uint16
+
+func (NamedUint16) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedUint16"
+}) {
+}
+
+type NamedUint32 uint32
+
+func (NamedUint32) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedUint32"
+}) {
+}
+
+type NamedUint64 uint64
+
+func (NamedUint64) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedUint64"
+}) {
+}
+
+type NamedInt16 int16
+
+func (NamedInt16) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedInt16"
+}) {
+}
+
+type NamedInt32 int32
+
+func (NamedInt32) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedInt32"
+}) {
+}
+
+type NamedInt64 int64
+
+func (NamedInt64) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedInt64"
+}) {
+}
+
+type NamedFloat32 float32
+
+func (NamedFloat32) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedFloat32"
+}) {
+}
+
+type NamedFloat64 float64
+
+func (NamedFloat64) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedFloat64"
+}) {
+}
+
+type NamedComplex64 complex64
+
+func (NamedComplex64) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedComplex64"
+}) {
+}
+
+type NamedComplex128 complex128
+
+func (NamedComplex128) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedComplex128"
+}) {
+}
+
+type NamedString string
+
+func (NamedString) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedString"
+}) {
+}
+
+type NamedEnum int
+
+const (
+	NamedEnumA NamedEnum = iota
+	NamedEnumB
+	NamedEnumC
+)
+
+// NamedEnumAll holds all labels for NamedEnum.
+var NamedEnumAll = []NamedEnum{NamedEnumA, NamedEnumB, NamedEnumC}
+
+// NamedEnumFromString creates a NamedEnum from a string label.
+func NamedEnumFromString(label string) (x NamedEnum, err error) {
+	err = x.Set(label)
+	return
+}
+
+// Set assigns label to x.
+func (x *NamedEnum) Set(label string) error {
+	switch label {
+	case "A", "a":
+		*x = NamedEnumA
+		return nil
+	case "B", "b":
+		*x = NamedEnumB
+		return nil
+	case "C", "c":
+		*x = NamedEnumC
+		return nil
+	}
+	*x = -1
+	return fmt.Errorf("unknown label %q in base.NamedEnum", label)
+}
+
+// String returns the string label of x.
+func (x NamedEnum) String() string {
+	switch x {
+	case NamedEnumA:
+		return "A"
+	case NamedEnumB:
+		return "B"
+	case NamedEnumC:
+		return "C"
+	}
+	return ""
+}
+
+func (NamedEnum) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedEnum"
+	Enum struct{ A, B, C string }
+}) {
+}
+
+type NamedArray [2]bool
+
+func (NamedArray) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedArray"
+}) {
+}
+
+type NamedList []uint32
+
+func (NamedList) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedList"
+}) {
+}
+
+type NamedSet map[string]struct{}
+
+func (NamedSet) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedSet"
+}) {
+}
+
+type NamedMap map[string]float32
+
+func (NamedMap) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedMap"
+}) {
+}
+
+type NamedStruct struct {
+	A bool
+	B string
+	C int32
+}
+
+func (NamedStruct) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NamedStruct"
+}) {
+}
+
+type (
+	// NamedUnion represents any single field of the NamedUnion union type.
+	NamedUnion interface {
+		// Index returns the field index.
+		Index() int
+		// Interface returns the field value as an interface.
+		Interface() interface{}
+		// Name returns the field name.
+		Name() string
+		// __VDLReflect describes the NamedUnion union type.
+		__VDLReflect(__NamedUnionReflect)
+	}
+	// NamedUnionA represents field A of the NamedUnion union type.
+	NamedUnionA struct{ Value bool }
+	// NamedUnionB represents field B of the NamedUnion union type.
+	NamedUnionB struct{ Value string }
+	// NamedUnionC represents field C of the NamedUnion union type.
+	NamedUnionC struct{ Value int32 }
+	// __NamedUnionReflect describes the NamedUnion union type.
+	__NamedUnionReflect struct {
+		Name  string "v.io/v23/vdl/testdata/base.NamedUnion"
+		Type  NamedUnion
+		Union struct {
+			A NamedUnionA
+			B NamedUnionB
+			C NamedUnionC
+		}
+	}
+)
+
+func (x NamedUnionA) Index() int                       { return 0 }
+func (x NamedUnionA) Interface() interface{}           { return x.Value }
+func (x NamedUnionA) Name() string                     { return "A" }
+func (x NamedUnionA) __VDLReflect(__NamedUnionReflect) {}
+
+func (x NamedUnionB) Index() int                       { return 1 }
+func (x NamedUnionB) Interface() interface{}           { return x.Value }
+func (x NamedUnionB) Name() string                     { return "B" }
+func (x NamedUnionB) __VDLReflect(__NamedUnionReflect) {}
+
+func (x NamedUnionC) Index() int                       { return 2 }
+func (x NamedUnionC) Interface() interface{}           { return x.Value }
+func (x NamedUnionC) Name() string                     { return "C" }
+func (x NamedUnionC) __VDLReflect(__NamedUnionReflect) {}
+
+type Scalars struct {
+	A0  bool
+	A1  byte
+	A2  uint16
+	A3  uint32
+	A4  uint64
+	A5  int16
+	A6  int32
+	A7  int64
+	A8  float32
+	A9  float64
+	A10 complex64
+	A11 complex128
+	A12 string
+	A13 error
+	A14 *vdl.Value
+	A15 *vdl.Type
+	B0  NamedBool
+	B1  NamedByte
+	B2  NamedUint16
+	B3  NamedUint32
+	B4  NamedUint64
+	B5  NamedInt16
+	B6  NamedInt32
+	B7  NamedInt64
+	B8  NamedFloat32
+	B9  NamedFloat64
+	B10 NamedComplex64
+	B11 NamedComplex128
+	B12 NamedString
+	B13 NamedEnum
+	B14 NamedUnion
+}
+
+func (Scalars) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.Scalars"
+}) {
+}
+
+// These are all scalars that may be used as map or set keys.
+type KeyScalars struct {
+	A0  bool
+	A1  byte
+	A2  uint16
+	A3  uint32
+	A4  uint64
+	A5  int16
+	A6  int32
+	A7  int64
+	A8  float32
+	A9  float64
+	A10 complex64
+	A11 complex128
+	A12 string
+	B0  NamedBool
+	B1  NamedByte
+	B2  NamedUint16
+	B3  NamedUint32
+	B4  NamedUint64
+	B5  NamedInt16
+	B6  NamedInt32
+	B7  NamedInt64
+	B8  NamedFloat32
+	B9  NamedFloat64
+	B10 NamedComplex64
+	B11 NamedComplex128
+	B12 NamedString
+}
+
+func (KeyScalars) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.KeyScalars"
+}) {
+}
+
+type ScalarsArray [2]Scalars
+
+func (ScalarsArray) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.ScalarsArray"
+}) {
+}
+
+type Composites struct {
+	A0 Scalars
+	A1 ScalarsArray
+	A2 []Scalars
+	A3 map[KeyScalars]struct{}
+	A4 map[string]Scalars
+	A5 map[KeyScalars][]map[string]complex128
+}
+
+func (Composites) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.Composites"
+}) {
+}
+
+type CompositesArray [2]Composites
+
+func (CompositesArray) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.CompositesArray"
+}) {
+}
+
+type CompComp struct {
+	A0 Composites
+	A1 CompositesArray
+	A2 []Composites
+	A3 map[string]Composites
+	A4 map[KeyScalars][]map[string]Composites
+}
+
+func (CompComp) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.CompComp"
+}) {
+}
+
+// NestedArgs is defined before Args; that's allowed in regular Go, and also
+// allowed in our vdl files.  The compiler will re-order dependent types to ease
+// code generation in other languages.
+type NestedArgs struct {
+	Args Args
+}
+
+func (NestedArgs) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.NestedArgs"
+}) {
+}
+
+// Args will be reordered to show up before NestedArgs in the generated output.
+type Args struct {
+	A int32
+	B int32
+}
+
+func (Args) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/base.Args"
+}) {
+}
+
+func init() {
+	vdl.Register((*NamedBool)(nil))
+	vdl.Register((*NamedByte)(nil))
+	vdl.Register((*NamedUint16)(nil))
+	vdl.Register((*NamedUint32)(nil))
+	vdl.Register((*NamedUint64)(nil))
+	vdl.Register((*NamedInt16)(nil))
+	vdl.Register((*NamedInt32)(nil))
+	vdl.Register((*NamedInt64)(nil))
+	vdl.Register((*NamedFloat32)(nil))
+	vdl.Register((*NamedFloat64)(nil))
+	vdl.Register((*NamedComplex64)(nil))
+	vdl.Register((*NamedComplex128)(nil))
+	vdl.Register((*NamedString)(nil))
+	vdl.Register((*NamedEnum)(nil))
+	vdl.Register((*NamedArray)(nil))
+	vdl.Register((*NamedList)(nil))
+	vdl.Register((*NamedSet)(nil))
+	vdl.Register((*NamedMap)(nil))
+	vdl.Register((*NamedStruct)(nil))
+	vdl.Register((*NamedUnion)(nil))
+	vdl.Register((*Scalars)(nil))
+	vdl.Register((*KeyScalars)(nil))
+	vdl.Register((*ScalarsArray)(nil))
+	vdl.Register((*Composites)(nil))
+	vdl.Register((*CompositesArray)(nil))
+	vdl.Register((*CompComp)(nil))
+	vdl.Register((*NestedArgs)(nil))
+	vdl.Register((*Args)(nil))
+}
+
+const Cbool = true
+
+const Cbyte = byte(1)
+
+const Cint32 = int32(2)
+
+const Cint64 = int64(3)
+
+const Cuint32 = uint32(4)
+
+const Cuint64 = uint64(5)
+
+const Cfloat32 = float32(6)
+
+const Cfloat64 = float64(7)
+
+const CNamedBool = NamedBool(true)
+
+var CNamedStruct = NamedStruct{
+	A: true,
+	B: "test",
+}
+
+const Ccomplex64 = complex64(8 + 9i)
+
+const Ccomplex128 = complex128(10 + 11i)
+
+const Cstring = "foo"
+
+const Cenum = NamedEnumA
+
+var Cunion = NamedUnion(NamedUnionA{true})
+
+var Carray = NamedArray{
+	true,
+	false,
+}
+
+var Clist = []int32{
+	1,
+	2,
+	3,
+}
+
+var Cset = map[int32]struct{}{
+	1: struct{}{},
+	2: struct{}{},
+	3: struct{}{},
+}
+
+var cmap = map[int32]string{
+	1: "A",
+	2: "B",
+	3: "C",
+}
+
+var Cargs = Args{
+	A: 1,
+	B: 2,
+}
+
+var CScalars = Scalars{
+	A0:  true,
+	A1:  1,
+	A2:  2,
+	A3:  3,
+	A4:  4,
+	A5:  5,
+	A6:  6,
+	A7:  7,
+	A8:  8,
+	A9:  9,
+	A10: 10,
+	A11: 11,
+	A12: "abc",
+	A14: vdl.ValueOf(false),
+	A15: vdl.TypeOf(false),
+	B0:  true,
+	B1:  1,
+	B2:  2,
+	B3:  3,
+	B4:  4,
+	B5:  5,
+	B6:  6,
+	B7:  7,
+	B8:  8,
+	B9:  9,
+	B10: 10,
+	B11: 11,
+	B12: "abc",
+	B13: NamedEnumB,
+	B14: NamedUnionC{int32(123)},
+}
+
+const True = true
+
+const Foo = "foo"
+
+const Five = int32(5)
+
+const Six = uint64(6)
+
+const SixSquared = uint64(36)
+
+const FiveSquared = int32(25)
+
+var CTObool = vdl.TypeOf(false)
+
+var CTOstring = vdl.TypeOf("")
+
+var CTObytes = vdl.TypeOf([]byte(nil))
+
+var CTObyte = vdl.TypeOf(byte(0))
+
+var CTOuint16 = vdl.TypeOf(uint16(0))
+
+var CTOint16 = vdl.TypeOf(int16(0))
+
+var CTOfloat32 = vdl.TypeOf(float32(0))
+
+var CTOcomplex64 = vdl.TypeOf(complex64(0))
+
+var CTOenum = vdl.TypeOf(NamedEnumA)
+
+var CTOArray = vdl.TypeOf(NamedArray{})
+
+var CTOList = vdl.TypeOf([]string(nil))
+
+var CTOSet = vdl.TypeOf(map[string]struct{}(nil))
+
+var CTOMap = vdl.TypeOf(map[string]int64(nil))
+
+var CTOStruct = vdl.TypeOf(Scalars{
+	A15: vdl.AnyType,
+	B14: NamedUnionA{false},
+})
+
+var CTOUnion = vdl.TypeOf(NamedUnion(NamedUnionA{false}))
+
+var CTOTypeObject = vdl.TypeObjectType
+
+var CTOAny = vdl.AnyType
+
+var (
+	ErrNoParams1   = verror.Register("v.io/v23/vdl/testdata/base.NoParams1", verror.NoRetry, "{1:}{2:} en msg")
+	ErrNoParams2   = verror.Register("v.io/v23/vdl/testdata/base.NoParams2", verror.RetryRefetch, "{1:}{2:} en msg")
+	ErrWithParams1 = verror.Register("v.io/v23/vdl/testdata/base.WithParams1", verror.NoRetry, "{1:}{2:} en x={3} y={4}")
+	ErrWithParams2 = verror.Register("v.io/v23/vdl/testdata/base.WithParams2", verror.RetryRefetch, "{1:}{2:} en x={3} y={4}")
+	errNotExported = verror.Register("v.io/v23/vdl/testdata/base.notExported", verror.NoRetry, "{1:}{2:} en x={3} y={4}")
+)
+
+func init() {
+	i18n.Cat().SetWithBase(i18n.LangID("en"), i18n.MsgID(ErrNoParams1.ID), "{1:}{2:} en msg")
+	i18n.Cat().SetWithBase(i18n.LangID("en"), i18n.MsgID(ErrNoParams2.ID), "{1:}{2:} en msg")
+	i18n.Cat().SetWithBase(i18n.LangID("fr"), i18n.MsgID(ErrNoParams2.ID), "{1:}{2:} fr msg")
+	i18n.Cat().SetWithBase(i18n.LangID("en"), i18n.MsgID(ErrWithParams1.ID), "{1:}{2:} en x={3} y={4}")
+	i18n.Cat().SetWithBase(i18n.LangID("en"), i18n.MsgID(ErrWithParams2.ID), "{1:}{2:} en x={3} y={4}")
+	i18n.Cat().SetWithBase(i18n.LangID("fr"), i18n.MsgID(ErrWithParams2.ID), "{1:}{2:} fr y={4} x={3}")
+	i18n.Cat().SetWithBase(i18n.LangID("en"), i18n.MsgID(errNotExported.ID), "{1:}{2:} en x={3} y={4}")
+}
+
+// NewErrNoParams1 returns an error with the ErrNoParams1 ID.
+func NewErrNoParams1(ctx *context.T) error {
+	return verror.New(ErrNoParams1, ctx)
+}
+
+// NewErrNoParams2 returns an error with the ErrNoParams2 ID.
+func NewErrNoParams2(ctx *context.T) error {
+	return verror.New(ErrNoParams2, ctx)
+}
+
+// NewErrWithParams1 returns an error with the ErrWithParams1 ID.
+func NewErrWithParams1(ctx *context.T, x string, y int32) error {
+	return verror.New(ErrWithParams1, ctx, x, y)
+}
+
+// NewErrWithParams2 returns an error with the ErrWithParams2 ID.
+func NewErrWithParams2(ctx *context.T, x string, y int32) error {
+	return verror.New(ErrWithParams2, ctx, x, y)
+}
+
+// newErrNotExported returns an error with the errNotExported ID.
+func newErrNotExported(ctx *context.T, x string, y int32) error {
+	return verror.New(errNotExported, ctx, x, y)
+}
+
+// ServiceAClientMethods is the client interface
+// containing ServiceA methods.
+type ServiceAClientMethods interface {
+	MethodA1(*context.T, ...ipc.CallOpt) error
+	MethodA2(ctx *context.T, a int32, b string, opts ...ipc.CallOpt) (s string, err error)
+	MethodA3(ctx *context.T, a int32, opts ...ipc.CallOpt) (ServiceAMethodA3Call, error)
+	MethodA4(ctx *context.T, a int32, opts ...ipc.CallOpt) (ServiceAMethodA4Call, error)
+}
+
+// ServiceAClientStub adds universal methods to ServiceAClientMethods.
+type ServiceAClientStub interface {
+	ServiceAClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// ServiceAClient returns a client stub for ServiceA.
+func ServiceAClient(name string, opts ...ipc.BindOpt) ServiceAClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implServiceAClientStub{name, client}
+}
+
+type implServiceAClientStub struct {
+	name   string
+	client ipc.Client
+}
+
+func (c implServiceAClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implServiceAClientStub) MethodA1(ctx *context.T, opts ...ipc.CallOpt) (err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "MethodA1", nil, opts...); err != nil {
+		return
+	}
+	err = call.Finish()
+	return
+}
+
+func (c implServiceAClientStub) MethodA2(ctx *context.T, i0 int32, i1 string, opts ...ipc.CallOpt) (o0 string, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "MethodA2", []interface{}{i0, i1}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+func (c implServiceAClientStub) MethodA3(ctx *context.T, i0 int32, opts ...ipc.CallOpt) (ocall ServiceAMethodA3Call, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "MethodA3", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	ocall = &implServiceAMethodA3Call{Call: call}
+	return
+}
+
+func (c implServiceAClientStub) MethodA4(ctx *context.T, i0 int32, opts ...ipc.CallOpt) (ocall ServiceAMethodA4Call, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "MethodA4", []interface{}{i0}, opts...); err != nil {
+		return
+	}
+	ocall = &implServiceAMethodA4Call{Call: call}
+	return
+}
+
+// ServiceAMethodA3ClientStream is the client stream for ServiceA.MethodA3.
+type ServiceAMethodA3ClientStream interface {
+	// RecvStream returns the receiver side of the ServiceA.MethodA3 client stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() Scalars
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+}
+
+// ServiceAMethodA3Call represents the call returned from ServiceA.MethodA3.
+type ServiceAMethodA3Call interface {
+	ServiceAMethodA3ClientStream
+	// Finish blocks until the server is done, and returns the positional return
+	// values for call.
+	//
+	// Finish returns immediately if the call has been canceled; depending on the
+	// timing the output could either be an error signaling cancelation, or the
+	// valid positional return values from the server.
+	//
+	// Calling Finish is mandatory for releasing stream resources, unless the call
+	// has been canceled or any of the other methods return an error.  Finish should
+	// be called at most once.
+	Finish() (s string, err error)
+}
+
+type implServiceAMethodA3Call struct {
+	ipc.Call
+	valRecv Scalars
+	errRecv error
+}
+
+func (c *implServiceAMethodA3Call) RecvStream() interface {
+	Advance() bool
+	Value() Scalars
+	Err() error
+} {
+	return implServiceAMethodA3CallRecv{c}
+}
+
+type implServiceAMethodA3CallRecv struct {
+	c *implServiceAMethodA3Call
+}
+
+func (c implServiceAMethodA3CallRecv) Advance() bool {
+	c.c.valRecv = Scalars{}
+	c.c.errRecv = c.c.Recv(&c.c.valRecv)
+	return c.c.errRecv == nil
+}
+func (c implServiceAMethodA3CallRecv) Value() Scalars {
+	return c.c.valRecv
+}
+func (c implServiceAMethodA3CallRecv) Err() error {
+	if c.c.errRecv == io.EOF {
+		return nil
+	}
+	return c.c.errRecv
+}
+func (c *implServiceAMethodA3Call) Finish() (o0 string, err error) {
+	err = c.Call.Finish(&o0)
+	return
+}
+
+// ServiceAMethodA4ClientStream is the client stream for ServiceA.MethodA4.
+type ServiceAMethodA4ClientStream interface {
+	// RecvStream returns the receiver side of the ServiceA.MethodA4 client stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() string
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+	// SendStream returns the send side of the ServiceA.MethodA4 client stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors
+		// encountered while sending, or if Send is called after Close or
+		// the stream has been canceled.  Blocks if there is no buffer
+		// space; will unblock when buffer space is available or after
+		// the stream has been canceled.
+		Send(item int32) error
+		// Close indicates to the server that no more items will be sent;
+		// server Recv calls will receive io.EOF after all sent items.
+		// This is an optional call - e.g. a client might call Close if it
+		// needs to continue receiving items from the server after it's
+		// done sending.  Returns errors encountered while closing, or if
+		// Close is called after the stream has been canceled.  Like Send,
+		// blocks if there is no buffer space available.
+		Close() error
+	}
+}
+
+// ServiceAMethodA4Call represents the call returned from ServiceA.MethodA4.
+type ServiceAMethodA4Call interface {
+	ServiceAMethodA4ClientStream
+	// Finish performs the equivalent of SendStream().Close, then blocks until
+	// the server is done, and returns the positional return values for the call.
+	//
+	// Finish returns immediately if the call has been canceled; depending on the
+	// timing the output could either be an error signaling cancelation, or the
+	// valid positional return values from the server.
+	//
+	// Calling Finish is mandatory for releasing stream resources, unless the call
+	// has been canceled or any of the other methods return an error.  Finish should
+	// be called at most once.
+	Finish() error
+}
+
+type implServiceAMethodA4Call struct {
+	ipc.Call
+	valRecv string
+	errRecv error
+}
+
+func (c *implServiceAMethodA4Call) RecvStream() interface {
+	Advance() bool
+	Value() string
+	Err() error
+} {
+	return implServiceAMethodA4CallRecv{c}
+}
+
+type implServiceAMethodA4CallRecv struct {
+	c *implServiceAMethodA4Call
+}
+
+func (c implServiceAMethodA4CallRecv) Advance() bool {
+	c.c.errRecv = c.c.Recv(&c.c.valRecv)
+	return c.c.errRecv == nil
+}
+func (c implServiceAMethodA4CallRecv) Value() string {
+	return c.c.valRecv
+}
+func (c implServiceAMethodA4CallRecv) Err() error {
+	if c.c.errRecv == io.EOF {
+		return nil
+	}
+	return c.c.errRecv
+}
+func (c *implServiceAMethodA4Call) SendStream() interface {
+	Send(item int32) error
+	Close() error
+} {
+	return implServiceAMethodA4CallSend{c}
+}
+
+type implServiceAMethodA4CallSend struct {
+	c *implServiceAMethodA4Call
+}
+
+func (c implServiceAMethodA4CallSend) Send(item int32) error {
+	return c.c.Send(item)
+}
+func (c implServiceAMethodA4CallSend) Close() error {
+	return c.c.CloseSend()
+}
+func (c *implServiceAMethodA4Call) Finish() (err error) {
+	err = c.Call.Finish()
+	return
+}
+
+// ServiceAServerMethods is the interface a server writer
+// implements for ServiceA.
+type ServiceAServerMethods interface {
+	MethodA1(ipc.ServerContext) error
+	MethodA2(ctx ipc.ServerContext, a int32, b string) (s string, err error)
+	MethodA3(ctx ServiceAMethodA3Context, a int32) (s string, err error)
+	MethodA4(ctx ServiceAMethodA4Context, a int32) error
+}
+
+// ServiceAServerStubMethods is the server interface containing
+// ServiceA methods, as expected by ipc.Server.
+// The only difference between this interface and ServiceAServerMethods
+// is the streaming methods.
+type ServiceAServerStubMethods interface {
+	MethodA1(ipc.ServerContext) error
+	MethodA2(ctx ipc.ServerContext, a int32, b string) (s string, err error)
+	MethodA3(ctx *ServiceAMethodA3ContextStub, a int32) (s string, err error)
+	MethodA4(ctx *ServiceAMethodA4ContextStub, a int32) error
+}
+
+// ServiceAServerStub adds universal methods to ServiceAServerStubMethods.
+type ServiceAServerStub interface {
+	ServiceAServerStubMethods
+	// Describe the ServiceA interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// ServiceAServer returns a server stub for ServiceA.
+// It converts an implementation of ServiceAServerMethods into
+// an object that may be used by ipc.Server.
+func ServiceAServer(impl ServiceAServerMethods) ServiceAServerStub {
+	stub := implServiceAServerStub{
+		impl: impl,
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implServiceAServerStub struct {
+	impl ServiceAServerMethods
+	gs   *ipc.GlobState
+}
+
+func (s implServiceAServerStub) MethodA1(ctx ipc.ServerContext) error {
+	return s.impl.MethodA1(ctx)
+}
+
+func (s implServiceAServerStub) MethodA2(ctx ipc.ServerContext, i0 int32, i1 string) (string, error) {
+	return s.impl.MethodA2(ctx, i0, i1)
+}
+
+func (s implServiceAServerStub) MethodA3(ctx *ServiceAMethodA3ContextStub, i0 int32) (string, error) {
+	return s.impl.MethodA3(ctx, i0)
+}
+
+func (s implServiceAServerStub) MethodA4(ctx *ServiceAMethodA4ContextStub, i0 int32) error {
+	return s.impl.MethodA4(ctx, i0)
+}
+
+func (s implServiceAServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implServiceAServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{ServiceADesc}
+}
+
+// ServiceADesc describes the ServiceA interface.
+var ServiceADesc ipc.InterfaceDesc = descServiceA
+
+// descServiceA hides the desc to keep godoc clean.
+var descServiceA = ipc.InterfaceDesc{
+	Name:    "ServiceA",
+	PkgPath: "v.io/v23/vdl/testdata/base",
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "MethodA1",
+		},
+		{
+			Name: "MethodA2",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // int32
+				{"b", ``}, // string
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"s", ``}, // string
+			},
+		},
+		{
+			Name: "MethodA3",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // int32
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"s", ``}, // string
+			},
+			Tags: []*vdl.Value{vdl.ValueOf("tag"), vdl.ValueOf(uint64(6))},
+		},
+		{
+			Name: "MethodA4",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // int32
+			},
+		},
+	},
+}
+
+// ServiceAMethodA3ServerStream is the server stream for ServiceA.MethodA3.
+type ServiceAMethodA3ServerStream interface {
+	// SendStream returns the send side of the ServiceA.MethodA3 server stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors encountered
+		// while sending.  Blocks if there is no buffer space; will unblock when
+		// buffer space is available.
+		Send(item Scalars) error
+	}
+}
+
+// ServiceAMethodA3Context represents the context passed to ServiceA.MethodA3.
+type ServiceAMethodA3Context interface {
+	ipc.ServerContext
+	ServiceAMethodA3ServerStream
+}
+
+// ServiceAMethodA3ContextStub is a wrapper that converts ipc.ServerCall into
+// a typesafe stub that implements ServiceAMethodA3Context.
+type ServiceAMethodA3ContextStub struct {
+	ipc.ServerCall
+}
+
+// Init initializes ServiceAMethodA3ContextStub from ipc.ServerCall.
+func (s *ServiceAMethodA3ContextStub) Init(call ipc.ServerCall) {
+	s.ServerCall = call
+}
+
+// SendStream returns the send side of the ServiceA.MethodA3 server stream.
+func (s *ServiceAMethodA3ContextStub) SendStream() interface {
+	Send(item Scalars) error
+} {
+	return implServiceAMethodA3ContextSend{s}
+}
+
+type implServiceAMethodA3ContextSend struct {
+	s *ServiceAMethodA3ContextStub
+}
+
+func (s implServiceAMethodA3ContextSend) Send(item Scalars) error {
+	return s.s.Send(item)
+}
+
+// ServiceAMethodA4ServerStream is the server stream for ServiceA.MethodA4.
+type ServiceAMethodA4ServerStream interface {
+	// RecvStream returns the receiver side of the ServiceA.MethodA4 server stream.
+	RecvStream() interface {
+		// Advance stages an item so that it may be retrieved via Value.  Returns
+		// true iff there is an item to retrieve.  Advance must be called before
+		// Value is called.  May block if an item is not available.
+		Advance() bool
+		// Value returns the item that was staged by Advance.  May panic if Advance
+		// returned false or was not called.  Never blocks.
+		Value() int32
+		// Err returns any error encountered by Advance.  Never blocks.
+		Err() error
+	}
+	// SendStream returns the send side of the ServiceA.MethodA4 server stream.
+	SendStream() interface {
+		// Send places the item onto the output stream.  Returns errors encountered
+		// while sending.  Blocks if there is no buffer space; will unblock when
+		// buffer space is available.
+		Send(item string) error
+	}
+}
+
+// ServiceAMethodA4Context represents the context passed to ServiceA.MethodA4.
+type ServiceAMethodA4Context interface {
+	ipc.ServerContext
+	ServiceAMethodA4ServerStream
+}
+
+// ServiceAMethodA4ContextStub is a wrapper that converts ipc.ServerCall into
+// a typesafe stub that implements ServiceAMethodA4Context.
+type ServiceAMethodA4ContextStub struct {
+	ipc.ServerCall
+	valRecv int32
+	errRecv error
+}
+
+// Init initializes ServiceAMethodA4ContextStub from ipc.ServerCall.
+func (s *ServiceAMethodA4ContextStub) Init(call ipc.ServerCall) {
+	s.ServerCall = call
+}
+
+// RecvStream returns the receiver side of the ServiceA.MethodA4 server stream.
+func (s *ServiceAMethodA4ContextStub) RecvStream() interface {
+	Advance() bool
+	Value() int32
+	Err() error
+} {
+	return implServiceAMethodA4ContextRecv{s}
+}
+
+type implServiceAMethodA4ContextRecv struct {
+	s *ServiceAMethodA4ContextStub
+}
+
+func (s implServiceAMethodA4ContextRecv) Advance() bool {
+	s.s.errRecv = s.s.Recv(&s.s.valRecv)
+	return s.s.errRecv == nil
+}
+func (s implServiceAMethodA4ContextRecv) Value() int32 {
+	return s.s.valRecv
+}
+func (s implServiceAMethodA4ContextRecv) Err() error {
+	if s.s.errRecv == io.EOF {
+		return nil
+	}
+	return s.s.errRecv
+}
+
+// SendStream returns the send side of the ServiceA.MethodA4 server stream.
+func (s *ServiceAMethodA4ContextStub) SendStream() interface {
+	Send(item string) error
+} {
+	return implServiceAMethodA4ContextSend{s}
+}
+
+type implServiceAMethodA4ContextSend struct {
+	s *ServiceAMethodA4ContextStub
+}
+
+func (s implServiceAMethodA4ContextSend) Send(item string) error {
+	return s.s.Send(item)
+}
+
+// ServiceBClientMethods is the client interface
+// containing ServiceB methods.
+type ServiceBClientMethods interface {
+	ServiceAClientMethods
+	MethodB1(ctx *context.T, a Scalars, b Composites, opts ...ipc.CallOpt) (c CompComp, err error)
+}
+
+// ServiceBClientStub adds universal methods to ServiceBClientMethods.
+type ServiceBClientStub interface {
+	ServiceBClientMethods
+	ipc.UniversalServiceMethods
+}
+
+// ServiceBClient returns a client stub for ServiceB.
+func ServiceBClient(name string, opts ...ipc.BindOpt) ServiceBClientStub {
+	var client ipc.Client
+	for _, opt := range opts {
+		if clientOpt, ok := opt.(ipc.Client); ok {
+			client = clientOpt
+		}
+	}
+	return implServiceBClientStub{name, client, ServiceAClient(name, client)}
+}
+
+type implServiceBClientStub struct {
+	name   string
+	client ipc.Client
+
+	ServiceAClientStub
+}
+
+func (c implServiceBClientStub) c(ctx *context.T) ipc.Client {
+	if c.client != nil {
+		return c.client
+	}
+	return v23.GetClient(ctx)
+}
+
+func (c implServiceBClientStub) MethodB1(ctx *context.T, i0 Scalars, i1 Composites, opts ...ipc.CallOpt) (o0 CompComp, err error) {
+	var call ipc.Call
+	if call, err = c.c(ctx).StartCall(ctx, c.name, "MethodB1", []interface{}{i0, i1}, opts...); err != nil {
+		return
+	}
+	err = call.Finish(&o0)
+	return
+}
+
+// ServiceBServerMethods is the interface a server writer
+// implements for ServiceB.
+type ServiceBServerMethods interface {
+	ServiceAServerMethods
+	MethodB1(ctx ipc.ServerContext, a Scalars, b Composites) (c CompComp, err error)
+}
+
+// ServiceBServerStubMethods is the server interface containing
+// ServiceB methods, as expected by ipc.Server.
+// The only difference between this interface and ServiceBServerMethods
+// is the streaming methods.
+type ServiceBServerStubMethods interface {
+	ServiceAServerStubMethods
+	MethodB1(ctx ipc.ServerContext, a Scalars, b Composites) (c CompComp, err error)
+}
+
+// ServiceBServerStub adds universal methods to ServiceBServerStubMethods.
+type ServiceBServerStub interface {
+	ServiceBServerStubMethods
+	// Describe the ServiceB interfaces.
+	Describe__() []ipc.InterfaceDesc
+}
+
+// ServiceBServer returns a server stub for ServiceB.
+// It converts an implementation of ServiceBServerMethods into
+// an object that may be used by ipc.Server.
+func ServiceBServer(impl ServiceBServerMethods) ServiceBServerStub {
+	stub := implServiceBServerStub{
+		impl:               impl,
+		ServiceAServerStub: ServiceAServer(impl),
+	}
+	// Initialize GlobState; always check the stub itself first, to handle the
+	// case where the user has the Glob method defined in their VDL source.
+	if gs := ipc.NewGlobState(stub); gs != nil {
+		stub.gs = gs
+	} else if gs := ipc.NewGlobState(impl); gs != nil {
+		stub.gs = gs
+	}
+	return stub
+}
+
+type implServiceBServerStub struct {
+	impl ServiceBServerMethods
+	ServiceAServerStub
+	gs *ipc.GlobState
+}
+
+func (s implServiceBServerStub) MethodB1(ctx ipc.ServerContext, i0 Scalars, i1 Composites) (CompComp, error) {
+	return s.impl.MethodB1(ctx, i0, i1)
+}
+
+func (s implServiceBServerStub) Globber() *ipc.GlobState {
+	return s.gs
+}
+
+func (s implServiceBServerStub) Describe__() []ipc.InterfaceDesc {
+	return []ipc.InterfaceDesc{ServiceBDesc, ServiceADesc}
+}
+
+// ServiceBDesc describes the ServiceB interface.
+var ServiceBDesc ipc.InterfaceDesc = descServiceB
+
+// descServiceB hides the desc to keep godoc clean.
+var descServiceB = ipc.InterfaceDesc{
+	Name:    "ServiceB",
+	PkgPath: "v.io/v23/vdl/testdata/base",
+	Embeds: []ipc.EmbedDesc{
+		{"ServiceA", "v.io/v23/vdl/testdata/base", ``},
+	},
+	Methods: []ipc.MethodDesc{
+		{
+			Name: "MethodB1",
+			InArgs: []ipc.ArgDesc{
+				{"a", ``}, // Scalars
+				{"b", ``}, // Composites
+			},
+			OutArgs: []ipc.ArgDesc{
+				{"c", ``}, // CompComp
+			},
+		},
+	},
+}
diff --git a/lib/vdl/testdata/nativedep/nativedep.vdl b/lib/vdl/testdata/nativedep/nativedep.vdl
new file mode 100644
index 0000000..c1bc881
--- /dev/null
+++ b/lib/vdl/testdata/nativedep/nativedep.vdl
@@ -0,0 +1,11 @@
+package nativedep
+
+import "v.io/v23/vdl/testdata/nativetest"
+
+type All struct {
+	A nativetest.WireString
+	B nativetest.WireMapStringInt
+	C nativetest.WireTime
+	D nativetest.WireSamePkg
+	E nativetest.WireMultiImport
+}
diff --git a/lib/vdl/testdata/nativedep/nativedep.vdl.go b/lib/vdl/testdata/nativedep/nativedep.vdl.go
new file mode 100644
index 0000000..0b8fee4
--- /dev/null
+++ b/lib/vdl/testdata/nativedep/nativedep.vdl.go
@@ -0,0 +1,30 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: nativedep.vdl
+
+package nativedep
+
+import (
+	// VDL system imports
+	"v.io/v23/vdl"
+
+	// VDL user imports
+	"time"
+	"v.io/v23/vdl/testdata/nativetest"
+)
+
+type All struct {
+	A string
+	B map[string]int
+	C time.Time
+	D nativetest.NativeSamePkg
+	E map[nativetest.NativeSamePkg]time.Time
+}
+
+func (All) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativedep.All"
+}) {
+}
+
+func init() {
+	vdl.Register((*All)(nil))
+}
diff --git a/lib/vdl/testdata/nativedep2/nativedep2.vdl b/lib/vdl/testdata/nativedep2/nativedep2.vdl
new file mode 100644
index 0000000..44f9f24
--- /dev/null
+++ b/lib/vdl/testdata/nativedep2/nativedep2.vdl
@@ -0,0 +1,14 @@
+package nativedep2
+
+// The purpose of this test is to ensure that the generated file gets the
+// imports right.  In particular, the generated file has no code dependencies on
+// nativetest, but should have two imports:
+//     "time"
+//   _ "v.io/v23/vdl/testdata/nativetest"
+//
+// The underscore dependency is added to ensure that nativetest.WireTime is
+// registered whenever this package is used, so that the WireTime<->time.Time
+// mapping is known by the vdl package.
+import "v.io/v23/vdl/testdata/nativetest"
+
+type MyTime nativetest.WireTime
diff --git a/lib/vdl/testdata/nativedep2/nativedep2.vdl.go b/lib/vdl/testdata/nativedep2/nativedep2.vdl.go
new file mode 100644
index 0000000..aaa59df
--- /dev/null
+++ b/lib/vdl/testdata/nativedep2/nativedep2.vdl.go
@@ -0,0 +1,24 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: nativedep2.vdl
+
+package nativedep2
+
+import (
+	// VDL system imports
+	"v.io/v23/vdl"
+
+	// VDL user imports
+	"time"
+	_ "v.io/v23/vdl/testdata/nativetest"
+)
+
+type MyTime time.Time
+
+func (MyTime) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativedep2.MyTime"
+}) {
+}
+
+func init() {
+	vdl.Register((*MyTime)(nil))
+}
diff --git a/lib/vdl/testdata/nativetest/nativetest.go b/lib/vdl/testdata/nativetest/nativetest.go
new file mode 100644
index 0000000..9968269
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/nativetest.go
@@ -0,0 +1,30 @@
+package nativetest
+
+import (
+	"strconv"
+	"time"
+)
+
+func wireStringToNative(x WireString, native *string) error {
+	*native = strconv.Itoa(int(x))
+	return nil
+}
+func wireStringFromNative(x *WireString, native string) error {
+	v, err := strconv.Atoi(native)
+	*x = WireString(v)
+	return err
+}
+
+func wireMapStringIntToNative(WireMapStringInt, *map[string]int) error   { return nil }
+func wireMapStringIntFromNative(*WireMapStringInt, map[string]int) error { return nil }
+
+func wireTimeToNative(WireTime, *time.Time) error   { return nil }
+func wireTimeFromNative(*WireTime, time.Time) error { return nil }
+
+func wireSamePkgToNative(WireSamePkg, native *NativeSamePkg) error { return nil }
+func wireSamePkgFromNative(*WireSamePkg, NativeSamePkg) error      { return nil }
+
+func wireMultiImportToNative(WireMultiImport, *map[NativeSamePkg]time.Time) error   { return nil }
+func wireMultiImportFromNative(*WireMultiImport, map[NativeSamePkg]time.Time) error { return nil }
+
+type NativeSamePkg string
diff --git a/lib/vdl/testdata/nativetest/nativetest.vdl b/lib/vdl/testdata/nativetest/nativetest.vdl
new file mode 100644
index 0000000..1b396a9
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/nativetest.vdl
@@ -0,0 +1,16 @@
+// Package nativetest tests a package with native type conversions.
+package nativetest
+
+type WireString int32
+type WireMapStringInt int32
+type WireTime int32
+type WireSamePkg int32
+type WireMultiImport int32
+
+type WireAll struct {
+	A WireString
+	B WireMapStringInt
+	C WireTime
+	D WireSamePkg
+	E WireMultiImport
+}
diff --git a/lib/vdl/testdata/nativetest/nativetest.vdl.go b/lib/vdl/testdata/nativetest/nativetest.vdl.go
new file mode 100644
index 0000000..1ed133d
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/nativetest.vdl.go
@@ -0,0 +1,95 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: nativetest.vdl
+
+// Package nativetest tests a package with native type conversions.
+package nativetest
+
+import (
+	// VDL system imports
+	"v.io/v23/vdl"
+
+	// VDL user imports
+	"time"
+)
+
+type WireString int32
+
+func (WireString) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireString"
+}) {
+}
+
+type WireMapStringInt int32
+
+func (WireMapStringInt) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireMapStringInt"
+}) {
+}
+
+type WireTime int32
+
+func (WireTime) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireTime"
+}) {
+}
+
+type WireSamePkg int32
+
+func (WireSamePkg) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireSamePkg"
+}) {
+}
+
+type WireMultiImport int32
+
+func (WireMultiImport) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireMultiImport"
+}) {
+}
+
+type WireAll struct {
+	A string
+	B map[string]int
+	C time.Time
+	D NativeSamePkg
+	E map[NativeSamePkg]time.Time
+}
+
+func (WireAll) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.WireAll"
+}) {
+}
+
+func init() {
+	vdl.RegisterNative(wireMapStringIntToNative, wireMapStringIntFromNative)
+	vdl.RegisterNative(wireMultiImportToNative, wireMultiImportFromNative)
+	vdl.RegisterNative(wireSamePkgToNative, wireSamePkgFromNative)
+	vdl.RegisterNative(wireStringToNative, wireStringFromNative)
+	vdl.RegisterNative(wireTimeToNative, wireTimeFromNative)
+	vdl.Register((*WireString)(nil))
+	vdl.Register((*WireMapStringInt)(nil))
+	vdl.Register((*WireTime)(nil))
+	vdl.Register((*WireSamePkg)(nil))
+	vdl.Register((*WireMultiImport)(nil))
+	vdl.Register((*WireAll)(nil))
+}
+
+// Type-check WireMapStringInt conversion functions.
+var _ func(WireMapStringInt, *map[string]int) error = wireMapStringIntToNative
+var _ func(*WireMapStringInt, map[string]int) error = wireMapStringIntFromNative
+
+// Type-check WireMultiImport conversion functions.
+var _ func(WireMultiImport, *map[NativeSamePkg]time.Time) error = wireMultiImportToNative
+var _ func(*WireMultiImport, map[NativeSamePkg]time.Time) error = wireMultiImportFromNative
+
+// Type-check WireSamePkg conversion functions.
+var _ func(WireSamePkg, *NativeSamePkg) error = wireSamePkgToNative
+var _ func(*WireSamePkg, NativeSamePkg) error = wireSamePkgFromNative
+
+// Type-check WireString conversion functions.
+var _ func(WireString, *string) error = wireStringToNative
+var _ func(*WireString, string) error = wireStringFromNative
+
+// Type-check WireTime conversion functions.
+var _ func(WireTime, *time.Time) error = wireTimeToNative
+var _ func(*WireTime, time.Time) error = wireTimeFromNative
diff --git a/lib/vdl/testdata/nativetest/otherfile.vdl b/lib/vdl/testdata/nativetest/otherfile.vdl
new file mode 100644
index 0000000..7489881
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/otherfile.vdl
@@ -0,0 +1,6 @@
+package nativetest
+
+// The only purpose of this file is to help ensure that the vdl tool works well
+// with multiple .vdl files in a package.
+
+type ignoreme string
diff --git a/lib/vdl/testdata/nativetest/otherfile.vdl.go b/lib/vdl/testdata/nativetest/otherfile.vdl.go
new file mode 100644
index 0000000..e11ba68
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/otherfile.vdl.go
@@ -0,0 +1,23 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: otherfile.vdl
+
+package nativetest
+
+import (
+	// VDL system imports
+	"v.io/v23/vdl"
+
+	// VDL user imports
+	"time"
+)
+
+type ignoreme string
+
+func (ignoreme) __VDLReflect(struct {
+	Name string "v.io/v23/vdl/testdata/nativetest.ignoreme"
+}) {
+}
+
+func init() {
+	vdl.Register((*ignoreme)(nil))
+}
diff --git a/lib/vdl/testdata/nativetest/vdl.config b/lib/vdl/testdata/nativetest/vdl.config
new file mode 100644
index 0000000..babfd9e
--- /dev/null
+++ b/lib/vdl/testdata/nativetest/vdl.config
@@ -0,0 +1,32 @@
+config = vdltool.Config{
+	Go: {
+		WireToNativeTypes: {
+			"WireString":       {Type: "string"},
+			"WireMapStringInt": {Type: "map[string]int"},
+			"WireTime": {
+				Type:    "time.Time",
+				Imports: {{Path: "time", Name: "time"}},
+			},
+			"WireSamePkg": {
+				Type:    "nativetest.NativeSamePkg",
+				Imports: {{Path: "v.io/v23/vdl/testdata/nativetest", Name: "nativetest"}},
+			},
+			"WireMultiImport": {
+				Type: "map[nativetest.NativeSamePkg]time.Time",
+				Imports: {
+					{Path: "v.io/v23/vdl/testdata/nativetest", Name: "nativetest"},
+					{Path: "time", Name: "time"},
+				},
+			},
+		},
+	},
+	Java: {
+		WireToNativeTypes: {
+			"WireString": "java.lang.String",
+			"WireMapStringInt": "java.util.Map<java.lang.String, java.lang.Integer>",
+			"WireTime": "org.joda.time.DateTime",
+			"WireSamePkg": "io.v.v23.vdl.testdata.nativetest.NativeSamePkg",
+			"WireMultiImport": "java.util.Map<io.v.v23.vdl.testdata.nativetest.NativeSamePkg, org.joda.time.DateTime>",
+		},
+	},
+}
diff --git a/lib/vdl/testdata/testconfig/testconfig.vdl b/lib/vdl/testdata/testconfig/testconfig.vdl
new file mode 100644
index 0000000..a7939fe
--- /dev/null
+++ b/lib/vdl/testdata/testconfig/testconfig.vdl
@@ -0,0 +1,4 @@
+// Package testconfig is a simple test of vdl.config files.  We don't care about
+// the actual VDL file contents; we really only want to make sure that the
+// vdl.config file in this package is read in successfully.
+package testconfig
diff --git a/lib/vdl/testdata/testconfig/testconfig.vdl.go b/lib/vdl/testdata/testconfig/testconfig.vdl.go
new file mode 100644
index 0000000..cf70809
--- /dev/null
+++ b/lib/vdl/testdata/testconfig/testconfig.vdl.go
@@ -0,0 +1,7 @@
+// This file was auto-generated by the veyron vdl tool.
+// Source: testconfig.vdl
+
+// Package testconfig is a simple test of vdl.config files.  We don't care about
+// the actual VDL file contents; we really only want to make sure that the
+// vdl.config file in this package is read in successfully.
+package testconfig
diff --git a/lib/vdl/testdata/testconfig/vdl.config b/lib/vdl/testdata/testconfig/vdl.config
new file mode 100644
index 0000000..fe2db9f
--- /dev/null
+++ b/lib/vdl/testdata/testconfig/vdl.config
@@ -0,0 +1,4 @@
+// Example of a vdl.config file for testing.
+config = vdltool.Config{
+	GenLanguages: {vdltool.GenLanguage.Go},
+}
diff --git a/lib/vdl/vdltest/vdltest.go b/lib/vdl/vdltest/vdltest.go
new file mode 100644
index 0000000..79daa85
--- /dev/null
+++ b/lib/vdl/vdltest/vdltest.go
@@ -0,0 +1,80 @@
+// Package vdltest provides testing utilities for veyron2/vdl/...
+package vdltest
+
+import (
+	"io"
+	"io/ioutil"
+	"regexp"
+	"strings"
+	"testing"
+
+	"v.io/v23/vdl/build"
+	"v.io/v23/vdl/vdlutil"
+)
+
+// ExpectPass makes sure errs has no errors.
+func ExpectPass(t *testing.T, errs *vdlutil.Errors, testName string) {
+	if !errs.IsEmpty() {
+		t.Errorf("%v expected no errors but saw: %v", testName, errs.ToError())
+		errs.Reset()
+	}
+}
+
+// ExpectFail makes sure errs has an error that matches all the re regexps.
+func ExpectFail(t *testing.T, errs *vdlutil.Errors, testName string, re ...string) {
+	if errs.IsEmpty() {
+		t.Errorf("%v expected errors but didn't see any", testName)
+		return
+	}
+	actual := errs.ToError().Error()
+	errs.Reset()
+	for index, errRe := range re {
+		matched, err := regexp.Match(errRe, []byte(actual))
+		if err != nil {
+			t.Errorf("%v bad regexp pattern [%v] %q", testName, index, errRe)
+			return
+		}
+		if !matched {
+			t.Errorf("%v couldn't match pattern [%v] %q against %q", testName, index, errRe, actual)
+		}
+	}
+}
+
+// ExpectResult ensures errs has an error that matches all the re regexps, or
+// that errs has no errors if no regexps were provided, or only one was provided
+// with the empty string.
+func ExpectResult(t *testing.T, errs *vdlutil.Errors, testName string, re ...string) {
+	if len(re) == 0 || len(re) == 1 && re[0] == "" {
+		ExpectPass(t, errs, testName)
+	} else {
+		ExpectFail(t, errs, testName, re...)
+	}
+}
+
+// FakeBuildPackage constructs a fake build package for testing, with files
+// mapping from file names to file contents.
+func FakeBuildPackage(name, path string, files map[string]string) *build.Package {
+	var fnames []string
+	for fname, _ := range files {
+		fnames = append(fnames, fname)
+	}
+	return &build.Package{
+		Dir:           "",
+		Name:          name,
+		Path:          path,
+		BaseFileNames: fnames,
+		OpenFilesFunc: FakeOpenFiles(files),
+	}
+}
+
+// FakeOpenFiles returns a function that obeys the build.Package.OpenFilesFunc
+// signature, that simply uses the files map to return readers.
+func FakeOpenFiles(files map[string]string) func(fnames []string) (map[string]io.ReadCloser, error) {
+	return func(fnames []string) (map[string]io.ReadCloser, error) {
+		ret := make(map[string]io.ReadCloser, len(fnames))
+		for _, fname := range fnames {
+			ret[fname] = ioutil.NopCloser(strings.NewReader(files[fname]))
+		}
+		return ret, nil
+	}
+}
diff --git a/lib/vdl/vdlutil/doc.go b/lib/vdl/vdlutil/doc.go
new file mode 100644
index 0000000..438b3f8
--- /dev/null
+++ b/lib/vdl/vdlutil/doc.go
@@ -0,0 +1,4 @@
+// Package vdlutil provides core utilities for vdl files.  It's used by the
+// auto-generated Go code, as well as the vdl parser, compiler and code
+// generators; it should have a small set of dependencies.
+package vdlutil
diff --git a/lib/vdl/vdlutil/errors.go b/lib/vdl/vdlutil/errors.go
new file mode 100644
index 0000000..5f0abb6
--- /dev/null
+++ b/lib/vdl/vdlutil/errors.go
@@ -0,0 +1,82 @@
+package vdlutil
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"strconv"
+)
+
+// Errors holds a buffer of encountered errors.  The point is to try displaying
+// all errors to the user rather than just the first.  We cutoff at MaxErrors to
+// ensure if something's really messed up we won't spew errors forever.  Set
+// MaxErrors=-1 to effectively continue despite any number of errors.  The zero
+// Errors struct stops at the first error encountered.
+type Errors struct {
+	MaxErrors int
+	buf       bytes.Buffer
+	num       int
+}
+
+// NewErrors returns a new Errors object, holding up to max errors.
+func NewErrors(max int) *Errors {
+	return &Errors{MaxErrors: max}
+}
+
+// Error adds the error described by msg to the buffer.  Returns true iff we're
+// still under the MaxErrors cutoff.
+func (e *Errors) Error(msg string) bool {
+	if e.num == e.MaxErrors {
+		return false
+	}
+	msg1 := "#" + strconv.Itoa(e.num) + " " + msg + "\n"
+	e.buf.WriteString(msg1)
+	if e.num++; e.num == e.MaxErrors {
+		msg2 := fmt.Sprintf("...stopping after %d error(s)...\n", e.num)
+		e.buf.WriteString(msg2)
+		return false
+	}
+	return true
+}
+
+// Errorf is like Error, and takes the same args as fmt.Printf.
+func (e *Errors) Errorf(format string, v ...interface{}) bool {
+	return e.Error(fmt.Sprintf(format, v...))
+}
+
+// String returns the buffered errors as a single human-readable string.
+func (e *Errors) String() string {
+	return e.buf.String()
+}
+
+// ToError returns the buffered errors as a single error, or nil if there
+// weren't any errors.
+func (e *Errors) ToError() error {
+	if e.num == 0 {
+		return nil
+	}
+	return errors.New(e.buf.String())
+}
+
+// IsEmpty returns true iff there weren't any errors.
+func (e *Errors) IsEmpty() bool {
+	return e.num == 0
+}
+
+// IsFull returns true iff we hit the MaxErrors cutoff.
+func (e *Errors) IsFull() bool {
+	return e.num == e.MaxErrors
+}
+
+// NumErrors returns the number of errors we've seen.
+func (e *Errors) NumErrors() int {
+	return e.num
+}
+
+// Reset clears the internal state so you start with no buffered errors.
+// MaxErrors remains the same; if you want to change it you should create a new
+// Errors struct.
+func (e *Errors) Reset() {
+	e.buf = bytes.Buffer{}
+	e.num = 0
+}
diff --git a/lib/vdl/vdlutil/util.go b/lib/vdl/vdlutil/util.go
new file mode 100644
index 0000000..64eb424
--- /dev/null
+++ b/lib/vdl/vdlutil/util.go
@@ -0,0 +1,69 @@
+package vdlutil
+
+import (
+	"bytes"
+	"unicode"
+	"unicode/utf8"
+)
+
+// ToCamelCase converts ThisString to thisString.
+// TODO(toddw): Remove this function, replace calls with FirstRuneToLower.
+func ToCamelCase(s string) string {
+	return FirstRuneToLower(s)
+}
+
+// FirstRuneToLower returns s with its first rune in lowercase.
+func FirstRuneToLower(s string) string {
+	if s == "" {
+		return ""
+	}
+	r, n := utf8.DecodeRuneInString(s)
+	return string(unicode.ToLower(r)) + s[n:]
+}
+
+// FirstRuneToUpper returns s with its first rune in uppercase.
+func FirstRuneToUpper(s string) string {
+	if s == "" {
+		return ""
+	}
+	r, n := utf8.DecodeRuneInString(s)
+	return string(unicode.ToUpper(r)) + s[n:]
+}
+
+// FirstRuneToExportCase returns s with its first rune in uppercase if export is
+// true, otherwise in lowercase.
+func FirstRuneToExportCase(s string, export bool) string {
+	if export {
+		return FirstRuneToUpper(s)
+	}
+	return FirstRuneToLower(s)
+}
+
+// toConstCase converts ThisString to THIS_STRING. For adding '_', we follow the
+// following algorithm.  For any sequence of three characters, c[n-1], c[n],
+// c[n+1], we add an underscore before c[n] if:
+//     1) c[n-1] is a digit and c[n] is a letter, or
+//     2) c[n-1] is a letter and c[n] is a digit, or
+//     3) c[n-1] is lowercase, and c[n] is uppercase, or
+//     4) c[n-1] is uppercase, c[n] is uppercase, and c[n+1] is lowercase.
+func ToConstCase(s string) string {
+	var buf bytes.Buffer
+	var size int
+	var prev, cur, next rune
+	next, size = utf8.DecodeRuneInString(s)
+	for next != utf8.RuneError {
+		s = s[size:]
+		prev, cur = cur, next
+		next, size = utf8.DecodeRuneInString(s)
+		// We avoid checking boundary conditions because, for a rune r that is zero or utf8.RuneError:
+		// unicode.Is{Letter,Digit,Lower,Upper}(r) == false
+		if unicode.IsDigit(prev) && unicode.IsLetter(cur) || // Rule (1)
+			unicode.IsLetter(prev) && unicode.IsDigit(cur) || // Rule (2)
+			unicode.IsLower(prev) && unicode.IsUpper(cur) || // Rule (3)
+			unicode.IsUpper(prev) && unicode.IsUpper(cur) && unicode.IsLower(next) { // Rule (4)
+			buf.WriteRune('_')
+		}
+		buf.WriteRune(unicode.ToUpper(cur))
+	}
+	return buf.String()
+}
diff --git a/lib/vdl/vdlutil/util_test.go b/lib/vdl/vdlutil/util_test.go
new file mode 100644
index 0000000..bddb14b
--- /dev/null
+++ b/lib/vdl/vdlutil/util_test.go
@@ -0,0 +1,64 @@
+package vdlutil
+
+import (
+	"testing"
+)
+
+func TestFirstRuneToLower(t *testing.T) {
+	tests := []struct {
+		arg, want string
+	}{
+		{"foo", "foo"},
+		{"Foo", "foo"},
+		{"FOO", "fOO"},
+		{"foobar", "foobar"},
+		{"fooBar", "fooBar"},
+		{"FooBar", "fooBar"},
+		{"FOOBAR", "fOOBAR"},
+	}
+	for _, test := range tests {
+		if got, want := FirstRuneToLower(test.arg), test.want; got != want {
+			t.Errorf("FirstRuneToLower(%s) got %s, want %s", test.arg, got, want)
+		}
+	}
+}
+
+func TestFirstRuneToUpper(t *testing.T) {
+	tests := []struct {
+		arg, want string
+	}{
+		{"foo", "Foo"},
+		{"Foo", "Foo"},
+		{"FOO", "FOO"},
+		{"foobar", "Foobar"},
+		{"fooBar", "FooBar"},
+		{"FooBar", "FooBar"},
+		{"FOOBAR", "FOOBAR"},
+	}
+	for _, test := range tests {
+		if got, want := FirstRuneToUpper(test.arg), test.want; got != want {
+			t.Errorf("FirstRuneToUpper(%s) got %s, want %s", test.arg, got, want)
+		}
+	}
+}
+
+func TestConstCase(t *testing.T) {
+	testcases := []struct {
+		name, want string
+	}{
+		{"TestFunction", "TEST_FUNCTION"},
+		{"BIGNumber", "BIG_NUMBER"},
+		{"SHA256Hash", "SHA_256_HASH"},
+		{"Sha256Hash", "SHA_256_HASH"},
+		{"Sha256hash", "SHA_256_HASH"},
+		{"THISIsAHugeVarname", "THIS_IS_A_HUGE_VARNAME"},
+		{"Sha256MD5Function", "SHA_256_MD_5_FUNCTION"},
+		{"IfIHadADollar4EachTest", "IF_I_HAD_A_DOLLAR_4_EACH_TEST"},
+	}
+
+	for _, testcase := range testcases {
+		if want, got := testcase.want, ToConstCase(testcase.name); want != got {
+			t.Errorf("toConstCase(%q) error, want %q, got %q", testcase.name, want, got)
+		}
+	}
+}
diff --git a/lib/vdl/vdlutil/vlog.go b/lib/vdl/vdlutil/vlog.go
new file mode 100644
index 0000000..06c86da
--- /dev/null
+++ b/lib/vdl/vdlutil/vlog.go
@@ -0,0 +1,23 @@
+package vdlutil
+
+import (
+	"io/ioutil"
+	"log"
+	"os"
+)
+
+const (
+	logPrefix = ""
+	logFlags  = log.Lshortfile | log.Ltime | log.Lmicroseconds
+)
+
+var (
+	// Vlog is a logger that discards output by default, and only outputs real
+	// logs when SetVerbose is called.
+	Vlog = log.New(ioutil.Discard, logPrefix, logFlags)
+)
+
+// SetVerbose tells the vdl package (and subpackages) to enable verbose logging.
+func SetVerbose() {
+	Vlog = log.New(os.Stderr, logPrefix, logFlags)
+}