Description: use gcimporter15 again
Author: Thorsten Alteholz <debian@alteholz.de> and upstream
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/bexport.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/bexport.go
new file mode 100644
index 0000000..cbf8bc0
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/bexport.go
@@ -0,0 +1,828 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
+// see that file for specification of the format.
+
+package gcimporter
+
+import (
+	"bytes"
+	"encoding/binary"
+	"fmt"
+	"go/ast"
+	"go/constant"
+	"go/token"
+	"go/types"
+	"log"
+	"math"
+	"math/big"
+	"sort"
+	"strings"
+)
+
+// If debugFormat is set, each integer and string value is preceded by a marker
+// and position information in the encoding. This mechanism permits an importer
+// to recognize immediately when it is out of sync. The importer recognizes this
+// mode automatically (i.e., it can import export data produced with debugging
+// support even if debugFormat is not set at the time of import). This mode will
+// lead to massively larger export data (by a factor of 2 to 3) and should only
+// be enabled during development and debugging.
+//
+// NOTE: This flag is the first flag to enable if importing dies because of
+// (suspected) format errors, and whenever a change is made to the format.
+const debugFormat = false // default: false
+
+// If trace is set, debugging output is printed to std out.
+const trace = false // default: false
+
+// Current export format version. Increase with each format change.
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
+const exportVersion = 4
+
+// trackAllTypes enables cycle tracking for all types, not just named
+// types. The existing compiler invariants assume that unnamed types
+// that are not completely set up are not used, or else there are spurious
+// errors.
+// If disabled, only named types are tracked, possibly leading to slightly
+// less efficient encoding in rare cases. It also prevents the export of
+// some corner-case type declarations (but those are not handled correctly
+// with with the textual export format either).
+// TODO(gri) enable and remove once issues caused by it are fixed
+const trackAllTypes = false
+
+type exporter struct {
+	fset *token.FileSet
+	out  bytes.Buffer
+
+	// object -> index maps, indexed in order of serialization
+	strIndex map[string]int
+	pkgIndex map[*types.Package]int
+	typIndex map[types.Type]int
+
+	// position encoding
+	posInfoFormat bool
+	prevFile      string
+	prevLine      int
+
+	// debugging support
+	written int // bytes written
+	indent  int // for trace
+}
+
+// BExportData returns binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func BExportData(fset *token.FileSet, pkg *types.Package) []byte {
+	p := exporter{
+		fset:          fset,
+		strIndex:      map[string]int{"": 0}, // empty string is mapped to 0
+		pkgIndex:      make(map[*types.Package]int),
+		typIndex:      make(map[types.Type]int),
+		posInfoFormat: true, // TODO(gri) might become a flag, eventually
+	}
+
+	// write version info
+	// The version string must start with "version %d" where %d is the version
+	// number. Additional debugging information may follow after a blank; that
+	// text is ignored by the importer.
+	p.rawStringln(fmt.Sprintf("version %d", exportVersion))
+	var debug string
+	if debugFormat {
+		debug = "debug"
+	}
+	p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
+	p.bool(trackAllTypes)
+	p.bool(p.posInfoFormat)
+
+	// --- generic export data ---
+
+	// populate type map with predeclared "known" types
+	for index, typ := range predeclared {
+		p.typIndex[typ] = index
+	}
+	if len(p.typIndex) != len(predeclared) {
+		log.Fatalf("gcimporter: duplicate entries in type map?")
+	}
+
+	// write package data
+	p.pkg(pkg, true)
+	if trace {
+		p.tracef("\n")
+	}
+
+	// write objects
+	objcount := 0
+	scope := pkg.Scope()
+	for _, name := range scope.Names() {
+		if !ast.IsExported(name) {
+			continue
+		}
+		if trace {
+			p.tracef("\n")
+		}
+		p.obj(scope.Lookup(name))
+		objcount++
+	}
+
+	// indicate end of list
+	if trace {
+		p.tracef("\n")
+	}
+	p.tag(endTag)
+
+	// for self-verification only (redundant)
+	p.int(objcount)
+
+	if trace {
+		p.tracef("\n")
+	}
+
+	// --- end of export data ---
+
+	return p.out.Bytes()
+}
+
+func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
+	if pkg == nil {
+		log.Fatalf("gcimporter: unexpected nil pkg")
+	}
+
+	// if we saw the package before, write its index (>= 0)
+	if i, ok := p.pkgIndex[pkg]; ok {
+		p.index('P', i)
+		return
+	}
+
+	// otherwise, remember the package, write the package tag (< 0) and package data
+	if trace {
+		p.tracef("P%d = { ", len(p.pkgIndex))
+		defer p.tracef("} ")
+	}
+	p.pkgIndex[pkg] = len(p.pkgIndex)
+
+	p.tag(packageTag)
+	p.string(pkg.Name())
+	if emptypath {
+		p.string("")
+	} else {
+		p.string(pkg.Path())
+	}
+}
+
+func (p *exporter) obj(obj types.Object) {
+	switch obj := obj.(type) {
+	case *types.Const:
+		p.tag(constTag)
+		p.pos(obj)
+		p.qualifiedName(obj)
+		p.typ(obj.Type())
+		p.value(obj.Val())
+
+	case *types.TypeName:
+		if isAlias(obj) {
+			p.tag(aliasTag)
+			p.pos(obj)
+			p.qualifiedName(obj)
+		} else {
+			p.tag(typeTag)
+		}
+		p.typ(obj.Type())
+
+	case *types.Var:
+		p.tag(varTag)
+		p.pos(obj)
+		p.qualifiedName(obj)
+		p.typ(obj.Type())
+
+	case *types.Func:
+		p.tag(funcTag)
+		p.pos(obj)
+		p.qualifiedName(obj)
+		sig := obj.Type().(*types.Signature)
+		p.paramList(sig.Params(), sig.Variadic())
+		p.paramList(sig.Results(), false)
+
+	default:
+		log.Fatalf("gcimporter: unexpected object %v (%T)", obj, obj)
+	}
+}
+
+func (p *exporter) pos(obj types.Object) {
+	if !p.posInfoFormat {
+		return
+	}
+
+	file, line := p.fileLine(obj)
+	if file == p.prevFile {
+		// common case: write line delta
+		// delta == 0 means different file or no line change
+		delta := line - p.prevLine
+		p.int(delta)
+		if delta == 0 {
+			p.int(-1) // -1 means no file change
+		}
+	} else {
+		// different file
+		p.int(0)
+		// Encode filename as length of common prefix with previous
+		// filename, followed by (possibly empty) suffix. Filenames
+		// frequently share path prefixes, so this can save a lot
+		// of space and make export data size less dependent on file
+		// path length. The suffix is unlikely to be empty because
+		// file names tend to end in ".go".
+		n := commonPrefixLen(p.prevFile, file)
+		p.int(n)           // n >= 0
+		p.string(file[n:]) // write suffix only
+		p.prevFile = file
+		p.int(line)
+	}
+	p.prevLine = line
+}
+
+func (p *exporter) fileLine(obj types.Object) (file string, line int) {
+	if p.fset != nil {
+		pos := p.fset.Position(obj.Pos())
+		file = pos.Filename
+		line = pos.Line
+	}
+	return
+}
+
+func commonPrefixLen(a, b string) int {
+	if len(a) > len(b) {
+		a, b = b, a
+	}
+	// len(a) <= len(b)
+	i := 0
+	for i < len(a) && a[i] == b[i] {
+		i++
+	}
+	return i
+}
+
+func (p *exporter) qualifiedName(obj types.Object) {
+	p.string(obj.Name())
+	p.pkg(obj.Pkg(), false)
+}
+
+func (p *exporter) typ(t types.Type) {
+	if t == nil {
+		log.Fatalf("gcimporter: nil type")
+	}
+
+	// Possible optimization: Anonymous pointer types *T where
+	// T is a named type are common. We could canonicalize all
+	// such types *T to a single type PT = *T. This would lead
+	// to at most one *T entry in typIndex, and all future *T's
+	// would be encoded as the respective index directly. Would
+	// save 1 byte (pointerTag) per *T and reduce the typIndex
+	// size (at the cost of a canonicalization map). We can do
+	// this later, without encoding format change.
+
+	// if we saw the type before, write its index (>= 0)
+	if i, ok := p.typIndex[t]; ok {
+		p.index('T', i)
+		return
+	}
+
+	// otherwise, remember the type, write the type tag (< 0) and type data
+	if trackAllTypes {
+		if trace {
+			p.tracef("T%d = {>\n", len(p.typIndex))
+			defer p.tracef("<\n} ")
+		}
+		p.typIndex[t] = len(p.typIndex)
+	}
+
+	switch t := t.(type) {
+	case *types.Named:
+		if !trackAllTypes {
+			// if we don't track all types, track named types now
+			p.typIndex[t] = len(p.typIndex)
+		}
+
+		p.tag(namedTag)
+		p.pos(t.Obj())
+		p.qualifiedName(t.Obj())
+		p.typ(t.Underlying())
+		if !types.IsInterface(t) {
+			p.assocMethods(t)
+		}
+
+	case *types.Array:
+		p.tag(arrayTag)
+		p.int64(t.Len())
+		p.typ(t.Elem())
+
+	case *types.Slice:
+		p.tag(sliceTag)
+		p.typ(t.Elem())
+
+	case *dddSlice:
+		p.tag(dddTag)
+		p.typ(t.elem)
+
+	case *types.Struct:
+		p.tag(structTag)
+		p.fieldList(t)
+
+	case *types.Pointer:
+		p.tag(pointerTag)
+		p.typ(t.Elem())
+
+	case *types.Signature:
+		p.tag(signatureTag)
+		p.paramList(t.Params(), t.Variadic())
+		p.paramList(t.Results(), false)
+
+	case *types.Interface:
+		p.tag(interfaceTag)
+		p.iface(t)
+
+	case *types.Map:
+		p.tag(mapTag)
+		p.typ(t.Key())
+		p.typ(t.Elem())
+
+	case *types.Chan:
+		p.tag(chanTag)
+		p.int(int(3 - t.Dir())) // hack
+		p.typ(t.Elem())
+
+	default:
+		log.Fatalf("gcimporter: unexpected type %T: %s", t, t)
+	}
+}
+
+func (p *exporter) assocMethods(named *types.Named) {
+	// Sort methods (for determinism).
+	var methods []*types.Func
+	for i := 0; i < named.NumMethods(); i++ {
+		methods = append(methods, named.Method(i))
+	}
+	sort.Sort(methodsByName(methods))
+
+	p.int(len(methods))
+
+	if trace && methods != nil {
+		p.tracef("associated methods {>\n")
+	}
+
+	for i, m := range methods {
+		if trace && i > 0 {
+			p.tracef("\n")
+		}
+
+		p.pos(m)
+		name := m.Name()
+		p.string(name)
+		if !exported(name) {
+			p.pkg(m.Pkg(), false)
+		}
+
+		sig := m.Type().(*types.Signature)
+		p.paramList(types.NewTuple(sig.Recv()), false)
+		p.paramList(sig.Params(), sig.Variadic())
+		p.paramList(sig.Results(), false)
+		p.int(0) // dummy value for go:nointerface pragma - ignored by importer
+	}
+
+	if trace && methods != nil {
+		p.tracef("<\n} ")
+	}
+}
+
+type methodsByName []*types.Func
+
+func (x methodsByName) Len() int           { return len(x) }
+func (x methodsByName) Swap(i, j int)      { x[i], x[j] = x[j], x[i] }
+func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
+
+func (p *exporter) fieldList(t *types.Struct) {
+	if trace && t.NumFields() > 0 {
+		p.tracef("fields {>\n")
+		defer p.tracef("<\n} ")
+	}
+
+	p.int(t.NumFields())
+	for i := 0; i < t.NumFields(); i++ {
+		if trace && i > 0 {
+			p.tracef("\n")
+		}
+		p.field(t.Field(i))
+		p.string(t.Tag(i))
+	}
+}
+
+func (p *exporter) field(f *types.Var) {
+	if !f.IsField() {
+		log.Fatalf("gcimporter: field expected")
+	}
+
+	p.pos(f)
+	p.fieldName(f)
+	p.typ(f.Type())
+}
+
+func (p *exporter) iface(t *types.Interface) {
+	// TODO(gri): enable importer to load embedded interfaces,
+	// then emit Embeddeds and ExplicitMethods separately here.
+	p.int(0)
+
+	n := t.NumMethods()
+	if trace && n > 0 {
+		p.tracef("methods {>\n")
+		defer p.tracef("<\n} ")
+	}
+	p.int(n)
+	for i := 0; i < n; i++ {
+		if trace && i > 0 {
+			p.tracef("\n")
+		}
+		p.method(t.Method(i))
+	}
+}
+
+func (p *exporter) method(m *types.Func) {
+	sig := m.Type().(*types.Signature)
+	if sig.Recv() == nil {
+		log.Fatalf("gcimporter: method expected")
+	}
+
+	p.pos(m)
+	p.string(m.Name())
+	if m.Name() != "_" && !ast.IsExported(m.Name()) {
+		p.pkg(m.Pkg(), false)
+	}
+
+	// interface method; no need to encode receiver.
+	p.paramList(sig.Params(), sig.Variadic())
+	p.paramList(sig.Results(), false)
+}
+
+func (p *exporter) fieldName(f *types.Var) {
+	name := f.Name()
+
+	if f.Anonymous() {
+		// anonymous field - we distinguish between 3 cases:
+		// 1) field name matches base type name and is exported
+		// 2) field name matches base type name and is not exported
+		// 3) field name doesn't match base type name (alias name)
+		bname := basetypeName(f.Type())
+		if name == bname {
+			if ast.IsExported(name) {
+				name = "" // 1) we don't need to know the field name or package
+			} else {
+				name = "?" // 2) use unexported name "?" to force package export
+			}
+		} else {
+			// 3) indicate alias and export name as is
+			// (this requires an extra "@" but this is a rare case)
+			p.string("@")
+		}
+	}
+
+	p.string(name)
+	if name != "" && !ast.IsExported(name) {
+		p.pkg(f.Pkg(), false)
+	}
+}
+
+func basetypeName(typ types.Type) string {
+	switch typ := deref(typ).(type) {
+	case *types.Basic:
+		return typ.Name()
+	case *types.Named:
+		return typ.Obj().Name()
+	default:
+		return "" // unnamed type
+	}
+}
+
+func (p *exporter) paramList(params *types.Tuple, variadic bool) {
+	// use negative length to indicate unnamed parameters
+	// (look at the first parameter only since either all
+	// names are present or all are absent)
+	n := params.Len()
+	if n > 0 && params.At(0).Name() == "" {
+		n = -n
+	}
+	p.int(n)
+	for i := 0; i < params.Len(); i++ {
+		q := params.At(i)
+		t := q.Type()
+		if variadic && i == params.Len()-1 {
+			t = &dddSlice{t.(*types.Slice).Elem()}
+		}
+		p.typ(t)
+		if n > 0 {
+			name := q.Name()
+			p.string(name)
+			if name != "_" {
+				p.pkg(q.Pkg(), false)
+			}
+		}
+		p.string("") // no compiler-specific info
+	}
+}
+
+func (p *exporter) value(x constant.Value) {
+	if trace {
+		p.tracef("= ")
+	}
+
+	switch x.Kind() {
+	case constant.Bool:
+		tag := falseTag
+		if constant.BoolVal(x) {
+			tag = trueTag
+		}
+		p.tag(tag)
+
+	case constant.Int:
+		if v, exact := constant.Int64Val(x); exact {
+			// common case: x fits into an int64 - use compact encoding
+			p.tag(int64Tag)
+			p.int64(v)
+			return
+		}
+		// uncommon case: large x - use float encoding
+		// (powers of 2 will be encoded efficiently with exponent)
+		p.tag(floatTag)
+		p.float(constant.ToFloat(x))
+
+	case constant.Float:
+		p.tag(floatTag)
+		p.float(x)
+
+	case constant.Complex:
+		p.tag(complexTag)
+		p.float(constant.Real(x))
+		p.float(constant.Imag(x))
+
+	case constant.String:
+		p.tag(stringTag)
+		p.string(constant.StringVal(x))
+
+	case constant.Unknown:
+		// package contains type errors
+		p.tag(unknownTag)
+
+	default:
+		log.Fatalf("gcimporter: unexpected value %v (%T)", x, x)
+	}
+}
+
+func (p *exporter) float(x constant.Value) {
+	if x.Kind() != constant.Float {
+		log.Fatalf("gcimporter: unexpected constant %v, want float", x)
+	}
+	// extract sign (there is no -0)
+	sign := constant.Sign(x)
+	if sign == 0 {
+		// x == 0
+		p.int(0)
+		return
+	}
+	// x != 0
+
+	var f big.Float
+	if v, exact := constant.Float64Val(x); exact {
+		// float64
+		f.SetFloat64(v)
+	} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+		// TODO(gri): add big.Rat accessor to constant.Value.
+		r := valueToRat(num)
+		f.SetRat(r.Quo(r, valueToRat(denom)))
+	} else {
+		// Value too large to represent as a fraction => inaccessible.
+		// TODO(gri): add big.Float accessor to constant.Value.
+		f.SetFloat64(math.MaxFloat64) // FIXME
+	}
+
+	// extract exponent such that 0.5 <= m < 1.0
+	var m big.Float
+	exp := f.MantExp(&m)
+
+	// extract mantissa as *big.Int
+	// - set exponent large enough so mant satisfies mant.IsInt()
+	// - get *big.Int from mant
+	m.SetMantExp(&m, int(m.MinPrec()))
+	mant, acc := m.Int(nil)
+	if acc != big.Exact {
+		log.Fatalf("gcimporter: internal error")
+	}
+
+	p.int(sign)
+	p.int(exp)
+	p.string(string(mant.Bytes()))
+}
+
+func valueToRat(x constant.Value) *big.Rat {
+	// Convert little-endian to big-endian.
+	// I can't believe this is necessary.
+	bytes := constant.Bytes(x)
+	for i := 0; i < len(bytes)/2; i++ {
+		bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+	}
+	return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
+func (p *exporter) bool(b bool) bool {
+	if trace {
+		p.tracef("[")
+		defer p.tracef("= %v] ", b)
+	}
+
+	x := 0
+	if b {
+		x = 1
+	}
+	p.int(x)
+	return b
+}
+
+// ----------------------------------------------------------------------------
+// Low-level encoders
+
+func (p *exporter) index(marker byte, index int) {
+	if index < 0 {
+		log.Fatalf("gcimporter: invalid index < 0")
+	}
+	if debugFormat {
+		p.marker('t')
+	}
+	if trace {
+		p.tracef("%c%d ", marker, index)
+	}
+	p.rawInt64(int64(index))
+}
+
+func (p *exporter) tag(tag int) {
+	if tag >= 0 {
+		log.Fatalf("gcimporter: invalid tag >= 0")
+	}
+	if debugFormat {
+		p.marker('t')
+	}
+	if trace {
+		p.tracef("%s ", tagString[-tag])
+	}
+	p.rawInt64(int64(tag))
+}
+
+func (p *exporter) int(x int) {
+	p.int64(int64(x))
+}
+
+func (p *exporter) int64(x int64) {
+	if debugFormat {
+		p.marker('i')
+	}
+	if trace {
+		p.tracef("%d ", x)
+	}
+	p.rawInt64(x)
+}
+
+func (p *exporter) string(s string) {
+	if debugFormat {
+		p.marker('s')
+	}
+	if trace {
+		p.tracef("%q ", s)
+	}
+	// if we saw the string before, write its index (>= 0)
+	// (the empty string is mapped to 0)
+	if i, ok := p.strIndex[s]; ok {
+		p.rawInt64(int64(i))
+		return
+	}
+	// otherwise, remember string and write its negative length and bytes
+	p.strIndex[s] = len(p.strIndex)
+	p.rawInt64(-int64(len(s)))
+	for i := 0; i < len(s); i++ {
+		p.rawByte(s[i])
+	}
+}
+
+// marker emits a marker byte and position information which makes
+// it easy for a reader to detect if it is "out of sync". Used for
+// debugFormat format only.
+func (p *exporter) marker(m byte) {
+	p.rawByte(m)
+	// Enable this for help tracking down the location
+	// of an incorrect marker when running in debugFormat.
+	if false && trace {
+		p.tracef("#%d ", p.written)
+	}
+	p.rawInt64(int64(p.written))
+}
+
+// rawInt64 should only be used by low-level encoders.
+func (p *exporter) rawInt64(x int64) {
+	var tmp [binary.MaxVarintLen64]byte
+	n := binary.PutVarint(tmp[:], x)
+	for i := 0; i < n; i++ {
+		p.rawByte(tmp[i])
+	}
+}
+
+// rawStringln should only be used to emit the initial version string.
+func (p *exporter) rawStringln(s string) {
+	for i := 0; i < len(s); i++ {
+		p.rawByte(s[i])
+	}
+	p.rawByte('\n')
+}
+
+// rawByte is the bottleneck interface to write to p.out.
+// rawByte escapes b as follows (any encoding does that
+// hides '$'):
+//
+//	'$'  => '|' 'S'
+//	'|'  => '|' '|'
+//
+// Necessary so other tools can find the end of the
+// export data by searching for "$$".
+// rawByte should only be used by low-level encoders.
+func (p *exporter) rawByte(b byte) {
+	switch b {
+	case '$':
+		// write '$' as '|' 'S'
+		b = 'S'
+		fallthrough
+	case '|':
+		// write '|' as '|' '|'
+		p.out.WriteByte('|')
+		p.written++
+	}
+	p.out.WriteByte(b)
+	p.written++
+}
+
+// tracef is like fmt.Printf but it rewrites the format string
+// to take care of indentation.
+func (p *exporter) tracef(format string, args ...interface{}) {
+	if strings.ContainsAny(format, "<>\n") {
+		var buf bytes.Buffer
+		for i := 0; i < len(format); i++ {
+			// no need to deal with runes
+			ch := format[i]
+			switch ch {
+			case '>':
+				p.indent++
+				continue
+			case '<':
+				p.indent--
+				continue
+			}
+			buf.WriteByte(ch)
+			if ch == '\n' {
+				for j := p.indent; j > 0; j-- {
+					buf.WriteString(".  ")
+				}
+			}
+		}
+		format = buf.String()
+	}
+	fmt.Printf(format, args...)
+}
+
+// Debugging support.
+// (tagString is only used when tracing is enabled)
+var tagString = [...]string{
+	// Packages
+	-packageTag: "package",
+
+	// Types
+	-namedTag:     "named type",
+	-arrayTag:     "array",
+	-sliceTag:     "slice",
+	-dddTag:       "ddd",
+	-structTag:    "struct",
+	-pointerTag:   "pointer",
+	-signatureTag: "signature",
+	-interfaceTag: "interface",
+	-mapTag:       "map",
+	-chanTag:      "chan",
+
+	// Values
+	-falseTag:    "false",
+	-trueTag:     "true",
+	-int64Tag:    "int64",
+	-floatTag:    "float",
+	-fractionTag: "fraction",
+	-complexTag:  "complex",
+	-stringTag:   "string",
+	-unknownTag:  "unknown",
+
+	// Type aliases
+	-aliasTag: "alias",
+}
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/bimport.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/bimport.go
new file mode 100644
index 0000000..6709e43
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/bimport.go
@@ -0,0 +1,996 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
+
+package gcimporter
+
+import (
+	"encoding/binary"
+	"fmt"
+	"go/constant"
+	"go/token"
+	"go/types"
+	"sort"
+	"strconv"
+	"strings"
+	"sync"
+	"unicode"
+	"unicode/utf8"
+)
+
+type importer struct {
+	imports    map[string]*types.Package
+	data       []byte
+	importpath string
+	buf        []byte // for reading strings
+	version    int    // export format version
+
+	// object lists
+	strList       []string           // in order of appearance
+	pathList      []string           // in order of appearance
+	pkgList       []*types.Package   // in order of appearance
+	typList       []types.Type       // in order of appearance
+	interfaceList []*types.Interface // for delayed completion only
+	trackAllTypes bool
+
+	// position encoding
+	posInfoFormat bool
+	prevFile      string
+	prevLine      int
+	fset          *token.FileSet
+	files         map[string]*token.File
+
+	// debugging support
+	debugFormat bool
+	read        int // bytes read
+}
+
+// BImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+	// catch panics and return them as errors
+	defer func() {
+		if e := recover(); e != nil {
+			// The package (filename) causing the problem is added to this
+			// error by a wrapper in the caller (Import in gcimporter.go).
+			// Return a (possibly nil or incomplete) package unchanged (see #16088).
+			err = fmt.Errorf("cannot import, possibly version skew (%v) - reinstall package", e)
+		}
+	}()
+
+	p := importer{
+		imports:    imports,
+		data:       data,
+		importpath: path,
+		version:    -1,           // unknown version
+		strList:    []string{""}, // empty string is mapped to 0
+		pathList:   []string{""}, // empty string is mapped to 0
+		fset:       fset,
+		files:      make(map[string]*token.File),
+	}
+
+	// read version info
+	var versionstr string
+	if b := p.rawByte(); b == 'c' || b == 'd' {
+		// Go1.7 encoding; first byte encodes low-level
+		// encoding format (compact vs debug).
+		// For backward-compatibility only (avoid problems with
+		// old installed packages). Newly compiled packages use
+		// the extensible format string.
+		// TODO(gri) Remove this support eventually; after Go1.8.
+		if b == 'd' {
+			p.debugFormat = true
+		}
+		p.trackAllTypes = p.rawByte() == 'a'
+		p.posInfoFormat = p.int() != 0
+		versionstr = p.string()
+		if versionstr == "v1" {
+			p.version = 0
+		}
+	} else {
+		// Go1.8 extensible encoding
+		// read version string and extract version number (ignore anything after the version number)
+		versionstr = p.rawStringln(b)
+		if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
+			if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
+				p.version = v
+			}
+		}
+	}
+
+	// read version specific flags - extend as necessary
+	switch p.version {
+	// case 7:
+	// 	...
+	//	fallthrough
+	case 6, 5, 4, 3, 2, 1:
+		p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
+		p.trackAllTypes = p.int() != 0
+		p.posInfoFormat = p.int() != 0
+	case 0:
+		// Go1.7 encoding format - nothing to do here
+	default:
+		errorf("unknown export format version %d (%q)", p.version, versionstr)
+	}
+
+	// --- generic export data ---
+
+	// populate typList with predeclared "known" types
+	p.typList = append(p.typList, predeclared...)
+
+	// read package data
+	pkg = p.pkg()
+
+	// read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go)
+	objcount := 0
+	for {
+		tag := p.tagOrIndex()
+		if tag == endTag {
+			break
+		}
+		p.obj(tag)
+		objcount++
+	}
+
+	// self-verification
+	if count := p.int(); count != objcount {
+		errorf("got %d objects; want %d", objcount, count)
+	}
+
+	// ignore compiler-specific import data
+
+	// complete interfaces
+	// TODO(gri) re-investigate if we still need to do this in a delayed fashion
+	for _, typ := range p.interfaceList {
+		typ.Complete()
+	}
+
+	// record all referenced packages as imports
+	list := append(([]*types.Package)(nil), p.pkgList[1:]...)
+	sort.Sort(byPath(list))
+	pkg.SetImports(list)
+
+	// package was imported completely and without errors
+	pkg.MarkComplete()
+
+	return p.read, pkg, nil
+}
+
+func errorf(format string, args ...interface{}) {
+	panic(fmt.Sprintf(format, args...))
+}
+
+func (p *importer) pkg() *types.Package {
+	// if the package was seen before, i is its index (>= 0)
+	i := p.tagOrIndex()
+	if i >= 0 {
+		return p.pkgList[i]
+	}
+
+	// otherwise, i is the package tag (< 0)
+	if i != packageTag {
+		errorf("unexpected package tag %d version %d", i, p.version)
+	}
+
+	// read package data
+	name := p.string()
+	var path string
+	if p.version >= 5 {
+		path = p.path()
+	} else {
+		path = p.string()
+	}
+	if p.version >= 6 {
+		p.int() // package height; unused by go/types
+	}
+
+	// we should never see an empty package name
+	if name == "" {
+		errorf("empty package name in import")
+	}
+
+	// an empty path denotes the package we are currently importing;
+	// it must be the first package we see
+	if (path == "") != (len(p.pkgList) == 0) {
+		errorf("package path %q for pkg index %d", path, len(p.pkgList))
+	}
+
+	// if the package was imported before, use that one; otherwise create a new one
+	if path == "" {
+		path = p.importpath
+	}
+	pkg := p.imports[path]
+	if pkg == nil {
+		pkg = types.NewPackage(path, name)
+		p.imports[path] = pkg
+	} else if pkg.Name() != name {
+		errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
+	}
+	p.pkgList = append(p.pkgList, pkg)
+
+	return pkg
+}
+
+// objTag returns the tag value for each object kind.
+func objTag(obj types.Object) int {
+	switch obj.(type) {
+	case *types.Const:
+		return constTag
+	case *types.TypeName:
+		return typeTag
+	case *types.Var:
+		return varTag
+	case *types.Func:
+		return funcTag
+	default:
+		errorf("unexpected object: %v (%T)", obj, obj) // panics
+		panic("unreachable")
+	}
+}
+
+func sameObj(a, b types.Object) bool {
+	// Because unnamed types are not canonicalized, we cannot simply compare types for
+	// (pointer) identity.
+	// Ideally we'd check equality of constant values as well, but this is good enough.
+	return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
+}
+
+func (p *importer) declare(obj types.Object) {
+	pkg := obj.Pkg()
+	if alt := pkg.Scope().Insert(obj); alt != nil {
+		// This can only trigger if we import a (non-type) object a second time.
+		// Excluding type aliases, this cannot happen because 1) we only import a package
+		// once; and b) we ignore compiler-specific export data which may contain
+		// functions whose inlined function bodies refer to other functions that
+		// were already imported.
+		// However, type aliases require reexporting the original type, so we need
+		// to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
+		// method importer.obj, switch case importing functions).
+		// TODO(gri) review/update this comment once the gc compiler handles type aliases.
+		if !sameObj(obj, alt) {
+			errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
+		}
+	}
+}
+
+func (p *importer) obj(tag int) {
+	switch tag {
+	case constTag:
+		pos := p.pos()
+		pkg, name := p.qualifiedName()
+		typ := p.typ(nil)
+		val := p.value()
+		p.declare(types.NewConst(pos, pkg, name, typ, val))
+
+	case aliasTag:
+		// TODO(gri) verify type alias hookup is correct
+		pos := p.pos()
+		pkg, name := p.qualifiedName()
+		typ := p.typ(nil)
+		p.declare(types.NewTypeName(pos, pkg, name, typ))
+
+	case typeTag:
+		p.typ(nil)
+
+	case varTag:
+		pos := p.pos()
+		pkg, name := p.qualifiedName()
+		typ := p.typ(nil)
+		p.declare(types.NewVar(pos, pkg, name, typ))
+
+	case funcTag:
+		pos := p.pos()
+		pkg, name := p.qualifiedName()
+		params, isddd := p.paramList()
+		result, _ := p.paramList()
+		sig := types.NewSignature(nil, params, result, isddd)
+		p.declare(types.NewFunc(pos, pkg, name, sig))
+
+	default:
+		errorf("unexpected object tag %d", tag)
+	}
+}
+
+const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+
+func (p *importer) pos() token.Pos {
+	if !p.posInfoFormat {
+		return token.NoPos
+	}
+
+	file := p.prevFile
+	line := p.prevLine
+	delta := p.int()
+	line += delta
+	if p.version >= 5 {
+		if delta == deltaNewFile {
+			if n := p.int(); n >= 0 {
+				// file changed
+				file = p.path()
+				line = n
+			}
+		}
+	} else {
+		if delta == 0 {
+			if n := p.int(); n >= 0 {
+				// file changed
+				file = p.prevFile[:n] + p.string()
+				line = p.int()
+			}
+		}
+	}
+	p.prevFile = file
+	p.prevLine = line
+
+	// Synthesize a token.Pos
+
+	// Since we don't know the set of needed file positions, we
+	// reserve maxlines positions per file.
+	const maxlines = 64 * 1024
+	f := p.files[file]
+	if f == nil {
+		f = p.fset.AddFile(file, -1, maxlines)
+		p.files[file] = f
+		// Allocate the fake linebreak indices on first use.
+		// TODO(adonovan): opt: save ~512KB using a more complex scheme?
+		fakeLinesOnce.Do(func() {
+			fakeLines = make([]int, maxlines)
+			for i := range fakeLines {
+				fakeLines[i] = i
+			}
+		})
+		f.SetLines(fakeLines)
+	}
+
+	if line > maxlines {
+		line = 1
+	}
+
+	// Treat the file as if it contained only newlines
+	// and column=1: use the line number as the offset.
+	return f.Pos(line - 1)
+}
+
+var (
+	fakeLines     []int
+	fakeLinesOnce sync.Once
+)
+
+func (p *importer) qualifiedName() (pkg *types.Package, name string) {
+	name = p.string()
+	pkg = p.pkg()
+	return
+}
+
+func (p *importer) record(t types.Type) {
+	p.typList = append(p.typList, t)
+}
+
+// A dddSlice is a types.Type representing ...T parameters.
+// It only appears for parameter types and does not escape
+// the importer.
+type dddSlice struct {
+	elem types.Type
+}
+
+func (t *dddSlice) Underlying() types.Type { return t }
+func (t *dddSlice) String() string         { return "..." + t.elem.String() }
+
+// parent is the package which declared the type; parent == nil means
+// the package currently imported. The parent package is needed for
+// exported struct fields and interface methods which don't contain
+// explicit package information in the export data.
+func (p *importer) typ(parent *types.Package) types.Type {
+	// if the type was seen before, i is its index (>= 0)
+	i := p.tagOrIndex()
+	if i >= 0 {
+		return p.typList[i]
+	}
+
+	// otherwise, i is the type tag (< 0)
+	switch i {
+	case namedTag:
+		// read type object
+		pos := p.pos()
+		parent, name := p.qualifiedName()
+		scope := parent.Scope()
+		obj := scope.Lookup(name)
+
+		// if the object doesn't exist yet, create and insert it
+		if obj == nil {
+			obj = types.NewTypeName(pos, parent, name, nil)
+			scope.Insert(obj)
+		}
+
+		if _, ok := obj.(*types.TypeName); !ok {
+			errorf("pkg = %s, name = %s => %s", parent, name, obj)
+		}
+
+		// associate new named type with obj if it doesn't exist yet
+		t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
+
+		// but record the existing type, if any
+		t := obj.Type().(*types.Named)
+		p.record(t)
+
+		// read underlying type
+		t0.SetUnderlying(p.typ(parent))
+
+		// interfaces don't have associated methods
+		if types.IsInterface(t0) {
+			return t
+		}
+
+		// read associated methods
+		for i := p.int(); i > 0; i-- {
+			// TODO(gri) replace this with something closer to fieldName
+			pos := p.pos()
+			name := p.string()
+			if !exported(name) {
+				p.pkg()
+			}
+
+			recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
+			params, isddd := p.paramList()
+			result, _ := p.paramList()
+			p.int() // go:nointerface pragma - discarded
+
+			sig := types.NewSignature(recv.At(0), params, result, isddd)
+			t0.AddMethod(types.NewFunc(pos, parent, name, sig))
+		}
+
+		return t
+
+	case arrayTag:
+		t := new(types.Array)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		n := p.int64()
+		*t = *types.NewArray(p.typ(parent), n)
+		return t
+
+	case sliceTag:
+		t := new(types.Slice)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		*t = *types.NewSlice(p.typ(parent))
+		return t
+
+	case dddTag:
+		t := new(dddSlice)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		t.elem = p.typ(parent)
+		return t
+
+	case structTag:
+		t := new(types.Struct)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		*t = *types.NewStruct(p.fieldList(parent))
+		return t
+
+	case pointerTag:
+		t := new(types.Pointer)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		*t = *types.NewPointer(p.typ(parent))
+		return t
+
+	case signatureTag:
+		t := new(types.Signature)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		params, isddd := p.paramList()
+		result, _ := p.paramList()
+		*t = *types.NewSignature(nil, params, result, isddd)
+		return t
+
+	case interfaceTag:
+		// Create a dummy entry in the type list. This is safe because we
+		// cannot expect the interface type to appear in a cycle, as any
+		// such cycle must contain a named type which would have been
+		// first defined earlier.
+		n := len(p.typList)
+		if p.trackAllTypes {
+			p.record(nil)
+		}
+
+		var embeddeds []*types.Named
+		for n := p.int(); n > 0; n-- {
+			p.pos()
+			embeddeds = append(embeddeds, p.typ(parent).(*types.Named))
+		}
+
+		t := types.NewInterface(p.methodList(parent), embeddeds)
+		p.interfaceList = append(p.interfaceList, t)
+		if p.trackAllTypes {
+			p.typList[n] = t
+		}
+		return t
+
+	case mapTag:
+		t := new(types.Map)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		key := p.typ(parent)
+		val := p.typ(parent)
+		*t = *types.NewMap(key, val)
+		return t
+
+	case chanTag:
+		t := new(types.Chan)
+		if p.trackAllTypes {
+			p.record(t)
+		}
+
+		var dir types.ChanDir
+		// tag values must match the constants in cmd/compile/internal/gc/go.go
+		switch d := p.int(); d {
+		case 1 /* Crecv */ :
+			dir = types.RecvOnly
+		case 2 /* Csend */ :
+			dir = types.SendOnly
+		case 3 /* Cboth */ :
+			dir = types.SendRecv
+		default:
+			errorf("unexpected channel dir %d", d)
+		}
+		val := p.typ(parent)
+		*t = *types.NewChan(dir, val)
+		return t
+
+	default:
+		errorf("unexpected type tag %d", i) // panics
+		panic("unreachable")
+	}
+}
+
+func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
+	if n := p.int(); n > 0 {
+		fields = make([]*types.Var, n)
+		tags = make([]string, n)
+		for i := range fields {
+			fields[i], tags[i] = p.field(parent)
+		}
+	}
+	return
+}
+
+func (p *importer) field(parent *types.Package) (*types.Var, string) {
+	pos := p.pos()
+	pkg, name, alias := p.fieldName(parent)
+	typ := p.typ(parent)
+	tag := p.string()
+
+	anonymous := false
+	if name == "" {
+		// anonymous field - typ must be T or *T and T must be a type name
+		switch typ := deref(typ).(type) {
+		case *types.Basic: // basic types are named types
+			pkg = nil // // objects defined in Universe scope have no package
+			name = typ.Name()
+		case *types.Named:
+			name = typ.Obj().Name()
+		default:
+			errorf("named base type expected")
+		}
+		anonymous = true
+	} else if alias {
+		// anonymous field: we have an explicit name because it's an alias
+		anonymous = true
+	}
+
+	return types.NewField(pos, pkg, name, typ, anonymous), tag
+}
+
+func (p *importer) methodList(parent *types.Package) (methods []*types.Func) {
+	if n := p.int(); n > 0 {
+		methods = make([]*types.Func, n)
+		for i := range methods {
+			methods[i] = p.method(parent)
+		}
+	}
+	return
+}
+
+func (p *importer) method(parent *types.Package) *types.Func {
+	pos := p.pos()
+	pkg, name, _ := p.fieldName(parent)
+	params, isddd := p.paramList()
+	result, _ := p.paramList()
+	sig := types.NewSignature(nil, params, result, isddd)
+	return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
+	name = p.string()
+	pkg = parent
+	if pkg == nil {
+		// use the imported package instead
+		pkg = p.pkgList[0]
+	}
+	if p.version == 0 && name == "_" {
+		// version 0 didn't export a package for _ fields
+		return
+	}
+	switch name {
+	case "":
+		// 1) field name matches base type name and is exported: nothing to do
+	case "?":
+		// 2) field name matches base type name and is not exported: need package
+		name = ""
+		pkg = p.pkg()
+	case "@":
+		// 3) field name doesn't match type name (alias)
+		name = p.string()
+		alias = true
+		fallthrough
+	default:
+		if !exported(name) {
+			pkg = p.pkg()
+		}
+	}
+	return
+}
+
+func (p *importer) paramList() (*types.Tuple, bool) {
+	n := p.int()
+	if n == 0 {
+		return nil, false
+	}
+	// negative length indicates unnamed parameters
+	named := true
+	if n < 0 {
+		n = -n
+		named = false
+	}
+	// n > 0
+	params := make([]*types.Var, n)
+	isddd := false
+	for i := range params {
+		params[i], isddd = p.param(named)
+	}
+	return types.NewTuple(params...), isddd
+}
+
+func (p *importer) param(named bool) (*types.Var, bool) {
+	t := p.typ(nil)
+	td, isddd := t.(*dddSlice)
+	if isddd {
+		t = types.NewSlice(td.elem)
+	}
+
+	var pkg *types.Package
+	var name string
+	if named {
+		name = p.string()
+		if name == "" {
+			errorf("expected named parameter")
+		}
+		if name != "_" {
+			pkg = p.pkg()
+		}
+		if i := strings.Index(name, "·"); i > 0 {
+			name = name[:i] // cut off gc-specific parameter numbering
+		}
+	}
+
+	// read and discard compiler-specific info
+	p.string()
+
+	return types.NewVar(token.NoPos, pkg, name, t), isddd
+}
+
+func exported(name string) bool {
+	ch, _ := utf8.DecodeRuneInString(name)
+	return unicode.IsUpper(ch)
+}
+
+func (p *importer) value() constant.Value {
+	switch tag := p.tagOrIndex(); tag {
+	case falseTag:
+		return constant.MakeBool(false)
+	case trueTag:
+		return constant.MakeBool(true)
+	case int64Tag:
+		return constant.MakeInt64(p.int64())
+	case floatTag:
+		return p.float()
+	case complexTag:
+		re := p.float()
+		im := p.float()
+		return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+	case stringTag:
+		return constant.MakeString(p.string())
+	case unknownTag:
+		return constant.MakeUnknown()
+	default:
+		errorf("unexpected value tag %d", tag) // panics
+		panic("unreachable")
+	}
+}
+
+func (p *importer) float() constant.Value {
+	sign := p.int()
+	if sign == 0 {
+		return constant.MakeInt64(0)
+	}
+
+	exp := p.int()
+	mant := []byte(p.string()) // big endian
+
+	// remove leading 0's if any
+	for len(mant) > 0 && mant[0] == 0 {
+		mant = mant[1:]
+	}
+
+	// convert to little endian
+	// TODO(gri) go/constant should have a more direct conversion function
+	//           (e.g., once it supports a big.Float based implementation)
+	for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
+		mant[i], mant[j] = mant[j], mant[i]
+	}
+
+	// adjust exponent (constant.MakeFromBytes creates an integer value,
+	// but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
+	exp -= len(mant) << 3
+	if len(mant) > 0 {
+		for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
+			exp++
+		}
+	}
+
+	x := constant.MakeFromBytes(mant)
+	switch {
+	case exp < 0:
+		d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+		x = constant.BinaryOp(x, token.QUO, d)
+	case exp > 0:
+		x = constant.Shift(x, token.SHL, uint(exp))
+	}
+
+	if sign < 0 {
+		x = constant.UnaryOp(token.SUB, x, 0)
+	}
+	return x
+}
+
+// ----------------------------------------------------------------------------
+// Low-level decoders
+
+func (p *importer) tagOrIndex() int {
+	if p.debugFormat {
+		p.marker('t')
+	}
+
+	return int(p.rawInt64())
+}
+
+func (p *importer) int() int {
+	x := p.int64()
+	if int64(int(x)) != x {
+		errorf("exported integer too large")
+	}
+	return int(x)
+}
+
+func (p *importer) int64() int64 {
+	if p.debugFormat {
+		p.marker('i')
+	}
+
+	return p.rawInt64()
+}
+
+func (p *importer) path() string {
+	if p.debugFormat {
+		p.marker('p')
+	}
+	// if the path was seen before, i is its index (>= 0)
+	// (the empty string is at index 0)
+	i := p.rawInt64()
+	if i >= 0 {
+		return p.pathList[i]
+	}
+	// otherwise, i is the negative path length (< 0)
+	a := make([]string, -i)
+	for n := range a {
+		a[n] = p.string()
+	}
+	s := strings.Join(a, "/")
+	p.pathList = append(p.pathList, s)
+	return s
+}
+
+func (p *importer) string() string {
+	if p.debugFormat {
+		p.marker('s')
+	}
+	// if the string was seen before, i is its index (>= 0)
+	// (the empty string is at index 0)
+	i := p.rawInt64()
+	if i >= 0 {
+		return p.strList[i]
+	}
+	// otherwise, i is the negative string length (< 0)
+	if n := int(-i); n <= cap(p.buf) {
+		p.buf = p.buf[:n]
+	} else {
+		p.buf = make([]byte, n)
+	}
+	for i := range p.buf {
+		p.buf[i] = p.rawByte()
+	}
+	s := string(p.buf)
+	p.strList = append(p.strList, s)
+	return s
+}
+
+func (p *importer) marker(want byte) {
+	if got := p.rawByte(); got != want {
+		errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
+	}
+
+	pos := p.read
+	if n := int(p.rawInt64()); n != pos {
+		errorf("incorrect position: got %d; want %d", n, pos)
+	}
+}
+
+// rawInt64 should only be used by low-level decoders.
+func (p *importer) rawInt64() int64 {
+	i, err := binary.ReadVarint(p)
+	if err != nil {
+		errorf("read error: %v", err)
+	}
+	return i
+}
+
+// rawStringln should only be used to read the initial version string.
+func (p *importer) rawStringln(b byte) string {
+	p.buf = p.buf[:0]
+	for b != '\n' {
+		p.buf = append(p.buf, b)
+		b = p.rawByte()
+	}
+	return string(p.buf)
+}
+
+// needed for binary.ReadVarint in rawInt64
+func (p *importer) ReadByte() (byte, error) {
+	return p.rawByte(), nil
+}
+
+// byte is the bottleneck interface for reading p.data.
+// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
+// rawByte should only be used by low-level decoders.
+func (p *importer) rawByte() byte {
+	b := p.data[0]
+	r := 1
+	if b == '|' {
+		b = p.data[1]
+		r = 2
+		switch b {
+		case 'S':
+			b = '$'
+		case '|':
+			// nothing to do
+		default:
+			errorf("unexpected escape sequence in export data")
+		}
+	}
+	p.data = p.data[r:]
+	p.read += r
+	return b
+
+}
+
+// ----------------------------------------------------------------------------
+// Export format
+
+// Tags. Must be < 0.
+const (
+	// Objects
+	packageTag = -(iota + 1)
+	constTag
+	typeTag
+	varTag
+	funcTag
+	endTag
+
+	// Types
+	namedTag
+	arrayTag
+	sliceTag
+	dddTag
+	structTag
+	pointerTag
+	signatureTag
+	interfaceTag
+	mapTag
+	chanTag
+
+	// Values
+	falseTag
+	trueTag
+	int64Tag
+	floatTag
+	fractionTag // not used by gc
+	complexTag
+	stringTag
+	nilTag     // only used by gc (appears in exported inlined function bodies)
+	unknownTag // not used by gc (only appears in packages with errors)
+
+	// Type aliases
+	aliasTag
+)
+
+var predeclared = []types.Type{
+	// basic types
+	types.Typ[types.Bool],
+	types.Typ[types.Int],
+	types.Typ[types.Int8],
+	types.Typ[types.Int16],
+	types.Typ[types.Int32],
+	types.Typ[types.Int64],
+	types.Typ[types.Uint],
+	types.Typ[types.Uint8],
+	types.Typ[types.Uint16],
+	types.Typ[types.Uint32],
+	types.Typ[types.Uint64],
+	types.Typ[types.Uintptr],
+	types.Typ[types.Float32],
+	types.Typ[types.Float64],
+	types.Typ[types.Complex64],
+	types.Typ[types.Complex128],
+	types.Typ[types.String],
+
+	// basic type aliases
+	types.Universe.Lookup("byte").Type(),
+	types.Universe.Lookup("rune").Type(),
+
+	// error
+	types.Universe.Lookup("error").Type(),
+
+	// untyped types
+	types.Typ[types.UntypedBool],
+	types.Typ[types.UntypedInt],
+	types.Typ[types.UntypedRune],
+	types.Typ[types.UntypedFloat],
+	types.Typ[types.UntypedComplex],
+	types.Typ[types.UntypedString],
+	types.Typ[types.UntypedNil],
+
+	// package unsafe
+	types.Typ[types.UnsafePointer],
+
+	// invalid type
+	types.Typ[types.Invalid], // only appears in packages with errors
+
+	// used internally by gc; never used by this package or in .a files
+	anyType{},
+}
+
+type anyType struct{}
+
+func (t anyType) Underlying() types.Type { return t }
+func (t anyType) String() string         { return "any" }
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go
new file mode 100644
index 0000000..f33dc56
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go
@@ -0,0 +1,93 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
+
+// This file implements FindExportData.
+
+package gcimporter
+
+import (
+	"bufio"
+	"fmt"
+	"io"
+	"strconv"
+	"strings"
+)
+
+func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
+	// See $GOROOT/include/ar.h.
+	hdr := make([]byte, 16+12+6+6+8+10+2)
+	_, err = io.ReadFull(r, hdr)
+	if err != nil {
+		return
+	}
+	// leave for debugging
+	if false {
+		fmt.Printf("header: %s", hdr)
+	}
+	s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
+	size, err = strconv.Atoi(s)
+	if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
+		err = fmt.Errorf("invalid archive header")
+		return
+	}
+	name = strings.TrimSpace(string(hdr[:16]))
+	return
+}
+
+// FindExportData positions the reader r at the beginning of the
+// export data section of an underlying GC-created object/archive
+// file by reading from it. The reader must be positioned at the
+// start of the file before calling this function. The hdr result
+// is the string before the export data, either "$$" or "$$B".
+//
+func FindExportData(r *bufio.Reader) (hdr string, err error) {
+	// Read first line to make sure this is an object file.
+	line, err := r.ReadSlice('\n')
+	if err != nil {
+		err = fmt.Errorf("can't find export data (%v)", err)
+		return
+	}
+
+	if string(line) == "!<arch>\n" {
+		// Archive file. Scan to __.PKGDEF.
+		var name string
+		if name, _, err = readGopackHeader(r); err != nil {
+			return
+		}
+
+		// First entry should be __.PKGDEF.
+		if name != "__.PKGDEF" {
+			err = fmt.Errorf("go archive is missing __.PKGDEF")
+			return
+		}
+
+		// Read first line of __.PKGDEF data, so that line
+		// is once again the first line of the input.
+		if line, err = r.ReadSlice('\n'); err != nil {
+			err = fmt.Errorf("can't find export data (%v)", err)
+			return
+		}
+	}
+
+	// Now at __.PKGDEF in archive or still at beginning of file.
+	// Either way, line should begin with "go object ".
+	if !strings.HasPrefix(string(line), "go object ") {
+		err = fmt.Errorf("not a Go object file")
+		return
+	}
+
+	// Skip over object header to export data.
+	// Begins after first line starting with $$.
+	for line[0] != '$' {
+		if line, err = r.ReadSlice('\n'); err != nil {
+			err = fmt.Errorf("can't find export data (%v)", err)
+			return
+		}
+	}
+	hdr = string(line)
+
+	return
+}
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go
new file mode 100644
index 0000000..6fbc9d7
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go
@@ -0,0 +1,1041 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
+// but it also contains the original source-based importer code for Go1.6.
+// Once we stop supporting 1.6, we can remove that code.
+
+// Package gcimporter15 provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+//
+// Deprecated: this package will be deleted in October 2017.
+// New code should use golang.org/x/tools/go/gcexportdata.
+//
+package gcimporter // import "golang.org/x/tools/go/gcimporter15"
+
+import (
+	"bufio"
+	"errors"
+	"fmt"
+	"go/build"
+	exact "go/constant"
+	"go/token"
+	"go/types"
+	"io"
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"sort"
+	"strconv"
+	"strings"
+	"text/scanner"
+)
+
+// debugging/development support
+const debug = false
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+//
+func FindPkg(path, srcDir string) (filename, id string) {
+	if path == "" {
+		return
+	}
+
+	var noext string
+	switch {
+	default:
+		// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+		// Don't require the source files to be present.
+		if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+			srcDir = abs
+		}
+		bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+		if bp.PkgObj == "" {
+			return
+		}
+		noext = strings.TrimSuffix(bp.PkgObj, ".a")
+		id = bp.ImportPath
+
+	case build.IsLocalImport(path):
+		// "./x" -> "/this/directory/x.ext", "/this/directory/x"
+		noext = filepath.Join(srcDir, path)
+		id = noext
+
+	case filepath.IsAbs(path):
+		// for completeness only - go/build.Import
+		// does not support absolute imports
+		// "/x" -> "/x.ext", "/x"
+		noext = path
+		id = path
+	}
+
+	if false { // for debugging
+		if path != id {
+			fmt.Printf("%s -> %s\n", path, id)
+		}
+	}
+
+	// try extensions
+	for _, ext := range pkgExts {
+		filename = noext + ext
+		if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+			return
+		}
+	}
+
+	filename = "" // not found
+	return
+}
+
+// ImportData imports a package by reading the gc-generated export data,
+// adds the corresponding package object to the packages map indexed by id,
+// and returns the object.
+//
+// The packages map must contains all packages already imported. The data
+// reader position must be the beginning of the export data section. The
+// filename is only used in error messages.
+//
+// If packages[id] contains the completely imported package, that package
+// can be used directly, and there is no need to call this function (but
+// there is also no harm but for extra time used).
+//
+func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
+	// support for parser error handling
+	defer func() {
+		switch r := recover().(type) {
+		case nil:
+			// nothing to do
+		case importError:
+			err = r
+		default:
+			panic(r) // internal error
+		}
+	}()
+
+	var p parser
+	p.init(filename, id, data, packages)
+	pkg = p.parseExport()
+
+	return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+//
+func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types.Package, err error) {
+	filename, id := FindPkg(path, srcDir)
+	if filename == "" {
+		if path == "unsafe" {
+			return types.Unsafe, nil
+		}
+		err = fmt.Errorf("can't find import: %s", id)
+		return
+	}
+
+	// no need to re-import if the package was imported completely before
+	if pkg = packages[id]; pkg != nil && pkg.Complete() {
+		return
+	}
+
+	// open file
+	f, err := os.Open(filename)
+	if err != nil {
+		return
+	}
+	defer func() {
+		f.Close()
+		if err != nil {
+			// add file name to error
+			err = fmt.Errorf("reading export data: %s: %v", filename, err)
+		}
+	}()
+
+	var hdr string
+	buf := bufio.NewReader(f)
+	if hdr, err = FindExportData(buf); err != nil {
+		return
+	}
+
+	switch hdr {
+	case "$$\n":
+		return ImportData(packages, filename, id, buf)
+	case "$$B\n":
+		var data []byte
+		data, err = ioutil.ReadAll(buf)
+		if err == nil {
+			fset := token.NewFileSet()
+			_, pkg, err = BImportData(fset, packages, data, id)
+			return
+		}
+	default:
+		err = fmt.Errorf("unknown export data header: %q", hdr)
+	}
+
+	return
+}
+
+// ----------------------------------------------------------------------------
+// Parser
+
+// TODO(gri) Imported objects don't have position information.
+//           Ideally use the debug table line info; alternatively
+//           create some fake position (or the position of the
+//           import). That way error messages referring to imported
+//           objects can print meaningful information.
+
+// parser parses the exports inside a gc compiler-produced
+// object/archive file and populates its scope with the results.
+type parser struct {
+	scanner    scanner.Scanner
+	tok        rune                      // current token
+	lit        string                    // literal string; only valid for Ident, Int, String tokens
+	id         string                    // package id of imported package
+	sharedPkgs map[string]*types.Package // package id -> package object (across importer)
+	localPkgs  map[string]*types.Package // package id -> package object (just this package)
+}
+
+func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
+	p.scanner.Init(src)
+	p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
+	p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
+	p.scanner.Whitespace = 1<<'\t' | 1<<' '
+	p.scanner.Filename = filename // for good error messages
+	p.next()
+	p.id = id
+	p.sharedPkgs = packages
+	if debug {
+		// check consistency of packages map
+		for _, pkg := range packages {
+			if pkg.Name() == "" {
+				fmt.Printf("no package name for %s\n", pkg.Path())
+			}
+		}
+	}
+}
+
+func (p *parser) next() {
+	p.tok = p.scanner.Scan()
+	switch p.tok {
+	case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
+		p.lit = p.scanner.TokenText()
+	default:
+		p.lit = ""
+	}
+	if debug {
+		fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
+	}
+}
+
+func declTypeName(pkg *types.Package, name string) *types.TypeName {
+	scope := pkg.Scope()
+	if obj := scope.Lookup(name); obj != nil {
+		return obj.(*types.TypeName)
+	}
+	obj := types.NewTypeName(token.NoPos, pkg, name, nil)
+	// a named type may be referred to before the underlying type
+	// is known - set it up
+	types.NewNamed(obj, nil, nil)
+	scope.Insert(obj)
+	return obj
+}
+
+// ----------------------------------------------------------------------------
+// Error handling
+
+// Internal errors are boxed as importErrors.
+type importError struct {
+	pos scanner.Position
+	err error
+}
+
+func (e importError) Error() string {
+	return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
+}
+
+func (p *parser) error(err interface{}) {
+	if s, ok := err.(string); ok {
+		err = errors.New(s)
+	}
+	// panic with a runtime.Error if err is not an error
+	panic(importError{p.scanner.Pos(), err.(error)})
+}
+
+func (p *parser) errorf(format string, args ...interface{}) {
+	p.error(fmt.Sprintf(format, args...))
+}
+
+func (p *parser) expect(tok rune) string {
+	lit := p.lit
+	if p.tok != tok {
+		p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
+	}
+	p.next()
+	return lit
+}
+
+func (p *parser) expectSpecial(tok string) {
+	sep := 'x' // not white space
+	i := 0
+	for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
+		sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+		p.next()
+		i++
+	}
+	if i < len(tok) {
+		p.errorf("expected %q, got %q", tok, tok[0:i])
+	}
+}
+
+func (p *parser) expectKeyword(keyword string) {
+	lit := p.expect(scanner.Ident)
+	if lit != keyword {
+		p.errorf("expected keyword %s, got %q", keyword, lit)
+	}
+}
+
+// ----------------------------------------------------------------------------
+// Qualified and unqualified names
+
+// PackageId = string_lit .
+//
+func (p *parser) parsePackageId() string {
+	id, err := strconv.Unquote(p.expect(scanner.String))
+	if err != nil {
+		p.error(err)
+	}
+	// id == "" stands for the imported package id
+	// (only known at time of package installation)
+	if id == "" {
+		id = p.id
+	}
+	return id
+}
+
+// PackageName = ident .
+//
+func (p *parser) parsePackageName() string {
+	return p.expect(scanner.Ident)
+}
+
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+func (p *parser) parseDotIdent() string {
+	ident := ""
+	if p.tok != scanner.Int {
+		sep := 'x' // not white space
+		for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
+			ident += p.lit
+			sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+			p.next()
+		}
+	}
+	if ident == "" {
+		p.expect(scanner.Ident) // use expect() for error handling
+	}
+	return ident
+}
+
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+//
+func (p *parser) parseQualifiedName() (id, name string) {
+	p.expect('@')
+	id = p.parsePackageId()
+	p.expect('.')
+	// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
+	if p.tok == '?' {
+		p.next()
+	} else {
+		name = p.parseDotIdent()
+	}
+	return
+}
+
+// getPkg returns the package for a given id. If the package is
+// not found, create the package and add it to the p.localPkgs
+// and p.sharedPkgs maps. name is the (expected) name of the
+// package. If name == "", the package name is expected to be
+// set later via an import clause in the export data.
+//
+// id identifies a package, usually by a canonical package path like
+// "encoding/json" but possibly by a non-canonical import path like
+// "./json".
+//
+func (p *parser) getPkg(id, name string) *types.Package {
+	// package unsafe is not in the packages maps - handle explicitly
+	if id == "unsafe" {
+		return types.Unsafe
+	}
+
+	pkg := p.localPkgs[id]
+	if pkg == nil {
+		// first import of id from this package
+		pkg = p.sharedPkgs[id]
+		if pkg == nil {
+			// first import of id by this importer;
+			// add (possibly unnamed) pkg to shared packages
+			pkg = types.NewPackage(id, name)
+			p.sharedPkgs[id] = pkg
+		}
+		// add (possibly unnamed) pkg to local packages
+		if p.localPkgs == nil {
+			p.localPkgs = make(map[string]*types.Package)
+		}
+		p.localPkgs[id] = pkg
+	} else if name != "" {
+		// package exists already and we have an expected package name;
+		// make sure names match or set package name if necessary
+		if pname := pkg.Name(); pname == "" {
+			pkg.SetName(name)
+		} else if pname != name {
+			p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
+		}
+	}
+	return pkg
+}
+
+// parseExportedName is like parseQualifiedName, but
+// the package id is resolved to an imported *types.Package.
+//
+func (p *parser) parseExportedName() (pkg *types.Package, name string) {
+	id, name := p.parseQualifiedName()
+	pkg = p.getPkg(id, "")
+	return
+}
+
+// ----------------------------------------------------------------------------
+// Types
+
+// BasicType = identifier .
+//
+func (p *parser) parseBasicType() types.Type {
+	id := p.expect(scanner.Ident)
+	obj := types.Universe.Lookup(id)
+	if obj, ok := obj.(*types.TypeName); ok {
+		return obj.Type()
+	}
+	p.errorf("not a basic type: %s", id)
+	return nil
+}
+
+// ArrayType = "[" int_lit "]" Type .
+//
+func (p *parser) parseArrayType(parent *types.Package) types.Type {
+	// "[" already consumed and lookahead known not to be "]"
+	lit := p.expect(scanner.Int)
+	p.expect(']')
+	elem := p.parseType(parent)
+	n, err := strconv.ParseInt(lit, 10, 64)
+	if err != nil {
+		p.error(err)
+	}
+	return types.NewArray(elem, n)
+}
+
+// MapType = "map" "[" Type "]" Type .
+//
+func (p *parser) parseMapType(parent *types.Package) types.Type {
+	p.expectKeyword("map")
+	p.expect('[')
+	key := p.parseType(parent)
+	p.expect(']')
+	elem := p.parseType(parent)
+	return types.NewMap(key, elem)
+}
+
+// Name = identifier | "?" | QualifiedName .
+//
+// For unqualified and anonymous names, the returned package is the parent
+// package unless parent == nil, in which case the returned package is the
+// package being imported. (The parent package is not nil if the the name
+// is an unqualified struct field or interface method name belonging to a
+// type declared in another package.)
+//
+// For qualified names, the returned package is nil (and not created if
+// it doesn't exist yet) unless materializePkg is set (which creates an
+// unnamed package with valid package path). In the latter case, a
+// subsequent import clause is expected to provide a name for the package.
+//
+func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
+	pkg = parent
+	if pkg == nil {
+		pkg = p.sharedPkgs[p.id]
+	}
+	switch p.tok {
+	case scanner.Ident:
+		name = p.lit
+		p.next()
+	case '?':
+		// anonymous
+		p.next()
+	case '@':
+		// exported name prefixed with package path
+		pkg = nil
+		var id string
+		id, name = p.parseQualifiedName()
+		if materializePkg {
+			pkg = p.getPkg(id, "")
+		}
+	default:
+		p.error("name expected")
+	}
+	return
+}
+
+func deref(typ types.Type) types.Type {
+	if p, _ := typ.(*types.Pointer); p != nil {
+		return p.Elem()
+	}
+	return typ
+}
+
+// Field = Name Type [ string_lit ] .
+//
+func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
+	pkg, name := p.parseName(parent, true)
+
+	if name == "_" {
+		// Blank fields should be package-qualified because they
+		// are unexported identifiers, but gc does not qualify them.
+		// Assuming that the ident belongs to the current package
+		// causes types to change during re-exporting, leading
+		// to spurious "can't assign A to B" errors from go/types.
+		// As a workaround, pretend all blank fields belong
+		// to the same unique dummy package.
+		const blankpkg = "<_>"
+		pkg = p.getPkg(blankpkg, blankpkg)
+	}
+
+	typ := p.parseType(parent)
+	anonymous := false
+	if name == "" {
+		// anonymous field - typ must be T or *T and T must be a type name
+		switch typ := deref(typ).(type) {
+		case *types.Basic: // basic types are named types
+			pkg = nil // objects defined in Universe scope have no package
+			name = typ.Name()
+		case *types.Named:
+			name = typ.Obj().Name()
+		default:
+			p.errorf("anonymous field expected")
+		}
+		anonymous = true
+	}
+	tag := ""
+	if p.tok == scanner.String {
+		s := p.expect(scanner.String)
+		var err error
+		tag, err = strconv.Unquote(s)
+		if err != nil {
+			p.errorf("invalid struct tag %s: %s", s, err)
+		}
+	}
+	return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
+}
+
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList  = Field { ";" Field } .
+//
+func (p *parser) parseStructType(parent *types.Package) types.Type {
+	var fields []*types.Var
+	var tags []string
+
+	p.expectKeyword("struct")
+	p.expect('{')
+	for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+		if i > 0 {
+			p.expect(';')
+		}
+		fld, tag := p.parseField(parent)
+		if tag != "" && tags == nil {
+			tags = make([]string, i)
+		}
+		if tags != nil {
+			tags = append(tags, tag)
+		}
+		fields = append(fields, fld)
+	}
+	p.expect('}')
+
+	return types.NewStruct(fields, tags)
+}
+
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+//
+func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
+	_, name := p.parseName(nil, false)
+	// remove gc-specific parameter numbering
+	if i := strings.Index(name, "·"); i >= 0 {
+		name = name[:i]
+	}
+	if p.tok == '.' {
+		p.expectSpecial("...")
+		isVariadic = true
+	}
+	typ := p.parseType(nil)
+	if isVariadic {
+		typ = types.NewSlice(typ)
+	}
+	// ignore argument tag (e.g. "noescape")
+	if p.tok == scanner.String {
+		p.next()
+	}
+	// TODO(gri) should we provide a package?
+	par = types.NewVar(token.NoPos, nil, name, typ)
+	return
+}
+
+// Parameters    = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
+//
+func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
+	p.expect('(')
+	for p.tok != ')' && p.tok != scanner.EOF {
+		if len(list) > 0 {
+			p.expect(',')
+		}
+		par, variadic := p.parseParameter()
+		list = append(list, par)
+		if variadic {
+			if isVariadic {
+				p.error("... not on final argument")
+			}
+			isVariadic = true
+		}
+	}
+	p.expect(')')
+
+	return
+}
+
+// Signature = Parameters [ Result ] .
+// Result    = Type | Parameters .
+//
+func (p *parser) parseSignature(recv *types.Var) *types.Signature {
+	params, isVariadic := p.parseParameters()
+
+	// optional result type
+	var results []*types.Var
+	if p.tok == '(' {
+		var variadic bool
+		results, variadic = p.parseParameters()
+		if variadic {
+			p.error("... not permitted on result type")
+		}
+	}
+
+	return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
+}
+
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList    = Method { ";" Method } .
+// Method        = Name Signature .
+//
+// The methods of embedded interfaces are always "inlined"
+// by the compiler and thus embedded interfaces are never
+// visible in the export data.
+//
+func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
+	var methods []*types.Func
+
+	p.expectKeyword("interface")
+	p.expect('{')
+	for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+		if i > 0 {
+			p.expect(';')
+		}
+		pkg, name := p.parseName(parent, true)
+		sig := p.parseSignature(nil)
+		methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
+	}
+	p.expect('}')
+
+	// Complete requires the type's embedded interfaces to be fully defined,
+	// but we do not define any
+	return types.NewInterface(methods, nil).Complete()
+}
+
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+//
+func (p *parser) parseChanType(parent *types.Package) types.Type {
+	dir := types.SendRecv
+	if p.tok == scanner.Ident {
+		p.expectKeyword("chan")
+		if p.tok == '<' {
+			p.expectSpecial("<-")
+			dir = types.SendOnly
+		}
+	} else {
+		p.expectSpecial("<-")
+		p.expectKeyword("chan")
+		dir = types.RecvOnly
+	}
+	elem := p.parseType(parent)
+	return types.NewChan(dir, elem)
+}
+
+// Type =
+//	BasicType | TypeName | ArrayType | SliceType | StructType |
+//      PointerType | FuncType | InterfaceType | MapType | ChanType |
+//      "(" Type ")" .
+//
+// BasicType   = ident .
+// TypeName    = ExportedName .
+// SliceType   = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType    = "func" Signature .
+//
+func (p *parser) parseType(parent *types.Package) types.Type {
+	switch p.tok {
+	case scanner.Ident:
+		switch p.lit {
+		default:
+			return p.parseBasicType()
+		case "struct":
+			return p.parseStructType(parent)
+		case "func":
+			// FuncType
+			p.next()
+			return p.parseSignature(nil)
+		case "interface":
+			return p.parseInterfaceType(parent)
+		case "map":
+			return p.parseMapType(parent)
+		case "chan":
+			return p.parseChanType(parent)
+		}
+	case '@':
+		// TypeName
+		pkg, name := p.parseExportedName()
+		return declTypeName(pkg, name).Type()
+	case '[':
+		p.next() // look ahead
+		if p.tok == ']' {
+			// SliceType
+			p.next()
+			return types.NewSlice(p.parseType(parent))
+		}
+		return p.parseArrayType(parent)
+	case '*':
+		// PointerType
+		p.next()
+		return types.NewPointer(p.parseType(parent))
+	case '<':
+		return p.parseChanType(parent)
+	case '(':
+		// "(" Type ")"
+		p.next()
+		typ := p.parseType(parent)
+		p.expect(')')
+		return typ
+	}
+	p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+	return nil
+}
+
+// ----------------------------------------------------------------------------
+// Declarations
+
+// ImportDecl = "import" PackageName PackageId .
+//
+func (p *parser) parseImportDecl() {
+	p.expectKeyword("import")
+	name := p.parsePackageName()
+	p.getPkg(p.parsePackageId(), name)
+}
+
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+//
+func (p *parser) parseInt() string {
+	s := ""
+	switch p.tok {
+	case '-':
+		s = "-"
+		p.next()
+	case '+':
+		p.next()
+	}
+	return s + p.expect(scanner.Int)
+}
+
+// number = int_lit [ "p" int_lit ] .
+//
+func (p *parser) parseNumber() (typ *types.Basic, val exact.Value) {
+	// mantissa
+	mant := exact.MakeFromLiteral(p.parseInt(), token.INT, 0)
+	if mant == nil {
+		panic("invalid mantissa")
+	}
+
+	if p.lit == "p" {
+		// exponent (base 2)
+		p.next()
+		exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
+		if err != nil {
+			p.error(err)
+		}
+		if exp < 0 {
+			denom := exact.MakeInt64(1)
+			denom = exact.Shift(denom, token.SHL, uint(-exp))
+			typ = types.Typ[types.UntypedFloat]
+			val = exact.BinaryOp(mant, token.QUO, denom)
+			return
+		}
+		if exp > 0 {
+			mant = exact.Shift(mant, token.SHL, uint(exp))
+		}
+		typ = types.Typ[types.UntypedFloat]
+		val = mant
+		return
+	}
+
+	typ = types.Typ[types.UntypedInt]
+	val = mant
+	return
+}
+
+// ConstDecl   = "const" ExportedName [ Type ] "=" Literal .
+// Literal     = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit    = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit    = "(" int_lit "+" int_lit ")" .
+// string_lit  = `"` { unicode_char } `"` .
+//
+func (p *parser) parseConstDecl() {
+	p.expectKeyword("const")
+	pkg, name := p.parseExportedName()
+
+	var typ0 types.Type
+	if p.tok != '=' {
+		// constant types are never structured - no need for parent type
+		typ0 = p.parseType(nil)
+	}
+
+	p.expect('=')
+	var typ types.Type
+	var val exact.Value
+	switch p.tok {
+	case scanner.Ident:
+		// bool_lit
+		if p.lit != "true" && p.lit != "false" {
+			p.error("expected true or false")
+		}
+		typ = types.Typ[types.UntypedBool]
+		val = exact.MakeBool(p.lit == "true")
+		p.next()
+
+	case '-', scanner.Int:
+		// int_lit
+		typ, val = p.parseNumber()
+
+	case '(':
+		// complex_lit or rune_lit
+		p.next()
+		if p.tok == scanner.Char {
+			p.next()
+			p.expect('+')
+			typ = types.Typ[types.UntypedRune]
+			_, val = p.parseNumber()
+			p.expect(')')
+			break
+		}
+		_, re := p.parseNumber()
+		p.expect('+')
+		_, im := p.parseNumber()
+		p.expectKeyword("i")
+		p.expect(')')
+		typ = types.Typ[types.UntypedComplex]
+		val = exact.BinaryOp(re, token.ADD, exact.MakeImag(im))
+
+	case scanner.Char:
+		// rune_lit
+		typ = types.Typ[types.UntypedRune]
+		val = exact.MakeFromLiteral(p.lit, token.CHAR, 0)
+		p.next()
+
+	case scanner.String:
+		// string_lit
+		typ = types.Typ[types.UntypedString]
+		val = exact.MakeFromLiteral(p.lit, token.STRING, 0)
+		p.next()
+
+	default:
+		p.errorf("expected literal got %s", scanner.TokenString(p.tok))
+	}
+
+	if typ0 == nil {
+		typ0 = typ
+	}
+
+	pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
+}
+
+// TypeDecl = "type" ExportedName Type .
+//
+func (p *parser) parseTypeDecl() {
+	p.expectKeyword("type")
+	pkg, name := p.parseExportedName()
+	obj := declTypeName(pkg, name)
+
+	// The type object may have been imported before and thus already
+	// have a type associated with it. We still need to parse the type
+	// structure, but throw it away if the object already has a type.
+	// This ensures that all imports refer to the same type object for
+	// a given type declaration.
+	typ := p.parseType(pkg)
+
+	if name := obj.Type().(*types.Named); name.Underlying() == nil {
+		name.SetUnderlying(typ)
+	}
+}
+
+// VarDecl = "var" ExportedName Type .
+//
+func (p *parser) parseVarDecl() {
+	p.expectKeyword("var")
+	pkg, name := p.parseExportedName()
+	typ := p.parseType(pkg)
+	pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
+}
+
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
+//
+func (p *parser) parseFunc(recv *types.Var) *types.Signature {
+	sig := p.parseSignature(recv)
+	if p.tok == '{' {
+		p.next()
+		for i := 1; i > 0; p.next() {
+			switch p.tok {
+			case '{':
+				i++
+			case '}':
+				i--
+			}
+		}
+	}
+	return sig
+}
+
+// MethodDecl = "func" Receiver Name Func .
+// Receiver   = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+//
+func (p *parser) parseMethodDecl() {
+	// "func" already consumed
+	p.expect('(')
+	recv, _ := p.parseParameter() // receiver
+	p.expect(')')
+
+	// determine receiver base type object
+	base := deref(recv.Type()).(*types.Named)
+
+	// parse method name, signature, and possibly inlined body
+	_, name := p.parseName(nil, false)
+	sig := p.parseFunc(recv)
+
+	// methods always belong to the same package as the base type object
+	pkg := base.Obj().Pkg()
+
+	// add method to type unless type was imported before
+	// and method exists already
+	// TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
+	base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+}
+
+// FuncDecl = "func" ExportedName Func .
+//
+func (p *parser) parseFuncDecl() {
+	// "func" already consumed
+	pkg, name := p.parseExportedName()
+	typ := p.parseFunc(nil)
+	pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
+}
+
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+//
+func (p *parser) parseDecl() {
+	if p.tok == scanner.Ident {
+		switch p.lit {
+		case "import":
+			p.parseImportDecl()
+		case "const":
+			p.parseConstDecl()
+		case "type":
+			p.parseTypeDecl()
+		case "var":
+			p.parseVarDecl()
+		case "func":
+			p.next() // look ahead
+			if p.tok == '(' {
+				p.parseMethodDecl()
+			} else {
+				p.parseFuncDecl()
+			}
+		}
+	}
+	p.expect('\n')
+}
+
+// ----------------------------------------------------------------------------
+// Export
+
+// Export        = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
+//
+func (p *parser) parseExport() *types.Package {
+	p.expectKeyword("package")
+	name := p.parsePackageName()
+	if p.tok == scanner.Ident && p.lit == "safe" {
+		// package was compiled with -u option - ignore
+		p.next()
+	}
+	p.expect('\n')
+
+	pkg := p.getPkg(p.id, name)
+
+	for p.tok != '$' && p.tok != scanner.EOF {
+		p.parseDecl()
+	}
+
+	if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
+		// don't call next()/expect() since reading past the
+		// export data may cause scanner errors (e.g. NUL chars)
+		p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
+	}
+
+	if n := p.scanner.ErrorCount; n != 0 {
+		p.errorf("expected no scanner errors, got %d", n)
+	}
+
+	// Record all locally referenced packages as imports.
+	var imports []*types.Package
+	for id, pkg2 := range p.localPkgs {
+		if pkg2.Name() == "" {
+			p.errorf("%s package has no name", id)
+		}
+		if id == p.id {
+			continue // avoid self-edge
+		}
+		imports = append(imports, pkg2)
+	}
+	sort.Sort(byPath(imports))
+	pkg.SetImports(imports)
+
+	// package was imported completely and without errors
+	pkg.MarkComplete()
+
+	return pkg
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int           { return len(a) }
+func (a byPath) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go
new file mode 100644
index 0000000..225ffee
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go
@@ -0,0 +1,13 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.9
+
+package gcimporter
+
+import "go/types"
+
+func isAlias(obj *types.TypeName) bool {
+	return false // there are no type aliases before Go 1.9
+}
diff --git a/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go b/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go
new file mode 100644
index 0000000..c2025d8
--- /dev/null
+++ b/compiler/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go
@@ -0,0 +1,13 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.9
+
+package gcimporter
+
+import "go/types"
+
+func isAlias(obj *types.TypeName) bool {
+	return obj.IsAlias()
+}
