Commit c9492649 by Ian Lance Taylor

libgo: update to Go 1.10.3 release

    
    Reviewed-on: https://go-review.googlesource.com/118495

From-SVN: r261549
parent 4dea3bff
bfe3a9b26c8b2e1b9ef34a7232a2d1529e639bbf 6743db0ed81e313acf66c00a4ed0e2dcaaca2c9f
The first line of this file holds the git revision number of the last The first line of this file holds the git revision number of the last
merge done from the gofrontend repository. merge done from the gofrontend repository.
71bdbf431b79dff61944f22c25c7e085ccfc25d5 fe8a0d12b14108cbe2408b417afcaab722b0727c
The first line of this file holds the git revision number of the The first line of this file holds the git revision number of the
last merge done from the master library sources. last merge done from the master library sources.
...@@ -55,6 +55,13 @@ func parseMetaGoImports(r io.Reader) (imports []metaImport, err error) { ...@@ -55,6 +55,13 @@ func parseMetaGoImports(r io.Reader) (imports []metaImport, err error) {
continue continue
} }
if f := strings.Fields(attrValue(e.Attr, "content")); len(f) == 3 { if f := strings.Fields(attrValue(e.Attr, "content")); len(f) == 3 {
// Ignore VCS type "mod", which is new Go modules.
// This code is for old go get and must ignore the new mod lines.
// Otherwise matchGoImport will complain about two
// different metaImport lines for the same Prefix.
if f[1] == "mod" {
continue
}
imports = append(imports, metaImport{ imports = append(imports, metaImport{
Prefix: f[0], Prefix: f[0],
VCS: f[1], VCS: f[1],
......
...@@ -209,7 +209,7 @@ var downloadRootCache = map[string]bool{} ...@@ -209,7 +209,7 @@ var downloadRootCache = map[string]bool{}
// download runs the download half of the get command // download runs the download half of the get command
// for the package named by the argument. // for the package named by the argument.
func download(arg string, parent *load.Package, stk *load.ImportStack, mode int) { func download(arg string, parent *load.Package, stk *load.ImportStack, mode int) {
if mode&load.UseVendor != 0 { if mode&load.ResolveImport != 0 {
// Caller is responsible for expanding vendor paths. // Caller is responsible for expanding vendor paths.
panic("internal error: download mode has useVendor set") panic("internal error: download mode has useVendor set")
} }
...@@ -217,7 +217,7 @@ func download(arg string, parent *load.Package, stk *load.ImportStack, mode int) ...@@ -217,7 +217,7 @@ func download(arg string, parent *load.Package, stk *load.ImportStack, mode int)
if parent == nil { if parent == nil {
return load.LoadPackage(path, stk) return load.LoadPackage(path, stk)
} }
return load.LoadImport(path, parent.Dir, parent, stk, nil, mode) return load.LoadImport(path, parent.Dir, parent, stk, nil, mode|load.ResolveModule)
} }
p := load1(arg, mode) p := load1(arg, mode)
...@@ -346,12 +346,12 @@ func download(arg string, parent *load.Package, stk *load.ImportStack, mode int) ...@@ -346,12 +346,12 @@ func download(arg string, parent *load.Package, stk *load.ImportStack, mode int)
base.Errorf("%s", err) base.Errorf("%s", err)
continue continue
} }
// If this is a test import, apply vendor lookup now. // If this is a test import, apply module and vendor lookup now.
// We cannot pass useVendor to download, because // We cannot pass ResolveImport to download, because
// download does caching based on the value of path, // download does caching based on the value of path,
// so it must be the fully qualified path already. // so it must be the fully qualified path already.
if i >= len(p.Imports) { if i >= len(p.Imports) {
path = load.VendoredImportPath(p, path) path = load.ResolveImportPath(p, path)
} }
download(path, p, stk, 0) download(path, p, stk, 0)
} }
......
...@@ -48,6 +48,20 @@ var parseMetaGoImportsTests = []struct { ...@@ -48,6 +48,20 @@ var parseMetaGoImportsTests = []struct {
}, },
}, },
{ {
`<meta name="go-import" content="foo/bar git https://github.com/rsc/foo/bar">
<meta name="go-import" content="foo/bar mod http://github.com/rsc/baz/quux">`,
[]metaImport{
{"foo/bar", "git", "https://github.com/rsc/foo/bar"},
},
},
{
`<meta name="go-import" content="foo/bar mod http://github.com/rsc/baz/quux">
<meta name="go-import" content="foo/bar git https://github.com/rsc/foo/bar">`,
[]metaImport{
{"foo/bar", "git", "https://github.com/rsc/foo/bar"},
},
},
{
`<head> `<head>
<meta name="go-import" content="foo/bar git https://github.com/rsc/foo/bar"> <meta name="go-import" content="foo/bar git https://github.com/rsc/foo/bar">
</head>`, </head>`,
......
...@@ -218,8 +218,8 @@ func runList(cmd *base.Command, args []string) { ...@@ -218,8 +218,8 @@ func runList(cmd *base.Command, args []string) {
for _, pkg := range pkgs { for _, pkg := range pkgs {
// Show vendor-expanded paths in listing // Show vendor-expanded paths in listing
pkg.TestImports = pkg.Vendored(pkg.TestImports) pkg.TestImports = pkg.Resolve(pkg.TestImports)
pkg.XTestImports = pkg.Vendored(pkg.XTestImports) pkg.XTestImports = pkg.Resolve(pkg.XTestImports)
do(&pkg.PackagePublic) do(&pkg.PackagePublic)
} }
......
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package load
import (
"bytes"
"encoding/json"
"errors"
"io/ioutil"
)
// DebugDeprecatedImportcfg is installed as the undocumented -debug-deprecated-importcfg build flag.
// It is useful for debugging subtle problems in the go command logic but not something
// we want users to depend on. The hope is that the "deprecated" will make that clear.
// We intend to remove this flag in Go 1.11.
var DebugDeprecatedImportcfg debugDeprecatedImportcfgFlag
type debugDeprecatedImportcfgFlag struct {
enabled bool
Import map[string]string
Pkg map[string]*debugDeprecatedImportcfgPkg
}
type debugDeprecatedImportcfgPkg struct {
Dir string
Import map[string]string
}
var (
debugDeprecatedImportcfgMagic = []byte("# debug-deprecated-importcfg\n")
errImportcfgSyntax = errors.New("malformed syntax")
)
func (f *debugDeprecatedImportcfgFlag) String() string { return "" }
func (f *debugDeprecatedImportcfgFlag) Set(x string) error {
if x == "" {
*f = debugDeprecatedImportcfgFlag{}
return nil
}
data, err := ioutil.ReadFile(x)
if err != nil {
return err
}
if !bytes.HasPrefix(data, debugDeprecatedImportcfgMagic) {
return errImportcfgSyntax
}
data = data[len(debugDeprecatedImportcfgMagic):]
f.Import = nil
f.Pkg = nil
if err := json.Unmarshal(data, &f); err != nil {
return errImportcfgSyntax
}
f.enabled = true
return nil
}
func (f *debugDeprecatedImportcfgFlag) lookup(parent *Package, path string) (dir, newPath string) {
newPath = path
if p := f.Import[path]; p != "" {
newPath = p
}
if parent != nil {
if p1 := f.Pkg[parent.ImportPath]; p1 != nil {
if p := p1.Import[path]; p != "" {
newPath = p
}
}
}
if p2 := f.Pkg[newPath]; p2 != nil {
return p2.Dir, newPath
}
return "", ""
}
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
package load package load
import ( import (
"bytes"
"fmt" "fmt"
"go/build" "go/build"
"go/token" "go/token"
...@@ -14,6 +15,7 @@ import ( ...@@ -14,6 +15,7 @@ import (
pathpkg "path" pathpkg "path"
"path/filepath" "path/filepath"
"sort" "sort"
"strconv"
"strings" "strings"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
...@@ -168,7 +170,7 @@ func (e *NoGoError) Error() string { ...@@ -168,7 +170,7 @@ func (e *NoGoError) Error() string {
return "no Go files in " + e.Package.Dir return "no Go files in " + e.Package.Dir
} }
// Vendored returns the vendor-resolved version of imports, // Resolve returns the resolved version of imports,
// which should be p.TestImports or p.XTestImports, NOT p.Imports. // which should be p.TestImports or p.XTestImports, NOT p.Imports.
// The imports in p.TestImports and p.XTestImports are not recursively // The imports in p.TestImports and p.XTestImports are not recursively
// loaded during the initial load of p, so they list the imports found in // loaded during the initial load of p, so they list the imports found in
...@@ -178,14 +180,14 @@ func (e *NoGoError) Error() string { ...@@ -178,14 +180,14 @@ func (e *NoGoError) Error() string {
// can produce better error messages if it starts with the original paths. // can produce better error messages if it starts with the original paths.
// The initial load of p loads all the non-test imports and rewrites // The initial load of p loads all the non-test imports and rewrites
// the vendored paths, so nothing should ever call p.vendored(p.Imports). // the vendored paths, so nothing should ever call p.vendored(p.Imports).
func (p *Package) Vendored(imports []string) []string { func (p *Package) Resolve(imports []string) []string {
if len(imports) > 0 && len(p.Imports) > 0 && &imports[0] == &p.Imports[0] { if len(imports) > 0 && len(p.Imports) > 0 && &imports[0] == &p.Imports[0] {
panic("internal error: p.vendored(p.Imports) called") panic("internal error: p.Resolve(p.Imports) called")
} }
seen := make(map[string]bool) seen := make(map[string]bool)
var all []string var all []string
for _, path := range imports { for _, path := range imports {
path = VendoredImportPath(p, path) path = ResolveImportPath(p, path)
if !seen[path] { if !seen[path] {
seen[path] = true seen[path] = true
all = append(all, path) all = append(all, path)
...@@ -380,16 +382,20 @@ func makeImportValid(r rune) rune { ...@@ -380,16 +382,20 @@ func makeImportValid(r rune) rune {
// Mode flags for loadImport and download (in get.go). // Mode flags for loadImport and download (in get.go).
const ( const (
// UseVendor means that loadImport should do vendor expansion // ResolveImport means that loadImport should do import path expansion.
// (provided the vendoring experiment is enabled). // That is, ResolveImport means that the import path came from
// That is, useVendor means that the import path came from // a source file and has not been expanded yet to account for
// a source file and has not been vendor-expanded yet. // vendoring or possible module adjustment.
// Every import path should be loaded initially with useVendor, // Every import path should be loaded initially with ResolveImport,
// and then the expanded version (with the /vendor/ in it) gets // and then the expanded version (for example with the /vendor/ in it)
// recorded as the canonical import path. At that point, future loads // gets recorded as the canonical import path. At that point, future loads
// of that package must not pass useVendor, because // of that package must not pass ResolveImport, because
// disallowVendor will reject direct use of paths containing /vendor/. // disallowVendor will reject direct use of paths containing /vendor/.
UseVendor = 1 << iota ResolveImport = 1 << iota
// ResolveModule is for download (part of "go get") and indicates
// that the module adjustment should be done, but not vendor adjustment.
ResolveModule
// GetTestDeps is for download (part of "go get") and indicates // GetTestDeps is for download (part of "go get") and indicates
// that test dependencies should be fetched too. // that test dependencies should be fetched too.
...@@ -412,20 +418,17 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo ...@@ -412,20 +418,17 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo
importPath := path importPath := path
origPath := path origPath := path
isLocal := build.IsLocalImport(path) isLocal := build.IsLocalImport(path)
var debugDeprecatedImportcfgDir string
if isLocal { if isLocal {
importPath = dirToImportPath(filepath.Join(srcDir, path)) importPath = dirToImportPath(filepath.Join(srcDir, path))
} else if DebugDeprecatedImportcfg.enabled { } else if mode&ResolveImport != 0 {
if d, i := DebugDeprecatedImportcfg.lookup(parent, path); d != "" { // We do our own path resolution, because we want to
debugDeprecatedImportcfgDir = d
importPath = i
}
} else if mode&UseVendor != 0 {
// We do our own vendor resolution, because we want to
// find out the key to use in packageCache without the // find out the key to use in packageCache without the
// overhead of repeated calls to buildContext.Import. // overhead of repeated calls to buildContext.Import.
// The code is also needed in a few other places anyway. // The code is also needed in a few other places anyway.
path = VendoredImportPath(parent, path) path = ResolveImportPath(parent, path)
importPath = path
} else if mode&ResolveModule != 0 {
path = ModuleImportPath(parent, path)
importPath = path importPath = path
} }
...@@ -441,26 +444,17 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo ...@@ -441,26 +444,17 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo
// Load package. // Load package.
// Import always returns bp != nil, even if an error occurs, // Import always returns bp != nil, even if an error occurs,
// in order to return partial information. // in order to return partial information.
var bp *build.Package buildMode := build.ImportComment
var err error if mode&ResolveImport == 0 || path != origPath {
if debugDeprecatedImportcfgDir != "" { // Not vendoring, or we already found the vendored path.
bp, err = cfg.BuildContext.ImportDir(debugDeprecatedImportcfgDir, 0) buildMode |= build.IgnoreVendor
} else if DebugDeprecatedImportcfg.enabled {
bp = new(build.Package)
err = fmt.Errorf("unknown import path %q: not in import cfg", importPath)
} else {
buildMode := build.ImportComment
if mode&UseVendor == 0 || path != origPath {
// Not vendoring, or we already found the vendored path.
buildMode |= build.IgnoreVendor
}
bp, err = cfg.BuildContext.Import(path, srcDir, buildMode)
} }
bp, err := cfg.BuildContext.Import(path, srcDir, buildMode)
bp.ImportPath = importPath bp.ImportPath = importPath
if cfg.GOBIN != "" { if cfg.GOBIN != "" {
bp.BinDir = cfg.GOBIN bp.BinDir = cfg.GOBIN
} }
if debugDeprecatedImportcfgDir == "" && err == nil && !isLocal && bp.ImportComment != "" && bp.ImportComment != path && if err == nil && !isLocal && bp.ImportComment != "" && bp.ImportComment != path &&
!strings.Contains(path, "/vendor/") && !strings.HasPrefix(path, "vendor/") { !strings.Contains(path, "/vendor/") && !strings.HasPrefix(path, "vendor/") {
err = fmt.Errorf("code in directory %s expects import %q", bp.Dir, bp.ImportComment) err = fmt.Errorf("code in directory %s expects import %q", bp.Dir, bp.ImportComment)
} }
...@@ -469,7 +463,7 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo ...@@ -469,7 +463,7 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo
p = setErrorPos(p, importPos) p = setErrorPos(p, importPos)
} }
if debugDeprecatedImportcfgDir == "" && origPath != cleanImport(origPath) { if origPath != cleanImport(origPath) {
p.Error = &PackageError{ p.Error = &PackageError{
ImportStack: stk.Copy(), ImportStack: stk.Copy(),
Err: fmt.Sprintf("non-canonical import path: %q should be %q", origPath, pathpkg.Clean(origPath)), Err: fmt.Sprintf("non-canonical import path: %q should be %q", origPath, pathpkg.Clean(origPath)),
...@@ -482,7 +476,7 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo ...@@ -482,7 +476,7 @@ func LoadImport(path, srcDir string, parent *Package, stk *ImportStack, importPo
if perr := disallowInternal(srcDir, p, stk); perr != p { if perr := disallowInternal(srcDir, p, stk); perr != p {
return setErrorPos(perr, importPos) return setErrorPos(perr, importPos)
} }
if mode&UseVendor != 0 { if mode&ResolveImport != 0 {
if perr := disallowVendor(srcDir, origPath, p, stk); perr != p { if perr := disallowVendor(srcDir, origPath, p, stk); perr != p {
return setErrorPos(perr, importPos) return setErrorPos(perr, importPos)
} }
...@@ -541,31 +535,31 @@ func isDir(path string) bool { ...@@ -541,31 +535,31 @@ func isDir(path string) bool {
return result return result
} }
// VendoredImportPath returns the expansion of path when it appears in parent. // ResolveImportPath returns the true meaning of path when it appears in parent.
// If parent is x/y/z, then path might expand to x/y/z/vendor/path, x/y/vendor/path, // There are two different resolutions applied.
// x/vendor/path, vendor/path, or else stay path if none of those exist. // First, there is Go 1.5 vendoring (golang.org/s/go15vendor).
// VendoredImportPath returns the expanded path or, if no expansion is found, the original. // If vendor expansion doesn't trigger, then the path is also subject to
func VendoredImportPath(parent *Package, path string) (found string) { // Go 1.11 vgo legacy conversion (golang.org/issue/25069).
if DebugDeprecatedImportcfg.enabled { func ResolveImportPath(parent *Package, path string) (found string) {
if d, i := DebugDeprecatedImportcfg.lookup(parent, path); d != "" { found = VendoredImportPath(parent, path)
return i if found != path {
} return found
return path }
} return ModuleImportPath(parent, path)
}
if parent == nil || parent.Root == "" {
return path
}
dir := filepath.Clean(parent.Dir) // dirAndRoot returns the source directory and workspace root
root := filepath.Join(parent.Root, "src") // for the package p, guaranteeing that root is a path prefix of dir.
if !str.HasFilePathPrefix(dir, root) || parent.ImportPath != "command-line-arguments" && filepath.Join(root, parent.ImportPath) != dir { func dirAndRoot(p *Package) (dir, root string) {
dir = filepath.Clean(p.Dir)
root = filepath.Join(p.Root, "src")
if !str.HasFilePathPrefix(dir, root) || p.ImportPath != "command-line-arguments" && filepath.Join(root, p.ImportPath) != dir {
// Look for symlinks before reporting error. // Look for symlinks before reporting error.
dir = expandPath(dir) dir = expandPath(dir)
root = expandPath(root) root = expandPath(root)
} }
if !str.HasFilePathPrefix(dir, root) || len(dir) <= len(root) || dir[len(root)] != filepath.Separator || parent.ImportPath != "command-line-arguments" && !parent.Internal.Local && filepath.Join(root, parent.ImportPath) != dir { if !str.HasFilePathPrefix(dir, root) || len(dir) <= len(root) || dir[len(root)] != filepath.Separator || p.ImportPath != "command-line-arguments" && !p.Internal.Local && filepath.Join(root, p.ImportPath) != dir {
base.Fatalf("unexpected directory layout:\n"+ base.Fatalf("unexpected directory layout:\n"+
" import path: %s\n"+ " import path: %s\n"+
" root: %s\n"+ " root: %s\n"+
...@@ -573,14 +567,28 @@ func VendoredImportPath(parent *Package, path string) (found string) { ...@@ -573,14 +567,28 @@ func VendoredImportPath(parent *Package, path string) (found string) {
" expand root: %s\n"+ " expand root: %s\n"+
" expand dir: %s\n"+ " expand dir: %s\n"+
" separator: %s", " separator: %s",
parent.ImportPath, p.ImportPath,
filepath.Join(parent.Root, "src"), filepath.Join(p.Root, "src"),
filepath.Clean(parent.Dir), filepath.Clean(p.Dir),
root, root,
dir, dir,
string(filepath.Separator)) string(filepath.Separator))
} }
return dir, root
}
// VendoredImportPath returns the vendor-expansion of path when it appears in parent.
// If parent is x/y/z, then path might expand to x/y/z/vendor/path, x/y/vendor/path,
// x/vendor/path, vendor/path, or else stay path if none of those exist.
// VendoredImportPath returns the expanded path or, if no expansion is found, the original.
func VendoredImportPath(parent *Package, path string) (found string) {
if parent == nil || parent.Root == "" {
return path
}
dir, root := dirAndRoot(parent)
vpath := "vendor/" + path vpath := "vendor/" + path
for i := len(dir); i >= len(root); i-- { for i := len(dir); i >= len(root); i-- {
if i < len(dir) && dir[i] != filepath.Separator { if i < len(dir) && dir[i] != filepath.Separator {
...@@ -623,6 +631,164 @@ func VendoredImportPath(parent *Package, path string) (found string) { ...@@ -623,6 +631,164 @@ func VendoredImportPath(parent *Package, path string) (found string) {
return path return path
} }
var (
modulePrefix = []byte("\nmodule ")
goModPathCache = make(map[string]string)
)
// goModPath returns the module path in the go.mod in dir, if any.
func goModPath(dir string) (path string) {
path, ok := goModPathCache[dir]
if ok {
return path
}
defer func() {
goModPathCache[dir] = path
}()
data, err := ioutil.ReadFile(filepath.Join(dir, "go.mod"))
if err != nil {
return ""
}
var i int
if bytes.HasPrefix(data, modulePrefix[1:]) {
i = 0
} else {
i = bytes.Index(data, modulePrefix)
if i < 0 {
return ""
}
i++
}
line := data[i:]
// Cut line at \n, drop trailing \r if present.
if j := bytes.IndexByte(line, '\n'); j >= 0 {
line = line[:j]
}
if line[len(line)-1] == '\r' {
line = line[:len(line)-1]
}
line = line[len("module "):]
// If quoted, unquote.
path = strings.TrimSpace(string(line))
if path != "" && path[0] == '"' {
s, err := strconv.Unquote(path)
if err != nil {
return ""
}
path = s
}
return path
}
// findVersionElement returns the slice indices of the final version element /vN in path.
// If there is no such element, it returns -1, -1.
func findVersionElement(path string) (i, j int) {
j = len(path)
for i = len(path) - 1; i >= 0; i-- {
if path[i] == '/' {
if isVersionElement(path[i:j]) {
return i, j
}
j = i
}
}
return -1, -1
}
// isVersionElement reports whether s is a well-formed path version element:
// v2, v3, v10, etc, but not v0, v05, v1.
func isVersionElement(s string) bool {
if len(s) < 3 || s[0] != '/' || s[1] != 'v' || s[2] == '0' || s[2] == '1' && len(s) == 3 {
return false
}
for i := 2; i < len(s); i++ {
if s[i] < '0' || '9' < s[i] {
return false
}
}
return true
}
// ModuleImportPath translates import paths found in go modules
// back down to paths that can be resolved in ordinary builds.
//
// Define “new” code as code with a go.mod file in the same directory
// or a parent directory. If an import in new code says x/y/v2/z but
// x/y/v2/z does not exist and x/y/go.mod says “module x/y/v2”,
// then go build will read the import as x/y/z instead.
// See golang.org/issue/25069.
func ModuleImportPath(parent *Package, path string) (found string) {
if parent == nil || parent.Root == "" {
return path
}
// If there are no vN elements in path, leave it alone.
// (The code below would do the same, but only after
// some other file system accesses that we can avoid
// here by returning early.)
if i, _ := findVersionElement(path); i < 0 {
return path
}
dir, root := dirAndRoot(parent)
// Consider dir and parents, up to and including root.
for i := len(dir); i >= len(root); i-- {
if i < len(dir) && dir[i] != filepath.Separator {
continue
}
if goModPath(dir[:i]) != "" {
goto HaveGoMod
}
}
// This code is not in a tree with a go.mod,
// so apply no changes to the path.
return path
HaveGoMod:
// This import is in a tree with a go.mod.
// Allow it to refer to code in GOPATH/src/x/y/z as x/y/v2/z
// if GOPATH/src/x/y/go.mod says module "x/y/v2",
// If x/y/v2/z exists, use it unmodified.
if bp, _ := cfg.BuildContext.Import(path, "", build.IgnoreVendor); bp.Dir != "" {
return path
}
// Otherwise look for a go.mod supplying a version element.
// Some version-like elements may appear in paths but not
// be module versions; we skip over those to look for module
// versions. For example the module m/v2 might have a
// package m/v2/api/v1/foo.
limit := len(path)
for limit > 0 {
i, j := findVersionElement(path[:limit])
if i < 0 {
return path
}
if bp, _ := cfg.BuildContext.Import(path[:i], "", build.IgnoreVendor); bp.Dir != "" {
if mpath := goModPath(bp.Dir); mpath != "" {
// Found a valid go.mod file, so we're stopping the search.
// If the path is m/v2/p and we found m/go.mod that says
// "module m/v2", then we return "m/p".
if mpath == path[:j] {
return path[:i] + path[j:]
}
// Otherwise just return the original path.
// We didn't find anything worth rewriting,
// and the go.mod indicates that we should
// not consider parent directories.
return path
}
}
limit = i
}
return path
}
// hasGoFiles reports whether dir contains any files with names ending in .go. // hasGoFiles reports whether dir contains any files with names ending in .go.
// For a vendor check we must exclude directories that contain no .go files. // For a vendor check we must exclude directories that contain no .go files.
// Otherwise it is not possible to vendor just a/b/c and still import the // Otherwise it is not possible to vendor just a/b/c and still import the
...@@ -1087,7 +1253,7 @@ func (p *Package) load(stk *ImportStack, bp *build.Package, err error) { ...@@ -1087,7 +1253,7 @@ func (p *Package) load(stk *ImportStack, bp *build.Package, err error) {
if path == "C" { if path == "C" {
continue continue
} }
p1 := LoadImport(path, p.Dir, p, stk, p.Internal.Build.ImportPos[path], UseVendor) p1 := LoadImport(path, p.Dir, p, stk, p.Internal.Build.ImportPos[path], ResolveImport)
if p.Standard && p.Error == nil && !p1.Standard && p1.Error == nil { if p.Standard && p.Error == nil && !p1.Standard && p1.Error == nil {
p.Error = &PackageError{ p.Error = &PackageError{
ImportStack: stk.Copy(), ImportStack: stk.Copy(),
...@@ -1598,7 +1764,7 @@ func GetTestPackagesFor(p *Package, forceTest bool) (ptest, pxtest *Package, err ...@@ -1598,7 +1764,7 @@ func GetTestPackagesFor(p *Package, forceTest bool) (ptest, pxtest *Package, err
stk.Push(p.ImportPath + " (test)") stk.Push(p.ImportPath + " (test)")
rawTestImports := str.StringList(p.TestImports) rawTestImports := str.StringList(p.TestImports)
for i, path := range p.TestImports { for i, path := range p.TestImports {
p1 := LoadImport(path, p.Dir, p, &stk, p.Internal.Build.TestImportPos[path], UseVendor) p1 := LoadImport(path, p.Dir, p, &stk, p.Internal.Build.TestImportPos[path], ResolveImport)
if p1.Error != nil { if p1.Error != nil {
return nil, nil, p1.Error return nil, nil, p1.Error
} }
...@@ -1626,7 +1792,7 @@ func GetTestPackagesFor(p *Package, forceTest bool) (ptest, pxtest *Package, err ...@@ -1626,7 +1792,7 @@ func GetTestPackagesFor(p *Package, forceTest bool) (ptest, pxtest *Package, err
pxtestNeedsPtest := false pxtestNeedsPtest := false
rawXTestImports := str.StringList(p.XTestImports) rawXTestImports := str.StringList(p.XTestImports)
for i, path := range p.XTestImports { for i, path := range p.XTestImports {
p1 := LoadImport(path, p.Dir, p, &stk, p.Internal.Build.XTestImportPos[path], UseVendor) p1 := LoadImport(path, p.Dir, p, &stk, p.Internal.Build.XTestImportPos[path], ResolveImport)
if p1.Error != nil { if p1.Error != nil {
return nil, nil, p1.Error return nil, nil, p1.Error
} }
......
...@@ -606,10 +606,10 @@ func runTest(cmd *base.Command, args []string) { ...@@ -606,10 +606,10 @@ func runTest(cmd *base.Command, args []string) {
for _, path := range p.Imports { for _, path := range p.Imports {
deps[path] = true deps[path] = true
} }
for _, path := range p.Vendored(p.TestImports) { for _, path := range p.Resolve(p.TestImports) {
deps[path] = true deps[path] = true
} }
for _, path := range p.Vendored(p.XTestImports) { for _, path := range p.Resolve(p.XTestImports) {
deps[path] = true deps[path] = true
} }
} }
......
...@@ -229,7 +229,6 @@ func AddBuildFlags(cmd *base.Command) { ...@@ -229,7 +229,6 @@ func AddBuildFlags(cmd *base.Command) {
// Undocumented, unstable debugging flags. // Undocumented, unstable debugging flags.
cmd.Flag.StringVar(&cfg.DebugActiongraph, "debug-actiongraph", "", "") cmd.Flag.StringVar(&cfg.DebugActiongraph, "debug-actiongraph", "", "")
cmd.Flag.Var(&load.DebugDeprecatedImportcfg, "debug-deprecated-importcfg", "")
} }
// fileExtSplit expects a filename and returns the name // fileExtSplit expects a filename and returns the name
......
...@@ -41,43 +41,57 @@ var re = regexp.MustCompile ...@@ -41,43 +41,57 @@ var re = regexp.MustCompile
var validCompilerFlags = []*regexp.Regexp{ var validCompilerFlags = []*regexp.Regexp{
re(`-D([A-Za-z_].*)`), re(`-D([A-Za-z_].*)`),
re(`-F([^@\-].*)`),
re(`-I([^@\-].*)`), re(`-I([^@\-].*)`),
re(`-O`), re(`-O`),
re(`-O([^@\-].*)`), re(`-O([^@\-].*)`),
re(`-W`), re(`-W`),
re(`-W([^@,]+)`), // -Wall but not -Wa,-foo. re(`-W([^@,]+)`), // -Wall but not -Wa,-foo.
re(`-Wa,-mbig-obj`), re(`-Wa,-mbig-obj`),
re(`-Wp,-D([A-Za-z_].*)`),
re(`-ansi`), re(`-ansi`),
re(`-f(no-)?asynchronous-unwind-tables`),
re(`-f(no-)?blocks`), re(`-f(no-)?blocks`),
re(`-f(no-)builtin-[a-zA-Z0-9_]*`),
re(`-f(no-)?common`), re(`-f(no-)?common`),
re(`-f(no-)?constant-cfstrings`), re(`-f(no-)?constant-cfstrings`),
re(`-fdiagnostics-show-note-include-stack`), re(`-fdiagnostics-show-note-include-stack`),
re(`-f(no-)?eliminate-unused-debug-types`),
re(`-f(no-)?exceptions`), re(`-f(no-)?exceptions`),
re(`-f(no-)?fast-math`),
re(`-f(no-)?inline-functions`), re(`-f(no-)?inline-functions`),
re(`-finput-charset=([^@\-].*)`), re(`-finput-charset=([^@\-].*)`),
re(`-f(no-)?fat-lto-objects`), re(`-f(no-)?fat-lto-objects`),
re(`-f(no-)?keep-inline-dllexport`),
re(`-f(no-)?lto`), re(`-f(no-)?lto`),
re(`-fmacro-backtrace-limit=(.+)`), re(`-fmacro-backtrace-limit=(.+)`),
re(`-fmessage-length=(.+)`), re(`-fmessage-length=(.+)`),
re(`-f(no-)?modules`), re(`-f(no-)?modules`),
re(`-f(no-)?objc-arc`), re(`-f(no-)?objc-arc`),
re(`-f(no-)?objc-nonfragile-abi`),
re(`-f(no-)?objc-legacy-dispatch`),
re(`-f(no-)?omit-frame-pointer`), re(`-f(no-)?omit-frame-pointer`),
re(`-f(no-)?openmp(-simd)?`), re(`-f(no-)?openmp(-simd)?`),
re(`-f(no-)?permissive`), re(`-f(no-)?permissive`),
re(`-f(no-)?(pic|PIC|pie|PIE)`), re(`-f(no-)?(pic|PIC|pie|PIE)`),
re(`-f(no-)?plt`),
re(`-f(no-)?rtti`), re(`-f(no-)?rtti`),
re(`-f(no-)?split-stack`), re(`-f(no-)?split-stack`),
re(`-f(no-)?stack-(.+)`), re(`-f(no-)?stack-(.+)`),
re(`-f(no-)?strict-aliasing`), re(`-f(no-)?strict-aliasing`),
re(`-f(un)signed-char`), re(`-f(un)signed-char`),
re(`-f(no-)?use-linker-plugin`), // safe if -B is not used; we don't permit -B re(`-f(no-)?use-linker-plugin`), // safe if -B is not used; we don't permit -B
re(`-f(no-)?visibility-inlines-hidden`),
re(`-fsanitize=(.+)`), re(`-fsanitize=(.+)`),
re(`-ftemplate-depth-(.+)`), re(`-ftemplate-depth-(.+)`),
re(`-fvisibility=(.+)`), re(`-fvisibility=(.+)`),
re(`-g([^@\-].*)?`), re(`-g([^@\-].*)?`),
re(`-m32`), re(`-m32`),
re(`-m64`), re(`-m64`),
re(`-m(arch|cpu|fpu|tune)=([^@\-].*)`), re(`-m(abi|arch|cpu|fpu|tune)=([^@\-].*)`),
re(`-marm`),
re(`-mfloat-abi=([^@\-].*)`),
re(`-mfpmath=[0-9a-z,+]*`),
re(`-m(no-)?avx[0-9a-z.]*`), re(`-m(no-)?avx[0-9a-z.]*`),
re(`-m(no-)?ms-bitfields`), re(`-m(no-)?ms-bitfields`),
re(`-m(no-)?stack-(.+)`), re(`-m(no-)?stack-(.+)`),
...@@ -86,12 +100,16 @@ var validCompilerFlags = []*regexp.Regexp{ ...@@ -86,12 +100,16 @@ var validCompilerFlags = []*regexp.Regexp{
re(`-miphoneos-version-min=(.+)`), re(`-miphoneos-version-min=(.+)`),
re(`-mnop-fun-dllimport`), re(`-mnop-fun-dllimport`),
re(`-m(no-)?sse[0-9.]*`), re(`-m(no-)?sse[0-9.]*`),
re(`-mthumb(-interwork)?`),
re(`-mthreads`),
re(`-mwindows`), re(`-mwindows`),
re(`--param=ssp-buffer-size=[0-9]*`),
re(`-pedantic(-errors)?`), re(`-pedantic(-errors)?`),
re(`-pipe`), re(`-pipe`),
re(`-pthread`), re(`-pthread`),
re(`-?-std=([^@\-].*)`), re(`-?-std=([^@\-].*)`),
re(`-?-stdlib=([^@\-].*)`), re(`-?-stdlib=([^@\-].*)`),
re(`--sysroot=([^@\-].*)`),
re(`-w`), re(`-w`),
re(`-x([^@\-].*)`), re(`-x([^@\-].*)`),
} }
...@@ -115,15 +133,20 @@ var validLinkerFlags = []*regexp.Regexp{ ...@@ -115,15 +133,20 @@ var validLinkerFlags = []*regexp.Regexp{
re(`-O`), re(`-O`),
re(`-O([^@\-].*)`), re(`-O([^@\-].*)`),
re(`-f(no-)?(pic|PIC|pie|PIE)`), re(`-f(no-)?(pic|PIC|pie|PIE)`),
re(`-f(no-)?openmp(-simd)?`),
re(`-fsanitize=([^@\-].*)`), re(`-fsanitize=([^@\-].*)`),
re(`-g([^@\-].*)?`), re(`-g([^@\-].*)?`),
re(`-m(arch|cpu|fpu|tune)=([^@\-].*)`), re(`-headerpad_max_install_names`),
re(`-m(abi|arch|cpu|fpu|tune)=([^@\-].*)`),
re(`-mfloat-abi=([^@\-].*)`),
re(`-mmacosx-(.+)`), re(`-mmacosx-(.+)`),
re(`-mios-simulator-version-min=(.+)`), re(`-mios-simulator-version-min=(.+)`),
re(`-miphoneos-version-min=(.+)`), re(`-miphoneos-version-min=(.+)`),
re(`-mthreads`),
re(`-mwindows`), re(`-mwindows`),
re(`-(pic|PIC|pie|PIE)`), re(`-(pic|PIC|pie|PIE)`),
re(`-pthread`), re(`-pthread`),
re(`-rdynamic`),
re(`-shared`), re(`-shared`),
re(`-?-static([-a-z0-9+]*)`), re(`-?-static([-a-z0-9+]*)`),
re(`-?-stdlib=([^@\-].*)`), re(`-?-stdlib=([^@\-].*)`),
...@@ -134,22 +157,27 @@ var validLinkerFlags = []*regexp.Regexp{ ...@@ -134,22 +157,27 @@ var validLinkerFlags = []*regexp.Regexp{
// in a wildcard would allow tunnelling arbitrary additional // in a wildcard would allow tunnelling arbitrary additional
// linker arguments through one of these. // linker arguments through one of these.
re(`-Wl,--(no-)?allow-multiple-definition`), re(`-Wl,--(no-)?allow-multiple-definition`),
re(`-Wl,--(no-)?allow-shlib-undefined`),
re(`-Wl,--(no-)?as-needed`), re(`-Wl,--(no-)?as-needed`),
re(`-Wl,-Bdynamic`), re(`-Wl,-Bdynamic`),
re(`-Wl,-Bstatic`), re(`-Wl,-Bstatic`),
re(`-WL,-O([^@,\-][^,]*)?`),
re(`-Wl,-d[ny]`), re(`-Wl,-d[ny]`),
re(`-Wl,--disable-new-dtags`), re(`-Wl,--disable-new-dtags`),
re(`-Wl,-e[=,][a-zA-Z0-9]*`),
re(`-Wl,--enable-new-dtags`), re(`-Wl,--enable-new-dtags`),
re(`-Wl,--end-group`), re(`-Wl,--end-group`),
re(`-Wl,-framework,[^,@\-][^,]+`), re(`-Wl,-framework,[^,@\-][^,]+`),
re(`-Wl,-headerpad_max_install_names`), re(`-Wl,-headerpad_max_install_names`),
re(`-Wl,--no-undefined`), re(`-Wl,--no-undefined`),
re(`-Wl,-rpath[=,]([^,@\-][^,]+)`), re(`-Wl,-rpath(-link)?[=,]([^,@\-][^,]+)`),
re(`-Wl,-s`),
re(`-Wl,-search_paths_first`), re(`-Wl,-search_paths_first`),
re(`-Wl,-sectcreate,([^,@\-][^,]+),([^,@\-][^,]+),([^,@\-][^,]+)`), re(`-Wl,-sectcreate,([^,@\-][^,]+),([^,@\-][^,]+),([^,@\-][^,]+)`),
re(`-Wl,--start-group`), re(`-Wl,--start-group`),
re(`-Wl,-?-static`), re(`-Wl,-?-static`),
re(`-Wl,--subsystem,(native|windows|console|posix|xbox)`), re(`-Wl,-?-subsystem,(native|windows|console|posix|xbox)`),
re(`-Wl,-syslibroot[=,]([^,@\-][^,]+)`),
re(`-Wl,-undefined[=,]([^,@\-][^,]+)`), re(`-Wl,-undefined[=,]([^,@\-][^,]+)`),
re(`-Wl,-?-unresolved-symbols=[^,]+`), re(`-Wl,-?-unresolved-symbols=[^,]+`),
re(`-Wl,--(no-)?warn-([^,]+)`), re(`-Wl,--(no-)?warn-([^,]+)`),
...@@ -157,6 +185,7 @@ var validLinkerFlags = []*regexp.Regexp{ ...@@ -157,6 +185,7 @@ var validLinkerFlags = []*regexp.Regexp{
re(`-Wl,-z,relro`), re(`-Wl,-z,relro`),
re(`[a-zA-Z0-9_/].*\.(a|o|obj|dll|dylib|so)`), // direct linker inputs: x.o or libfoo.so (but not -foo.o or @foo.o) re(`[a-zA-Z0-9_/].*\.(a|o|obj|dll|dylib|so)`), // direct linker inputs: x.o or libfoo.so (but not -foo.o or @foo.o)
re(`\./.*\.(a|o|obj|dll|dylib|so)`),
} }
var validLinkerFlagsWithNextArg = []string{ var validLinkerFlagsWithNextArg = []string{
......
...@@ -12,6 +12,7 @@ import ( ...@@ -12,6 +12,7 @@ import (
var goodCompilerFlags = [][]string{ var goodCompilerFlags = [][]string{
{"-DFOO"}, {"-DFOO"},
{"-Dfoo=bar"}, {"-Dfoo=bar"},
{"-F/Qt"},
{"-I/"}, {"-I/"},
{"-I/etc/passwd"}, {"-I/etc/passwd"},
{"-I."}, {"-I."},
...@@ -62,6 +63,8 @@ var goodCompilerFlags = [][]string{ ...@@ -62,6 +63,8 @@ var goodCompilerFlags = [][]string{
var badCompilerFlags = [][]string{ var badCompilerFlags = [][]string{
{"-D@X"}, {"-D@X"},
{"-D-X"}, {"-D-X"},
{"-F@dir"},
{"-F-dir"},
{"-I@dir"}, {"-I@dir"},
{"-I-dir"}, {"-I-dir"},
{"-O@1"}, {"-O@1"},
...@@ -125,6 +128,7 @@ var goodLinkerFlags = [][]string{ ...@@ -125,6 +128,7 @@ var goodLinkerFlags = [][]string{
{"-Wl,--no-warn-error"}, {"-Wl,--no-warn-error"},
{"foo.so"}, {"foo.so"},
{"_世界.dll"}, {"_世界.dll"},
{"./x.o"},
{"libcgosotest.dylib"}, {"libcgosotest.dylib"},
{"-F", "framework"}, {"-F", "framework"},
{"-l", "."}, {"-l", "."},
...@@ -191,6 +195,7 @@ var badLinkerFlags = [][]string{ ...@@ -191,6 +195,7 @@ var badLinkerFlags = [][]string{
{"-x", "--c"}, {"-x", "--c"},
{"-x", "@obj"}, {"-x", "@obj"},
{"-Wl,-rpath,@foo"}, {"-Wl,-rpath,@foo"},
{"../x.o"},
} }
func TestCheckLinkerFlags(t *testing.T) { func TestCheckLinkerFlags(t *testing.T) {
......
package p1
import _ "old/p2"
import _ "new/v2"
import _ "new/v2/p2"
import _ "new/sub/v2/x/v1/y" // v2 is module, v1 is directory in module
import _ "new/sub/inner/x" // new/sub/inner/go.mod overrides new/sub/go.mod
package p1
import _ "old/p2"
import _ "new/p1"
import _ "new"
...@@ -10,6 +10,7 @@ import ( ...@@ -10,6 +10,7 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"internal/testenv" "internal/testenv"
"os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
...@@ -328,3 +329,75 @@ func TestVendor12156(t *testing.T) { ...@@ -328,3 +329,75 @@ func TestVendor12156(t *testing.T) {
tg.grepStderrNot("panic", "panicked") tg.grepStderrNot("panic", "panicked")
tg.grepStderr(`cannot find package "x"`, "wrong error") tg.grepStderr(`cannot find package "x"`, "wrong error")
} }
// Module legacy support does path rewriting very similar to vendoring.
func TestModLegacy(t *testing.T) {
tg := testgo(t)
defer tg.cleanup()
tg.setenv("GOPATH", filepath.Join(tg.pwd(), "testdata/modlegacy"))
tg.run("list", "-f", "{{.Imports}}", "old/p1")
tg.grepStdout("new/p1", "old/p1 should import new/p1")
tg.run("list", "-f", "{{.Imports}}", "new/p1")
tg.grepStdout("new/p2", "new/p1 should import new/p2 (not new/v2/p2)")
tg.grepStdoutNot("new/v2", "new/p1 should NOT import new/v2*")
tg.grepStdout("new/sub/x/v1/y", "new/p1 should import new/sub/x/v1/y (not new/sub/v2/x/v1/y)")
tg.grepStdoutNot("new/sub/v2", "new/p1 should NOT import new/sub/v2*")
tg.grepStdout("new/sub/inner/x", "new/p1 should import new/sub/inner/x (no rewrites)")
tg.run("build", "old/p1", "new/p1")
}
func TestModLegacyGet(t *testing.T) {
testenv.MustHaveExternalNetwork(t)
tg := testgo(t)
defer tg.cleanup()
tg.makeTempdir()
tg.setenv("GOPATH", tg.path("d1"))
tg.run("get", "vcs-test.golang.org/git/modlegacy1-old.git/p1")
tg.run("list", "-f", "{{.Deps}}", "vcs-test.golang.org/git/modlegacy1-old.git/p1")
tg.grepStdout("new.git/p2", "old/p1 should depend on new/p2")
tg.grepStdoutNot("new.git/v2/p2", "old/p1 should NOT depend on new/v2/p2")
tg.run("build", "vcs-test.golang.org/git/modlegacy1-old.git/p1", "vcs-test.golang.org/git/modlegacy1-new.git/p1")
tg.setenv("GOPATH", tg.path("d2"))
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", "github.com/rsc/vgotest5")
tg.run("get", "github.com/rsc/vgotest4")
tg.run("get", "github.com/myitcv/vgo_example_compat")
if testing.Short() {
return
}
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", "github.com/rsc/vgotest4")
tg.run("get", "github.com/rsc/vgotest5")
tg.run("get", "github.com/myitcv/vgo_example_compat")
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", "github.com/rsc/vgotest4", "github.com/rsc/vgotest5")
tg.run("get", "github.com/myitcv/vgo_example_compat")
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", "github.com/rsc/vgotest5", "github.com/rsc/vgotest4")
tg.run("get", "github.com/myitcv/vgo_example_compat")
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", "github.com/myitcv/vgo_example_compat")
tg.run("get", "github.com/rsc/vgotest4", "github.com/rsc/vgotest5")
pkgs := []string{"github.com/myitcv/vgo_example_compat", "github.com/rsc/vgotest4", "github.com/rsc/vgotest5"}
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
for k := 0; k < 3; k++ {
if i == j || i == k || k == j {
continue
}
tg.must(os.RemoveAll(tg.path("d2")))
tg.run("get", pkgs[i], pkgs[j], pkgs[k])
}
}
}
}
...@@ -1222,8 +1222,9 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1222,8 +1222,9 @@ var nameConstraintsTests = []nameConstraintsTest{
}, },
}, },
// #63: A specified key usage in an intermediate forbids other usages // #63: An intermediate with enumerated EKUs causes a failure if we
// in the leaf. // test for an EKU not in that set. (ServerAuth is required by
// default.)
nameConstraintsTest{ nameConstraintsTest{
roots: []constraintsSpec{ roots: []constraintsSpec{
constraintsSpec{}, constraintsSpec{},
...@@ -1239,11 +1240,11 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1239,11 +1240,11 @@ var nameConstraintsTests = []nameConstraintsTest{
sans: []string{"dns:example.com"}, sans: []string{"dns:example.com"},
ekus: []string{"serverAuth"}, ekus: []string{"serverAuth"},
}, },
expectedError: "EKU not permitted", expectedError: "incompatible key usage",
}, },
// #64: A specified key usage in an intermediate forbids other usages // #64: an unknown EKU in the leaf doesn't break anything, even if it's not
// in the leaf, even if we don't recognise them. // correctly nested.
nameConstraintsTest{ nameConstraintsTest{
roots: []constraintsSpec{ roots: []constraintsSpec{
constraintsSpec{}, constraintsSpec{},
...@@ -1259,7 +1260,7 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1259,7 +1260,7 @@ var nameConstraintsTests = []nameConstraintsTest{
sans: []string{"dns:example.com"}, sans: []string{"dns:example.com"},
ekus: []string{"other"}, ekus: []string{"other"},
}, },
expectedError: "EKU not permitted", requestedEKUs: []ExtKeyUsage{ExtKeyUsageAny},
}, },
// #65: trying to add extra permitted key usages in an intermediate // #65: trying to add extra permitted key usages in an intermediate
...@@ -1284,24 +1285,25 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1284,24 +1285,25 @@ var nameConstraintsTests = []nameConstraintsTest{
}, },
}, },
// #66: EKUs in roots are ignored. // #66: EKUs in roots are not ignored.
nameConstraintsTest{ nameConstraintsTest{
roots: []constraintsSpec{ roots: []constraintsSpec{
constraintsSpec{ constraintsSpec{
ekus: []string{"serverAuth"}, ekus: []string{"email"},
}, },
}, },
intermediates: [][]constraintsSpec{ intermediates: [][]constraintsSpec{
[]constraintsSpec{ []constraintsSpec{
constraintsSpec{ constraintsSpec{
ekus: []string{"serverAuth", "email"}, ekus: []string{"serverAuth"},
}, },
}, },
}, },
leaf: leafSpec{ leaf: leafSpec{
sans: []string{"dns:example.com"}, sans: []string{"dns:example.com"},
ekus: []string{"serverAuth", "email"}, ekus: []string{"serverAuth"},
}, },
expectedError: "incompatible key usage",
}, },
// #67: in order to support COMODO chains, SGC key usages permit // #67: in order to support COMODO chains, SGC key usages permit
...@@ -1447,8 +1449,7 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1447,8 +1449,7 @@ var nameConstraintsTests = []nameConstraintsTest{
expectedError: "\"https://example.com/test\" is excluded", expectedError: "\"https://example.com/test\" is excluded",
}, },
// #75: While serverAuth in a CA certificate permits clientAuth in a leaf, // #75: serverAuth in a leaf shouldn't permit clientAuth when requested in
// serverAuth in a leaf shouldn't permit clientAuth when requested in
// VerifyOptions. // VerifyOptions.
nameConstraintsTest{ nameConstraintsTest{
roots: []constraintsSpec{ roots: []constraintsSpec{
...@@ -1558,6 +1559,27 @@ var nameConstraintsTests = []nameConstraintsTest{ ...@@ -1558,6 +1559,27 @@ var nameConstraintsTests = []nameConstraintsTest{
}, },
requestedEKUs: []ExtKeyUsage{ExtKeyUsageClientAuth, ExtKeyUsageEmailProtection}, requestedEKUs: []ExtKeyUsage{ExtKeyUsageClientAuth, ExtKeyUsageEmailProtection},
}, },
// #81: EKUs that are not asserted in VerifyOpts are not required to be
// nested.
nameConstraintsTest{
roots: []constraintsSpec{
constraintsSpec{},
},
intermediates: [][]constraintsSpec{
[]constraintsSpec{
constraintsSpec{
ekus: []string{"serverAuth"},
},
},
},
leaf: leafSpec{
sans: []string{"dns:example.com"},
// There's no email EKU in the intermediate. This would be rejected if
// full nesting was required.
ekus: []string{"email", "serverAuth"},
},
},
} }
func makeConstraintsCACert(constraints constraintsSpec, name string, key *ecdsa.PrivateKey, parent *Certificate, parentKey *ecdsa.PrivateKey) (*Certificate, error) { func makeConstraintsCACert(constraints constraintsSpec, name string, key *ecdsa.PrivateKey, parent *Certificate, parentKey *ecdsa.PrivateKey) (*Certificate, error) {
......
...@@ -95,6 +95,12 @@ func checkChainTrustStatus(c *Certificate, chainCtx *syscall.CertChainContext) e ...@@ -95,6 +95,12 @@ func checkChainTrustStatus(c *Certificate, chainCtx *syscall.CertChainContext) e
return nil return nil
} }
type _CertChainPolicyPara struct {
Size uint32
Flags uint32
ExtraPolicyPara unsafe.Pointer
}
// checkChainSSLServerPolicy checks that the certificate chain in chainCtx is valid for // checkChainSSLServerPolicy checks that the certificate chain in chainCtx is valid for
// use as a certificate chain for a SSL/TLS server. // use as a certificate chain for a SSL/TLS server.
func checkChainSSLServerPolicy(c *Certificate, chainCtx *syscall.CertChainContext, opts *VerifyOptions) error { func checkChainSSLServerPolicy(c *Certificate, chainCtx *syscall.CertChainContext, opts *VerifyOptions) error {
...@@ -108,13 +114,13 @@ func checkChainSSLServerPolicy(c *Certificate, chainCtx *syscall.CertChainContex ...@@ -108,13 +114,13 @@ func checkChainSSLServerPolicy(c *Certificate, chainCtx *syscall.CertChainContex
} }
sslPara.Size = uint32(unsafe.Sizeof(*sslPara)) sslPara.Size = uint32(unsafe.Sizeof(*sslPara))
para := &syscall.CertChainPolicyPara{ para := &_CertChainPolicyPara{
ExtraPolicyPara: uintptr(unsafe.Pointer(sslPara)), ExtraPolicyPara: unsafe.Pointer(sslPara),
} }
para.Size = uint32(unsafe.Sizeof(*para)) para.Size = uint32(unsafe.Sizeof(*para))
status := syscall.CertChainPolicyStatus{} status := syscall.CertChainPolicyStatus{}
err = syscall.CertVerifyCertificateChainPolicy(syscall.CERT_CHAIN_POLICY_SSL, chainCtx, para, &status) err = syscall.CertVerifyCertificateChainPolicy(syscall.CERT_CHAIN_POLICY_SSL, chainCtx, (*syscall.CertChainPolicyPara)(unsafe.Pointer(para)), &status)
if err != nil { if err != nil {
return err return err
} }
......
...@@ -56,8 +56,7 @@ const ( ...@@ -56,8 +56,7 @@ const (
// CPU time to verify. // CPU time to verify.
TooManyConstraints TooManyConstraints
// CANotAuthorizedForExtKeyUsage results when an intermediate or root // CANotAuthorizedForExtKeyUsage results when an intermediate or root
// certificate does not permit an extended key usage that is claimed by // certificate does not permit a requested extended key usage.
// the leaf certificate.
CANotAuthorizedForExtKeyUsage CANotAuthorizedForExtKeyUsage
) )
...@@ -82,7 +81,7 @@ func (e CertificateInvalidError) Error() string { ...@@ -82,7 +81,7 @@ func (e CertificateInvalidError) Error() string {
case TooManyIntermediates: case TooManyIntermediates:
return "x509: too many intermediates for path length constraint" return "x509: too many intermediates for path length constraint"
case IncompatibleUsage: case IncompatibleUsage:
return "x509: certificate specifies an incompatible key usage: " + e.Detail return "x509: certificate specifies an incompatible key usage"
case NameMismatch: case NameMismatch:
return "x509: issuer name does not match subject from issuing certificate" return "x509: issuer name does not match subject from issuing certificate"
case NameConstraintsWithoutSANs: case NameConstraintsWithoutSANs:
...@@ -185,9 +184,8 @@ type VerifyOptions struct { ...@@ -185,9 +184,8 @@ type VerifyOptions struct {
// list means ExtKeyUsageServerAuth. To accept any key usage, include // list means ExtKeyUsageServerAuth. To accept any key usage, include
// ExtKeyUsageAny. // ExtKeyUsageAny.
// //
// Certificate chains are required to nest extended key usage values, // Certificate chains are required to nest these extended key usage values.
// irrespective of this value. This matches the Windows CryptoAPI behavior, // (This matches the Windows CryptoAPI behavior, but not the spec.)
// but not the spec.
KeyUsages []ExtKeyUsage KeyUsages []ExtKeyUsage
// MaxConstraintComparisions is the maximum number of comparisons to // MaxConstraintComparisions is the maximum number of comparisons to
// perform when checking a given certificate's name constraints. If // perform when checking a given certificate's name constraints. If
...@@ -549,51 +547,6 @@ func (c *Certificate) checkNameConstraints(count *int, ...@@ -549,51 +547,6 @@ func (c *Certificate) checkNameConstraints(count *int,
return nil return nil
} }
const (
checkingAgainstIssuerCert = iota
checkingAgainstLeafCert
)
// ekuPermittedBy returns true iff the given extended key usage is permitted by
// the given EKU from a certificate. Normally, this would be a simple
// comparison plus a special case for the “any” EKU. But, in order to support
// existing certificates, some exceptions are made.
func ekuPermittedBy(eku, certEKU ExtKeyUsage, context int) bool {
if certEKU == ExtKeyUsageAny || eku == certEKU {
return true
}
// Some exceptions are made to support existing certificates. Firstly,
// the ServerAuth and SGC EKUs are treated as a group.
mapServerAuthEKUs := func(eku ExtKeyUsage) ExtKeyUsage {
if eku == ExtKeyUsageNetscapeServerGatedCrypto || eku == ExtKeyUsageMicrosoftServerGatedCrypto {
return ExtKeyUsageServerAuth
}
return eku
}
eku = mapServerAuthEKUs(eku)
certEKU = mapServerAuthEKUs(certEKU)
if eku == certEKU {
return true
}
// If checking a requested EKU against the list in a leaf certificate there
// are fewer exceptions.
if context == checkingAgainstLeafCert {
return false
}
// ServerAuth in a CA permits ClientAuth in the leaf.
return (eku == ExtKeyUsageClientAuth && certEKU == ExtKeyUsageServerAuth) ||
// Any CA may issue an OCSP responder certificate.
eku == ExtKeyUsageOCSPSigning ||
// Code-signing CAs can use Microsoft's commercial and
// kernel-mode EKUs.
(eku == ExtKeyUsageMicrosoftCommercialCodeSigning || eku == ExtKeyUsageMicrosoftKernelCodeSigning) && certEKU == ExtKeyUsageCodeSigning
}
// isValid performs validity checks on c given that it is a candidate to append // isValid performs validity checks on c given that it is a candidate to append
// to the chain in currentChain. // to the chain in currentChain.
func (c *Certificate) isValid(certType int, currentChain []*Certificate, opts *VerifyOptions) error { func (c *Certificate) isValid(certType int, currentChain []*Certificate, opts *VerifyOptions) error {
...@@ -708,59 +661,6 @@ func (c *Certificate) isValid(certType int, currentChain []*Certificate, opts *V ...@@ -708,59 +661,6 @@ func (c *Certificate) isValid(certType int, currentChain []*Certificate, opts *V
} }
} }
checkEKUs := certType == intermediateCertificate
// If no extended key usages are specified, then all are acceptable.
if checkEKUs && (len(c.ExtKeyUsage) == 0 && len(c.UnknownExtKeyUsage) == 0) {
checkEKUs = false
}
// If the “any” key usage is permitted, then no more checks are needed.
if checkEKUs {
for _, caEKU := range c.ExtKeyUsage {
comparisonCount++
if caEKU == ExtKeyUsageAny {
checkEKUs = false
break
}
}
}
if checkEKUs {
NextEKU:
for _, eku := range leaf.ExtKeyUsage {
if comparisonCount > maxConstraintComparisons {
return CertificateInvalidError{c, TooManyConstraints, ""}
}
for _, caEKU := range c.ExtKeyUsage {
comparisonCount++
if ekuPermittedBy(eku, caEKU, checkingAgainstIssuerCert) {
continue NextEKU
}
}
oid, _ := oidFromExtKeyUsage(eku)
return CertificateInvalidError{c, CANotAuthorizedForExtKeyUsage, fmt.Sprintf("EKU not permitted: %#v", oid)}
}
NextUnknownEKU:
for _, eku := range leaf.UnknownExtKeyUsage {
if comparisonCount > maxConstraintComparisons {
return CertificateInvalidError{c, TooManyConstraints, ""}
}
for _, caEKU := range c.UnknownExtKeyUsage {
comparisonCount++
if caEKU.Equal(eku) {
continue NextUnknownEKU
}
}
return CertificateInvalidError{c, CANotAuthorizedForExtKeyUsage, fmt.Sprintf("EKU not permitted: %#v", eku)}
}
}
// KeyUsage status flags are ignored. From Engineering Security, Peter // KeyUsage status flags are ignored. From Engineering Security, Peter
// Gutmann: A European government CA marked its signing certificates as // Gutmann: A European government CA marked its signing certificates as
// being valid for encryption only, but no-one noticed. Another // being valid for encryption only, but no-one noticed. Another
...@@ -861,63 +761,38 @@ func (c *Certificate) Verify(opts VerifyOptions) (chains [][]*Certificate, err e ...@@ -861,63 +761,38 @@ func (c *Certificate) Verify(opts VerifyOptions) (chains [][]*Certificate, err e
} }
} }
requestedKeyUsages := make([]ExtKeyUsage, len(opts.KeyUsages)) var candidateChains [][]*Certificate
copy(requestedKeyUsages, opts.KeyUsages) if opts.Roots.contains(c) {
if len(requestedKeyUsages) == 0 { candidateChains = append(candidateChains, []*Certificate{c})
requestedKeyUsages = append(requestedKeyUsages, ExtKeyUsageServerAuth) } else {
if candidateChains, err = c.buildChains(make(map[int][][]*Certificate), []*Certificate{c}, &opts); err != nil {
return nil, err
}
} }
// If no key usages are specified, then any are acceptable. keyUsages := opts.KeyUsages
checkEKU := len(c.ExtKeyUsage) > 0 if len(keyUsages) == 0 {
keyUsages = []ExtKeyUsage{ExtKeyUsageServerAuth}
for _, eku := range requestedKeyUsages {
if eku == ExtKeyUsageAny {
checkEKU = false
break
}
} }
if checkEKU { // If any key usage is acceptable then we're done.
foundMatch := false for _, usage := range keyUsages {
NextUsage: if usage == ExtKeyUsageAny {
for _, eku := range requestedKeyUsages { return candidateChains, nil
for _, leafEKU := range c.ExtKeyUsage {
if ekuPermittedBy(eku, leafEKU, checkingAgainstLeafCert) {
foundMatch = true
break NextUsage
}
}
} }
}
if !foundMatch { for _, candidate := range candidateChains {
msg := "leaf contains the following, recognized EKUs: " if checkChainForKeyUsage(candidate, keyUsages) {
chains = append(chains, candidate)
for i, leafEKU := range c.ExtKeyUsage {
oid, ok := oidFromExtKeyUsage(leafEKU)
if !ok {
continue
}
if i > 0 {
msg += ", "
}
msg += formatOID(oid)
}
return nil, CertificateInvalidError{c, IncompatibleUsage, msg}
} }
} }
var candidateChains [][]*Certificate if len(chains) == 0 {
if opts.Roots.contains(c) { return nil, CertificateInvalidError{c, IncompatibleUsage, ""}
candidateChains = append(candidateChains, []*Certificate{c})
} else {
if candidateChains, err = c.buildChains(make(map[int][][]*Certificate), []*Certificate{c}, &opts); err != nil {
return nil, err
}
} }
return candidateChains, nil return chains, nil
} }
func appendToFreshChain(chain []*Certificate, cert *Certificate) []*Certificate { func appendToFreshChain(chain []*Certificate, cert *Certificate) []*Certificate {
...@@ -1078,3 +953,65 @@ func (c *Certificate) VerifyHostname(h string) error { ...@@ -1078,3 +953,65 @@ func (c *Certificate) VerifyHostname(h string) error {
return HostnameError{c, h} return HostnameError{c, h}
} }
func checkChainForKeyUsage(chain []*Certificate, keyUsages []ExtKeyUsage) bool {
usages := make([]ExtKeyUsage, len(keyUsages))
copy(usages, keyUsages)
if len(chain) == 0 {
return false
}
usagesRemaining := len(usages)
// We walk down the list and cross out any usages that aren't supported
// by each certificate. If we cross out all the usages, then the chain
// is unacceptable.
NextCert:
for i := len(chain) - 1; i >= 0; i-- {
cert := chain[i]
if len(cert.ExtKeyUsage) == 0 && len(cert.UnknownExtKeyUsage) == 0 {
// The certificate doesn't have any extended key usage specified.
continue
}
for _, usage := range cert.ExtKeyUsage {
if usage == ExtKeyUsageAny {
// The certificate is explicitly good for any usage.
continue NextCert
}
}
const invalidUsage ExtKeyUsage = -1
NextRequestedUsage:
for i, requestedUsage := range usages {
if requestedUsage == invalidUsage {
continue
}
for _, usage := range cert.ExtKeyUsage {
if requestedUsage == usage {
continue NextRequestedUsage
} else if requestedUsage == ExtKeyUsageServerAuth &&
(usage == ExtKeyUsageNetscapeServerGatedCrypto ||
usage == ExtKeyUsageMicrosoftServerGatedCrypto) {
// In order to support COMODO
// certificate chains, we have to
// accept Netscape or Microsoft SGC
// usages as equal to ServerAuth.
continue NextRequestedUsage
}
}
usages[i] = invalidUsage
usagesRemaining--
if usagesRemaining == 0 {
return false
}
}
}
return true
}
...@@ -474,7 +474,7 @@ func Map(mapping func(rune) rune, s string) string { ...@@ -474,7 +474,7 @@ func Map(mapping func(rune) rune, s string) string {
b = make([]byte, len(s)+utf8.UTFMax) b = make([]byte, len(s)+utf8.UTFMax)
nbytes = copy(b, s[:i]) nbytes = copy(b, s[:i])
if r >= 0 { if r >= 0 {
if r <= utf8.RuneSelf { if r < utf8.RuneSelf {
b[nbytes] = byte(r) b[nbytes] = byte(r)
nbytes++ nbytes++
} else { } else {
...@@ -504,7 +504,7 @@ func Map(mapping func(rune) rune, s string) string { ...@@ -504,7 +504,7 @@ func Map(mapping func(rune) rune, s string) string {
r := mapping(c) r := mapping(c)
// common case // common case
if (0 <= r && r <= utf8.RuneSelf) && nbytes < len(b) { if (0 <= r && r < utf8.RuneSelf) && nbytes < len(b) {
b[nbytes] = byte(r) b[nbytes] = byte(r)
nbytes++ nbytes++
continue continue
......
...@@ -532,6 +532,7 @@ var upperTests = []StringTest{ ...@@ -532,6 +532,7 @@ var upperTests = []StringTest{
{"longStrinGwitHmixofsmaLLandcAps", "LONGSTRINGWITHMIXOFSMALLANDCAPS"}, {"longStrinGwitHmixofsmaLLandcAps", "LONGSTRINGWITHMIXOFSMALLANDCAPS"},
{"long\u0250string\u0250with\u0250nonascii\u2C6Fchars", "LONG\u2C6FSTRING\u2C6FWITH\u2C6FNONASCII\u2C6FCHARS"}, {"long\u0250string\u0250with\u0250nonascii\u2C6Fchars", "LONG\u2C6FSTRING\u2C6FWITH\u2C6FNONASCII\u2C6FCHARS"},
{"\u0250\u0250\u0250\u0250\u0250", "\u2C6F\u2C6F\u2C6F\u2C6F\u2C6F"}, // grows one byte per char {"\u0250\u0250\u0250\u0250\u0250", "\u2C6F\u2C6F\u2C6F\u2C6F\u2C6F"}, // grows one byte per char
{"a\u0080\U0010FFFF", "A\u0080\U0010FFFF"}, // test utf8.RuneSelf and utf8.MaxRune
} }
var lowerTests = []StringTest{ var lowerTests = []StringTest{
...@@ -542,6 +543,7 @@ var lowerTests = []StringTest{ ...@@ -542,6 +543,7 @@ var lowerTests = []StringTest{
{"longStrinGwitHmixofsmaLLandcAps", "longstringwithmixofsmallandcaps"}, {"longStrinGwitHmixofsmaLLandcAps", "longstringwithmixofsmallandcaps"},
{"LONG\u2C6FSTRING\u2C6FWITH\u2C6FNONASCII\u2C6FCHARS", "long\u0250string\u0250with\u0250nonascii\u0250chars"}, {"LONG\u2C6FSTRING\u2C6FWITH\u2C6FNONASCII\u2C6FCHARS", "long\u0250string\u0250with\u0250nonascii\u0250chars"},
{"\u2C6D\u2C6D\u2C6D\u2C6D\u2C6D", "\u0251\u0251\u0251\u0251\u0251"}, // shrinks one byte per char {"\u2C6D\u2C6D\u2C6D\u2C6D\u2C6D", "\u0251\u0251\u0251\u0251\u0251"}, // shrinks one byte per char
{"A\u0080\U0010FFFF", "a\u0080\U0010FFFF"}, // test utf8.RuneSelf and utf8.MaxRune
} }
const space = "\t\v\r\f\n\u0085\u00a0\u2000\u3000" const space = "\t\v\r\f\n\u0085\u00a0\u2000\u3000"
...@@ -654,6 +656,27 @@ func TestMap(t *testing.T) { ...@@ -654,6 +656,27 @@ func TestMap(t *testing.T) {
if m != expect { if m != expect {
t.Errorf("replace invalid sequence: expected %q got %q", expect, m) t.Errorf("replace invalid sequence: expected %q got %q", expect, m)
} }
// 8. Check utf8.RuneSelf and utf8.MaxRune encoding
encode := func(r rune) rune {
switch r {
case utf8.RuneSelf:
return unicode.MaxRune
case unicode.MaxRune:
return utf8.RuneSelf
}
return r
}
s := string(utf8.RuneSelf) + string(utf8.MaxRune)
r := string(utf8.MaxRune) + string(utf8.RuneSelf) // reverse of s
m = Map(encode, s)
if m != r {
t.Errorf("encoding not handled correctly: expected %q got %q", r, m)
}
m = Map(encode, r)
if m != s {
t.Errorf("encoding not handled correctly: expected %q got %q", s, m)
}
} }
func TestToUpper(t *testing.T) { runStringTests(t, ToUpper, "ToUpper", upperTests) } func TestToUpper(t *testing.T) { runStringTests(t, ToUpper, "ToUpper", upperTests) }
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
#include <time.h> #include <time.h>
#include <unistd.h> #include <unistd.h>
#define fd (100) #define fd (30)
// Tests libgo2.so, which does not export any functions. // Tests libgo2.so, which does not export any functions.
// Read a string from the file descriptor and print it. // Read a string from the file descriptor and print it.
......
...@@ -21,7 +21,7 @@ import ( ...@@ -21,7 +21,7 @@ import (
// that the C code can also use. // that the C code can also use.
const ( const (
fd = 100 fd = 30
) )
func init() { func init() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment