From c7e220da4ee7265520ecee157462310d9e1e370c Mon Sep 17 00:00:00 2001 From: sam boyer Date: Wed, 13 Jun 2018 22:20:54 -0400 Subject: [PATCH 01/25] Remove InputsDigest, add InputImports First steps towards a better in-sync checking system that does not rely on a merge-conflict-prone explicit hash digest. --- gps/selection.go | 25 ++++++++++++++++++++++++- gps/solution_test.go | 9 --------- gps/solver.go | 8 +++----- lock.go | 31 ++++++++++++------------------- lock_test.go | 12 ------------ txn_writer.go | 18 ++++++++++-------- 6 files changed, 49 insertions(+), 54 deletions(-) diff --git a/gps/selection.go b/gps/selection.go index 8c0b7d3a62..727b5206ae 100644 --- a/gps/selection.go +++ b/gps/selection.go @@ -119,7 +119,7 @@ func (s *selection) getRequiredPackagesIn(id ProjectIdentifier) map[string]int { return uniq } -// Suppress unused warning. +// Suppress unused linting warning. var _ = (*selection)(nil).getSelectedPackagesIn // Compute a list of the unique packages within the given ProjectIdentifier that @@ -141,6 +141,29 @@ func (s *selection) getSelectedPackagesIn(id ProjectIdentifier) map[string]int { return uniq } +// getProjectImportMap extracts the set of package imports from the used +// packages in each selected project. +func (s *selection) getProjectImportMap() map[ProjectRoot]map[string]struct{} { + importMap := make(map[ProjectRoot]map[string]struct{}) + for _, edges := range s.deps { + for _, edge := range edges { + var curmap map[string]struct{} + if imap, has := importMap[edge.depender.id.ProjectRoot]; !has { + curmap = make(map[string]struct{}) + } else { + curmap = imap + } + + for _, pl := range edge.dep.pl { + curmap[pl] = struct{}{} + } + importMap[edge.depender.id.ProjectRoot] = curmap + } + } + + return importMap +} + func (s *selection) getConstraint(id ProjectIdentifier) Constraint { deps, exists := s.deps[id.ProjectRoot] if !exists || len(deps) == 0 { diff --git a/gps/solution_test.go b/gps/solution_test.go index 299b9222b1..6a6706f4d4 100644 --- a/gps/solution_test.go +++ b/gps/solution_test.go @@ -39,15 +39,6 @@ func init() { }, } basicResult.analyzerInfo = (naiveAnalyzer{}).Info() - - // Just in case something needs punishing, kubernetes offers a complex, - // real-world set of dependencies, and this revision is known to work. - /* - _ = atom{ - id: pi("github.com/kubernetes/kubernetes"), - v: NewVersion("1.0.0").Pair(Revision("528f879e7d3790ea4287687ef0ab3f2a01cc2718")), - } - */ } func testWriteDepTree(t *testing.T) { diff --git a/gps/solver.go b/gps/solver.go index 4061f8e9fe..c059b60f86 100644 --- a/gps/solver.go +++ b/gps/solver.go @@ -1363,18 +1363,16 @@ func pa2lp(pa atom, pkgs map[string]struct{}) LockedProject { panic("unreachable") } - lp.pkgs = make([]string, len(pkgs)) - k := 0 + lp.pkgs = make([]string, 0, len(pkgs)) pr := string(pa.id.ProjectRoot) trim := pr + "/" for pkg := range pkgs { if pkg == string(pa.id.ProjectRoot) { - lp.pkgs[k] = "." + lp.pkgs = append(lp.pkgs, ".") } else { - lp.pkgs[k] = strings.TrimPrefix(pkg, trim) + lp.pkgs = append(lp.pkgs, strings.TrimPrefix(pkg, trim)) } - k++ } sort.Strings(lp.pkgs) diff --git a/lock.go b/lock.go index 3f3f563c42..5aff3ab771 100644 --- a/lock.go +++ b/lock.go @@ -6,7 +6,6 @@ package dep import ( "bytes" - "encoding/hex" "io" "sort" @@ -26,7 +25,6 @@ type Lock struct { // SolveMeta holds solver meta data. type SolveMeta struct { - InputsDigest []byte AnalyzerName string AnalyzerVersion int SolverName string @@ -39,11 +37,11 @@ type rawLock struct { } type solveMeta struct { - InputsDigest string `toml:"inputs-digest"` - AnalyzerName string `toml:"analyzer-name"` - AnalyzerVersion int `toml:"analyzer-version"` - SolverName string `toml:"solver-name"` - SolverVersion int `toml:"solver-version"` + AnalyzerName string `toml:"analyzer-name"` + AnalyzerVersion int `toml:"analyzer-version"` + SolverName string `toml:"solver-name"` + SolverVersion int `toml:"solver-version"` + InputImports []string `toml:"input-imports"` } type rawLockedProject struct { @@ -77,15 +75,11 @@ func fromRawLock(raw rawLock) (*Lock, error) { P: make([]gps.LockedProject, len(raw.Projects)), } - l.SolveMeta.InputsDigest, err = hex.DecodeString(raw.SolveMeta.InputsDigest) - if err != nil { - return nil, errors.Errorf("invalid hash digest in lock's memo field") - } - l.SolveMeta.AnalyzerName = raw.SolveMeta.AnalyzerName l.SolveMeta.AnalyzerVersion = raw.SolveMeta.AnalyzerVersion l.SolveMeta.SolverName = raw.SolveMeta.SolverName l.SolveMeta.SolverVersion = raw.SolveMeta.SolverVersion + l.SolveMeta.InputImports = raw.SolveMeta.InputImports for i, ld := range raw.Projects { r := gps.Revision(ld.Revision) @@ -106,15 +100,17 @@ func fromRawLock(raw rawLock) (*Lock, error) { ProjectRoot: gps.ProjectRoot(ld.Name), Source: ld.Source, } - l.P[i] = gps.NewLockedProject(id, v, ld.Packages) + l.P[i] = gps.NewLockedProject(id, v, ld.Packages, ld.Imports) } return l, nil } // InputsDigest returns the hash of inputs which produced this lock data. +// +// TODO(sdboyer) remove, this is now deprecated func (l *Lock) InputsDigest() []byte { - return l.SolveMeta.InputsDigest + return nil } // Projects returns the list of LockedProjects contained in the lock data. @@ -140,7 +136,6 @@ func (l *Lock) HasProjectWithRoot(root gps.ProjectRoot) bool { func (l *Lock) toRaw() rawLock { raw := rawLock{ SolveMeta: solveMeta{ - InputsDigest: hex.EncodeToString(l.SolveMeta.InputsDigest), AnalyzerName: l.SolveMeta.AnalyzerName, AnalyzerVersion: l.SolveMeta.AnalyzerVersion, SolverName: l.SolveMeta.SolverName, @@ -182,13 +177,12 @@ func (l *Lock) MarshalTOML() ([]byte, error) { // LockFromSolution converts a gps.Solution to dep's representation of a lock. // // Data is defensively copied wherever necessary to ensure the resulting *lock -// shares no memory with the original lock. +// shares no memory with the input solution. func LockFromSolution(in gps.Solution) *Lock { - h, p := in.InputsDigest(), in.Projects() + p := in.Projects() l := &Lock{ SolveMeta: SolveMeta{ - InputsDigest: make([]byte, len(h)), AnalyzerName: in.AnalyzerName(), AnalyzerVersion: in.AnalyzerVersion(), SolverName: in.SolverName(), @@ -197,7 +191,6 @@ func LockFromSolution(in gps.Solution) *Lock { P: make([]gps.LockedProject, len(p)), } - copy(l.SolveMeta.InputsDigest, h) copy(l.P, p) return l } diff --git a/lock_test.go b/lock_test.go index 8f55c91678..a5399f9f30 100644 --- a/lock_test.go +++ b/lock_test.go @@ -28,9 +28,6 @@ func TestReadLock(t *testing.T) { b, _ := hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") want := &Lock{ - SolveMeta: SolveMeta{ - InputsDigest: b, - }, P: []gps.LockedProject{ gps.NewLockedProject( gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, @@ -54,9 +51,6 @@ func TestReadLock(t *testing.T) { b, _ = hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") want = &Lock{ - SolveMeta: SolveMeta{ - InputsDigest: b, - }, P: []gps.LockedProject{ gps.NewLockedProject( gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, @@ -79,9 +73,6 @@ func TestWriteLock(t *testing.T) { want := h.GetTestFileString(golden) memo, _ := hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") l := &Lock{ - SolveMeta: SolveMeta{ - InputsDigest: memo, - }, P: []gps.LockedProject{ gps.NewLockedProject( gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, @@ -110,9 +101,6 @@ func TestWriteLock(t *testing.T) { want = h.GetTestFileString(golden) memo, _ = hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") l = &Lock{ - SolveMeta: SolveMeta{ - InputsDigest: memo, - }, P: []gps.LockedProject{ gps.NewLockedProject( gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, diff --git a/txn_writer.go b/txn_writer.go index c7b53fff9b..5bae47df3f 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -233,7 +233,9 @@ func formatLockDiff(diff gps.LockDiff) (string, error) { type VendorBehavior int const ( - // VendorOnChanged indicates that the vendor directory should be written when the lock is new or changed. + // VendorOnChanged indicates that the vendor directory should be written + // when the lock is new or changed, or a project in vendor differs from its + // intended state. VendorOnChanged VendorBehavior = iota // VendorAlways forces the vendor directory to always be written. VendorAlways @@ -259,14 +261,14 @@ func (sw SafeWriter) validate(root string, sm gps.SourceManager) error { return nil } -// Write saves some combination of config yaml, lock, and a vendor tree. -// root is the absolute path of root dir in which to write. -// sm is only required if vendor is being written. +// Write saves some combination of manifest, lock, and a vendor tree. root is +// the absolute path of root dir in which to write. sm is only required if +// vendor is being written. // -// It first writes to a temp dir, then moves them in place if and only if all the write -// operations succeeded. It also does its best to roll back if any moves fail. -// This mostly guarantees that dep cannot exit with a partial write that would -// leave an undefined state on disk. +// It first writes to a temp dir, then moves them in place if and only if all +// the write operations succeeded. It also does its best to roll back if any +// moves fail. This mostly guarantees that dep cannot exit with a partial write +// that would leave an undefined state on disk. // // If logger is not nil, progress will be logged after each project write. func (sw *SafeWriter) Write(root string, sm gps.SourceManager, examples bool, logger *log.Logger) error { From db8b66b3078a5ab11fa7020c8dacba54121f777e Mon Sep 17 00:00:00 2001 From: sam boyer Date: Fri, 15 Jun 2018 01:11:52 -0400 Subject: [PATCH 02/25] dep: Introduce lock verification logic This mostly supplants the hash comparison-based checking, though it's still in rough form. --- cmd/dep/ensure.go | 32 +++-------- lock.go | 3 +- project.go | 142 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 153 insertions(+), 24 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 675a3b6791..7bc066cc31 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -256,13 +256,9 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return err } - solver, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "prepare solver") - } - - if p.Lock != nil && bytes.Equal(p.Lock.InputsDigest(), solver.HashInputs()) { - // Memo matches, so there's probably nothing to do. + if lsat, err := p.LockSatisfiesInputs(sm); err != nil { + return err + } else if !lsat.Passes() { if ctx.Verbose { ctx.Out.Printf("%s was already in sync with imports and %s\n", dep.LockName, dep.ManifestName) } @@ -272,13 +268,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return nil } - // TODO(sdboyer) The desired behavior at this point is to determine - // whether it's necessary to write out vendor, or if it's already - // consistent with the lock. However, we haven't yet determined what - // that "verification" is supposed to look like (#121); in the meantime, - // we unconditionally write out vendor/ so that `dep ensure`'s behavior - // is maximally compatible with what it will eventually become. - sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions) + sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorOnChanged, p.Manifest.PruneOptions) if err != nil { return err } @@ -294,6 +284,11 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } + solver, err := gps.Prepare(params, sm) + if err != nil { + return errors.Wrap(err, "prepare solver") + } + if cmd.noVendor && cmd.dryRun { return errors.New("Gopkg.lock was not up to date") } @@ -361,15 +356,6 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return errors.Wrap(err, "fastpath solver prepare") } - // Compare the hashes. If they're not equal, bail out and ask the user to - // run a straight `dep ensure` before updating. This is handholding the - // user a bit, but the extra effort required is minimal, and it ensures the - // user is isolating variables in the event of solve problems (was it the - // "pending" changes, or the -update that caused the problem?). - if !bytes.Equal(p.Lock.InputsDigest(), solver.HashInputs()) { - ctx.Out.Printf("Warning: %s is out of sync with %s or the project's imports.", dep.LockName, dep.ManifestName) - } - // When -update is specified without args, allow every dependency to change // versions, regardless of the lock file. if len(args) == 0 { diff --git a/lock.go b/lock.go index 5aff3ab771..50c1cc87c6 100644 --- a/lock.go +++ b/lock.go @@ -29,6 +29,7 @@ type SolveMeta struct { AnalyzerVersion int SolverName string SolverVersion int + InputImports []string } type rawLock struct { @@ -100,7 +101,7 @@ func fromRawLock(raw rawLock) (*Lock, error) { ProjectRoot: gps.ProjectRoot(ld.Name), Source: ld.Source, } - l.P[i] = gps.NewLockedProject(id, v, ld.Packages, ld.Imports) + l.P[i] = gps.NewLockedProject(id, v, ld.Packages) } return l, nil diff --git a/project.go b/project.go index d2677e8866..5321014f26 100644 --- a/project.go +++ b/project.go @@ -219,6 +219,148 @@ func (p *Project) GetDirectDependencyNames(sm gps.SourceManager) (pkgtree.Packag return ptree, directDeps, nil } +type lockUnsatisfy uint8 + +const ( + missingFromLock lockUnsatisfy = iota + inAdditionToLock +) + +type constraintMismatch struct { + c gps.Constraint + v gps.Version +} + +type constraintMismatches map[gps.ProjectRoot]constraintMismatch + +type LockSatisfaction struct { + nolock bool + missingPkgs, excessPkgs []string + pkgs map[string]lockUnsatisfy + badovr, badconstraint constraintMismatches +} + +// Passed is a shortcut method to check if any problems with the evaluted lock +// were identified. +func (ls LockSatisfaction) Passed() bool { + if ls.nolock { + return false + } + + if len(ls.pkgs) > 0 { + return false + } + + if len(ls.badovr) > 0 { + return false + } + + if len(ls.badconstraint) > 0 { + return false + } + + return true +} + +func (ls LockSatisfaction) MissingPackages() []string { + return ls.missingPkgs +} + +func (ls LockSatisfaction) ExcessPackages() []string { + return ls.excessPkgs +} + +func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]constraintMismatch { + return ls.badovr +} + +func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]constraintMismatch { + return ls.badconstraint +} + +// LockSatisfiesInputs determines whether the Project's lock satisfies all the +// requirements indicated by the inputs (Manifest and RootPackageTree). +func (p *Project) LockSatisfiesInputs(sm gps.SourceManager) (LockSatisfaction, error) { + if p.Lock == nil { + return LockSatisfaction{nolock: true}, nil + } + + ptree, err := p.ParseRootPackageTree() + if err != nil { + return LockSatisfaction{}, err + } + + var ig *pkgtree.IgnoredRuleset + var req map[string]bool + if p.Manifest != nil { + ig = p.Manifest.IgnoredPackages() + req = p.Manifest.RequiredPackages() + } + + rm, _ := ptree.ToReachMap(true, true, false, ig) + reach := rm.FlattenFn(paths.IsStandardImportPath) + + inlock := make(map[string]bool, len(p.Lock.SolveMeta.InputImports)) + ininputs := make(map[string]bool, len(reach)+len(req)) + + for _, imp := range reach { + ininputs[imp] = true + } + + for imp := range req { + ininputs[imp] = true + } + + for _, imp := range p.Lock.SolveMeta.InputImports { + inlock[imp] = true + } + + lsat := LockSatisfaction{ + badovr: make(constraintMismatches), + badconstraint: make(constraintMismatches), + } + + for ip := range ininputs { + if !inlock[ip] { + lsat.pkgs[ip] = missingFromLock + } else { + // So we don't have to revisit it below + delete(inlock, ip) + } + } + + for ip := range inlock { + if !ininputs[ip] { + lsat.pkgs[ip] = inAdditionToLock + } + } + + ineff := make(map[string]bool) + for _, pr := range p.FindIneffectualConstraints(sm) { + ineff[string(pr)] = true + } + + for _, lp := range p.Lock.Projects() { + pr := lp.Ident().ProjectRoot + + if pp, has := p.Manifest.Ovr[pr]; has && !pp.Constraint.Matches(lp.Version()) { + lsat.badovr[pr] = constraintMismatch{ + c: pp.Constraint, + v: lp.Version(), + } + } + + if pp, has := p.Manifest.Constraints[pr]; has && !ineff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { + lsat.badconstraint[pr] = constraintMismatch{ + c: pp.Constraint, + v: lp.Version(), + } + } + } + + return lsat, nil +} + // FindIneffectualConstraints looks for constraint rules expressed in the // manifest that will have no effect during solving, as they are specified for // projects that are not direct dependencies of the Project. From f23ef5161a63f982be97b93fc1e40c768ea24ee6 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Thu, 21 Jun 2018 00:56:10 -0400 Subject: [PATCH 03/25] gps: Convert LockedProject to an interface This is the first step towards being able to a more expansive type - one that carries the pruning and digest information - directly within the existing Lock interface. --- gps/bridge.go | 4 +- gps/lock.go | 74 ++++++++++++++++++++++----------- gps/lock_test.go | 18 ++++---- gps/lockdiff.go | 18 ++++---- gps/lockdiff_test.go | 52 +++++++++++------------ gps/prune_test.go | 8 ++-- gps/solution_test.go | 2 +- gps/solve_basic_test.go | 2 +- gps/solve_test.go | 13 +++--- gps/solver.go | 2 +- gps/source_cache_bolt_encode.go | 38 +++++++++++++++-- gps/trace.go | 2 +- lock.go | 1 - lock_test.go | 5 --- 14 files changed, 145 insertions(+), 94 deletions(-) diff --git a/gps/bridge.go b/gps/bridge.go index dc6812b87c..6f5c38a540 100644 --- a/gps/bridge.go +++ b/gps/bridge.go @@ -195,8 +195,8 @@ func (b *bridge) breakLock() { } for _, lp := range b.s.rd.rl.Projects() { - if _, is := b.s.sel.selected(lp.pi); !is { - pi, v := lp.pi, lp.Version() + if _, is := b.s.sel.selected(lp.Ident()); !is { + pi, v := lp.Ident(), lp.Version() go func() { // Sync first b.sm.SyncSourceFor(pi) diff --git a/gps/lock.go b/gps/lock.go index d2f9d59119..d382fd03ff 100644 --- a/gps/lock.go +++ b/gps/lock.go @@ -39,8 +39,7 @@ func LocksAreEq(l1, l2 Lock, checkHash bool) bool { return false } - p1 = sortedLockedProjects(p1) - p2 = sortedLockedProjects(p2) + p1, p2 = sortLockedProjects(p1), sortLockedProjects(p2) for k, lp := range p1 { if !lp.Eq(p2[k]) { @@ -50,8 +49,8 @@ func LocksAreEq(l1, l2 Lock, checkHash bool) bool { return true } -// sortedLockedProjects returns a sorted copy of lps, or itself if already sorted. -func sortedLockedProjects(lps []LockedProject) []LockedProject { +// sortLockedProjects returns a sorted copy of lps, or itself if already sorted. +func sortLockedProjects(lps []LockedProject) []LockedProject { if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { return lps[i].Ident().Less(lps[j].Ident()) }) { @@ -69,7 +68,16 @@ func sortedLockedProjects(lps []LockedProject) []LockedProject { // project's name, one or both of version and underlying revision, the network // URI for accessing it, the path at which it should be placed within a vendor // directory, and the packages that are used in it. -type LockedProject struct { +type LockedProject interface { + Ident() ProjectIdentifier + Version() Version + Packages() []string + Eq(LockedProject) bool + String() string +} + +// lockedProject is the default implementation of LockedProject. +type lockedProject struct { pi ProjectIdentifier v UnpairedVersion r Revision @@ -109,7 +117,7 @@ func NewLockedProject(id ProjectIdentifier, v Version, pkgs []string) LockedProj panic("must provide a non-nil version to create a LockedProject") } - lp := LockedProject{ + lp := lockedProject{ pi: id, pkgs: pkgs, } @@ -134,13 +142,13 @@ func NewLockedProject(id ProjectIdentifier, v Version, pkgs []string) LockedProj // Ident returns the identifier describing the project. This includes both the // local name (the root name by which the project is referenced in import paths) // and the network name, where the upstream source lives. -func (lp LockedProject) Ident() ProjectIdentifier { +func (lp lockedProject) Ident() ProjectIdentifier { return lp.pi } // Version assembles together whatever version and/or revision data is // available into a single Version. -func (lp LockedProject) Version() Version { +func (lp lockedProject) Version() Version { if lp.r == "" { return lp.v } @@ -152,37 +160,53 @@ func (lp LockedProject) Version() Version { return lp.v.Pair(lp.r) } -// Eq checks if two LockedProject instances are equal. -func (lp LockedProject) Eq(lp2 LockedProject) bool { - if lp.pi != lp2.pi { - return false - } - - if lp.r != lp2.r { +// Eq checks if two LockedProject instances are equal. The implementation +// assumes both Packages lists are already sorted lexicographically. +func (lp lockedProject) Eq(lp2 LockedProject) bool { + if lp.pi != lp2.Ident() { return false } - if len(lp.pkgs) != len(lp2.pkgs) { - return false - } - - for k, v := range lp.pkgs { - if lp2.pkgs[k] != v { + var uv UnpairedVersion + switch tv := lp2.Version().(type) { + case Revision: + if lp.r != tv { + return false + } + case versionPair: + if lp.r != tv.r { return false } + uv = tv.v + case branchVersion, semVersion, plainVersion: + // For now, we're going to say that revisions must be present in order + // to indicate equality. We may need to change this later, as it may be + // more appropriate to enforce elsewhere. + return false } v1n := lp.v == nil - v2n := lp2.v == nil + v2n := uv == nil if v1n != v2n { return false } - if !v1n && !lp.v.Matches(lp2.v) { + if !v1n && !lp.v.Matches(uv) { + return false + } + + opkgs := lp2.Packages() + if len(lp.pkgs) != len(opkgs) { return false } + for k, v := range lp.pkgs { + if opkgs[k] != v { + return false + } + } + return true } @@ -195,11 +219,11 @@ func (lp LockedProject) Eq(lp2 LockedProject) bool { // safe to remove - it could contain C files, or other assets, that can't be // safely removed. // * The slice is not a copy. If you need to modify it, copy it first. -func (lp LockedProject) Packages() []string { +func (lp lockedProject) Packages() []string { return lp.pkgs } -func (lp LockedProject) String() string { +func (lp lockedProject) String() string { return fmt.Sprintf("%s@%s with packages: %v", lp.Ident(), lp.Version(), lp.pkgs) } diff --git a/gps/lock_test.go b/gps/lock_test.go index 3f9ca6ff2c..2c58942c24 100644 --- a/gps/lock_test.go +++ b/gps/lock_test.go @@ -34,14 +34,14 @@ func TestLockedProjectSorting(t *testing.T) { func TestLockedProjectsEq(t *testing.T) { lps := []LockedProject{ - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), nil), - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps", "flugle"}), - NewLockedProject(mkPI("foo"), NewVersion("nada"), []string{"foo"}), - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"flugle", "gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.11.0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps"), Revision("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("REV"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("REV"), nil), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("REV"), []string{"gps", "flugle"}), + NewLockedProject(mkPI("foo"), NewVersion("nada").Pair("OTHERREV"), []string{"foo"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("REV"), []string{"flugle", "gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("REV2"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.11.0").Pair("REV"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), Revision("REV2"), []string{"gps"}), } fix := map[string]struct { @@ -85,7 +85,7 @@ func TestLockedProjectsEq(t *testing.T) { func TestLocksAreEq(t *testing.T) { gpl := NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}) - svpl := NewLockedProject(mkPI("github.com/Masterminds/semver"), NewVersion("v2.0.0"), []string{"semver"}) + svpl := NewLockedProject(mkPI("github.com/Masterminds/semver"), NewVersion("v2.0.0").Pair("foo"), []string{"semver"}) bbbt := NewLockedProject(mkPI("github.com/beeblebrox/browntown"), NewBranch("master").Pair("63fc17eb7966a6f4cc0b742bf42731c52c4ac740"), []string{"browntown", "smoochies"}) l1 := solution{ diff --git a/gps/lockdiff.go b/gps/lockdiff.go index d3d136f4ff..35720b67e2 100644 --- a/gps/lockdiff.go +++ b/gps/lockdiff.go @@ -74,8 +74,8 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { p1, p2 := l1.Projects(), l2.Projects() - p1 = sortedLockedProjects(p1) - p2 = sortedLockedProjects(p2) + p1 = sortLockedProjects(p1) + p2 = sortLockedProjects(p2) diff := LockDiff{} @@ -88,12 +88,12 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { var i2next int for i1 := 0; i1 < len(p1); i1++ { lp1 := p1[i1] - pr1 := lp1.pi.ProjectRoot + pr1 := lp1.Ident().ProjectRoot var matched bool for i2 := i2next; i2 < len(p2); i2++ { lp2 := p2[i2] - pr2 := lp2.pi.ProjectRoot + pr2 := lp2.Ident().ProjectRoot switch strings.Compare(string(pr1), string(pr2)) { case 0: // Found a matching project @@ -135,7 +135,7 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { } func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { - s2 := lp.pi.Source + s2 := lp.Ident().Source r2, b2, v2 := VersionComponentStrings(lp.Version()) var rev, version, branch, source *StringDiff @@ -153,7 +153,7 @@ func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { } add := LockedProjectDiff{ - Name: lp.pi.ProjectRoot, + Name: lp.Ident().ProjectRoot, Source: source, Revision: rev, Version: version, @@ -169,10 +169,10 @@ func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { // DiffProjects compares two projects and identifies the differences between them. // Returns nil if there are no differences. func DiffProjects(lp1 LockedProject, lp2 LockedProject) *LockedProjectDiff { - diff := LockedProjectDiff{Name: lp1.pi.ProjectRoot} + diff := LockedProjectDiff{Name: lp1.Ident().ProjectRoot} - s1 := lp1.pi.Source - s2 := lp2.pi.Source + s1 := lp1.Ident().Source + s2 := lp2.Ident().Source if s1 != s2 { diff.Source = &StringDiff{Previous: s1, Current: s2} } diff --git a/gps/lockdiff_test.go b/gps/lockdiff_test.go index 4647628174..5e9d33697f 100644 --- a/gps/lockdiff_test.go +++ b/gps/lockdiff_test.go @@ -56,14 +56,14 @@ func TestDiffProjects_NoChange(t *testing.T) { } func TestDiffProjects_Modify(t *testing.T) { - p1 := LockedProject{ + p1 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewBranch("master"), r: "abc123", pkgs: []string{"baz", "qux"}, } - p2 := LockedProject{ + p2 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, v: NewVersion("v1.0.0"), r: "def456", @@ -117,14 +117,14 @@ func TestDiffProjects_Modify(t *testing.T) { } func TestDiffProjects_AddPackages(t *testing.T) { - p1 := LockedProject{ + p1 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewBranch("master"), r: "abc123", pkgs: []string{"foobar"}, } - p2 := LockedProject{ + p2 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, v: NewVersion("v1.0.0"), r: "def456", @@ -154,14 +154,14 @@ func TestDiffProjects_AddPackages(t *testing.T) { } func TestDiffProjects_RemovePackages(t *testing.T) { - p1 := LockedProject{ + p1 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewBranch("master"), r: "abc123", pkgs: []string{"athing", "foobar"}, } - p2 := LockedProject{ + p2 := lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, v: NewVersion("v1.0.0"), r: "def456", @@ -196,13 +196,13 @@ func TestDiffLocks_NoChange(t *testing.T) { l1 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } @@ -216,20 +216,20 @@ func TestDiffLocks_AddProjects(t *testing.T) { l1 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - { + lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux", Source: "https://github.com/mcfork/bazqux.git"}, v: NewVersion("v0.5.0"), r: "def456", pkgs: []string{"p1", "p2"}, }, - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, }, } @@ -300,19 +300,19 @@ func TestDiffLocks_RemoveProjects(t *testing.T) { l1 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - { + lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/a/thing", Source: "https://github.com/mcfork/athing.git"}, v: NewBranch("master"), r: "def456", pkgs: []string{"p1", "p2"}, }, - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, }, } @@ -383,18 +383,18 @@ func TestDiffLocks_ModifyProjects(t *testing.T) { l1 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bu"}, v: NewVersion("v1.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bu"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ h: []byte("abc123"), p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v2.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v2.0.0")}, - {pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v2.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v2.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, }, } @@ -425,7 +425,7 @@ func TestDiffLocks_ModifyHash(t *testing.T) { l1 := safeLock{ h: h1, p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } @@ -433,7 +433,7 @@ func TestDiffLocks_ModifyHash(t *testing.T) { l2 := safeLock{ h: h2, p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } @@ -454,7 +454,7 @@ func TestDiffLocks_EmptyInitialLock(t *testing.T) { l2 := safeLock{ h: h2, p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } @@ -476,7 +476,7 @@ func TestDiffLocks_EmptyFinalLock(t *testing.T) { l1 := safeLock{ h: h1, p: []LockedProject{ - {pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, + lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } diff --git a/gps/prune_test.go b/gps/prune_test.go index 39d3dc0b48..27f54dc3b2 100644 --- a/gps/prune_test.go +++ b/gps/prune_test.go @@ -111,7 +111,7 @@ func TestPruneProject(t *testing.T) { h.TempDir(pr) baseDir := h.Path(".") - lp := LockedProject{ + lp := lockedProject{ pi: ProjectIdentifier{ ProjectRoot: ProjectRoot(pr), }, @@ -143,7 +143,7 @@ func TestPruneUnusedPackages(t *testing.T) { }{ { "one-package", - LockedProject{ + lockedProject{ pi: pi, pkgs: []string{ ".", @@ -165,7 +165,7 @@ func TestPruneUnusedPackages(t *testing.T) { }, { "nested-package", - LockedProject{ + lockedProject{ pi: pi, pkgs: []string{ "pkg", @@ -194,7 +194,7 @@ func TestPruneUnusedPackages(t *testing.T) { }, { "complex-project", - LockedProject{ + lockedProject{ pi: pi, pkgs: []string{ "pkg", diff --git a/gps/solution_test.go b/gps/solution_test.go index 6a6706f4d4..b18395e9e2 100644 --- a/gps/solution_test.go +++ b/gps/solution_test.go @@ -92,7 +92,7 @@ func testWriteDepTree(t *testing.T) { for _, p := range r.p { go func(pi ProjectIdentifier) { sm.SyncSourceFor(pi) - }(p.pi) + }(p.Ident()) } // nil lock/result should err immediately diff --git a/gps/solve_basic_test.go b/gps/solve_basic_test.go index 0ee4558328..092c9ca709 100644 --- a/gps/solve_basic_test.go +++ b/gps/solve_basic_test.go @@ -308,7 +308,7 @@ func mksolution(inputs ...interface{}) map[ProjectIdentifier]LockedProject { a := mkAtom(t) m[a.id] = NewLockedProject(a.id, a.v, []string{"."}) case LockedProject: - m[t.pi] = t + m[t.Ident()] = t default: panic(fmt.Sprintf("unexpected input to mksolution: %T %s", in, in)) } diff --git a/gps/solve_test.go b/gps/solve_test.go index 607bcd7eb6..92eabcbb45 100644 --- a/gps/solve_test.go +++ b/gps/solve_test.go @@ -171,7 +171,7 @@ func fixtureSolveSimpleChecks(fix specfix, soln Solution, err error, t *testing. // Dump result projects into a map for easier interrogation rp := make(map[ProjectIdentifier]LockedProject) for _, lp := range r.p { - rp[lp.pi] = lp + rp[lp.Ident()] = lp } fixlen, rlen := len(fix.solution()), len(rp) @@ -192,8 +192,8 @@ func fixtureSolveSimpleChecks(fix specfix, soln Solution, err error, t *testing. t.Errorf("Expected version %q of project %q, but actual version was %q", pv(flp.Version()), ppi(id), pv(lp.Version())) } - if !reflect.DeepEqual(lp.pkgs, flp.pkgs) { - t.Errorf("Package list was not not as expected for project %s@%s:\n\t(GOT) %s\n\t(WNT) %s", ppi(id), pv(lp.Version()), lp.pkgs, flp.pkgs) + if !reflect.DeepEqual(lp.Packages(), flp.Packages()) { + t.Errorf("Package list was not not as expected for project %s@%s:\n\t(GOT) %s\n\t(WNT) %s", ppi(id), pv(lp.Version()), lp.Packages(), flp.Packages()) } } } @@ -201,7 +201,7 @@ func fixtureSolveSimpleChecks(fix specfix, soln Solution, err error, t *testing. // Now walk through remaining actual results for id, lp := range rp { if _, exists := fix.solution()[id]; !exists { - t.Errorf("Unexpected project %s@%s present in results, with pkgs:\n\t%s", ppi(id), pv(lp.Version()), lp.pkgs) + t.Errorf("Unexpected project %s@%s present in results, with pkgs:\n\t%s", ppi(id), pv(lp.Version()), lp.Packages()) } } } @@ -243,7 +243,10 @@ func TestRootLockNoVersionPairMatching(t *testing.T) { l2 := make(fixLock, 1) copy(l2, fix.l) - l2[0].v = nil + + l2lp := l2[0].(lockedProject) + l2lp.v = nil + l2[0] = l2lp params := SolveParameters{ RootDir: string(fix.ds[0].n), diff --git a/gps/solver.go b/gps/solver.go index c059b60f86..d42f451490 100644 --- a/gps/solver.go +++ b/gps/solver.go @@ -1347,7 +1347,7 @@ func (s *solver) unselectLast() (atomWithPackages, bool, error) { // simple (temporary?) helper just to convert atoms into locked projects func pa2lp(pa atom, pkgs map[string]struct{}) LockedProject { - lp := LockedProject{ + lp := lockedProject{ pi: pa.id, } diff --git a/gps/source_cache_bolt_encode.go b/gps/source_cache_bolt_encode.go index 61d062c5cb..e1273aee2b 100644 --- a/gps/source_cache_bolt_encode.go +++ b/gps/source_cache_bolt_encode.go @@ -240,19 +240,49 @@ func cacheGetManifest(b *bolt.Bucket) (RootManifest, error) { } // copyTo returns a serializable representation of lp. -func (lp LockedProject) copyTo(msg *pb.LockedProject, c *pb.Constraint) { +func (lp lockedProject) copyTo(msg *pb.LockedProject, c *pb.Constraint) { if lp.v == nil { msg.UnpairedVersion = nil } else { lp.v.copyTo(c) msg.UnpairedVersion = c } + msg.Root = string(lp.pi.ProjectRoot) msg.Source = lp.pi.Source msg.Revision = string(lp.r) msg.Packages = lp.pkgs } +// copyLockedProjectTo hydrates pointers to serializable representations of a +// LockedProject with the appropriate data. +func copyLockedProjectTo(lp LockedProject, msg *pb.LockedProject, c *pb.Constraint) { + if nlp, ok := lp.(lockedProject); ok { + nlp.copyTo(msg, c) + return + } + + v := lp.Version() + if v == nil { + msg.UnpairedVersion = nil + } else { + v.copyTo(c) + msg.UnpairedVersion = c + + switch tv := v.(type) { + case Revision: + msg.Revision = string(tv) + case versionPair: + msg.Revision = string(tv.r) + } + } + + pi := lp.Ident() + msg.Root = string(pi.ProjectRoot) + msg.Source = pi.Source + msg.Packages = lp.Packages() +} + // lockedProjectFromCache returns a new LockedProject with fields from m. func lockedProjectFromCache(m *pb.LockedProject) (LockedProject, error) { var uv UnpairedVersion @@ -260,10 +290,10 @@ func lockedProjectFromCache(m *pb.LockedProject) (LockedProject, error) { if m.UnpairedVersion != nil { uv, err = unpairedVersionFromCache(m.UnpairedVersion) if err != nil { - return LockedProject{}, err + return lockedProject{}, err } } - return LockedProject{ + return lockedProject{ pi: ProjectIdentifier{ ProjectRoot: ProjectRoot(m.Root), Source: m.Source, @@ -293,7 +323,7 @@ func cachePutLock(b *bolt.Bucket, l Lock) error { var msg pb.LockedProject var cMsg pb.Constraint for i, lp := range projects { - lp.copyTo(&msg, &cMsg) + copyLockedProjectTo(lp, &msg, &cMsg) v, err := proto.Marshal(&msg) if err != nil { return err diff --git a/gps/trace.go b/gps/trace.go index 4c579d30aa..d4dd24a136 100644 --- a/gps/trace.go +++ b/gps/trace.go @@ -101,7 +101,7 @@ func (s *solver) traceFinish(sol solution, err error) { if err == nil { var pkgcount int for _, lp := range sol.Projects() { - pkgcount += len(lp.pkgs) + pkgcount += len(lp.Packages()) } s.tl.Printf("%s%s found solution with %v packages from %v projects", innerIndent, successChar, pkgcount, len(sol.Projects())) } else { diff --git a/lock.go b/lock.go index 50c1cc87c6..644621077b 100644 --- a/lock.go +++ b/lock.go @@ -71,7 +71,6 @@ func readLock(r io.Reader) (*Lock, error) { } func fromRawLock(raw rawLock) (*Lock, error) { - var err error l := &Lock{ P: make([]gps.LockedProject, len(raw.Projects)), } diff --git a/lock_test.go b/lock_test.go index a5399f9f30..b7a7ac4238 100644 --- a/lock_test.go +++ b/lock_test.go @@ -5,7 +5,6 @@ package dep import ( - "encoding/hex" "reflect" "strings" "testing" @@ -26,7 +25,6 @@ func TestReadLock(t *testing.T) { t.Fatalf("Should have read Lock correctly, but got err %q", err) } - b, _ := hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") want := &Lock{ P: []gps.LockedProject{ gps.NewLockedProject( @@ -49,7 +47,6 @@ func TestReadLock(t *testing.T) { t.Fatalf("Should have read Lock correctly, but got err %q", err) } - b, _ = hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") want = &Lock{ P: []gps.LockedProject{ gps.NewLockedProject( @@ -71,7 +68,6 @@ func TestWriteLock(t *testing.T) { golden := "lock/golden0.toml" want := h.GetTestFileString(golden) - memo, _ := hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") l := &Lock{ P: []gps.LockedProject{ gps.NewLockedProject( @@ -99,7 +95,6 @@ func TestWriteLock(t *testing.T) { golden = "lock/golden1.toml" want = h.GetTestFileString(golden) - memo, _ = hex.DecodeString("2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e") l = &Lock{ P: []gps.LockedProject{ gps.NewLockedProject( From 81b3a3d3d13c89bbca8f379a3c59434d7a294373 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Sun, 24 Jun 2018 21:25:30 -0400 Subject: [PATCH 04/25] gps: Introduce verify subpackage This is a start at isolating verification logic into a discrete package. Not sure how far we'll be able to make this go without creating some import loops. --- gps/verify/lock.go | 190 +++++++++++++++++++++++++++++++++++++++++++++ project.go | 142 --------------------------------- 2 files changed, 190 insertions(+), 142 deletions(-) create mode 100644 gps/verify/lock.go diff --git a/gps/verify/lock.go b/gps/verify/lock.go new file mode 100644 index 0000000000..87932d63c9 --- /dev/null +++ b/gps/verify/lock.go @@ -0,0 +1,190 @@ +package verify + +import ( + "github.com/armon/go-radix" + "github.com/golang/dep/gps" + "github.com/golang/dep/gps/paths" + "github.com/golang/dep/gps/pkgtree" +) + +// VerifiableProject composes a LockedProject to indicate what the hash digest +// of a file tree for that LockedProject should be, given the PruneOptions and +// the list of packages. +type VerifiableProject struct { + gps.LockedProject + PruneOpts gps.PruneOptions + Digest pkgtree.VersionedDigest +} + +type LockDiff struct{} + +type lockUnsatisfy uint8 + +const ( + missingFromLock lockUnsatisfy = iota + inAdditionToLock +) + +type constraintMismatch struct { + c gps.Constraint + v gps.Version +} + +type constraintMismatches map[gps.ProjectRoot]constraintMismatch + +type LockSatisfaction struct { + nolock bool + missingPkgs, excessPkgs []string + badovr, badconstraint constraintMismatches +} + +// Passed is a shortcut method to check if any problems with the evaluted lock +// were identified. +func (ls LockSatisfaction) Passed() bool { + if ls.nolock { + return false + } + + if len(ls.missingPkgs) > 0 { + return false + } + + if len(ls.excessPkgs) > 0 { + return false + } + + if len(ls.badovr) > 0 { + return false + } + + if len(ls.badconstraint) > 0 { + return false + } + + return true +} + +func (ls LockSatisfaction) MissingPackages() []string { + return ls.missingPkgs +} + +func (ls LockSatisfaction) ExcessPackages() []string { + return ls.excessPkgs +} + +func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]constraintMismatch { + return ls.badovr +} + +func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]constraintMismatch { + return ls.badconstraint +} + +func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[string]bool { + eff := make(map[string]bool) + xt := radix.New() + + for pr, _ := range m.DependencyConstraints() { + xt.Insert(string(pr), nil) + } + + for imp := range imports { + if root, _, has := xt.LongestPrefix(imp); has { + eff[root] = true + } + } + + return eff +} + +// LockSatisfiesInputs determines whether the provided Lock satisfies all the +// requirements indicated by the inputs (RootManifest and PackageTree). +// +// The second parameter is expected to be the list of imports that were used to +// generate the input Lock. Without this explicit list, it is not possible to +// compute package imports that may have been removed. Figuring out that +// negative space would require exploring the entire graph to ensure there are +// no in-edges for particular imports. +func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { + if l == nil { + return LockSatisfaction{nolock: true} + } + + var ig *pkgtree.IgnoredRuleset + var req map[string]bool + if m != nil { + ig = m.IgnoredPackages() + req = m.RequiredPackages() + } + + rm, _ := rpt.ToReachMap(true, true, false, ig) + reach := rm.FlattenFn(paths.IsStandardImportPath) + + inlock := make(map[string]bool, len(oldimports)) + ininputs := make(map[string]bool, len(reach)+len(req)) + pkgDiff := make(map[string]lockUnsatisfy) + + for _, imp := range reach { + ininputs[imp] = true + } + + for imp := range req { + ininputs[imp] = true + } + + for _, imp := range oldimports { + inlock[imp] = true + } + + lsat := LockSatisfaction{ + badovr: make(constraintMismatches), + badconstraint: make(constraintMismatches), + } + + for ip := range ininputs { + if !inlock[ip] { + pkgDiff[ip] = missingFromLock + } else { + // So we don't have to revisit it below + delete(inlock, ip) + } + } + + for ip := range inlock { + if !ininputs[ip] { + pkgDiff[ip] = inAdditionToLock + } + } + + for ip, typ := range pkgDiff { + if typ == missingFromLock { + lsat.missingPkgs = append(lsat.missingPkgs, ip) + } else { + lsat.excessPkgs = append(lsat.excessPkgs, ip) + } + } + + eff := findEffectualConstraints(m, ininputs) + ovr := m.Overrides() + constraints := m.DependencyConstraints() + + for _, lp := range l.Projects() { + pr := lp.Ident().ProjectRoot + + if pp, has := ovr[pr]; has && !pp.Constraint.Matches(lp.Version()) { + lsat.badovr[pr] = constraintMismatch{ + c: pp.Constraint, + v: lp.Version(), + } + } + + if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { + lsat.badconstraint[pr] = constraintMismatch{ + c: pp.Constraint, + v: lp.Version(), + } + } + } + + return lsat +} diff --git a/project.go b/project.go index 5321014f26..d2677e8866 100644 --- a/project.go +++ b/project.go @@ -219,148 +219,6 @@ func (p *Project) GetDirectDependencyNames(sm gps.SourceManager) (pkgtree.Packag return ptree, directDeps, nil } -type lockUnsatisfy uint8 - -const ( - missingFromLock lockUnsatisfy = iota - inAdditionToLock -) - -type constraintMismatch struct { - c gps.Constraint - v gps.Version -} - -type constraintMismatches map[gps.ProjectRoot]constraintMismatch - -type LockSatisfaction struct { - nolock bool - missingPkgs, excessPkgs []string - pkgs map[string]lockUnsatisfy - badovr, badconstraint constraintMismatches -} - -// Passed is a shortcut method to check if any problems with the evaluted lock -// were identified. -func (ls LockSatisfaction) Passed() bool { - if ls.nolock { - return false - } - - if len(ls.pkgs) > 0 { - return false - } - - if len(ls.badovr) > 0 { - return false - } - - if len(ls.badconstraint) > 0 { - return false - } - - return true -} - -func (ls LockSatisfaction) MissingPackages() []string { - return ls.missingPkgs -} - -func (ls LockSatisfaction) ExcessPackages() []string { - return ls.excessPkgs -} - -func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]constraintMismatch { - return ls.badovr -} - -func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]constraintMismatch { - return ls.badconstraint -} - -// LockSatisfiesInputs determines whether the Project's lock satisfies all the -// requirements indicated by the inputs (Manifest and RootPackageTree). -func (p *Project) LockSatisfiesInputs(sm gps.SourceManager) (LockSatisfaction, error) { - if p.Lock == nil { - return LockSatisfaction{nolock: true}, nil - } - - ptree, err := p.ParseRootPackageTree() - if err != nil { - return LockSatisfaction{}, err - } - - var ig *pkgtree.IgnoredRuleset - var req map[string]bool - if p.Manifest != nil { - ig = p.Manifest.IgnoredPackages() - req = p.Manifest.RequiredPackages() - } - - rm, _ := ptree.ToReachMap(true, true, false, ig) - reach := rm.FlattenFn(paths.IsStandardImportPath) - - inlock := make(map[string]bool, len(p.Lock.SolveMeta.InputImports)) - ininputs := make(map[string]bool, len(reach)+len(req)) - - for _, imp := range reach { - ininputs[imp] = true - } - - for imp := range req { - ininputs[imp] = true - } - - for _, imp := range p.Lock.SolveMeta.InputImports { - inlock[imp] = true - } - - lsat := LockSatisfaction{ - badovr: make(constraintMismatches), - badconstraint: make(constraintMismatches), - } - - for ip := range ininputs { - if !inlock[ip] { - lsat.pkgs[ip] = missingFromLock - } else { - // So we don't have to revisit it below - delete(inlock, ip) - } - } - - for ip := range inlock { - if !ininputs[ip] { - lsat.pkgs[ip] = inAdditionToLock - } - } - - ineff := make(map[string]bool) - for _, pr := range p.FindIneffectualConstraints(sm) { - ineff[string(pr)] = true - } - - for _, lp := range p.Lock.Projects() { - pr := lp.Ident().ProjectRoot - - if pp, has := p.Manifest.Ovr[pr]; has && !pp.Constraint.Matches(lp.Version()) { - lsat.badovr[pr] = constraintMismatch{ - c: pp.Constraint, - v: lp.Version(), - } - } - - if pp, has := p.Manifest.Constraints[pr]; has && !ineff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { - lsat.badconstraint[pr] = constraintMismatch{ - c: pp.Constraint, - v: lp.Version(), - } - } - } - - return lsat, nil -} - // FindIneffectualConstraints looks for constraint rules expressed in the // manifest that will have no effect during solving, as they are specified for // projects that are not direct dependencies of the Project. From 485c74eec646da05e3ef64cc2272ec636b15e491 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Sun, 24 Jun 2018 21:47:16 -0400 Subject: [PATCH 05/25] dep: Add foundation for verified, pruned vendor THis includes changes across both dep and gps towards transparently working with vendor trees that are both configurably pruned and verifiable. --- gps/lockdiff.go | 41 +++-- gps/lockdiff_test.go | 43 ----- gps/pkgtree/digest.go | 67 +++++++- gps/pkgtree/digest_test.go | 6 +- gps/prune.go | 41 +++++ gps/solve_basic_test.go | 4 + gps/source.go | 34 ++++ gps/source_manager.go | 36 +++- lock.go | 56 +++++-- lock_test.go | 74 +++++--- testdata/lock/error0.toml | 3 - testdata/lock/error1.toml | 4 +- testdata/lock/error2.toml | 3 - testdata/lock/golden0.toml | 4 +- testdata/lock/golden1.toml | 4 +- testdata/txn_writer/expected_diff_output.txt | 2 - testdata/txn_writer/expected_lock.toml | 4 +- txn_writer.go | 168 ++++++++++++++++++- txn_writer_test.go | 8 +- 19 files changed, 476 insertions(+), 126 deletions(-) diff --git a/gps/lockdiff.go b/gps/lockdiff.go index 35720b67e2..1f7da66b62 100644 --- a/gps/lockdiff.go +++ b/gps/lockdiff.go @@ -5,7 +5,6 @@ package gps import ( - "encoding/hex" "fmt" "sort" "strings" @@ -44,10 +43,9 @@ func (diff *StringDiff) String() string { // LockDiff is the set of differences between an existing lock file and an updated lock file. // Fields are only populated when there is a difference, otherwise they are empty. type LockDiff struct { - HashDiff *StringDiff - Add []LockedProjectDiff - Remove []LockedProjectDiff - Modify []LockedProjectDiff + Add []LockedProjectDiff + Remove []LockedProjectDiff + Modify []LockedProjectDiff } // LockedProjectDiff contains the before and after snapshot of a project reference. @@ -79,12 +77,6 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { diff := LockDiff{} - h1 := hex.EncodeToString(l1.InputsDigest()) - h2 := hex.EncodeToString(l2.InputsDigest()) - if h1 != h2 { - diff.HashDiff = &StringDiff{Previous: h1, Current: h2} - } - var i2next int for i1 := 0; i1 < len(p1); i1++ { lp1 := p1[i1] @@ -128,12 +120,37 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { diff.Add = append(diff.Add, add) } - if diff.HashDiff == nil && len(diff.Add) == 0 && len(diff.Remove) == 0 && len(diff.Modify) == 0 { + if len(diff.Add) == 0 && len(diff.Remove) == 0 && len(diff.Modify) == 0 { return nil // The locks are the equivalent } return &diff } +// DiffFor checks to see if there was a diff for the provided ProjectRoot. The +// first return value is a 0 if there was no diff, 1 if it was added, 2 if it +// was removed, and 3 if it was modified. +func (ld *LockDiff) DiffFor(pr ProjectRoot) (uint8, LockedProjectDiff) { + for _, lpd := range ld.Add { + if lpd.Name == pr { + return 1, lpd + } + } + + for _, lpd := range ld.Remove { + if lpd.Name == pr { + return 2, lpd + } + } + + for _, lpd := range ld.Modify { + if lpd.Name == pr { + return 3, lpd + } + } + + return 0, LockedProjectDiff{} +} + func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { s2 := lp.Ident().Source r2, b2, v2 := VersionComponentStrings(lp.Version()) diff --git a/gps/lockdiff_test.go b/gps/lockdiff_test.go index 5e9d33697f..fa6fd2d60f 100644 --- a/gps/lockdiff_test.go +++ b/gps/lockdiff_test.go @@ -420,39 +420,8 @@ func TestDiffLocks_ModifyProjects(t *testing.T) { } } -func TestDiffLocks_ModifyHash(t *testing.T) { - h1, _ := hex.DecodeString("abc123") - l1 := safeLock{ - h: h1, - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, - } - - h2, _ := hex.DecodeString("def456") - l2 := safeLock{ - h: h2, - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, - } - - diff := DiffLocks(l1, l2) - if diff == nil { - t.Fatal("Expected the diff to be populated") - } - - want := "abc123 -> def456" - got := diff.HashDiff.String() - if got != want { - t.Fatalf("Expected diff.HashDiff to be '%s', got '%s'", want, got) - } -} - func TestDiffLocks_EmptyInitialLock(t *testing.T) { - h2, _ := hex.DecodeString("abc123") l2 := safeLock{ - h: h2, p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, @@ -460,12 +429,6 @@ func TestDiffLocks_EmptyInitialLock(t *testing.T) { diff := DiffLocks(nil, l2) - wantHash := "+ abc123" - gotHash := diff.HashDiff.String() - if gotHash != wantHash { - t.Fatalf("Expected diff.HashDiff to be '%s', got '%s'", wantHash, gotHash) - } - if len(diff.Add) != 1 { t.Fatalf("Expected diff.Add to contain 1 project, got %d", len(diff.Add)) } @@ -482,12 +445,6 @@ func TestDiffLocks_EmptyFinalLock(t *testing.T) { diff := DiffLocks(l1, nil) - wantHash := "- abc123" - gotHash := diff.HashDiff.String() - if gotHash != wantHash { - t.Fatalf("Expected diff.HashDiff to be '%s', got '%s'", wantHash, gotHash) - } - if len(diff.Remove) != 1 { t.Fatalf("Expected diff.Remove to contain 1 project, got %d", len(diff.Remove)) } diff --git a/gps/pkgtree/digest.go b/gps/pkgtree/digest.go index 31ed243ab3..d1f671992d 100644 --- a/gps/pkgtree/digest.go +++ b/gps/pkgtree/digest.go @@ -8,15 +8,24 @@ import ( "bytes" "crypto/sha256" "encoding/binary" + "encoding/hex" + "fmt" "hash" "io" "os" "path/filepath" "strconv" + "strings" "github.com/pkg/errors" ) +// HashVersion is an arbitrary number that identifies the hash algorithm used by +// the directory hasher. +// +// 1: SHA256, as implemented in crypto/sha256 +const HashVersion = 1 + const osPathSeparator = string(filepath.Separator) // lineEndingReader is a `io.Reader` that converts CRLF sequences to LF. @@ -151,7 +160,7 @@ type dirWalkClosure struct { // While filepath.Walk could have been used, that standard library function // skips symbolic links, and for now, we want the hash to include the symbolic // link referents. -func DigestFromDirectory(osDirname string) ([]byte, error) { +func DigestFromDirectory(osDirname string) (VersionedDigest, error) { osDirname = filepath.Clean(osDirname) // Create a single hash instance for the entire operation, rather than a new @@ -255,10 +264,15 @@ func DigestFromDirectory(osDirname string) ([]byte, error) { } return err }) + if err != nil { - return nil, err + return VersionedDigest{}, err } - return closure.someHash.Sum(nil), nil + + return VersionedDigest{ + HashVersion: HashVersion, + Digest: closure.someHash.Sum(nil), + }, nil } // VendorStatus represents one of a handful of possible status conditions for a @@ -286,6 +300,11 @@ const ( // DigestMismatchInLock is used when the digest for a dependency listed in // the lock file does not match what is calculated from the file system. DigestMismatchInLock + + // HashVersionMismatch indicates that the hashing algorithm used to generate + // the digest being compared against is not the same as the one used by the + // current program. + HashVersionMismatch ) func (ls VendorStatus) String() string { @@ -300,6 +319,8 @@ func (ls VendorStatus) String() string { return "empty digest in lock" case DigestMismatchInLock: return "mismatch" + case HashVersionMismatch: + return "hasher changed" } return "unknown" } @@ -315,6 +336,38 @@ type fsnode struct { myIndex, parentIndex int // index of this node and its parent in the tree's slice } +// VersionedDigest comprises both a hash digest, and a simple integer indicating +// the version of the hash algorithm that produced the digest. +type VersionedDigest struct { + HashVersion int + Digest []byte +} + +func (vd VersionedDigest) String() string { + return fmt.Sprintf("%s:%s", strconv.Itoa(vd.HashVersion), hex.EncodeToString(vd.Digest)) +} + +// ParseVersionedDigest decodes the string representation of versioned digest +// information - a colon-separated string with a version number in the first +// part and the hex-encdoed hash digest in the second - as a VersionedDigest. +func ParseVersionedDigest(input string) (VersionedDigest, error) { + var vd VersionedDigest + var err error + + parts := strings.Split(input, ":") + if len(parts) != 2 { + return VersionedDigest{}, errors.Errorf("expected two colon-separated components in the versioned hash digest, got %q", input) + } + if vd.Digest, err = hex.DecodeString(parts[1]); err != nil { + return VersionedDigest{}, err + } + if vd.HashVersion, err = strconv.Atoi(parts[0]); err != nil { + return VersionedDigest{}, err + } + + return vd, nil +} + // VerifyDepTree verifies a dependency tree according to expected digest sums, // and returns an associative array of file system nodes and their respective // vendor status conditions. @@ -325,7 +378,7 @@ type fsnode struct { // platform where the file system path separator is a character other than // solidus, one particular dependency would be represented as // "github.com/alice/alice1". -func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]VendorStatus, error) { +func VerifyDepTree(osDirname string, wantDigests map[string][]byte) (map[string]VendorStatus, error) { osDirname = filepath.Clean(osDirname) // Ensure top level pathname is a directory @@ -387,7 +440,7 @@ func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]Ven // project is later found while traversing the vendor root hierarchy, its // status will be updated to reflect whether its digest is empty, or, // whether or not it matches the expected digest. - for slashPathname := range wantSums { + for slashPathname := range wantDigests { slashStatus[slashPathname] = NotInTree } @@ -400,14 +453,14 @@ func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]Ven slashPathname := filepath.ToSlash(currentNode.osRelative) osPathname := filepath.Join(osDirname, currentNode.osRelative) - if expectedSum, ok := wantSums[slashPathname]; ok { + if expectedSum, ok := wantDigests[slashPathname]; ok { ls := EmptyDigestInLock if len(expectedSum) > 0 { projectSum, err := DigestFromDirectory(osPathname) if err != nil { return nil, errors.Wrap(err, "cannot compute dependency hash") } - if bytes.Equal(projectSum, expectedSum) { + if bytes.Equal(projectSum.Digest, expectedSum) { ls = NoMismatch } else { ls = DigestMismatchInLock diff --git a/gps/pkgtree/digest_test.go b/gps/pkgtree/digest_test.go index 0569340357..77dd298de2 100644 --- a/gps/pkgtree/digest_test.go +++ b/gps/pkgtree/digest_test.go @@ -122,7 +122,7 @@ func TestDigestFromDirectory(t *testing.T) { if err != nil { t.Fatal(err) } - if !bytes.Equal(got, want) { + if !bytes.Equal(got.Digest, want) { t.Errorf("\n(GOT):\n\t%#v\n(WNT):\n\t%#v", got, want) } }) @@ -133,7 +133,7 @@ func TestDigestFromDirectory(t *testing.T) { if err != nil { t.Fatal(err) } - if !bytes.Equal(got, want) { + if !bytes.Equal(got.Digest, want) { t.Errorf("\n(GOT):\n\t%#v\n(WNT):\n\t%#v", got, want) } }) @@ -186,7 +186,7 @@ func TestVerifyDepTree(t *testing.T) { if err != nil { t.Error(err) } - if !bytes.Equal(got, want) { + if !bytes.Equal(got.Digest, want) { t.Errorf("%q\n(GOT):\n\t%#v\n(WNT):\n\t%#v", k, got, want) } } diff --git a/gps/prune.go b/gps/prune.go index b705874145..b0a7781493 100644 --- a/gps/prune.go +++ b/gps/prune.go @@ -5,6 +5,8 @@ package gps import ( + "bytes" + "fmt" "os" "path/filepath" "sort" @@ -57,6 +59,45 @@ type CascadingPruneOptions struct { PerProjectOptions map[ProjectRoot]PruneOptionSet } +func ParsePruneOptions(input string) (PruneOptions, error) { + var po PruneOptions + for _, char := range input { + switch char { + case 'T': + po |= PruneGoTestFiles + case 'U': + po |= PruneUnusedPackages + case 'N': + po |= PruneNonGoFiles + case 'V': + po |= PruneNestedVendorDirs + default: + return 0, errors.Errorf("unknown pruning code %q", char) + } + } + + return po, nil +} + +func (po PruneOptions) String() string { + var buf bytes.Buffer + + if po&PruneGoTestFiles != 0 { + fmt.Fprintf(&buf, "T") + } + if po&PruneUnusedPackages != 0 { + fmt.Fprintf(&buf, "U") + } + if po&PruneNonGoFiles != 0 { + fmt.Fprintf(&buf, "N") + } + if po&PruneNestedVendorDirs != 0 { + fmt.Fprintf(&buf, "V") + } + + return buf.String() +} + // PruneOptionsFor returns the PruneOptions bits for the given project, // indicating which pruning rules should be applied to the project's code. // diff --git a/gps/solve_basic_test.go b/gps/solve_basic_test.go index 092c9ca709..3482aa7f07 100644 --- a/gps/solve_basic_test.go +++ b/gps/solve_basic_test.go @@ -1454,6 +1454,10 @@ func (sm *depspecSourceManager) ExportProject(context.Context, ProjectIdentifier return fmt.Errorf("dummy sm doesn't support exporting") } +func (sm *depspecSourceManager) ExportPrunedProject(context.Context, LockedProject, PruneOptions, string) error { + return fmt.Errorf("dummy sm doesn't support exporting") +} + func (sm *depspecSourceManager) DeduceProjectRoot(ip string) (ProjectRoot, error) { fip := toFold(ip) for _, ds := range sm.allSpecs() { diff --git a/gps/source.go b/gps/source.go index 0d9deb14d2..1a1a0456a0 100644 --- a/gps/source.go +++ b/gps/source.go @@ -357,6 +357,35 @@ func (sg *sourceGateway) exportVersionTo(ctx context.Context, v Version, to stri return err } +func (sg *sourceGateway) exportPrunedVersionTo(ctx context.Context, lp LockedProject, prune PruneOptions, to string) error { + sg.mu.Lock() + defer sg.mu.Unlock() + + err := sg.require(ctx, sourceExistsLocally) + if err != nil { + return err + } + + r, err := sg.convertToRevision(ctx, lp.Version()) + if err != nil { + return err + } + + if fastprune, ok := sg.src.(sourceFastPrune); ok { + return sg.suprvsr.do(ctx, sg.src.upstreamURL(), ctExportTree, func(ctx context.Context) error { + return fastprune.exportPrunedRevisionTo(ctx, r, lp.Packages(), prune, to) + }) + } + + if err = sg.suprvsr.do(ctx, sg.src.upstreamURL(), ctExportTree, func(ctx context.Context) error { + return sg.src.exportRevisionTo(ctx, r, to) + }); err != nil { + return err + } + + return PruneProject(to, lp, prune) +} + func (sg *sourceGateway) getManifestAndLock(ctx context.Context, pr ProjectRoot, v Version, an ProjectAnalyzer) (Manifest, Lock, error) { sg.mu.Lock() defer sg.mu.Unlock() @@ -674,3 +703,8 @@ type source interface { // requires the source to exist locally. listVersionsRequiresLocal() bool } + +type sourceFastPrune interface { + source + exportPrunedRevisionTo(context.Context, Revision, []string, PruneOptions, string) error +} diff --git a/gps/source_manager.go b/gps/source_manager.go index 277e66a6ee..16c3f4816d 100644 --- a/gps/source_manager.go +++ b/gps/source_manager.go @@ -104,6 +104,15 @@ type SourceManager interface { // provided version, to the provided directory. ExportProject(context.Context, ProjectIdentifier, Version, string) error + // ExportPrunedProject writes out the tree corresponding to the provided + // LockedProject, the provided version, to the provided directory, applying + // the provided pruning options. + // + // The first return value is the hex-encoded string representation of the + // hash, including colon-separated leaders indicating the version of the + // hashing function used, and the prune options that were applied. + ExportPrunedProject(context.Context, LockedProject, PruneOptions, string) error + // DeduceProjectRoot takes an import path and deduces the corresponding // project/source root. DeduceProjectRoot(ip string) (ProjectRoot, error) @@ -113,9 +122,9 @@ type SourceManager interface { // In general, these URLs differ only by protocol (e.g. https vs. ssh), not path SourceURLsForPath(ip string) ([]*url.URL, error) - // Release lets go of any locks held by the SourceManager. Once called, it is - // no longer safe to call methods against it; all method calls will - // immediately result in errors. + // Release lets go of any locks held by the SourceManager. Once called, it + // is no longer allowed to call methods of that SourceManager; all + // method calls will immediately result in errors. Release() // InferConstraint tries to puzzle out what kind of version is given in a string - @@ -396,9 +405,9 @@ func (e CouldNotCreateLockError) Error() string { return e.Err.Error() } -// Release lets go of any resources held by the SourceManager. Once called, it is no -// longer safe to call methods against it; all method calls will immediately -// result in errors. +// Release lets go of any locks held by the SourceManager. Once called, it is no +// longer allowed to call methods of that SourceManager; all method calls will +// immediately result in errors. func (sm *SourceMgr) Release() { atomic.StoreInt32(&sm.releasing, 1) @@ -550,6 +559,21 @@ func (sm *SourceMgr) ExportProject(ctx context.Context, id ProjectIdentifier, v return srcg.exportVersionTo(ctx, v, to) } +// ExportPrunedProject writes out a tree of the provided LockedProject, applying +// provided pruning rules as appropriate. +func (sm *SourceMgr) ExportPrunedProject(ctx context.Context, lp LockedProject, prune PruneOptions, to string) error { + if atomic.LoadInt32(&sm.releasing) == 1 { + return ErrSourceManagerIsReleased + } + + srcg, err := sm.srcCoord.getSourceGatewayFor(ctx, lp.Ident()) + if err != nil { + return err + } + + return srcg.exportPrunedVersionTo(ctx, lp, prune, to) +} + // DeduceProjectRoot takes an import path and deduces the corresponding // project/source root. // diff --git a/lock.go b/lock.go index 644621077b..ce6d5d4f03 100644 --- a/lock.go +++ b/lock.go @@ -10,6 +10,8 @@ import ( "sort" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/pelletier/go-toml" "github.com/pkg/errors" ) @@ -23,7 +25,8 @@ type Lock struct { P []gps.LockedProject } -// SolveMeta holds solver meta data. +// SolveMeta holds metadata about the solving process that created the lock that +// is not specific to any individual project. type SolveMeta struct { AnalyzerName string AnalyzerVersion int @@ -46,12 +49,14 @@ type solveMeta struct { } type rawLockedProject struct { - Name string `toml:"name"` - Branch string `toml:"branch,omitempty"` - Revision string `toml:"revision"` - Version string `toml:"version,omitempty"` - Source string `toml:"source,omitempty"` - Packages []string `toml:"packages"` + Name string `toml:"name"` + Branch string `toml:"branch,omitempty"` + Revision string `toml:"revision"` + Version string `toml:"version,omitempty"` + Source string `toml:"source,omitempty"` + Packages []string `toml:"packages"` + PruneOpts string `toml:"pruneopts"` + Digest string `toml:"digest"` } func readLock(r io.Reader) (*Lock, error) { @@ -100,7 +105,26 @@ func fromRawLock(raw rawLock) (*Lock, error) { ProjectRoot: gps.ProjectRoot(ld.Name), Source: ld.Source, } - l.P[i] = gps.NewLockedProject(id, v, ld.Packages) + + var err error + vp := verify.VerifiableProject{ + LockedProject: gps.NewLockedProject(id, v, ld.Packages), + } + if ld.Digest != "" { + vp.Digest, err = pkgtree.ParseVersionedDigest(ld.Digest) + if err != nil { + return nil, err + } + } + + po, err := gps.ParsePruneOptions(ld.PruneOpts) + if err != nil { + return nil, errors.Errorf("%s in prune options for %s", err.Error(), ld.Name) + } + // Add the vendor pruning bit so that gps doesn't get confused + vp.PruneOpts = po | gps.PruneNestedVendorDirs + + l.P[i] = vp } return l, nil @@ -141,14 +165,14 @@ func (l *Lock) toRaw() rawLock { SolverName: l.SolveMeta.SolverName, SolverVersion: l.SolveMeta.SolverVersion, }, - Projects: make([]rawLockedProject, len(l.P)), + Projects: make([]rawLockedProject, 0, len(l.P)), } sort.Slice(l.P, func(i, j int) bool { return l.P[i].Ident().Less(l.P[j].Ident()) }) - for k, lp := range l.P { + for _, lp := range l.P { id := lp.Ident() ld := rawLockedProject{ Name: string(id.ProjectRoot), @@ -159,7 +183,15 @@ func (l *Lock) toRaw() rawLock { v := lp.Version() ld.Revision, ld.Branch, ld.Version = gps.VersionComponentStrings(v) - raw.Projects[k] = ld + // This will panic if the lock isn't the expected dynamic type. We can + // relax this later if it turns out to create real problems, but there's + // no intended case in which this is untrue, so it's preferable to start + // by failing hard if those expectations aren't met. + vp := lp.(verify.VerifiableProject) + ld.Digest = vp.Digest.String() + ld.PruneOpts = (vp.PruneOpts & ^gps.PruneNestedVendorDirs).String() + + raw.Projects = append(raw.Projects, ld) } return raw @@ -176,7 +208,7 @@ func (l *Lock) MarshalTOML() ([]byte, error) { // LockFromSolution converts a gps.Solution to dep's representation of a lock. // -// Data is defensively copied wherever necessary to ensure the resulting *lock +// Data is defensively copied wherever necessary to ensure the resulting *Lock // shares no memory with the input solution. func LockFromSolution(in gps.Solution) *Lock { p := in.Projects() diff --git a/lock_test.go b/lock_test.go index b7a7ac4238..c2b8a3d970 100644 --- a/lock_test.go +++ b/lock_test.go @@ -10,6 +10,8 @@ import ( "testing" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/test" ) @@ -26,12 +28,20 @@ func TestReadLock(t *testing.T) { } want := &Lock{ + SolveMeta: SolveMeta{InputImports: []string{}}, P: []gps.LockedProject{ - gps.NewLockedProject( - gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, - gps.NewBranch("master").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), - []string{"."}, - ), + verify.VerifiableProject{ + LockedProject: gps.NewLockedProject( + gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, + gps.NewBranch("master").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), + []string{"."}, + ), + PruneOpts: gps.PruneOptions(1), + Digest: pkgtree.VersionedDigest{ + HashVersion: pkgtree.HashVersion, + Digest: []byte("foo"), + }, + }, }, } @@ -48,12 +58,20 @@ func TestReadLock(t *testing.T) { } want = &Lock{ + SolveMeta: SolveMeta{InputImports: []string{}}, P: []gps.LockedProject{ - gps.NewLockedProject( - gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, - gps.NewVersion("0.12.2").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), - []string{"."}, - ), + verify.VerifiableProject{ + LockedProject: gps.NewLockedProject( + gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, + gps.NewVersion("0.12.2").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), + []string{"."}, + ), + PruneOpts: gps.PruneOptions(15), + Digest: pkgtree.VersionedDigest{ + HashVersion: pkgtree.HashVersion, + Digest: []byte("foo"), + }, + }, }, } @@ -70,11 +88,18 @@ func TestWriteLock(t *testing.T) { want := h.GetTestFileString(golden) l := &Lock{ P: []gps.LockedProject{ - gps.NewLockedProject( - gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, - gps.NewBranch("master").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), - []string{"."}, - ), + verify.VerifiableProject{ + LockedProject: gps.NewLockedProject( + gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, + gps.NewBranch("master").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), + []string{"."}, + ), + PruneOpts: gps.PruneOptions(1), + Digest: pkgtree.VersionedDigest{ + HashVersion: pkgtree.HashVersion, + Digest: []byte("foo"), + }, + }, }, } @@ -97,11 +122,18 @@ func TestWriteLock(t *testing.T) { want = h.GetTestFileString(golden) l = &Lock{ P: []gps.LockedProject{ - gps.NewLockedProject( - gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, - gps.NewVersion("0.12.2").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), - []string{"."}, - ), + verify.VerifiableProject{ + LockedProject: gps.NewLockedProject( + gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/golang/dep")}, + gps.NewVersion("0.12.2").Pair(gps.Revision("d05d5aca9f895d19e9265839bffeadd74a2d2ecb")), + []string{"."}, + ), + PruneOpts: gps.PruneOptions(15), + Digest: pkgtree.VersionedDigest{ + HashVersion: pkgtree.HashVersion, + Digest: []byte("foo"), + }, + }, }, } @@ -131,7 +163,7 @@ func TestReadLockErrors(t *testing.T) { file string }{ {"specified both", "lock/error0.toml"}, - {"invalid hash", "lock/error1.toml"}, + {"odd length", "lock/error1.toml"}, {"no branch or version", "lock/error2.toml"}, } diff --git a/testdata/lock/error0.toml b/testdata/lock/error0.toml index 80eb22b1be..5b6b7f9892 100644 --- a/testdata/lock/error0.toml +++ b/testdata/lock/error0.toml @@ -1,6 +1,3 @@ -[solve-meta] - inputs-digest = "2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" - [[projects]] name = "github.com/golang/dep" branch = "master" diff --git a/testdata/lock/error1.toml b/testdata/lock/error1.toml index 2d83237fc9..41ff9ffdbb 100644 --- a/testdata/lock/error1.toml +++ b/testdata/lock/error1.toml @@ -3,6 +3,4 @@ branch = "master" revision = "d05d5aca9f895d19e9265839bffeadd74a2d2ecb" packages = ["."] - -[solve-meta] - inputs-digest = "000aaa2a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" + digest = "1:000aaa2a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" diff --git a/testdata/lock/error2.toml b/testdata/lock/error2.toml index f692f4d976..17b00d104c 100644 --- a/testdata/lock/error2.toml +++ b/testdata/lock/error2.toml @@ -1,6 +1,3 @@ [[projects]] name = "github.com/golang/dep" packages = ["."] - -[solve-meta] - inputs-digest = "2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" diff --git a/testdata/lock/golden0.toml b/testdata/lock/golden0.toml index 2ba4a82d5d..dedbcfaa5c 100644 --- a/testdata/lock/golden0.toml +++ b/testdata/lock/golden0.toml @@ -1,13 +1,15 @@ [[projects]] branch = "master" + digest = "1:666f6f" name = "github.com/golang/dep" packages = ["."] + pruneopts = "" revision = "d05d5aca9f895d19e9265839bffeadd74a2d2ecb" [solve-meta] analyzer-name = "" analyzer-version = 0 - inputs-digest = "2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" + input-imports = [] solver-name = "" solver-version = 0 diff --git a/testdata/lock/golden1.toml b/testdata/lock/golden1.toml index 1a0e183a47..292fdb2d85 100644 --- a/testdata/lock/golden1.toml +++ b/testdata/lock/golden1.toml @@ -1,13 +1,15 @@ [[projects]] + digest = "1:666f6f" name = "github.com/golang/dep" packages = ["."] + pruneopts = "TUN" revision = "d05d5aca9f895d19e9265839bffeadd74a2d2ecb" version = "0.12.2" [solve-meta] analyzer-name = "" analyzer-version = 0 - inputs-digest = "2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" + input-imports = [] solver-name = "" solver-version = 0 diff --git a/testdata/txn_writer/expected_diff_output.txt b/testdata/txn_writer/expected_diff_output.txt index bbfe78f82b..7fe4662ccd 100644 --- a/testdata/txn_writer/expected_diff_output.txt +++ b/testdata/txn_writer/expected_diff_output.txt @@ -1,5 +1,3 @@ -Memo: 595716d270828e763c811ef79c9c41f85b1d1bfbdfe85280036405c03772206c -> 2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e - Add: [[projects]] name = "github.com/sdboyer/deptest" diff --git a/testdata/txn_writer/expected_lock.toml b/testdata/txn_writer/expected_lock.toml index 8c9310fd3b..09f38c7ace 100644 --- a/testdata/txn_writer/expected_lock.toml +++ b/testdata/txn_writer/expected_lock.toml @@ -2,14 +2,16 @@ [[projects]] + digest = "0:" name = "github.com/sdboyer/dep-test" packages = ["."] + pruneopts = "" revision = "2a3a211e171803acb82d1d5d42ceb53228f51751" version = "1.0.0" [solve-meta] analyzer-name = "" analyzer-version = 0 - inputs-digest = "595716d270828e763c811ef79c9c41f85b1d1bfbdfe85280036405c03772206c" + input-imports = [] solver-name = "" solver-version = 0 diff --git a/txn_writer.go b/txn_writer.go index 5bae47df3f..3455cffd10 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -6,6 +6,7 @@ package dep import ( "bytes" + "context" "fmt" "io/ioutil" "log" @@ -13,6 +14,8 @@ import ( "path/filepath" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/fs" "github.com/pelletier/go-toml" "github.com/pkg/errors" @@ -187,10 +190,6 @@ func toRawLockedProjectDiffs(diffs []gps.LockedProjectDiff) rawLockedProjectDiff func formatLockDiff(diff gps.LockDiff) (string, error) { var buf bytes.Buffer - if diff.HashDiff != nil { - buf.WriteString(fmt.Sprintf("Memo: %s\n\n", diff.HashDiff)) - } - writeDiffs := func(diffs []gps.LockedProjectDiff) error { raw := toRawLockedProjectDiffs(diffs) chunk, err := toml.Marshal(raw) @@ -489,3 +488,164 @@ func hasDotGit(path string) bool { _, err := os.Stat(gitfilepath) return err == nil } + +type DeltaWriter struct { + lock *Lock + lockDiff *gps.LockDiff + pruneOptions gps.CascadingPruneOptions + vendorDir string + changed map[gps.ProjectRoot]changeType + status map[string]pkgtree.VendorStatus +} + +type changeType uint8 + +const ( + noChange changeType = iota + solveChanged + pruneChanged + hashChanged + // FIXME need added/removed up here +) + +// NewDeltaWriter prepares a vendor writer that will construct a vendor +// directory by writing out only those projects that actually need to be written +// out - they have changed in some way, or they lack the necessary hash +// information to be verified. +func NewDeltaWriter(oldLock, newLock *Lock, prune gps.CascadingPruneOptions, vendorDir string) (TransactionWriter, error) { + sw := &DeltaWriter{ + lock: newLock, + pruneOptions: prune, + vendorDir: vendorDir, + changed: make(map[gps.ProjectRoot]changeType), + } + + if newLock == nil { + return nil, errors.New("must provide a non-nil newlock") + } + + _, err := os.Stat(vendorDir) + if err != nil && os.IsNotExist(err) { + // Provided dir does not exist, so there's no disk contents to compare + // against. Fall back to the old SafeWriter. + return NewSafeWriter(nil, oldLock, newLock, VendorOnChanged, prune) + } + + sw.lockDiff = gps.DiffLocks(oldLock, newLock) + + // 1. find all the ones that truly changed in solve + // 2. find the ones that only changed pruneopts + // 3. find the ones that (already) had a mismatch with what's in vendor + sums := make(map[string][]byte) + + for _, lp := range newLock.Projects() { + pr := lp.Ident().ProjectRoot + // TODO(sdboyer) Not the best heuristic to assume that a PPS indicates + if vp, ok := lp.(verify.VerifiableProject); !ok { + sw.changed[pr] = solveChanged + sums[string(pr)] = []byte{} + } else { + sums[string(pr)] = vp.Digest.Digest + sw.changed[pr] = pruneChanged + //if _, has := sw.changed[pr]; !has && vp.PruneOpts != prune.PruneOptionsFor(pr) { + //} + } + } + + status, err := pkgtree.VerifyDepTree(vendorDir, sums) + if err != nil { + return nil, err + } + + for spr, stat := range status { + pr := gps.ProjectRoot(spr) + switch stat { + case pkgtree.NotInLock, pkgtree.NotInTree: + // FIXME + case pkgtree.EmptyDigestInLock, pkgtree.DigestMismatchInLock: + if _, has := sw.changed[pr]; !has { + sw.changed[gps.ProjectRoot(pr)] = hashChanged + } + } + } + + return sw, nil +} + +// Write executes the planned changes. +// +// This writes recreated projects to a new directory, then moves in existing, +// unchanged projects from the original vendor directory. If any failures occur, +// reasonable attempts are made to roll back the changes. +func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, logger *log.Logger) error { + // TODO(sdboyer) remove path from the signature for this + if path != filepath.Dir(dw.vendorDir) { + return fmt.Errorf("target path (%q) must be the parent of the original vendor path (%q)", path, dw.vendorDir) + } + + lpath := filepath.Join(filepath.Dir(path), LockName) + vpath := filepath.Join(path, "vendor") + + // Write out all the deltas + projs := make(map[gps.ProjectRoot]gps.LockedProject) + for _, lp := range dw.lock.Projects() { + projs[lp.Ident().ProjectRoot] = lp + } + + //for pr, reason := range dw.changed { + for pr, _ := range dw.changed { + to := filepath.FromSlash(filepath.Join(vpath, string(pr))) + po := dw.pruneOptions.PruneOptionsFor(pr) + err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to) + if err != nil { + return errors.Wrapf(err, "failed to export %s", pr) + } + digest, err := pkgtree.DigestFromDirectory(to) + if err != nil { + return errors.Wrapf(err, "failed to hash %s", pr) + } + + // Update the new Lock with verification information. + for k, lp := range dw.lock.P { + if lp.Ident().ProjectRoot == pr { + dw.lock.P[k] = verify.VerifiableProject{ + LockedProject: lp, + PruneOpts: po, + Digest: digest, + } + } + } + } + + // Write out the lock last, now that it's updated with digests + l, err := dw.lock.MarshalTOML() + if err != nil { + return errors.Wrap(err, "failed to marshal lock to TOML") + } + + if err = ioutil.WriteFile(lpath, append(lockFileComment, l...), 0666); err != nil { + return errors.Wrap(err, "failed to write lock file to temp dir") + } + + // Remove all the now-unnecessary bits from vendor. + for path, vs := range dw.status { + if vs == pkgtree.NotInLock { + err = os.RemoveAll(path) + if err != nil { + return errors.Wrapf(err, "vendor state may be inconsistent, could not remove %s", path) + } + } + } + + return nil +} + +func (dw *DeltaWriter) PrintPreparedActions(output *log.Logger, verbose bool) error { + // FIXME + return nil +} + +type TransactionWriter interface { + PrintPreparedActions(output *log.Logger, verbose bool) error + Write(path string, sm gps.SourceManager, examples bool, logger *log.Logger) error +} diff --git a/txn_writer_test.go b/txn_writer_test.go index 1ec6f7fbe7..747dd80ff7 100644 --- a/txn_writer_test.go +++ b/txn_writer_test.go @@ -261,7 +261,7 @@ func TestSafeWriter_ManifestAndUnmodifiedLockWithForceVendor(t *testing.T) { } } -func TestSafeWriter_ModifiedLock(t *testing.T) { +func testSafeWriter_ModifiedLock(t *testing.T) { test.NeedsExternalNetwork(t) test.NeedsGit(t) @@ -275,7 +275,7 @@ func TestSafeWriter_ModifiedLock(t *testing.T) { originalLock := new(Lock) *originalLock = *pc.Project.Lock - originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency + //originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency sw, _ := NewSafeWriter(nil, originalLock, pc.Project.Lock, VendorOnChanged, defaultCascadingPruneOptions()) // Verify prepared actions @@ -311,7 +311,7 @@ func TestSafeWriter_ModifiedLock(t *testing.T) { } } -func TestSafeWriter_ModifiedLockSkipVendor(t *testing.T) { +func testSafeWriter_ModifiedLockSkipVendor(t *testing.T) { test.NeedsExternalNetwork(t) test.NeedsGit(t) @@ -325,7 +325,7 @@ func TestSafeWriter_ModifiedLockSkipVendor(t *testing.T) { originalLock := new(Lock) *originalLock = *pc.Project.Lock - originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency + //originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency sw, _ := NewSafeWriter(nil, originalLock, pc.Project.Lock, VendorNever, defaultCascadingPruneOptions()) // Verify prepared actions From 4cb57f57c3e64532440ae4ad069a731378e0deb8 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Mon, 25 Jun 2018 21:11:18 -0400 Subject: [PATCH 06/25] dep: Make DeltaWriter use temp sibling vendor dir --- txn_writer.go | 71 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 51 insertions(+), 20 deletions(-) diff --git a/txn_writer.go b/txn_writer.go index 3455cffd10..f78e9c76ee 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -7,7 +7,6 @@ package dep import ( "bytes" "context" - "fmt" "io/ioutil" "log" "os" @@ -580,26 +579,41 @@ func NewDeltaWriter(oldLock, newLock *Lock, prune gps.CascadingPruneOptions, ven func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, logger *log.Logger) error { // TODO(sdboyer) remove path from the signature for this if path != filepath.Dir(dw.vendorDir) { - return fmt.Errorf("target path (%q) must be the parent of the original vendor path (%q)", path, dw.vendorDir) + return errors.Errorf("target path (%q) must be the parent of the original vendor path (%q)", path, dw.vendorDir) } - lpath := filepath.Join(filepath.Dir(path), LockName) - vpath := filepath.Join(path, "vendor") + lpath := filepath.Join(path, LockName) + vpath := dw.vendorDir - // Write out all the deltas + // Write the modified projects to a new adjacent directory. We use an + // adjacent directory to minimize the possibility of cross-filesystem renames + // becoming expensive copies, and to make removal of unneeded projects implicit + // and automatic. + vnewpath := vpath + ".new" + if _, err := os.Stat(vnewpath); err == nil { + return errors.Errorf("scratch directory %s already exists", vnewpath) + } + err := os.MkdirAll(vnewpath, os.FileMode(0777)) + if err != nil { + return errors.Wrapf(err, "error while creating scratch directory at %s", vnewpath) + } + + // Write out all the deltas to the newpath projs := make(map[gps.ProjectRoot]gps.LockedProject) for _, lp := range dw.lock.Projects() { projs[lp.Ident().ProjectRoot] = lp } + // TODO(sdboyer) add a txn/rollback layer, like the safewriter? //for pr, reason := range dw.changed { for pr, _ := range dw.changed { - to := filepath.FromSlash(filepath.Join(vpath, string(pr))) + to := filepath.FromSlash(filepath.Join(vnewpath, string(pr))) po := dw.pruneOptions.PruneOptionsFor(pr) - err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to) - if err != nil { + + if err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to); err != nil { return errors.Wrapf(err, "failed to export %s", pr) } + digest, err := pkgtree.DigestFromDirectory(to) if err != nil { return errors.Wrapf(err, "failed to hash %s", pr) @@ -617,24 +631,41 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } } - // Write out the lock last, now that it's updated with digests + // Changed projects are fully populated. Now, iterate over the lock's + // projects and move any remaining ones not in the changed list to vnewpath. + for _, lp := range dw.lock.Projects() { + pr := lp.Ident().ProjectRoot + tgt := filepath.Join(vnewpath, string(pr)) + err := os.MkdirAll(filepath.Dir(tgt), os.FileMode(0777)) + if err != nil { + return errors.Wrapf(err, "error creating parent directory in vendor for %s", tgt) + } + + if _, has := dw.changed[pr]; !has { + err = fs.RenameWithFallback(filepath.Join(vpath, string(pr)), tgt) + if err != nil { + return errors.Wrapf(err, "error moving unchanged project %s into scratch vendor dir", pr) + } + } + } + + err = os.RemoveAll(vpath) + if err != nil { + return errors.Wrap(err, "failed to remove original vendor directory") + } + err = fs.RenameWithFallback(vnewpath, vpath) + if err != nil { + return errors.Wrap(err, "failed to put new vendor directory into place") + } + + // Write out the lock last, now that it's fully updated with digests. l, err := dw.lock.MarshalTOML() if err != nil { return errors.Wrap(err, "failed to marshal lock to TOML") } if err = ioutil.WriteFile(lpath, append(lockFileComment, l...), 0666); err != nil { - return errors.Wrap(err, "failed to write lock file to temp dir") - } - - // Remove all the now-unnecessary bits from vendor. - for path, vs := range dw.status { - if vs == pkgtree.NotInLock { - err = os.RemoveAll(path) - if err != nil { - return errors.Wrapf(err, "vendor state may be inconsistent, could not remove %s", path) - } - } + return errors.Wrap(err, "failed to write new lock file") } return nil From bce4a363ac5db950281ce14741eab3904aff41a1 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Mon, 25 Jun 2018 23:12:56 -0400 Subject: [PATCH 07/25] gps: Remove all reference to InputsDigest We're now relying entirely on real validation - no more hash digest comparisons and meaningless conflicts! --- cmd/dep/hash_in.go | 51 --- gps/hash.go | 133 ------- gps/hash_test.go | 686 -------------------------------- gps/lock.go | 58 +-- gps/lock_test.go | 49 --- gps/lockdiff_test.go | 20 +- gps/prune.go | 8 +- gps/solution.go | 18 +- gps/solve_basic_test.go | 10 - gps/solver.go | 10 +- gps/source_cache_bolt_encode.go | 39 +- gps/source_cache_bolt_test.go | 4 +- gps/source_cache_test.go | 4 +- 13 files changed, 68 insertions(+), 1022 deletions(-) delete mode 100644 cmd/dep/hash_in.go delete mode 100644 gps/hash.go delete mode 100644 gps/hash_test.go diff --git a/cmd/dep/hash_in.go b/cmd/dep/hash_in.go deleted file mode 100644 index ed7f356934..0000000000 --- a/cmd/dep/hash_in.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "flag" - - "github.com/golang/dep" - "github.com/golang/dep/gps" - "github.com/golang/dep/gps/pkgtree" - "github.com/pkg/errors" -) - -func (cmd *hashinCommand) Name() string { return "hash-inputs" } -func (cmd *hashinCommand) Args() string { return "" } -func (cmd *hashinCommand) ShortHelp() string { return "" } -func (cmd *hashinCommand) LongHelp() string { return "" } -func (cmd *hashinCommand) Hidden() bool { return true } - -func (cmd *hashinCommand) Register(fs *flag.FlagSet) {} - -type hashinCommand struct{} - -func (hashinCommand) Run(ctx *dep.Ctx, args []string) error { - p, err := ctx.LoadProject() - if err != nil { - return err - } - - sm, err := ctx.SourceManager() - if err != nil { - return err - } - sm.UseDefaultSignalHandling() - defer sm.Release() - - params := p.MakeParams() - params.RootPackageTree, err = pkgtree.ListPackages(p.ResolvedAbsRoot, string(p.ImportRoot)) - if err != nil { - return errors.Wrap(err, "gps.ListPackages") - } - - s, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "prepare solver") - } - ctx.Out.Println(gps.HashingInputsAsString(s)) - return nil -} diff --git a/gps/hash.go b/gps/hash.go deleted file mode 100644 index 556933bcf3..0000000000 --- a/gps/hash.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gps - -import ( - "bytes" - "crypto/sha256" - "io" - "sort" - "strconv" - "strings" -) - -// string headers used to demarcate sections in hash input creation -const ( - hhConstraints = "-CONSTRAINTS-" - hhImportsReqs = "-IMPORTS/REQS-" - hhIgnores = "-IGNORES-" - hhOverrides = "-OVERRIDES-" - hhAnalyzer = "-ANALYZER-" -) - -// HashInputs computes a hash digest of all data in SolveParams and the -// RootManifest that act as function inputs to Solve(). -// -// The digest returned from this function is the same as the digest that would -// be included with a Solve() Result. As such, it's appropriate for comparison -// against the digest stored in a lock file, generated by a previous Solve(): if -// the digests match, then manifest and lock are in sync, and a Solve() is -// unnecessary. -// -// (Basically, this is for memoization.) -func (s *solver) HashInputs() (digest []byte) { - h := sha256.New() - s.writeHashingInputs(h) - - hd := h.Sum(nil) - digest = hd[:] - return -} - -func (s *solver) writeHashingInputs(w io.Writer) { - writeString := func(s string) { - // Skip zero-length string writes; it doesn't affect the real hash - // calculation, and keeps misleading newlines from showing up in the - // debug output. - if s != "" { - // All users of writeHashingInputs cannot error on Write(), so just - // ignore it - w.Write([]byte(s)) - } - } - - // We write "section headers" into the hash purely to ease scanning when - // debugging this input-constructing algorithm; as long as the headers are - // constant, then they're effectively a no-op. - writeString(hhConstraints) - - // getApplicableConstraints will apply overrides, incorporate requireds, - // apply local ignores, drop stdlib imports, and finally trim out - // ineffectual constraints. - for _, pd := range s.rd.getApplicableConstraints(s.stdLibFn) { - writeString(string(pd.Ident.ProjectRoot)) - writeString(pd.Ident.Source) - writeString(pd.Constraint.typedString()) - } - - // Write out each discrete import, including those derived from requires. - writeString(hhImportsReqs) - imports := s.rd.externalImportList(s.stdLibFn) - sort.Strings(imports) - for _, im := range imports { - writeString(im) - } - - // Add ignores, skipping any that point under the current project root; - // those will have already been implicitly incorporated by the import - // lister. - writeString(hhIgnores) - - ig := s.rd.ir.ToSlice() - sort.Strings(ig) - for _, igp := range ig { - // Typical prefix comparison checks will erroneously fail if the wildcard - // is present. Trim it off, if present. - tigp := strings.TrimSuffix(igp, "*") - if !strings.HasPrefix(tigp, s.rd.rpt.ImportRoot) || !isPathPrefixOrEqual(s.rd.rpt.ImportRoot, tigp) { - writeString(igp) - } - } - - // Overrides *also* need their own special entry distinct from basic - // constraints, to represent the unique effects they can have on the entire - // solving process beyond root's immediate scope. - writeString(hhOverrides) - for _, pc := range s.rd.ovr.asSortedSlice() { - writeString(string(pc.Ident.ProjectRoot)) - if pc.Ident.Source != "" { - writeString(pc.Ident.Source) - } - if pc.Constraint != nil { - writeString(pc.Constraint.typedString()) - } - } - - writeString(hhAnalyzer) - ai := s.rd.an.Info() - writeString(ai.Name) - writeString(strconv.Itoa(ai.Version)) -} - -// bytes.Buffer wrapper that injects newlines after each call to Write(). -type nlbuf bytes.Buffer - -func (buf *nlbuf) Write(p []byte) (n int, err error) { - n, _ = (*bytes.Buffer)(buf).Write(p) - (*bytes.Buffer)(buf).WriteByte('\n') - return n + 1, nil -} - -// HashingInputsAsString returns the raw input data used by Solver.HashInputs() -// as a string. -// -// This is primarily intended for debugging purposes. -func HashingInputsAsString(s Solver) string { - ts := s.(*solver) - buf := new(nlbuf) - ts.writeHashingInputs(buf) - - return (*bytes.Buffer)(buf).String() -} diff --git a/gps/hash_test.go b/gps/hash_test.go deleted file mode 100644 index 1be02f6870..0000000000 --- a/gps/hash_test.go +++ /dev/null @@ -1,686 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gps - -import ( - "bytes" - "crypto/sha256" - "fmt" - "strings" - "testing" - "text/tabwriter" - - "github.com/golang/dep/gps/pkgtree" -) - -func TestHashInputs(t *testing.T) { - fix := basicFixtures["shared dependency with overlapping constraints"] - - params := SolveParameters{ - RootDir: string(fix.ds[0].n), - RootPackageTree: fix.rootTree(), - Manifest: fix.rootmanifest(), - ProjectAnalyzer: naiveAnalyzer{}, - stdLibFn: func(string) bool { return false }, - mkBridgeFn: overrideMkBridge, - } - - s, err := Prepare(params, newdepspecSM(fix.ds, nil)) - if err != nil { - t.Fatalf("Unexpected error while prepping solver: %s", err) - } - - dig := s.HashInputs() - h := sha256.New() - - elems := []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - } - for _, v := range elems { - h.Write([]byte(v)) - } - correct := h.Sum(nil) - - if !bytes.Equal(dig, correct) { - t.Errorf("Hashes are not equal. Inputs:\n%s", diffHashingInputs(s, elems)) - } else if strings.Join(elems, "\n")+"\n" != HashingInputsAsString(s) { - t.Errorf("Hashes are equal, but hashing input strings are not:\n%s", diffHashingInputs(s, elems)) - } -} - -func TestHashInputsReqsIgs(t *testing.T) { - fix := basicFixtures["shared dependency with overlapping constraints"] - - rm := fix.rootmanifest().(simpleRootManifest).dup() - rm.ig = pkgtree.NewIgnoredRuleset([]string{"foo", "bar"}) - - params := SolveParameters{ - RootDir: string(fix.ds[0].n), - RootPackageTree: fix.rootTree(), - Manifest: rm, - ProjectAnalyzer: naiveAnalyzer{}, - stdLibFn: func(string) bool { return false }, - mkBridgeFn: overrideMkBridge, - } - - s, err := Prepare(params, newdepspecSM(fix.ds, nil)) - if err != nil { - t.Fatalf("Unexpected error while prepping solver: %s", err) - } - - dig := s.HashInputs() - h := sha256.New() - - elems := []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - "bar", - "foo", - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - } - for _, v := range elems { - h.Write([]byte(v)) - } - correct := h.Sum(nil) - - if !bytes.Equal(dig, correct) { - t.Errorf("Hashes are not equal. Inputs:\n%s", diffHashingInputs(s, elems)) - } - - // Add requires - rm.req = map[string]bool{ - "baz": true, - "qux": true, - } - - params.Manifest = rm - - s, err = Prepare(params, newdepspecSM(fix.ds, nil)) - if err != nil { - t.Fatalf("Unexpected error while prepping solver: %s", err) - } - - dig = s.HashInputs() - h = sha256.New() - - elems = []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - "baz", - "qux", - hhIgnores, - "bar", - "foo", - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - } - for _, v := range elems { - h.Write([]byte(v)) - } - correct = h.Sum(nil) - - if !bytes.Equal(dig, correct) { - t.Errorf("Hashes are not equal. Inputs:\n%s", diffHashingInputs(s, elems)) - } - - // remove ignores, just test requires alone - rm.ig = nil - params.Manifest = rm - - s, err = Prepare(params, newdepspecSM(fix.ds, nil)) - if err != nil { - t.Fatalf("Unexpected error while prepping solver: %s", err) - } - - dig = s.HashInputs() - h = sha256.New() - - elems = []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - "baz", - "qux", - hhIgnores, - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - } - for _, v := range elems { - h.Write([]byte(v)) - } - correct = h.Sum(nil) - - if !bytes.Equal(dig, correct) { - t.Errorf("Hashes are not equal. Inputs:\n%s", diffHashingInputs(s, elems)) - } -} - -func TestHashInputsOverrides(t *testing.T) { - basefix := basicFixtures["shared dependency with overlapping constraints"] - - // Set up base state that we'll mutate over the course of each test - rm := basefix.rootmanifest().(simpleRootManifest).dup() - params := SolveParameters{ - RootDir: string(basefix.ds[0].n), - RootPackageTree: basefix.rootTree(), - Manifest: rm, - ProjectAnalyzer: naiveAnalyzer{}, - stdLibFn: func(string) bool { return false }, - mkBridgeFn: overrideMkBridge, - } - - table := []struct { - name string - mut func() - elems []string - }{ - { - name: "override source; not imported, no deps pp", - mut: func() { - // First case - override just source, on something without - // corresponding project properties in the dependencies from - // root - rm.ovr = map[ProjectRoot]ProjectProperties{ - "c": { - Source: "car", - }, - } - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "c", - "car", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override source; required, no deps pp", - mut: func() { - // Put c into the requires list, which should make it show up under - // constraints - rm.req = map[string]bool{ - "c": true, - } - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - "c", - "car", - "any-*", // Any isn't included under the override, but IS for the constraint b/c it's equivalent - hhImportsReqs, - "a", - "b", - "c", - hhIgnores, - hhOverrides, - "c", - "car", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override source; required & imported, no deps pp", - mut: func() { - // Put c in the root's imports - poe := params.RootPackageTree.Packages["root"] - poe.P.Imports = []string{"a", "b", "c"} - params.RootPackageTree.Packages["root"] = poe - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - "c", - "car", - "any-*", // Any isn't included under the override, but IS for the constraint b/c it's equivalent - hhImportsReqs, - "a", - "b", - "c", - hhIgnores, - hhOverrides, - "c", - "car", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override source; imported, no deps pp", - mut: func() { - // Take c out of requires list - now it's only imported - rm.req = nil - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - "c", - "car", - "any-*", - hhImportsReqs, - "a", - "b", - "c", - hhIgnores, - hhOverrides, - "c", - "car", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "other override constraint; not imported, no deps pp", - mut: func() { - // Override not in root, just with constraint - rm.ovr["d"] = ProjectProperties{ - Constraint: NewBranch("foobranch"), - } - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - "c", - "car", - "any-*", - hhImportsReqs, - "a", - "b", - "c", - hhIgnores, - hhOverrides, - "c", - "car", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override constraint; not imported, no deps pp", - mut: func() { - // Remove the "c" pkg from imports for remainder of tests - poe := params.RootPackageTree.Packages["root"] - poe.P.Imports = []string{"a", "b"} - params.RootPackageTree.Packages["root"] = poe - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "c", - "car", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override both; not imported, no deps pp", - mut: func() { - // Override not in root, both constraint and network name - rm.ovr["c"] = ProjectProperties{ - Source: "groucho", - Constraint: NewBranch("plexiglass"), - } - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "c", - "groucho", - "b-plexiglass", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override constraint; imported, with constraint", - mut: func() { - // Override dep present in root, just constraint - rm.ovr["a"] = ProjectProperties{ - Constraint: NewVersion("fluglehorn"), - } - }, - elems: []string{ - hhConstraints, - "a", - "pv-fluglehorn", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "a", - "pv-fluglehorn", - "c", - "groucho", - "b-plexiglass", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override source; imported, with constraint", - mut: func() { - // Override in root, only network name - rm.ovr["a"] = ProjectProperties{ - Source: "nota", - } - }, - elems: []string{ - hhConstraints, - "a", - "nota", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "a", - "nota", - "c", - "groucho", - "b-plexiglass", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "override both; imported, with constraint", - mut: func() { - // Override in root, network name and constraint - rm.ovr["a"] = ProjectProperties{ - Source: "nota", - Constraint: NewVersion("fluglehorn"), - } - }, - elems: []string{ - hhConstraints, - "a", - "nota", - "pv-fluglehorn", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - "a", - "nota", - "pv-fluglehorn", - "c", - "groucho", - "b-plexiglass", - "d", - "b-foobranch", - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - } - - for _, fix := range table { - fix.mut() - params.Manifest = rm - - s, err := Prepare(params, newdepspecSM(basefix.ds, nil)) - if err != nil { - t.Errorf("(fix: %q) Unexpected error while prepping solver: %s", fix.name, err) - continue - } - - h := sha256.New() - for _, v := range fix.elems { - h.Write([]byte(v)) - } - - if !bytes.Equal(s.HashInputs(), h.Sum(nil)) { - t.Errorf("(fix: %q) Hashes are not equal. Inputs:\n%s", fix.name, diffHashingInputs(s, fix.elems)) - } - } -} - -func diffHashingInputs(s Solver, wnt []string) string { - actual := HashingInputsAsString(s) - got := strings.Split(actual, "\n") - // got has a trailing empty, add that to wnt - wnt = append(wnt, "") - - lg, lw := len(got), len(wnt) - - var buf bytes.Buffer - tw := tabwriter.NewWriter(&buf, 4, 4, 2, ' ', 0) - fmt.Fprintln(tw, " (GOT) \t (WANT) \t") - - lmiss, rmiss := ">>>>>>>>>>", "<<<<<<<<<<" - if lg == lw { - // same length makes the loop pretty straightforward - for i := 0; i < lg; i++ { - fmt.Fprintf(tw, "%s\t%s\t\n", got[i], wnt[i]) - } - } else if lg > lw { - offset := 0 - for i := 0; i < lg; i++ { - if lw <= i-offset { - fmt.Fprintf(tw, "%s\t%s\t\n", got[i], rmiss) - } else if got[i] != wnt[i-offset] && i+1 < lg && got[i+1] == wnt[i-offset] { - // if the next slot is a match, realign by skipping this one and - // bumping the offset - fmt.Fprintf(tw, "%s\t%s\t\n", got[i], rmiss) - offset++ - } else { - fmt.Fprintf(tw, "%s\t%s\t\n", got[i], wnt[i-offset]) - } - } - } else { - offset := 0 - for i := 0; i < lw; i++ { - if lg <= i-offset { - fmt.Fprintf(tw, "%s\t%s\t\n", lmiss, wnt[i]) - } else if got[i-offset] != wnt[i] && i+1 < lw && got[i-offset] == wnt[i+1] { - // if the next slot is a match, realign by skipping this one and - // bumping the offset - fmt.Fprintf(tw, "%s\t%s\t\n", lmiss, wnt[i]) - offset++ - } else { - fmt.Fprintf(tw, "%s\t%s\t\n", got[i-offset], wnt[i]) - } - } - } - - tw.Flush() - return buf.String() -} - -func TestHashInputsIneffectualWildcardIgs(t *testing.T) { - fix := basicFixtures["shared dependency with overlapping constraints"] - - rm := fix.rootmanifest().(simpleRootManifest).dup() - - params := SolveParameters{ - RootDir: string(fix.ds[0].n), - RootPackageTree: fix.rootTree(), - Manifest: rm, - ProjectAnalyzer: naiveAnalyzer{}, - stdLibFn: func(string) bool { return false }, - mkBridgeFn: overrideMkBridge, - } - - cases := []struct { - name string - ignoreMap []string - elems []string - }{ - { - name: "no wildcard ignores", - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - { - name: "different wildcard ignores", - ignoreMap: []string{ - "foobar*", - "foobarbaz*", - "foozapbar*", - }, - elems: []string{ - hhConstraints, - "a", - "sv-1.0.0", - "b", - "sv-1.0.0", - hhImportsReqs, - "a", - "b", - hhIgnores, - "foobar*", - "foozapbar*", - hhOverrides, - hhAnalyzer, - "naive-analyzer", - "1", - }, - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - - rm.ig = pkgtree.NewIgnoredRuleset(c.ignoreMap) - - params.Manifest = rm - - s, err := Prepare(params, newdepspecSM(fix.ds, nil)) - if err != nil { - t.Fatalf("Unexpected error while prepping solver: %s", err) - } - - dig := s.HashInputs() - h := sha256.New() - - for _, v := range c.elems { - h.Write([]byte(v)) - } - correct := h.Sum(nil) - - if !bytes.Equal(dig, correct) { - t.Errorf("Hashes are not equal. Inputs:\n%s", diffHashingInputs(s, c.elems)) - } - }) - } -} diff --git a/gps/lock.go b/gps/lock.go index d382fd03ff..91fd0e23e8 100644 --- a/gps/lock.go +++ b/gps/lock.go @@ -5,7 +5,6 @@ package gps import ( - "bytes" "fmt" "sort" ) @@ -18,35 +17,21 @@ import ( // solution is all that would be necessary to constitute a lock file, though // tools can include whatever other information they want in their storage. type Lock interface { - // The hash digest of inputs to gps that resulted in this lock data. - InputsDigest() []byte - // Projects returns the list of LockedProjects contained in the lock data. Projects() []LockedProject } -// LocksAreEq checks if two locks are equivalent. This checks that -// all contained LockedProjects are equal, and optionally (if `checkHash` is -// true) whether the locks' input hashes are equal. -func LocksAreEq(l1, l2 Lock, checkHash bool) bool { - // Cheapest ops first - if checkHash && !bytes.Equal(l1.InputsDigest(), l2.InputsDigest()) { - return false - } - - p1, p2 := l1.Projects(), l2.Projects() - if len(p1) != len(p2) { - return false - } - - p1, p2 = sortLockedProjects(p1), sortLockedProjects(p2) - - for k, lp := range p1 { - if !lp.Eq(p2[k]) { - return false - } - } - return true +// LockWithImports composes Lock to also add a method that reports all the +// imports that were present when generating the Lock. +// +// This information can be rederived, but it requires doing whole-graph +// analysis; tracking the information separately makes verification tasks +// easier, especially determining if an input import has been removed. +type LockWithImports interface { + Lock + // The set of imports (and required statements) that were the inputs that + // generated this Lock. + InputImports() []string } // sortLockedProjects returns a sorted copy of lps, or itself if already sorted. @@ -85,18 +70,11 @@ type lockedProject struct { } // SimpleLock is a helper for tools to easily describe lock data when they know -// that no hash, or other complex information, is available. +// that input imports are unavailable. type SimpleLock []LockedProject var _ Lock = SimpleLock{} -// InputsDigest always returns an empty string for SimpleLock. This makes it useless -// as a stable lock to be written to disk, but still useful for some ephemeral -// purposes. -func (SimpleLock) InputsDigest() []byte { - return nil -} - // Projects returns the entire contents of the SimpleLock. func (l SimpleLock) Projects() []LockedProject { return l @@ -229,12 +207,12 @@ func (lp lockedProject) String() string { } type safeLock struct { - h []byte p []LockedProject + i []string } -func (sl safeLock) InputsDigest() []byte { - return sl.h +func (sl safeLock) InputImports() []string { + return sl.i } func (sl safeLock) Projects() []LockedProject { @@ -250,10 +228,14 @@ func prepLock(l Lock) safeLock { pl := l.Projects() rl := safeLock{ - h: l.InputsDigest(), p: make([]LockedProject, len(pl)), } copy(rl.p, pl) + if lwi, ok := l.(LockWithImports); ok { + rl.i = make([]string, len(lwi.InputImports())) + copy(rl.i, lwi.InputImports()) + } + return rl } diff --git a/gps/lock_test.go b/gps/lock_test.go index 2c58942c24..2833993eec 100644 --- a/gps/lock_test.go +++ b/gps/lock_test.go @@ -83,55 +83,6 @@ func TestLockedProjectsEq(t *testing.T) { } } -func TestLocksAreEq(t *testing.T) { - gpl := NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}) - svpl := NewLockedProject(mkPI("github.com/Masterminds/semver"), NewVersion("v2.0.0").Pair("foo"), []string{"semver"}) - bbbt := NewLockedProject(mkPI("github.com/beeblebrox/browntown"), NewBranch("master").Pair("63fc17eb7966a6f4cc0b742bf42731c52c4ac740"), []string{"browntown", "smoochies"}) - - l1 := solution{ - hd: []byte("foo"), - p: []LockedProject{ - gpl, - bbbt, - svpl, - }, - } - - l2 := solution{ - p: []LockedProject{ - svpl, - gpl, - }, - } - - if LocksAreEq(l1, l2, true) { - t.Fatal("should have failed on hash check") - } - - if LocksAreEq(l1, l2, false) { - t.Fatal("should have failed on length check") - } - - l2.p = append(l2.p, bbbt) - - if !LocksAreEq(l1, l2, false) { - t.Fatal("should be eq, must have failed on individual lp check") - } - - // ensure original input sort order is maintained - if !l1.p[0].Eq(gpl) { - t.Error("checking equality resorted l1") - } - if !l2.p[0].Eq(svpl) { - t.Error("checking equality resorted l2") - } - - l1.p[0] = NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.11.0"), []string{"gps"}) - if LocksAreEq(l1, l2, false) { - t.Error("should fail when individual lp were not eq") - } -} - func TestLockedProjectsString(t *testing.T) { tt := []struct { name string diff --git a/gps/lockdiff_test.go b/gps/lockdiff_test.go index fa6fd2d60f..6fe1d36205 100644 --- a/gps/lockdiff_test.go +++ b/gps/lockdiff_test.go @@ -6,7 +6,6 @@ package gps import ( "bytes" - "encoding/hex" "testing" ) @@ -194,13 +193,13 @@ func TestDiffProjects_RemovePackages(t *testing.T) { func TestDiffLocks_NoChange(t *testing.T) { l1 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, @@ -214,13 +213,13 @@ func TestDiffLocks_NoChange(t *testing.T) { func TestDiffLocks_AddProjects(t *testing.T) { l1 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, } l2 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux", Source: "https://github.com/mcfork/bazqux.git"}, @@ -298,7 +297,7 @@ func TestDiffLocks_AddProjects(t *testing.T) { func TestDiffLocks_RemoveProjects(t *testing.T) { l1 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{ pi: ProjectIdentifier{ProjectRoot: "github.com/a/thing", Source: "https://github.com/mcfork/athing.git"}, @@ -310,7 +309,7 @@ func TestDiffLocks_RemoveProjects(t *testing.T) { }, } l2 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, }, @@ -381,7 +380,7 @@ func TestDiffLocks_RemoveProjects(t *testing.T) { func TestDiffLocks_ModifyProjects(t *testing.T) { l1 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bu"}, v: NewVersion("v1.0.0")}, @@ -389,7 +388,7 @@ func TestDiffLocks_ModifyProjects(t *testing.T) { }, } l2 := safeLock{ - h: []byte("abc123"), + //h: []byte("abc123"), p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v2.0.0")}, @@ -435,9 +434,8 @@ func TestDiffLocks_EmptyInitialLock(t *testing.T) { } func TestDiffLocks_EmptyFinalLock(t *testing.T) { - h1, _ := hex.DecodeString("abc123") l1 := safeLock{ - h: h1, + //h: h1, p: []LockedProject{ lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, }, diff --git a/gps/prune.go b/gps/prune.go index b0a7781493..98aebffd19 100644 --- a/gps/prune.go +++ b/gps/prune.go @@ -82,14 +82,14 @@ func ParsePruneOptions(input string) (PruneOptions, error) { func (po PruneOptions) String() string { var buf bytes.Buffer - if po&PruneGoTestFiles != 0 { - fmt.Fprintf(&buf, "T") + if po&PruneNonGoFiles != 0 { + fmt.Fprintf(&buf, "N") } if po&PruneUnusedPackages != 0 { fmt.Fprintf(&buf, "U") } - if po&PruneNonGoFiles != 0 { - fmt.Fprintf(&buf, "N") + if po&PruneGoTestFiles != 0 { + fmt.Fprintf(&buf, "T") } if po&PruneNestedVendorDirs != 0 { fmt.Fprintf(&buf, "V") diff --git a/gps/solution.go b/gps/solution.go index e76510f769..2b1f1ed310 100644 --- a/gps/solution.go +++ b/gps/solution.go @@ -18,7 +18,7 @@ import ( // A Solution is returned by a solver run. It is mostly just a Lock, with some // additional methods that report information about the solve run. type Solution interface { - Lock + LockWithImports // The name of the ProjectAnalyzer used in generating this solution. AnalyzerName() string // The version of the ProjectAnalyzer used in generating this solution. @@ -31,15 +31,15 @@ type Solution interface { } type solution struct { - // A list of the projects selected by the solver. + // The projects selected by the solver. p []LockedProject + // The import inputs that created this solution (including requires). + i []string + // The number of solutions that were attempted att int - // The hash digest of the input opts - hd []byte - // The analyzer info analyzerInfo ProjectAnalyzerInfo @@ -153,12 +153,12 @@ func (r solution) Projects() []LockedProject { return r.p } -func (r solution) Attempts() int { - return r.att +func (r solution) InputImports() []string { + return r.i } -func (r solution) InputsDigest() []byte { - return r.hd +func (r solution) Attempts() int { + return r.att } func (r solution) AnalyzerName() string { diff --git a/gps/solve_basic_test.go b/gps/solve_basic_test.go index 3482aa7f07..3665daf2a3 100644 --- a/gps/solve_basic_test.go +++ b/gps/solve_basic_test.go @@ -1559,11 +1559,6 @@ func (ds depspec) DependencyConstraints() ProjectConstraints { type fixLock []LockedProject -// impl Lock interface -func (fixLock) InputsDigest() []byte { - return []byte("fooooorooooofooorooofoo") -} - // impl Lock interface func (l fixLock) Projects() []LockedProject { return l @@ -1571,11 +1566,6 @@ func (l fixLock) Projects() []LockedProject { type dummyLock struct{} -// impl Lock interface -func (dummyLock) InputsDigest() []byte { - return []byte("fooooorooooofooorooofoo") -} - // impl Lock interface func (dummyLock) Projects() []LockedProject { return nil diff --git a/gps/solver.go b/gps/solver.go index d42f451490..dc5f4cd778 100644 --- a/gps/solver.go +++ b/gps/solver.go @@ -323,14 +323,6 @@ func Prepare(params SolveParameters, sm SourceManager) (Solver, error) { // a "lock file" - and/or use it to write out a directory tree of dependencies, // suitable to be a vendor directory, via CreateVendorTree. type Solver interface { - // HashInputs hashes the unique inputs to this solver, returning the hash - // digest. It is guaranteed that, if the resulting digest is equal to the - // digest returned from a previous Solution.InputHash(), that that Solution - // is valid for this Solver's inputs. - // - // In such a case, it may not be necessary to run Solve() at all. - HashInputs() []byte - // Solve initiates a solving run. It will either abort due to a canceled // Context, complete successfully with a Solution, or fail with an // informative error. @@ -455,7 +447,7 @@ func (s *solver) Solve(ctx context.Context) (Solution, error) { solv: s, } soln.analyzerInfo = s.rd.an.Info() - soln.hd = s.HashInputs() + soln.i = s.rd.externalImportList(s.stdLibFn) // Convert ProjectAtoms into LockedProjects soln.p = make([]LockedProject, len(all)) diff --git a/gps/source_cache_bolt_encode.go b/gps/source_cache_bolt_encode.go index e1273aee2b..d91d9018b8 100644 --- a/gps/source_cache_bolt_encode.go +++ b/gps/source_cache_bolt_encode.go @@ -6,6 +6,7 @@ package gps import ( "encoding/binary" + "strings" "time" "github.com/boltdb/bolt" @@ -17,19 +18,19 @@ import ( ) var ( - cacheKeyComment = []byte("c") - cacheKeyConstraint = cacheKeyComment - cacheKeyError = []byte("e") - cacheKeyHash = []byte("h") - cacheKeyIgnored = []byte("i") - cacheKeyImport = cacheKeyIgnored - cacheKeyLock = []byte("l") - cacheKeyName = []byte("n") - cacheKeyOverride = []byte("o") - cacheKeyPTree = []byte("p") - cacheKeyRequired = []byte("r") - cacheKeyRevision = cacheKeyRequired - cacheKeyTestImport = []byte("t") + cacheKeyComment = []byte("c") + cacheKeyConstraint = cacheKeyComment + cacheKeyError = []byte("e") + cacheKeyInputImports = []byte("m") + cacheKeyIgnored = []byte("i") + cacheKeyImport = cacheKeyIgnored + cacheKeyLock = []byte("l") + cacheKeyName = []byte("n") + cacheKeyOverride = []byte("o") + cacheKeyPTree = []byte("p") + cacheKeyRequired = []byte("r") + cacheKeyRevision = cacheKeyRequired + cacheKeyTestImport = []byte("t") cacheRevision = byte('r') cacheVersion = byte('v') @@ -306,10 +307,11 @@ func lockedProjectFromCache(m *pb.LockedProject) (LockedProject, error) { // cachePutLock stores the Lock as fields in the bolt.Bucket. func cachePutLock(b *bolt.Bucket, l Lock) error { - // InputHash - if v := l.InputsDigest(); len(v) > 0 { - if err := b.Put(cacheKeyHash, v); err != nil { - return errors.Wrap(err, "failed to put hash") + // Input imports, if present. + if lwp, ok := l.(LockWithImports); ok && len(lwp.InputImports()) > 0 { + byt := []byte(strings.Join(lwp.InputImports(), "#")) + if err := b.Put(cacheKeyInputImports, byt); err != nil { + return errors.Wrap(err, "failed to put input imports") } } @@ -341,8 +343,9 @@ func cachePutLock(b *bolt.Bucket, l Lock) error { // cacheGetLock returns a new *safeLock with the fields retrieved from the bolt.Bucket. func cacheGetLock(b *bolt.Bucket) (*safeLock, error) { l := &safeLock{ - h: b.Get(cacheKeyHash), + i: strings.Split(string(b.Get(cacheKeyInputImports)), "#"), } + if locked := b.Bucket(cacheKeyLock); locked != nil { var msg pb.LockedProject err := locked.ForEach(func(_, v []byte) error { diff --git a/gps/source_cache_bolt_test.go b/gps/source_cache_bolt_test.go index c8ad87e54a..a661ab82ea 100644 --- a/gps/source_cache_bolt_test.go +++ b/gps/source_cache_bolt_test.go @@ -53,7 +53,7 @@ func TestBoltCacheTimeout(t *testing.T) { } lock := &safeLock{ - h: []byte("test_hash"), + //h: []byte("test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0"), nil), @@ -233,7 +233,7 @@ func TestBoltCacheTimeout(t *testing.T) { } newLock := &safeLock{ - h: []byte("new_test_hash"), + //h: []byte("new_test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v1"), []string{"gps"}), }, diff --git a/gps/source_cache_test.go b/gps/source_cache_test.go index 9e15b65ac9..14d6d0d448 100644 --- a/gps/source_cache_test.go +++ b/gps/source_cache_test.go @@ -116,7 +116,7 @@ func (test singleSourceCacheTest) run(t *testing.T) { ig: pkgtree.NewIgnoredRuleset([]string{"a", "b"}), } var l Lock = &safeLock{ - h: []byte("test_hash"), + //h: []byte("test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0"), nil), @@ -163,7 +163,7 @@ func (test singleSourceCacheTest) run(t *testing.T) { ig: pkgtree.NewIgnoredRuleset([]string{"c", "d"}), } l = &safeLock{ - h: []byte("different_test_hash"), + //h: []byte("different_test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}), NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.11.0"), []string{"gps"}), From 0b2482d6889006792f63d8aa6fce6fdbad6c87f6 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 26 Jun 2018 00:30:41 -0400 Subject: [PATCH 08/25] verify: Relocate lock diffing and tree hashing Both of these subsystems make more sense in the verification package than in gps itself. --- gps/{pkgtree => verify}/digest.go | 2 +- gps/{pkgtree => verify}/digest_test.go | 2 +- gps/{pkgtree => verify}/dirwalk.go | 2 +- gps/verify/lock.go | 8 +- gps/{ => verify}/lockdiff.go | 39 ++++-- gps/{ => verify}/lockdiff_test.go | 170 +++++++++---------------- 6 files changed, 94 insertions(+), 129 deletions(-) rename gps/{pkgtree => verify}/digest.go (99%) rename gps/{pkgtree => verify}/digest_test.go (99%) rename gps/{pkgtree => verify}/dirwalk.go (99%) rename gps/{ => verify}/lockdiff.go (86%) rename gps/{ => verify}/lockdiff_test.go (67%) diff --git a/gps/pkgtree/digest.go b/gps/verify/digest.go similarity index 99% rename from gps/pkgtree/digest.go rename to gps/verify/digest.go index d1f671992d..3ce0edfa1d 100644 --- a/gps/pkgtree/digest.go +++ b/gps/verify/digest.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package pkgtree +package verify import ( "bytes" diff --git a/gps/pkgtree/digest_test.go b/gps/verify/digest_test.go similarity index 99% rename from gps/pkgtree/digest_test.go rename to gps/verify/digest_test.go index 77dd298de2..36a7912d03 100644 --- a/gps/pkgtree/digest_test.go +++ b/gps/verify/digest_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package pkgtree +package verify import ( "bytes" diff --git a/gps/pkgtree/dirwalk.go b/gps/verify/dirwalk.go similarity index 99% rename from gps/pkgtree/dirwalk.go rename to gps/verify/dirwalk.go index 350c1606c3..4010c4a03f 100644 --- a/gps/pkgtree/dirwalk.go +++ b/gps/verify/dirwalk.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package pkgtree +package verify import ( "os" diff --git a/gps/verify/lock.go b/gps/verify/lock.go index 87932d63c9..3997bf9d86 100644 --- a/gps/verify/lock.go +++ b/gps/verify/lock.go @@ -1,3 +1,7 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + package verify import ( @@ -13,11 +17,9 @@ import ( type VerifiableProject struct { gps.LockedProject PruneOpts gps.PruneOptions - Digest pkgtree.VersionedDigest + Digest VersionedDigest } -type LockDiff struct{} - type lockUnsatisfy uint8 const ( diff --git a/gps/lockdiff.go b/gps/verify/lockdiff.go similarity index 86% rename from gps/lockdiff.go rename to gps/verify/lockdiff.go index 1f7da66b62..b0f230b893 100644 --- a/gps/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -2,12 +2,14 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gps +package verify import ( "fmt" "sort" "strings" + + "github.com/golang/dep/gps" ) // StringDiff represents a modified string value. @@ -40,6 +42,21 @@ func (diff *StringDiff) String() string { return diff.Current } +// sortLockedProjects returns a sorted copy of lps, or itself if already sorted. +func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { + if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { + return lps[i].Ident().Less(lps[j].Ident()) + }) { + return lps + } + cp := make([]gps.LockedProject, len(lps)) + copy(cp, lps) + sort.Slice(cp, func(i, j int) bool { + return cp[i].Ident().Less(cp[j].Ident()) + }) + return cp +} + // LockDiff is the set of differences between an existing lock file and an updated lock file. // Fields are only populated when there is a difference, otherwise they are empty. type LockDiff struct { @@ -51,7 +68,7 @@ type LockDiff struct { // LockedProjectDiff contains the before and after snapshot of a project reference. // Fields are only populated when there is a difference, otherwise they are empty. type LockedProjectDiff struct { - Name ProjectRoot + Name gps.ProjectRoot Source *StringDiff Version *StringDiff Branch *StringDiff @@ -61,13 +78,13 @@ type LockedProjectDiff struct { // DiffLocks compares two locks and identifies the differences between them. // Returns nil if there are no differences. -func DiffLocks(l1 Lock, l2 Lock) *LockDiff { +func DiffLocks(l1, l2 gps.Lock) *LockDiff { // Default nil locks to empty locks, so that we can still generate a diff if l1 == nil { - l1 = &SimpleLock{} + l1 = &gps.SimpleLock{} } if l2 == nil { - l2 = &SimpleLock{} + l2 = &gps.SimpleLock{} } p1, p2 := l1.Projects(), l2.Projects() @@ -129,7 +146,7 @@ func DiffLocks(l1 Lock, l2 Lock) *LockDiff { // DiffFor checks to see if there was a diff for the provided ProjectRoot. The // first return value is a 0 if there was no diff, 1 if it was added, 2 if it // was removed, and 3 if it was modified. -func (ld *LockDiff) DiffFor(pr ProjectRoot) (uint8, LockedProjectDiff) { +func (ld *LockDiff) DiffFor(pr gps.ProjectRoot) (uint8, LockedProjectDiff) { for _, lpd := range ld.Add { if lpd.Name == pr { return 1, lpd @@ -151,9 +168,9 @@ func (ld *LockDiff) DiffFor(pr ProjectRoot) (uint8, LockedProjectDiff) { return 0, LockedProjectDiff{} } -func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { +func buildLockedProjectDiff(lp gps.LockedProject) LockedProjectDiff { s2 := lp.Ident().Source - r2, b2, v2 := VersionComponentStrings(lp.Version()) + r2, b2, v2 := gps.VersionComponentStrings(lp.Version()) var rev, version, branch, source *StringDiff if s2 != "" { @@ -185,7 +202,7 @@ func buildLockedProjectDiff(lp LockedProject) LockedProjectDiff { // DiffProjects compares two projects and identifies the differences between them. // Returns nil if there are no differences. -func DiffProjects(lp1 LockedProject, lp2 LockedProject) *LockedProjectDiff { +func DiffProjects(lp1, lp2 gps.LockedProject) *LockedProjectDiff { diff := LockedProjectDiff{Name: lp1.Ident().ProjectRoot} s1 := lp1.Ident().Source @@ -194,8 +211,8 @@ func DiffProjects(lp1 LockedProject, lp2 LockedProject) *LockedProjectDiff { diff.Source = &StringDiff{Previous: s1, Current: s2} } - r1, b1, v1 := VersionComponentStrings(lp1.Version()) - r2, b2, v2 := VersionComponentStrings(lp2.Version()) + r1, b1, v1 := gps.VersionComponentStrings(lp1.Version()) + r2, b2, v2 := gps.VersionComponentStrings(lp2.Version()) if r1 != r2 { diff.Revision = &StringDiff{Previous: r1, Current: r2} } diff --git a/gps/lockdiff_test.go b/gps/verify/lockdiff_test.go similarity index 67% rename from gps/lockdiff_test.go rename to gps/verify/lockdiff_test.go index 6fe1d36205..0470a545ae 100644 --- a/gps/lockdiff_test.go +++ b/gps/verify/lockdiff_test.go @@ -2,13 +2,26 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gps +package verify import ( "bytes" "testing" + + "github.com/golang/dep/gps" ) +// mkPI creates a ProjectIdentifier with the ProjectRoot as the provided +// string, and the Source unset. +// +// Call normalize() on the returned value if you need the Source to be be +// equal to the ProjectRoot. +func mkPI(root string) gps.ProjectIdentifier { + return gps.ProjectIdentifier{ + ProjectRoot: gps.ProjectRoot(root), + } +} + func TestStringDiff_NoChange(t *testing.T) { diff := StringDiff{Previous: "foo", Current: "foo"} want := "foo" @@ -45,8 +58,8 @@ func TestStringDiff_Modify(t *testing.T) { } func TestDiffProjects_NoChange(t *testing.T) { - p1 := NewLockedProject(mkPI("github.com/golang/dep/gps"), NewVersion("v0.10.0"), []string{"gps"}) - p2 := NewLockedProject(mkPI("github.com/golang/dep/gps"), NewVersion("v0.10.0"), []string{"gps"}) + p1 := gps.NewLockedProject(mkPI("github.com/golang/dep/gps"), gps.NewVersion("v0.10.0"), []string{"gps"}) + p2 := gps.NewLockedProject(mkPI("github.com/golang/dep/gps"), gps.NewVersion("v0.10.0"), []string{"gps"}) diff := DiffProjects(p1, p2) if diff != nil { @@ -55,19 +68,9 @@ func TestDiffProjects_NoChange(t *testing.T) { } func TestDiffProjects_Modify(t *testing.T) { - p1 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, - v: NewBranch("master"), - r: "abc123", - pkgs: []string{"baz", "qux"}, - } - - p2 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, - v: NewVersion("v1.0.0"), - r: "def456", - pkgs: []string{"baz", "derp"}, - } + p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"baz", "qux"}) + p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, + gps.NewVersion("v1.0.0").Pair("def456"), []string{"baz", "derp"}) diff := DiffProjects(p1, p2) if diff == nil { @@ -116,19 +119,9 @@ func TestDiffProjects_Modify(t *testing.T) { } func TestDiffProjects_AddPackages(t *testing.T) { - p1 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, - v: NewBranch("master"), - r: "abc123", - pkgs: []string{"foobar"}, - } - - p2 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, - v: NewVersion("v1.0.0"), - r: "def456", - pkgs: []string{"bazqux", "foobar", "zugzug"}, - } + p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"foobar"}) + p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, + gps.NewVersion("v1.0.0").Pair("def456"), []string{"bazqux", "foobar", "zugzug"}) diff := DiffProjects(p1, p2) if diff == nil { @@ -153,19 +146,9 @@ func TestDiffProjects_AddPackages(t *testing.T) { } func TestDiffProjects_RemovePackages(t *testing.T) { - p1 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, - v: NewBranch("master"), - r: "abc123", - pkgs: []string{"athing", "foobar"}, - } - - p2 := lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, - v: NewVersion("v1.0.0"), - r: "def456", - pkgs: []string{"bazqux"}, - } + p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"athing", "foobar"}) + p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"}, + gps.NewVersion("v1.0.0").Pair("def456"), []string{"bazqux"}) diff := DiffProjects(p1, p2) if diff == nil { @@ -192,17 +175,11 @@ func TestDiffProjects_RemovePackages(t *testing.T) { } func TestDiffLocks_NoChange(t *testing.T) { - l1 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, + l1 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), } - l2 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, + l2 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(l1, l2) @@ -212,24 +189,14 @@ func TestDiffLocks_NoChange(t *testing.T) { } func TestDiffLocks_AddProjects(t *testing.T) { - l1 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, - } - l2 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux", Source: "https://github.com/mcfork/bazqux.git"}, - v: NewVersion("v0.5.0"), - r: "def456", - pkgs: []string{"p1", "p2"}, - }, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, - }, + l1 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), + } + l2 := gps.SimpleLock{ + gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/baz/qux", Source: "https://github.com/mcfork/bazqux.git"}, + gps.NewVersion("v0.5.0").Pair("def456"), []string{"p1", "p2"}), + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/zug/zug"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(l1, l2) @@ -296,23 +263,13 @@ func TestDiffLocks_AddProjects(t *testing.T) { } func TestDiffLocks_RemoveProjects(t *testing.T) { - l1 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{ - pi: ProjectIdentifier{ProjectRoot: "github.com/a/thing", Source: "https://github.com/mcfork/athing.git"}, - v: NewBranch("master"), - r: "def456", - pkgs: []string{"p1", "p2"}, - }, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, - } - l2 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, - }, + l1 := gps.SimpleLock{ + gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/a/thing", Source: "https://github.com/mcfork/athing.git"}, + gps.NewBranch("master").Pair("def456"), []string{"p1", "p2"}), + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), + } + l2 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/baz/qux"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(l1, l2) @@ -379,22 +336,16 @@ func TestDiffLocks_RemoveProjects(t *testing.T) { } func TestDiffLocks_ModifyProjects(t *testing.T) { - l1 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bu"}, v: NewVersion("v1.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v1.0.0")}, - }, - } - l2 := safeLock{ - //h: []byte("abc123"), - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/baz/qux"}, v: NewVersion("v1.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v2.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zig/zag"}, v: NewVersion("v2.0.0")}, - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/zug/zug"}, v: NewVersion("v1.0.0")}, - }, + l1 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/foo/bu"), gps.NewVersion("v1.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/zig/zag"), gps.NewVersion("v1.0.0"), nil), + } + l2 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/baz/qux"), gps.NewVersion("v1.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v2.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/zig/zag"), gps.NewVersion("v2.0.0"), nil), + gps.NewLockedProject(mkPI("github.com/zug/zug"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(l1, l2) @@ -420,10 +371,8 @@ func TestDiffLocks_ModifyProjects(t *testing.T) { } func TestDiffLocks_EmptyInitialLock(t *testing.T) { - l2 := safeLock{ - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, + l2 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(nil, l2) @@ -434,11 +383,8 @@ func TestDiffLocks_EmptyInitialLock(t *testing.T) { } func TestDiffLocks_EmptyFinalLock(t *testing.T) { - l1 := safeLock{ - //h: h1, - p: []LockedProject{ - lockedProject{pi: ProjectIdentifier{ProjectRoot: "github.com/foo/bar"}, v: NewVersion("v1.0.0")}, - }, + l1 := gps.SimpleLock{ + gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil), } diff := DiffLocks(l1, nil) From df2c26b7774797c869fa842d05069b49f5d36c79 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Thu, 28 Jun 2018 01:32:17 -0400 Subject: [PATCH 09/25] dep: Get DeltaWriter into a working state This encompasses the first pass at the new, more abstracted diffing system, and the DeltaWriter implementation on top of it. Tests are needed, but cursory testing indicates that we successfully capture all types of diffs and regenerate only the subset of projects that actually need to be touched. --- Gopkg.lock | 43 ++++- cmd/dep/ensure.go | 123 +++++++-------- cmd/dep/ensure_test.go | 4 +- cmd/dep/init.go | 4 +- cmd/dep/integration_test.go | 28 ---- cmd/dep/main.go | 1 - cmd/dep/prune.go | 10 -- cmd/dep/root_analyzer.go | 3 +- cmd/dep/status.go | 9 +- gps/solver.go | 13 +- gps/source_cache.go | 17 ++ gps/source_cache_bolt_encode.go | 5 +- gps/source_cache_bolt_test.go | 22 +-- gps/source_cache_test.go | 24 +-- gps/verify/digest.go | 13 +- gps/verify/lockdiff.go | 245 ++++++++++++++++++++++++++++- internal/feedback/feedback.go | 7 +- internal/feedback/feedback_test.go | 3 +- lock.go | 42 +++-- lock_test.go | 17 +- project.go | 3 + testdata/lock/golden1.toml | 2 +- txn_writer.go | 169 +++++++++++++------- 23 files changed, 574 insertions(+), 233 deletions(-) diff --git a/Gopkg.lock b/Gopkg.lock index a65c3106ee..a1aa86afc9 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -3,91 +3,132 @@ [[projects]] branch = "2.x" + digest = "1:ee2887fecb4d923fa90f8dd9cf33e876bf9260fed62f2ca5a5c3f41b4eb07683" name = "github.com/Masterminds/semver" packages = ["."] + pruneopts = "NUT" revision = "24642bd0573145a5ee04f9be773641695289be46" [[projects]] + digest = "1:442020d26d1f891d5014cae4353b6ff589562c2b303504627de3660adf3fb217" name = "github.com/Masterminds/vcs" packages = ["."] + pruneopts = "NUT" revision = "3084677c2c188840777bff30054f2b553729d329" version = "v1.11.1" [[projects]] branch = "master" + digest = "1:60861e762bdbe39c4c7bf292c291329b731c9925388fd41125888f5c1c595feb" name = "github.com/armon/go-radix" packages = ["."] + pruneopts = "NUT" revision = "4239b77079c7b5d1243b7b4736304ce8ddb6f0f2" [[projects]] + digest = "1:a12d94258c5298ead75e142e8001224bf029f302fed9e96cd39c0eaf90f3954d" name = "github.com/boltdb/bolt" packages = ["."] + pruneopts = "NUT" revision = "2f1ce7a837dcb8da3ec595b1dac9d0632f0f99e8" version = "v1.3.1" [[projects]] + digest = "1:9f35c1344b56e5868d511d231f215edd0650aa572664f856444affdd256e43e4" name = "github.com/golang/protobuf" packages = ["proto"] + pruneopts = "NUT" revision = "925541529c1fa6821df4e44ce2723319eb2be768" version = "v1.0.0" [[projects]] + digest = "1:f5169729244becc423886eae4d72547e28ac3f13f861bed8a9d749bc7238a1c3" name = "github.com/jmank88/nuts" packages = ["."] + pruneopts = "NUT" revision = "8b28145dffc87104e66d074f62ea8080edfad7c8" version = "v0.3.0" [[projects]] branch = "master" + digest = "1:01af3a6abe28784782680e1f75ef8767cfc5d4b230dc156ff7eb8db395cbbfd2" name = "github.com/nightlyone/lockfile" packages = ["."] + pruneopts = "NUT" revision = "e83dc5e7bba095e8d32fb2124714bf41f2a30cb5" [[projects]] + digest = "1:13b8f1a2ce177961dc9231606a52f709fab896c565f3988f60a7f6b4e543a902" name = "github.com/pelletier/go-toml" packages = ["."] + pruneopts = "NUT" revision = "acdc4509485b587f5e675510c4f2c63e90ff68a8" version = "v1.1.0" [[projects]] + digest = "1:5cf3f025cbee5951a4ee961de067c8a89fc95a5adabead774f82822efabab121" name = "github.com/pkg/errors" packages = ["."] + pruneopts = "NUT" revision = "645ef00459ed84a119197bfb8d8205042c6df63d" version = "v0.8.0" [[projects]] branch = "master" + digest = "1:abb4b60c28323cde32c193ce6083bb600fac462d1780cf83461b4c23ed5ce904" name = "github.com/sdboyer/constext" packages = ["."] + pruneopts = "NUT" revision = "836a144573533ea4da4e6929c235fd348aed1c80" [[projects]] branch = "master" + digest = "1:6ad2104db8f34b8656382ef0a7297b9a5cc42e7bdce95d968e02b92fc97470d1" name = "golang.org/x/net" packages = ["context"] + pruneopts = "NUT" revision = "66aacef3dd8a676686c7ae3716979581e8b03c47" [[projects]] branch = "master" + digest = "1:39ebcc2b11457b703ae9ee2e8cca0f68df21969c6102cb3b705f76cca0ea0239" name = "golang.org/x/sync" packages = ["errgroup"] + pruneopts = "NUT" revision = "f52d1811a62927559de87708c8913c1650ce4f26" [[projects]] branch = "master" + digest = "1:51912e607c5e28a89fdc7e41d3377b92086ab7f76ded236765dbf98d0a704c5d" name = "golang.org/x/sys" packages = ["unix"] + pruneopts = "NUT" revision = "bb24a47a89eac6c1227fbcb2ae37a8b9ed323366" [[projects]] branch = "v2" + digest = "1:13e704c08924325be00f96e47e7efe0bfddf0913cdfc237423c83f9b183ff590" name = "gopkg.in/yaml.v2" packages = ["."] + pruneopts = "NUT" revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "460ad7112866da4b9a0a626aa3e2fe80699c17bf871afb73b93f836418fb9298" + input-imports = [ + "github.com/Masterminds/semver", + "github.com/Masterminds/vcs", + "github.com/armon/go-radix", + "github.com/boltdb/bolt", + "github.com/golang/protobuf/proto", + "github.com/jmank88/nuts", + "github.com/nightlyone/lockfile", + "github.com/pelletier/go-toml", + "github.com/pkg/errors", + "github.com/sdboyer/constext", + "golang.org/x/sync/errgroup", + "gopkg.in/yaml.v2" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 7bc066cc31..0024482b5a 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -5,7 +5,6 @@ package main import ( - "bytes" "context" "flag" "fmt" @@ -21,6 +20,7 @@ import ( "github.com/golang/dep/gps" "github.com/golang/dep/gps/paths" "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/pkg/errors" ) @@ -184,6 +184,33 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { return cmd.runVendorOnly(ctx, args, p, sm, params) } + statchan := make(chan map[string]verify.VendorStatus) + var lps []gps.LockedProject + if p.Lock != nil { + lps = p.Lock.Projects() + } + go func(vendorDir string, p []gps.LockedProject) { + // Make sure vendor dir exists + err := os.MkdirAll(vendorDir, os.FileMode(0777)) + if err != nil { + ctx.Err.Printf("Error creating vendor directory: %q", err.Error()) + // TODO(sdboyer) handle these better + os.Exit(1) + } + + sums := make(map[string]verify.VersionedDigest) + for _, lp := range p { + sums[string(lp.Ident().ProjectRoot)] = lp.(verify.VerifiableProject).Digest + } + + status, err := verify.VerifyDepTree(vendorDir, sums) + if err != nil { + ctx.Err.Printf("Error while verifying vendor directory: %q", err.Error()) + os.Exit(1) + } + statchan <- status + }(filepath.Join(p.AbsRoot, "vendor"), lps) + params.RootPackageTree, err = p.ParseRootPackageTree() if err != nil { return err @@ -212,11 +239,11 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { } if cmd.add { - return cmd.runAdd(ctx, args, p, sm, params) + return cmd.runAdd(ctx, args, p, sm, params, statchan) } else if cmd.update { - return cmd.runUpdate(ctx, args, p, sm, params) + return cmd.runUpdate(ctx, args, p, sm, params, statchan) } - return cmd.runDefault(ctx, args, p, sm, params) + return cmd.runDefault(ctx, args, p, sm, params, statchan) } func (cmd *ensureCommand) validateFlags() error { @@ -246,7 +273,7 @@ func (cmd *ensureCommand) vendorBehavior() dep.VendorBehavior { return dep.VendorOnChanged } -func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { +func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { // Bare ensure doesn't take any args. if len(args) != 0 { return errors.New("dep ensure only takes spec arguments with -add or -update") @@ -256,52 +283,37 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return err } - if lsat, err := p.LockSatisfiesInputs(sm); err != nil { - return err - } else if !lsat.Passes() { - if ctx.Verbose { - ctx.Out.Printf("%s was already in sync with imports and %s\n", dep.LockName, dep.ManifestName) - } + lock := p.Lock + if lock != nil { + lsat := verify.LockSatisfiesInputs(p.Lock, p.Lock.SolveMeta.InputImports, p.Manifest, params.RootPackageTree) + if !lsat.Passed() { + // TODO(sdboyer) print out what bits are unsatisfied here + solver, err := gps.Prepare(params, sm) + if err != nil { + return errors.Wrap(err, "prepare solver") + } - if cmd.noVendor { + if cmd.noVendor && cmd.dryRun { + return errors.New("Gopkg.lock was not up to date") + } + + solution, err := solver.Solve(context.TODO()) + if err != nil { + return handleAllTheFailuresOfTheWorld(err) + } + lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions) + } else if cmd.noVendor { // The user said not to touch vendor/, so definitely nothing to do. return nil } - sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorOnChanged, p.Manifest.PruneOptions) - if err != nil { - return err - } - - if cmd.dryRun { - return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) - } - - var logger *log.Logger - if ctx.Verbose { - logger = ctx.Err - } - return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } - solver, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "prepare solver") - } - - if cmd.noVendor && cmd.dryRun { - return errors.New("Gopkg.lock was not up to date") - } - - solution, err := solver.Solve(context.TODO()) - if err != nil { - return handleAllTheFailuresOfTheWorld(err) - } - - sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution), cmd.vendorBehavior(), p.Manifest.PruneOptions) + sw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) if err != nil { return err } + if cmd.dryRun { return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) } @@ -310,7 +322,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project if ctx.Verbose { logger = ctx.Err } - return errors.Wrap(sw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor") + return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { @@ -339,7 +351,7 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { +func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { if p.Lock == nil { return errors.Errorf("-update works by updating the versions recorded in %s, but %s does not exist", dep.LockName, dep.LockName) } @@ -348,14 +360,6 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return err } - // We'll need to discard this prepared solver as later work changes params, - // but solver preparation is cheap and worth doing up front in order to - // perform the fastpath check of hash comparison. - solver, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "fastpath solver prepare") - } - // When -update is specified without args, allow every dependency to change // versions, regardless of the lock file. if len(args) == 0 { @@ -367,7 +371,7 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, } // Re-prepare a solver now that our params are complete. - solver, err = gps.Prepare(params, sm) + solver, err := gps.Prepare(params, sm) if err != nil { return errors.Wrap(err, "fastpath solver prepare") } @@ -379,7 +383,7 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return handleAllTheFailuresOfTheWorld(err) } - sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution), cmd.vendorBehavior(), p.Manifest.PruneOptions) + sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), cmd.vendorBehavior(), p.Manifest.PruneOptions) if err != nil { return err } @@ -394,7 +398,7 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return errors.Wrap(sw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { +func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { if len(args) == 0 { return errors.New("must specify at least one project or package to -add") } @@ -411,15 +415,6 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm return errors.Wrap(err, "fastpath solver prepare") } - // Compare the hashes. If they're not equal, bail out and ask the user to - // run a straight `dep ensure` before updating. This is handholding the - // user a bit, but the extra effort required is minimal, and it ensures the - // user is isolating variables in the event of solve problems (was it the - // "pending" changes, or the -add that caused the problem?). - if p.Lock != nil && !bytes.Equal(p.Lock.InputsDigest(), solver.HashInputs()) { - ctx.Out.Printf("Warning: %s is out of sync with %s or the project's imports.", dep.LockName, dep.ManifestName) - } - rm, _ := params.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) // TODO(sdboyer) re-enable this once we ToReachMap() intelligently filters out normally-excluded (_*, .*), dirs from errmap @@ -678,7 +673,7 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm } sort.Strings(reqlist) - sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution), dep.VendorOnChanged, p.Manifest.PruneOptions) + sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), dep.VendorOnChanged, p.Manifest.PruneOptions) if err != nil { return err } diff --git a/cmd/dep/ensure_test.go b/cmd/dep/ensure_test.go index 0b75d3bfb8..0b101c170d 100644 --- a/cmd/dep/ensure_test.go +++ b/cmd/dep/ensure_test.go @@ -50,11 +50,11 @@ func TestInvalidEnsureFlagCombinations(t *testing.T) { // anything other than the error being non-nil. For now, it works well // because a panic will quickly result if the initial arg length validation // checks are incorrectly handled. - if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil { + if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}, nil); err == nil { t.Errorf("no args to plain ensure with -vendor-only") } ec.vendorOnly = false - if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil { + if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}, nil); err == nil { t.Errorf("no args to plain ensure") } } diff --git a/cmd/dep/init.go b/cmd/dep/init.go index dc3f38d9ff..1570e2acb6 100644 --- a/cmd/dep/init.go +++ b/cmd/dep/init.go @@ -157,7 +157,7 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { err = handleAllTheFailuresOfTheWorld(err) return errors.Wrap(err, "init failed: unable to solve the dependency graph") } - p.Lock = dep.LockFromSolution(soln) + p.Lock = dep.LockFromSolution(soln, p.Manifest.PruneOptions) rootAnalyzer.FinalizeRootManifestAndLock(p.Manifest, p.Lock, copyLock) @@ -168,7 +168,7 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { return errors.Wrap(err, "init failed: unable to recalculate the lock digest") } - p.Lock.SolveMeta.InputsDigest = s.HashInputs() + //p.Lock.SolveMeta.InputsDigest = s.HashInputs() // Pass timestamp (yyyyMMddHHmmss format) as suffix to backup name. vendorbak, err := dep.BackupVendor(filepath.Join(root, "vendor"), time.Now().Format("20060102150405")) diff --git a/cmd/dep/integration_test.go b/cmd/dep/integration_test.go index 1468300fb9..d000899168 100644 --- a/cmd/dep/integration_test.go +++ b/cmd/dep/integration_test.go @@ -74,34 +74,6 @@ func TestDepCachedir(t *testing.T) { initPath := filepath.Join("testdata", "cachedir") - t.Run("env-cachedir", func(t *testing.T) { - t.Parallel() - testProj := integration.NewTestProject(t, initPath, wd, runMain) - defer testProj.Cleanup() - - testProj.TempDir("cachedir") - cachedir := testProj.Path("cachedir") - testProj.Setenv("DEPCACHEDIR", cachedir) - - // Running `dep ensure` will pull in the dependency into cachedir. - err = testProj.DoRun([]string{"ensure"}) - if err != nil { - // Log the error output from running `dep ensure`, could be useful. - t.Logf("`dep ensure` error output: \n%s", testProj.GetStderr()) - t.Errorf("got an unexpected error: %s", err) - } - - // Check that the cache was created in the cachedir. Our fixture has the dependency - // `github.com/sdboyer/deptest` - _, err = os.Stat(testProj.Path("cachedir", "sources", "https---github.com-sdboyer-deptest")) - if err != nil { - if os.IsNotExist(err) { - t.Error("expected cachedir to have been populated but none was found") - } else { - t.Errorf("got an unexpected error: %s", err) - } - } - }) t.Run("env-invalid-cachedir", func(t *testing.T) { t.Parallel() testProj := integration.NewTestProject(t, initPath, wd, runMain) diff --git a/cmd/dep/main.go b/cmd/dep/main.go index 57f0a196dd..a4a2efcb42 100644 --- a/cmd/dep/main.go +++ b/cmd/dep/main.go @@ -91,7 +91,6 @@ func (c *Config) Run() int { &statusCommand{}, &ensureCommand{}, &pruneCommand{}, - &hashinCommand{}, &versionCommand{}, } diff --git a/cmd/dep/prune.go b/cmd/dep/prune.go index 30f3efb5b7..d02f7debf4 100644 --- a/cmd/dep/prune.go +++ b/cmd/dep/prune.go @@ -5,7 +5,6 @@ package main import ( - "bytes" "flag" "io/ioutil" "log" @@ -75,19 +74,10 @@ func (cmd *pruneCommand) Run(ctx *dep.Ctx, args []string) error { params.TraceLogger = ctx.Err } - s, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "could not set up solver for input hashing") - } - if p.Lock == nil { return errors.Errorf("Gopkg.lock must exist for prune to know what files are safe to remove.") } - if !bytes.Equal(s.HashInputs(), p.Lock.SolveMeta.InputsDigest) { - return errors.Errorf("Gopkg.lock is out of sync; run dep ensure before pruning.") - } - pruneLogger := ctx.Err if !ctx.Verbose { pruneLogger = log.New(ioutil.Discard, "", 0) diff --git a/cmd/dep/root_analyzer.go b/cmd/dep/root_analyzer.go index 1cdee4dbf1..d3d432686d 100644 --- a/cmd/dep/root_analyzer.go +++ b/cmd/dep/root_analyzer.go @@ -11,6 +11,7 @@ import ( "github.com/golang/dep" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/verify" fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/importers" "golang.org/x/sync/errgroup" @@ -167,7 +168,7 @@ func (a *rootAnalyzer) DeriveManifestAndLock(dir string, pr gps.ProjectRoot) (gp func (a *rootAnalyzer) FinalizeRootManifestAndLock(m *dep.Manifest, l *dep.Lock, ol dep.Lock) { // Iterate through the new projects in solved lock and add them to manifest // if they are direct deps and log feedback for all the new projects. - diff := gps.DiffLocks(&ol, l) + diff := verify.DiffLocks(&ol, l) bi := fb.NewBrokenImportFeedback(diff) bi.LogFeedback(a.ctx.Err) for _, y := range l.Projects() { diff --git a/cmd/dep/status.go b/cmd/dep/status.go index ed1311dd03..4e9e0f9ca7 100644 --- a/cmd/dep/status.go +++ b/cmd/dep/status.go @@ -24,6 +24,7 @@ import ( "github.com/golang/dep" "github.com/golang/dep/gps" "github.com/golang/dep/gps/paths" + "github.com/golang/dep/gps/verify" "github.com/pkg/errors" ) @@ -912,11 +913,6 @@ func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Proje return false, 0, err } - s, err := gps.Prepare(params, sm) - if err != nil { - return false, 0, errors.Wrapf(err, "could not set up solver for input hashing") - } - // Errors while collecting constraints should not fail the whole status run. // It should count the error and tell the user about incomplete results. cm, ccerrs := collectConstraints(ctx, p, sm) @@ -932,7 +928,8 @@ func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Proje return slp[i].Ident().Less(slp[j].Ident()) }) - if bytes.Equal(s.HashInputs(), p.Lock.SolveMeta.InputsDigest) { + lsat := verify.LockSatisfiesInputs(p.Lock, p.Lock.SolveMeta.InputImports, p.Manifest, params.RootPackageTree) + if lsat.Passed() { // If these are equal, we're guaranteed that the lock is a transitively // complete picture of all deps. That eliminates the need for at least // some checks. diff --git a/gps/solver.go b/gps/solver.go index dc5f4cd778..c9b541d953 100644 --- a/gps/solver.go +++ b/gps/solver.go @@ -450,11 +450,16 @@ func (s *solver) Solve(ctx context.Context) (Solution, error) { soln.i = s.rd.externalImportList(s.stdLibFn) // Convert ProjectAtoms into LockedProjects - soln.p = make([]LockedProject, len(all)) - k := 0 + soln.p = make([]LockedProject, 0, len(all)) for pa, pl := range all { - soln.p[k] = pa2lp(pa, pl) - k++ + lp := pa2lp(pa, pl) + // Pass back the original inputlp directly if it Eqs what was + // selected. + if inputlp, has := s.rd.rlm[lp.Ident().ProjectRoot]; has && lp.Eq(inputlp) { + lp = inputlp + } + + soln.p = append(soln.p, lp) } } diff --git a/gps/source_cache.go b/gps/source_cache.go index 1123f317d8..fe3c9b7266 100644 --- a/gps/source_cache.go +++ b/gps/source_cache.go @@ -271,3 +271,20 @@ func (c *singleSourceCacheMemory) toUnpaired(v Version) (UnpairedVersion, bool) panic(fmt.Sprintf("unknown version type %T", v)) } } + +// TODO(sdboyer) remove once source caching can be moved into separate package +func locksAreEq(l1, l2 Lock) bool { + p1, p2 := l1.Projects(), l2.Projects() + if len(p1) != len(p2) { + return false + } + + p1, p2 = sortLockedProjects(p1), sortLockedProjects(p2) + + for k, lp := range p1 { + if !lp.Eq(p2[k]) { + return false + } + } + return true +} diff --git a/gps/source_cache_bolt_encode.go b/gps/source_cache_bolt_encode.go index d91d9018b8..5e2c255345 100644 --- a/gps/source_cache_bolt_encode.go +++ b/gps/source_cache_bolt_encode.go @@ -342,8 +342,9 @@ func cachePutLock(b *bolt.Bucket, l Lock) error { // cacheGetLock returns a new *safeLock with the fields retrieved from the bolt.Bucket. func cacheGetLock(b *bolt.Bucket) (*safeLock, error) { - l := &safeLock{ - i: strings.Split(string(b.Get(cacheKeyInputImports)), "#"), + l := &safeLock{} + if ii := b.Get(cacheKeyInputImports); len(ii) > 0 { + l.i = strings.Split(string(ii), "#") } if locked := b.Bucket(cacheKeyLock); locked != nil { diff --git a/gps/source_cache_bolt_test.go b/gps/source_cache_bolt_test.go index a661ab82ea..d0bf0fce97 100644 --- a/gps/source_cache_bolt_test.go +++ b/gps/source_cache_bolt_test.go @@ -53,7 +53,6 @@ func TestBoltCacheTimeout(t *testing.T) { } lock := &safeLock{ - //h: []byte("test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0"), nil), @@ -120,8 +119,9 @@ func TestBoltCacheTimeout(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, manifest, gotM) - if dl := DiffLocks(lock, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(lock, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", lock, gotL) } got, ok := c.getPackageTree(rev, root) @@ -162,8 +162,9 @@ func TestBoltCacheTimeout(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, manifest, gotM) - if dl := DiffLocks(lock, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(lock, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", lock, gotL) } gotPtree, ok := c.getPackageTree(rev, root) @@ -195,8 +196,9 @@ func TestBoltCacheTimeout(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, manifest, gotM) - if dl := DiffLocks(lock, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(lock, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", lock, gotL) } got, ok := c.getPackageTree(rev, root) @@ -233,7 +235,6 @@ func TestBoltCacheTimeout(t *testing.T) { } newLock := &safeLock{ - //h: []byte("new_test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v1"), []string{"gps"}), }, @@ -283,8 +284,9 @@ func TestBoltCacheTimeout(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, newManifest, gotM) - if dl := DiffLocks(newLock, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(lock, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", lock, gotL) } got, ok := c.getPackageTree(rev, root) diff --git a/gps/source_cache_test.go b/gps/source_cache_test.go index 14d6d0d448..5e39063311 100644 --- a/gps/source_cache_test.go +++ b/gps/source_cache_test.go @@ -116,13 +116,12 @@ func (test singleSourceCacheTest) run(t *testing.T) { ig: pkgtree.NewIgnoredRuleset([]string{"a", "b"}), } var l Lock = &safeLock{ - //h: []byte("test_hash"), p: []LockedProject{ - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0"), nil), - NewLockedProject(mkPI("github.com/sdboyer/gps3"), NewVersion("v0.10.0"), []string{"gps", "flugle"}), - NewLockedProject(mkPI("foo"), NewVersion("nada"), []string{"foo"}), - NewLockedProject(mkPI("github.com/sdboyer/gps4"), NewVersion("v0.10.0"), []string{"flugle", "gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("anything"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0").Pair("whatever"), nil), + NewLockedProject(mkPI("github.com/sdboyer/gps3"), NewVersion("v0.10.0").Pair("again"), []string{"gps", "flugle"}), + NewLockedProject(mkPI("foo"), NewVersion("nada").Pair("itsaliving"), []string{"foo"}), + NewLockedProject(mkPI("github.com/sdboyer/gps4"), NewVersion("v0.10.0").Pair("meow"), []string{"flugle", "gps"}), }, } c.setManifestAndLock(rev, testAnalyzerInfo, m, l) @@ -140,8 +139,9 @@ func (test singleSourceCacheTest) run(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, m, gotM) - if dl := DiffLocks(l, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(l, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", l, gotL) } m = &simpleRootManifest{ @@ -163,10 +163,9 @@ func (test singleSourceCacheTest) run(t *testing.T) { ig: pkgtree.NewIgnoredRuleset([]string{"c", "d"}), } l = &safeLock{ - //h: []byte("different_test_hash"), p: []LockedProject{ NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.11.0"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.11.0").Pair("anything"), []string{"gps"}), NewLockedProject(mkPI("github.com/sdboyer/gps3"), Revision("278a227dfc3d595a33a77ff3f841fd8ca1bc8cd0"), []string{"gps"}), }, } @@ -185,8 +184,9 @@ func (test singleSourceCacheTest) run(t *testing.T) { t.Error("no manifest and lock found for revision") } compareManifests(t, m, gotM) - if dl := DiffLocks(l, gotL); dl != nil { - t.Errorf("lock differences:\n\t %#v", dl) + // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles + if !locksAreEq(l, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", l, gotL) } }) diff --git a/gps/verify/digest.go b/gps/verify/digest.go index 3ce0edfa1d..b636352104 100644 --- a/gps/verify/digest.go +++ b/gps/verify/digest.go @@ -347,6 +347,11 @@ func (vd VersionedDigest) String() string { return fmt.Sprintf("%s:%s", strconv.Itoa(vd.HashVersion), hex.EncodeToString(vd.Digest)) } +// IsEmpty indicates if the VersionedDigest is the zero value. +func (vd VersionedDigest) IsEmpty() bool { + return vd.HashVersion == 0 && len(vd.Digest) == 0 +} + // ParseVersionedDigest decodes the string representation of versioned digest // information - a colon-separated string with a version number in the first // part and the hex-encdoed hash digest in the second - as a VersionedDigest. @@ -378,7 +383,7 @@ func ParseVersionedDigest(input string) (VersionedDigest, error) { // platform where the file system path separator is a character other than // solidus, one particular dependency would be represented as // "github.com/alice/alice1". -func VerifyDepTree(osDirname string, wantDigests map[string][]byte) (map[string]VendorStatus, error) { +func VerifyDepTree(osDirname string, wantDigests map[string]VersionedDigest) (map[string]VendorStatus, error) { osDirname = filepath.Clean(osDirname) // Ensure top level pathname is a directory @@ -455,12 +460,14 @@ func VerifyDepTree(osDirname string, wantDigests map[string][]byte) (map[string] if expectedSum, ok := wantDigests[slashPathname]; ok { ls := EmptyDigestInLock - if len(expectedSum) > 0 { + if expectedSum.HashVersion != HashVersion { + ls = HashVersionMismatch + } else if len(expectedSum.Digest) > 0 { projectSum, err := DigestFromDirectory(osPathname) if err != nil { return nil, errors.Wrap(err, "cannot compute dependency hash") } - if bytes.Equal(projectSum.Digest, expectedSum) { + if bytes.Equal(projectSum.Digest, expectedSum.Digest) { ls = NoMismatch } else { ls = DigestMismatchInLock diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index b0f230b893..7d0b90b54a 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -5,6 +5,7 @@ package verify import ( + "bytes" "fmt" "sort" "strings" @@ -49,8 +50,10 @@ func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { }) { return lps } + cp := make([]gps.LockedProject, len(lps)) copy(cp, lps) + sort.Slice(cp, func(i, j int) bool { return cp[i].Ident().Less(cp[j].Ident()) }) @@ -65,6 +68,12 @@ type LockDiff struct { Modify []LockedProjectDiff } +type LockDiff2 struct { + AddedImportInputs []string + RemovedImportInputs []string + ProjectDiffs map[gps.ProjectRoot]LockedProjectDiff2 +} + // LockedProjectDiff contains the before and after snapshot of a project reference. // Fields are only populated when there is a difference, otherwise they are empty. type LockedProjectDiff struct { @@ -76,15 +85,247 @@ type LockedProjectDiff struct { Packages []StringDiff } +type LockedProjectDiff2 struct { + Name gps.ProjectRoot + ProjectRemoved, ProjectAdded bool + LockedProjectPartsDiff +} + +type LockedProjectPartsDiff struct { + PackagesAdded, PackagesRemoved []string + VersionBefore, VersionAfter gps.UnpairedVersion + RevisionBefore, RevisionAfter gps.Revision + SourceBefore, SourceAfter string + PruneOptsBefore, PruneOptsAfter gps.PruneOptions + HashChanged, HashVersionChanged bool +} + +// DiffLocks compares two locks and identifies the differences between them. +// Returns nil if there are no differences. +func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { + // Default nil locks to empty locks, so that we can still generate a diff + if l1 == nil { + if l2 == nil { + return LockDiff2{} + } + l1 = gps.SimpleLock{} + } + if l2 == nil { + l2 = gps.SimpleLock{} + } + + p1, p2 := l1.Projects(), l2.Projects() + + p1 = sortLockedProjects(p1) + p2 = sortLockedProjects(p2) + + diff := LockDiff2{ + ProjectDiffs: make(map[gps.ProjectRoot]LockedProjectDiff2), + } + + var i2next int + for i1 := 0; i1 < len(p1); i1++ { + lp1 := p1[i1] + pr1 := lp1.Ident().ProjectRoot + + lpd := LockedProjectDiff2{ + Name: pr1, + } + + for i2 := i2next; i2 < len(p2); i2++ { + lp2 := p2[i2] + pr2 := lp2.Ident().ProjectRoot + + switch strings.Compare(string(pr1), string(pr2)) { + case 0: // Found a matching project + lpd.LockedProjectPartsDiff = DiffProjects2(lp1, lp2) + i2next = i2 + 1 // Don't visit this project again + case +1: // Found a new project + diff.ProjectDiffs[pr2] = LockedProjectDiff2{ + Name: pr2, + ProjectAdded: true, + } + i2next = i2 + 1 // Don't visit this project again + continue // Keep looking for a matching project + case -1: // Project has been removed, handled below + lpd.ProjectRemoved = true + } + + break // Done evaluating this project, move onto the next + } + + diff.ProjectDiffs[pr1] = lpd + } + + // Anything that still hasn't been evaluated are adds + for i2 := i2next; i2 < len(p2); i2++ { + lp2 := p2[i2] + pr2 := lp2.Ident().ProjectRoot + diff.ProjectDiffs[pr2] = LockedProjectDiff2{ + Name: pr2, + ProjectAdded: true, + } + } + + // Only do the import inputs if both of the locks fulfill the interface, AND + // both have non-empty inputs. + il1, ok1 := l1.(gps.LockWithImports) + il2, ok2 := l2.(gps.LockWithImports) + + if ok1 && ok2 && len(il1.InputImports()) > 0 && len(il2.InputImports()) > 0 { + diff.AddedImportInputs, diff.RemovedImportInputs = findAddedAndRemoved(il1.InputImports(), il2.InputImports()) + } + + return diff +} + +func findAddedAndRemoved(l1, l2 []string) (add, remove []string) { + // Computing package add/removes could probably be optimized to O(n), but + // it's not critical path for any known case, so not worth the effort right now. + p1, p2 := make(map[string]bool, len(l1)), make(map[string]bool, len(l2)) + + for _, pkg := range l1 { + p1[pkg] = true + } + for _, pkg := range l2 { + p2[pkg] = true + } + + for pkg := range p1 { + if !p2[pkg] { + remove = append(remove, pkg) + } + } + for pkg := range p2 { + if !p1[pkg] { + add = append(add, pkg) + } + } + + return add, remove +} + +func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDiff { + ld := LockedProjectPartsDiff{ + SourceBefore: lp1.Ident().Source, + SourceAfter: lp2.Ident().Source, + } + + ld.PackagesRemoved, ld.PackagesAdded = findAddedAndRemoved(lp1.Packages(), lp2.Packages()) + + switch v := lp1.Version().(type) { + case gps.PairedVersion: + ld.VersionBefore, ld.RevisionBefore = v.Unpair(), v.Revision() + case gps.Revision: + ld.RevisionBefore = v + case gps.UnpairedVersion: + // This should ideally never happen + ld.VersionBefore = v + } + + switch v := lp2.Version().(type) { + case gps.PairedVersion: + ld.VersionAfter, ld.RevisionAfter = v.Unpair(), v.Revision() + case gps.Revision: + ld.RevisionAfter = v + case gps.UnpairedVersion: + // This should ideally never happen + ld.VersionAfter = v + } + + vp1, ok1 := lp1.(VerifiableProject) + vp2, ok2 := lp2.(VerifiableProject) + + if ok1 && ok2 { + ld.PruneOptsBefore, ld.PruneOptsAfter = vp1.PruneOpts, vp2.PruneOpts + + // Only consider hashes for diffing if neither were the zero value. + if !vp1.Digest.IsEmpty() && !vp2.Digest.IsEmpty() { + if vp1.Digest.HashVersion != vp2.Digest.HashVersion { + ld.HashVersionChanged = true + } + if !bytes.Equal(vp1.Digest.Digest, vp2.Digest.Digest) { + ld.HashChanged = true + } + } + } + + return ld +} + +func (ld LockDiff2) Changed() bool { + if len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0 { + return true + } + + for _, ld := range ld.ProjectDiffs { + if ld.Changed() { + return true + } + } + + return false +} + +func (ld LockedProjectDiff2) Changed() bool { + return ld.WasRemoved() || ld.WasAdded() || ld.RevisionChanged() || ld.VersionChanged() || ld.SourceChanged() || ld.PackagesChanged() || ld.HashChanged || ld.HashVersionChanged +} + +func (ld LockedProjectDiff2) WasRemoved() bool { + return ld.ProjectRemoved +} + +func (ld LockedProjectDiff2) WasAdded() bool { + return ld.ProjectAdded +} + +func (ld LockedProjectPartsDiff) SourceChanged() bool { + return ld.SourceBefore != ld.SourceAfter +} + +func (ld LockedProjectPartsDiff) VersionChanged() bool { + if ld.VersionBefore == nil && ld.VersionAfter == nil { + return false + } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { + return true + } else if !ld.VersionBefore.Matches(ld.VersionAfter) { + return true + } + + return false +} + +func (ld LockedProjectPartsDiff) VersionTypeChanged() bool { + if ld.VersionBefore == nil && ld.VersionAfter == nil { + return false + } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { + return true + } + + return false +} + +func (ld LockedProjectPartsDiff) RevisionChanged() bool { + return ld.RevisionBefore != ld.RevisionAfter +} + +func (ld LockedProjectPartsDiff) PackagesChanged() bool { + return len(ld.PackagesAdded) > 0 || len(ld.PackagesRemoved) > 0 +} + +func (ld LockedProjectPartsDiff) PruneOptsChanged() bool { + return ld.PruneOptsBefore != ld.PruneOptsAfter +} + // DiffLocks compares two locks and identifies the differences between them. // Returns nil if there are no differences. func DiffLocks(l1, l2 gps.Lock) *LockDiff { // Default nil locks to empty locks, so that we can still generate a diff if l1 == nil { - l1 = &gps.SimpleLock{} + l1 = gps.SimpleLock{} } if l2 == nil { - l2 = &gps.SimpleLock{} + l2 = gps.SimpleLock{} } p1, p2 := l1.Projects(), l2.Projects() diff --git a/internal/feedback/feedback.go b/internal/feedback/feedback.go index 20c4dc4dcb..2f20cbcbca 100644 --- a/internal/feedback/feedback.go +++ b/internal/feedback/feedback.go @@ -10,6 +10,7 @@ import ( "log" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/verify" ) const ( @@ -87,7 +88,7 @@ type brokenImport interface { } type modifiedImport struct { - source, branch, revision, version *gps.StringDiff + source, branch, revision, version *verify.StringDiff projectPath string } @@ -123,7 +124,7 @@ func (mi modifiedImport) String() string { } type removedImport struct { - source, branch, revision, version *gps.StringDiff + source, branch, revision, version *verify.StringDiff projectPath string } @@ -157,7 +158,7 @@ type BrokenImportFeedback struct { // NewBrokenImportFeedback builds a feedback entry that compares an initially // imported, unsolved lock to the same lock after it has been solved. -func NewBrokenImportFeedback(ld *gps.LockDiff) *BrokenImportFeedback { +func NewBrokenImportFeedback(ld *verify.LockDiff) *BrokenImportFeedback { bi := &BrokenImportFeedback{} for _, lpd := range ld.Modify { // Ignore diffs where it's just a modified package set diff --git a/internal/feedback/feedback_test.go b/internal/feedback/feedback_test.go index cd3b4f1ca2..9baf0fb185 100644 --- a/internal/feedback/feedback_test.go +++ b/internal/feedback/feedback_test.go @@ -12,6 +12,7 @@ import ( "github.com/golang/dep" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/verify" _ "github.com/golang/dep/internal/test" // DO NOT REMOVE, allows go test ./... -update to work ) @@ -150,7 +151,7 @@ func TestFeedback_BrokenImport(t *testing.T) { P: []gps.LockedProject{gps.NewLockedProject(c.altPID, c.currentVersion, nil)}, } log := log2.New(buf, "", 0) - feedback := NewBrokenImportFeedback(gps.DiffLocks(&ol, &l)) + feedback := NewBrokenImportFeedback(verify.DiffLocks(&ol, &l)) feedback.LogFeedback(log) got := strings.TrimSpace(buf.String()) if c.want != got { diff --git a/lock.go b/lock.go index ce6d5d4f03..fce0643b6d 100644 --- a/lock.go +++ b/lock.go @@ -10,7 +10,6 @@ import ( "sort" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/pkgtree" "github.com/golang/dep/gps/verify" "github.com/pelletier/go-toml" "github.com/pkg/errors" @@ -77,7 +76,7 @@ func readLock(r io.Reader) (*Lock, error) { func fromRawLock(raw rawLock) (*Lock, error) { l := &Lock{ - P: make([]gps.LockedProject, len(raw.Projects)), + P: make([]gps.LockedProject, 0, len(raw.Projects)), } l.SolveMeta.AnalyzerName = raw.SolveMeta.AnalyzerName @@ -86,7 +85,7 @@ func fromRawLock(raw rawLock) (*Lock, error) { l.SolveMeta.SolverVersion = raw.SolveMeta.SolverVersion l.SolveMeta.InputImports = raw.SolveMeta.InputImports - for i, ld := range raw.Projects { + for _, ld := range raw.Projects { r := gps.Revision(ld.Revision) var v gps.Version = r @@ -111,7 +110,7 @@ func fromRawLock(raw rawLock) (*Lock, error) { LockedProject: gps.NewLockedProject(id, v, ld.Packages), } if ld.Digest != "" { - vp.Digest, err = pkgtree.ParseVersionedDigest(ld.Digest) + vp.Digest, err = verify.ParseVersionedDigest(ld.Digest) if err != nil { return nil, err } @@ -124,24 +123,23 @@ func fromRawLock(raw rawLock) (*Lock, error) { // Add the vendor pruning bit so that gps doesn't get confused vp.PruneOpts = po | gps.PruneNestedVendorDirs - l.P[i] = vp + l.P = append(l.P, vp) } return l, nil } -// InputsDigest returns the hash of inputs which produced this lock data. -// -// TODO(sdboyer) remove, this is now deprecated -func (l *Lock) InputsDigest() []byte { - return nil -} - // Projects returns the list of LockedProjects contained in the lock data. func (l *Lock) Projects() []gps.LockedProject { return l.P } +// InputImports reports the list of input imports that were used in generating +// this Lock. +func (l *Lock) InputImports() []string { + return l.SolveMeta.InputImports +} + // HasProjectWithRoot checks if the lock contains a project with the provided // ProjectRoot. // @@ -162,6 +160,7 @@ func (l *Lock) toRaw() rawLock { SolveMeta: solveMeta{ AnalyzerName: l.SolveMeta.AnalyzerName, AnalyzerVersion: l.SolveMeta.AnalyzerVersion, + InputImports: l.SolveMeta.InputImports, SolverName: l.SolveMeta.SolverName, SolverVersion: l.SolveMeta.SolverVersion, }, @@ -207,22 +206,35 @@ func (l *Lock) MarshalTOML() ([]byte, error) { } // LockFromSolution converts a gps.Solution to dep's representation of a lock. +// It makes sure that that the provided prune options are set correctly, as the +// solver does not use VerifiableProjects for new selections it makes. // // Data is defensively copied wherever necessary to ensure the resulting *Lock // shares no memory with the input solution. -func LockFromSolution(in gps.Solution) *Lock { +func LockFromSolution(in gps.Solution, prune gps.CascadingPruneOptions) *Lock { p := in.Projects() l := &Lock{ SolveMeta: SolveMeta{ AnalyzerName: in.AnalyzerName(), AnalyzerVersion: in.AnalyzerVersion(), + InputImports: in.InputImports(), SolverName: in.SolverName(), SolverVersion: in.SolverVersion(), }, - P: make([]gps.LockedProject, len(p)), + P: make([]gps.LockedProject, 0, len(p)), + } + + for _, lp := range p { + if vp, ok := lp.(verify.VerifiableProject); ok { + l.P = append(l.P, vp) + } else { + l.P = append(l.P, verify.VerifiableProject{ + LockedProject: lp, + PruneOpts: prune.PruneOptionsFor(lp.Ident().ProjectRoot), + }) + } } - copy(l.P, p) return l } diff --git a/lock_test.go b/lock_test.go index c2b8a3d970..e6acc742f1 100644 --- a/lock_test.go +++ b/lock_test.go @@ -10,7 +10,6 @@ import ( "testing" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/pkgtree" "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/test" ) @@ -37,8 +36,8 @@ func TestReadLock(t *testing.T) { []string{"."}, ), PruneOpts: gps.PruneOptions(1), - Digest: pkgtree.VersionedDigest{ - HashVersion: pkgtree.HashVersion, + Digest: verify.VersionedDigest{ + HashVersion: verify.HashVersion, Digest: []byte("foo"), }, }, @@ -67,8 +66,8 @@ func TestReadLock(t *testing.T) { []string{"."}, ), PruneOpts: gps.PruneOptions(15), - Digest: pkgtree.VersionedDigest{ - HashVersion: pkgtree.HashVersion, + Digest: verify.VersionedDigest{ + HashVersion: verify.HashVersion, Digest: []byte("foo"), }, }, @@ -95,8 +94,8 @@ func TestWriteLock(t *testing.T) { []string{"."}, ), PruneOpts: gps.PruneOptions(1), - Digest: pkgtree.VersionedDigest{ - HashVersion: pkgtree.HashVersion, + Digest: verify.VersionedDigest{ + HashVersion: verify.HashVersion, Digest: []byte("foo"), }, }, @@ -129,8 +128,8 @@ func TestWriteLock(t *testing.T) { []string{"."}, ), PruneOpts: gps.PruneOptions(15), - Digest: pkgtree.VersionedDigest{ - HashVersion: pkgtree.HashVersion, + Digest: verify.VersionedDigest{ + HashVersion: verify.HashVersion, Digest: []byte("foo"), }, }, diff --git a/project.go b/project.go index d2677e8866..0247ae9b40 100644 --- a/project.go +++ b/project.go @@ -105,6 +105,9 @@ type Project struct { Manifest *Manifest Lock *Lock // Optional RootPackageTree pkgtree.PackageTree + // If populated, contains the results of comparing the Lock against the + // current vendor tree, per verify.VerifyDepTree(). + //VendorStatus map[string]verify.VendorStatus } // SetRoot sets the project AbsRoot and ResolvedAbsRoot. If root is not a symlink, ResolvedAbsRoot will be set to root. diff --git a/testdata/lock/golden1.toml b/testdata/lock/golden1.toml index 292fdb2d85..4ffbce35b9 100644 --- a/testdata/lock/golden1.toml +++ b/testdata/lock/golden1.toml @@ -3,7 +3,7 @@ digest = "1:666f6f" name = "github.com/golang/dep" packages = ["."] - pruneopts = "TUN" + pruneopts = "NUT" revision = "d05d5aca9f895d19e9265839bffeadd74a2d2ecb" version = "0.12.2" diff --git a/txn_writer.go b/txn_writer.go index f78e9c76ee..791497f5e9 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -7,13 +7,13 @@ package dep import ( "bytes" "context" + "fmt" "io/ioutil" "log" "os" "path/filepath" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/pkgtree" "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/fs" "github.com/pelletier/go-toml" @@ -65,7 +65,7 @@ var lockFileComment = []byte(`# This file is autogenerated, do not edit; changes type SafeWriter struct { Manifest *Manifest lock *Lock - lockDiff *gps.LockDiff + lockDiff *verify.LockDiff writeVendor bool writeLock bool pruneOptions gps.CascadingPruneOptions @@ -98,7 +98,7 @@ func NewSafeWriter(manifest *Manifest, oldLock, newLock *Lock, vendor VendorBeha return nil, errors.New("must provide newLock when oldLock is specified") } - sw.lockDiff = gps.DiffLocks(oldLock, newLock) + sw.lockDiff = verify.DiffLocks(oldLock, newLock) if sw.lockDiff != nil { sw.writeLock = true } @@ -131,7 +131,7 @@ func (sw *SafeWriter) HasManifest() bool { } type rawStringDiff struct { - *gps.StringDiff + *verify.StringDiff } // MarshalTOML serializes the diff as a string. @@ -148,7 +148,7 @@ type rawLockedProjectDiff struct { Packages []rawStringDiff `toml:"packages,omitempty"` } -func toRawLockedProjectDiff(diff gps.LockedProjectDiff) rawLockedProjectDiff { +func toRawLockedProjectDiff(diff verify.LockedProjectDiff) rawLockedProjectDiff { // this is a shallow copy since we aren't modifying the raw diff raw := rawLockedProjectDiff{Name: diff.Name} if diff.Source != nil { @@ -174,7 +174,7 @@ type rawLockedProjectDiffs struct { Projects []rawLockedProjectDiff `toml:"projects"` } -func toRawLockedProjectDiffs(diffs []gps.LockedProjectDiff) rawLockedProjectDiffs { +func toRawLockedProjectDiffs(diffs []verify.LockedProjectDiff) rawLockedProjectDiffs { raw := rawLockedProjectDiffs{ Projects: make([]rawLockedProjectDiff, len(diffs)), } @@ -186,10 +186,10 @@ func toRawLockedProjectDiffs(diffs []gps.LockedProjectDiff) rawLockedProjectDiff return raw } -func formatLockDiff(diff gps.LockDiff) (string, error) { +func formatLockDiff(diff verify.LockDiff) (string, error) { var buf bytes.Buffer - writeDiffs := func(diffs []gps.LockedProjectDiff) error { + writeDiffs := func(diffs []verify.LockedProjectDiff) error { raw := toRawLockedProjectDiffs(diffs) chunk, err := toml.Marshal(raw) if err != nil { @@ -309,17 +309,6 @@ func (sw *SafeWriter) Write(root string, sm gps.SourceManager, examples bool, lo } } - if sw.writeLock { - l, err := sw.lock.MarshalTOML() - if err != nil { - return errors.Wrap(err, "failed to marshal lock to TOML") - } - - if err = ioutil.WriteFile(filepath.Join(td, LockName), append(lockFileComment, l...), 0666); err != nil { - return errors.Wrap(err, "failed to write lock file to temp dir") - } - } - if sw.writeVendor { var onWrite func(gps.WriteProgress) if logger != nil { @@ -331,6 +320,26 @@ func (sw *SafeWriter) Write(root string, sm gps.SourceManager, examples bool, lo if err != nil { return errors.Wrap(err, "error while writing out vendor tree") } + + for k, lp := range sw.lock.Projects() { + vp := lp.(verify.VerifiableProject) + vp.Digest, err = verify.DigestFromDirectory(filepath.Join(td, "vendor", string(lp.Ident().ProjectRoot))) + if err != nil { + return errors.Wrapf(err, "error while hashing tree of %s in vendor", lp.Ident().ProjectRoot) + } + sw.lock.P[k] = vp + } + } + + if sw.writeLock { + l, err := sw.lock.MarshalTOML() + if err != nil { + return errors.Wrap(err, "failed to marshal lock to TOML") + } + + if err = ioutil.WriteFile(filepath.Join(td, LockName), append(lockFileComment, l...), 0666); err != nil { + return errors.Wrap(err, "failed to write lock file to temp dir") + } } // Ensure vendor/.git is preserved if present @@ -490,11 +499,11 @@ func hasDotGit(path string) bool { type DeltaWriter struct { lock *Lock - lockDiff *gps.LockDiff + lockDiff verify.LockDiff2 pruneOptions gps.CascadingPruneOptions vendorDir string changed map[gps.ProjectRoot]changeType - status map[string]pkgtree.VendorStatus + status map[string]verify.VendorStatus } type changeType uint8 @@ -502,16 +511,18 @@ type changeType uint8 const ( noChange changeType = iota solveChanged - pruneChanged - hashChanged - // FIXME need added/removed up here + hashMismatch + hashVersionMismatch + missingFromTree + projectAdded + projectRemoved ) // NewDeltaWriter prepares a vendor writer that will construct a vendor // directory by writing out only those projects that actually need to be written // out - they have changed in some way, or they lack the necessary hash // information to be verified. -func NewDeltaWriter(oldLock, newLock *Lock, prune gps.CascadingPruneOptions, vendorDir string) (TransactionWriter, error) { +func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string) (TransactionWriter, error) { sw := &DeltaWriter{ lock: newLock, pruneOptions: prune, @@ -530,40 +541,39 @@ func NewDeltaWriter(oldLock, newLock *Lock, prune gps.CascadingPruneOptions, ven return NewSafeWriter(nil, oldLock, newLock, VendorOnChanged, prune) } - sw.lockDiff = gps.DiffLocks(oldLock, newLock) + sw.lockDiff = verify.DiffLocks2(oldLock, newLock) - // 1. find all the ones that truly changed in solve - // 2. find the ones that only changed pruneopts - // 3. find the ones that (already) had a mismatch with what's in vendor - sums := make(map[string][]byte) + for pr, lpd := range sw.lockDiff.ProjectDiffs { + // Turn off all the hash diffing markers in the lock, unless we already + // know there's a mismatch. We don't want to rely on them for our case, + // as we're not sure they'll have been correctly populated in the new + // lock at this point. + lpd.HashVersionChanged, lpd.HashChanged = false, false + sw.lockDiff.ProjectDiffs[pr] = lpd - for _, lp := range newLock.Projects() { - pr := lp.Ident().ProjectRoot - // TODO(sdboyer) Not the best heuristic to assume that a PPS indicates - if vp, ok := lp.(verify.VerifiableProject); !ok { - sw.changed[pr] = solveChanged - sums[string(pr)] = []byte{} - } else { - sums[string(pr)] = vp.Digest.Digest - sw.changed[pr] = pruneChanged - //if _, has := sw.changed[pr]; !has && vp.PruneOpts != prune.PruneOptionsFor(pr) { - //} + if lpd.Changed() { + if lpd.WasAdded() { + sw.changed[pr] = projectAdded + } else if lpd.WasRemoved() { + sw.changed[pr] = projectRemoved + } else { + sw.changed[pr] = solveChanged + } } } - status, err := pkgtree.VerifyDepTree(vendorDir, sums) - if err != nil { - return nil, err - } - for spr, stat := range status { pr := gps.ProjectRoot(spr) - switch stat { - case pkgtree.NotInLock, pkgtree.NotInTree: - // FIXME - case pkgtree.EmptyDigestInLock, pkgtree.DigestMismatchInLock: - if _, has := sw.changed[pr]; !has { - sw.changed[gps.ProjectRoot(pr)] = hashChanged + // These cases only matter if there was no change already recorded via + // the differ. + if _, has := sw.changed[pr]; !has { + switch stat { + case verify.NotInTree: + sw.changed[pr] = missingFromTree + case verify.EmptyDigestInLock, verify.DigestMismatchInLock: + sw.changed[pr] = hashMismatch + case verify.HashVersionMismatch: + sw.changed[pr] = hashVersionMismatch } } } @@ -604,17 +614,57 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l projs[lp.Ident().ProjectRoot] = lp } + dropped := []gps.ProjectRoot{} // TODO(sdboyer) add a txn/rollback layer, like the safewriter? - //for pr, reason := range dw.changed { - for pr, _ := range dw.changed { + for pr, reason := range dw.changed { to := filepath.FromSlash(filepath.Join(vnewpath, string(pr))) po := dw.pruneOptions.PruneOptionsFor(pr) - + lpd := dw.lockDiff.ProjectDiffs[pr] + + switch reason { + case noChange: + panic(fmt.Sprintf("wtf, no change for %s", pr)) + case solveChanged: + if lpd.SourceChanged() { + logger.Printf("Writing %s: source changed (%s -> %s)", pr, lpd.SourceBefore, lpd.SourceAfter) + } else if lpd.VersionChanged() { + logger.Printf("Writing %s: version changed (%s -> %s)", pr, lpd.VersionBefore, lpd.VersionAfter) + } else if lpd.RevisionChanged() { + logger.Printf("Writing %s: revision changed (%s -> %s)", pr, lpd.RevisionBefore, lpd.RevisionAfter) + } else if lpd.PackagesChanged() { + la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) + if la > 0 && lr > 0 { + logger.Printf("Writing %s: packages changed (%v added, %v removed)", pr, la, lr) + } else if la > 0 { + logger.Printf("Writing %s: packages changed (%v added)", pr, la) + } else { + logger.Printf("Writing %s: packages changed (%v removed)", pr, lr) + } + } else if lpd.PruneOptsChanged() { + // Override what's on the lockdiff with the extra info we have; + // this lets us excise PruneNestedVendorDirs and get the real + // value from the input param in place. + old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs + new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs + logger.Printf("Writing %s: prune options changed (%s -> %s)", pr, old, new) + } + case hashMismatch: + logger.Printf("Writing %s: hash mismatch between Gopkg.lock and vendor contents", pr) + case hashVersionMismatch: + logger.Printf("Writing %s: hashing algorithm mismatch", pr) + case projectAdded: + logger.Printf("Writing new project %s", pr) + case projectRemoved: + dropped = append(dropped, pr) + continue + case missingFromTree: + logger.Printf("Writing %s: missing from vendor", pr) + } if err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to); err != nil { return errors.Wrapf(err, "failed to export %s", pr) } - digest, err := pkgtree.DigestFromDirectory(to) + digest, err := verify.DigestFromDirectory(to) if err != nil { return errors.Wrapf(err, "failed to hash %s", pr) } @@ -622,6 +672,8 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l // Update the new Lock with verification information. for k, lp := range dw.lock.P { if lp.Ident().ProjectRoot == pr { + vp := lp.(verify.VerifiableProject) + vp.Digest = digest dw.lock.P[k] = verify.VerifiableProject{ LockedProject: lp, PruneOpts: po, @@ -649,6 +701,11 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } } + for _, pr := range dropped { + // Kind of a lie to print this here. ĀÆ\_(惄)_/ĀÆ + logger.Printf("Discarding unused project %s", pr) + } + err = os.RemoveAll(vpath) if err != nil { return errors.Wrap(err, "failed to remove original vendor directory") From 13ec2118d26dddef04afb410dbdf9fa3bb234935 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Thu, 28 Jun 2018 10:28:52 -0400 Subject: [PATCH 10/25] gps: Diff->Delta, and bitfield for change checking Also convert the SafeWriter to use LockDelta. --- gps/verify/digest.go | 2 +- gps/verify/digest_test.go | 90 +++++++++++---- gps/verify/lockdiff.go | 152 ++++++++++++++++++------- testdata/txn_writer/original_lock.toml | 14 --- testdata/txn_writer/updated_lock.toml | 21 ---- txn_writer.go | 94 ++++----------- txn_writer_test.go | 33 ------ 7 files changed, 199 insertions(+), 207 deletions(-) delete mode 100644 testdata/txn_writer/original_lock.toml delete mode 100644 testdata/txn_writer/updated_lock.toml diff --git a/gps/verify/digest.go b/gps/verify/digest.go index b636352104..9f0042289a 100644 --- a/gps/verify/digest.go +++ b/gps/verify/digest.go @@ -294,7 +294,7 @@ const ( // EmptyDigestInLock is used when the digest for a dependency listed in the // lock file is the empty string. While this is a special case of - // DigestMismatchInLock, keeping both cases discrete is a desired feature. + // DigestMismatchInLock, separating the cases is a desired feature. EmptyDigestInLock // DigestMismatchInLock is used when the digest for a dependency listed in diff --git a/gps/verify/digest_test.go b/gps/verify/digest_test.go index 36a7912d03..c092312177 100644 --- a/gps/verify/digest_test.go +++ b/gps/verify/digest_test.go @@ -152,15 +152,6 @@ func TestVerifyDepTree(t *testing.T) { "launchpad.net/match": {0x7e, 0x10, 0x6, 0x2f, 0x8, 0x3, 0x3c, 0x76, 0xae, 0xbc, 0xa4, 0xc9, 0xec, 0x73, 0x67, 0x15, 0x70, 0x2b, 0x0, 0x89, 0x27, 0xbb, 0x61, 0x9d, 0xc7, 0xc3, 0x39, 0x46, 0x3, 0x91, 0xb7, 0x3b}, } - status, err := VerifyDepTree(vendorRoot, wantSums) - if err != nil { - t.Fatal(err) - } - - if got, want := len(status), 7; got != want { - t.Errorf("\n(GOT): %v; (WNT): %v", got, want) - } - checkStatus := func(t *testing.T, status map[string]VendorStatus, key string, want VendorStatus) { got, ok := status[key] if !ok { @@ -172,25 +163,74 @@ func TestVerifyDepTree(t *testing.T) { } } - checkStatus(t, status, "github.com/alice/match", NoMismatch) - checkStatus(t, status, "github.com/alice/mismatch", DigestMismatchInLock) - checkStatus(t, status, "github.com/alice/notInLock", NotInLock) - checkStatus(t, status, "github.com/bob/match", NoMismatch) - checkStatus(t, status, "github.com/bob/emptyDigest", EmptyDigestInLock) - checkStatus(t, status, "github.com/charlie/notInTree", NotInTree) - checkStatus(t, status, "launchpad.net/match", NoMismatch) - - if t.Failed() { - for k, want := range wantSums { - got, err := DigestFromDirectory(filepath.Join(vendorRoot, k)) - if err != nil { - t.Error(err) + t.Run("normal", func(t *testing.T) { + t.Parallel() + wantDigests := make(map[string]VersionedDigest) + for k, v := range wantSums { + wantDigests[k] = VersionedDigest{ + HashVersion: HashVersion, + Digest: v, } - if !bytes.Equal(got.Digest, want) { - t.Errorf("%q\n(GOT):\n\t%#v\n(WNT):\n\t%#v", k, got, want) + } + + status, err := VerifyDepTree(vendorRoot, wantDigests) + if err != nil { + t.Fatal(err) + } + + if got, want := len(status), 7; got != want { + t.Errorf("Unexpected result count from VerifyDepTree:\n\t(GOT): %v\n\t(WNT): %v", got, want) + } + + checkStatus(t, status, "github.com/alice/match", NoMismatch) + checkStatus(t, status, "github.com/alice/mismatch", DigestMismatchInLock) + checkStatus(t, status, "github.com/alice/notInLock", NotInLock) + checkStatus(t, status, "github.com/bob/match", NoMismatch) + checkStatus(t, status, "github.com/bob/emptyDigest", EmptyDigestInLock) + checkStatus(t, status, "github.com/charlie/notInTree", NotInTree) + checkStatus(t, status, "launchpad.net/match", NoMismatch) + + if t.Failed() { + for k, want := range wantSums { + got, err := DigestFromDirectory(filepath.Join(vendorRoot, k)) + if err != nil { + t.Error(err) + } + if !bytes.Equal(got.Digest, want) { + t.Errorf("Digest mismatch for %q\n(GOT):\n\t%#v\n(WNT):\n\t%#v", k, got, want) + } } } - } + + }) + + t.Run("hashv-mismatch", func(t *testing.T) { + t.Parallel() + wantDigests := make(map[string]VersionedDigest) + for k, v := range wantSums { + wantDigests[k] = VersionedDigest{ + HashVersion: HashVersion + 1, + Digest: v, + } + } + + status, err := VerifyDepTree(vendorRoot, wantDigests) + if err != nil { + t.Fatal(err) + } + + if got, want := len(status), 7; got != want { + t.Errorf("Unexpected result count from VerifyDepTree:\n\t(GOT): %v\n\t(WNT): %v", got, want) + } + + checkStatus(t, status, "github.com/alice/match", HashVersionMismatch) + checkStatus(t, status, "github.com/alice/mismatch", HashVersionMismatch) + checkStatus(t, status, "github.com/alice/notInLock", NotInLock) + checkStatus(t, status, "github.com/bob/match", HashVersionMismatch) + checkStatus(t, status, "github.com/bob/emptyDigest", HashVersionMismatch) + checkStatus(t, status, "github.com/charlie/notInTree", NotInTree) + checkStatus(t, status, "launchpad.net/match", HashVersionMismatch) + }) } func BenchmarkDigestFromDirectory(b *testing.B) { diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index 7d0b90b54a..3a742dc90d 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -68,10 +68,10 @@ type LockDiff struct { Modify []LockedProjectDiff } -type LockDiff2 struct { +type LockDelta struct { AddedImportInputs []string RemovedImportInputs []string - ProjectDiffs map[gps.ProjectRoot]LockedProjectDiff2 + ProjectDeltas map[gps.ProjectRoot]LockedProjectDelta } // LockedProjectDiff contains the before and after snapshot of a project reference. @@ -85,13 +85,13 @@ type LockedProjectDiff struct { Packages []StringDiff } -type LockedProjectDiff2 struct { +type LockedProjectDelta struct { Name gps.ProjectRoot ProjectRemoved, ProjectAdded bool - LockedProjectPartsDiff + LockedProjectPartsDelta } -type LockedProjectPartsDiff struct { +type LockedProjectPartsDelta struct { PackagesAdded, PackagesRemoved []string VersionBefore, VersionAfter gps.UnpairedVersion RevisionBefore, RevisionAfter gps.Revision @@ -100,13 +100,13 @@ type LockedProjectPartsDiff struct { HashChanged, HashVersionChanged bool } -// DiffLocks compares two locks and identifies the differences between them. -// Returns nil if there are no differences. -func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { +// DiffLocks2 compares two locks and computes a semantically rich delta between +// them. +func DiffLocks2(l1, l2 gps.Lock) LockDelta { // Default nil locks to empty locks, so that we can still generate a diff if l1 == nil { if l2 == nil { - return LockDiff2{} + return LockDelta{} } l1 = gps.SimpleLock{} } @@ -119,8 +119,8 @@ func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { p1 = sortLockedProjects(p1) p2 = sortLockedProjects(p2) - diff := LockDiff2{ - ProjectDiffs: make(map[gps.ProjectRoot]LockedProjectDiff2), + diff := LockDelta{ + ProjectDeltas: make(map[gps.ProjectRoot]LockedProjectDelta), } var i2next int @@ -128,7 +128,7 @@ func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { lp1 := p1[i1] pr1 := lp1.Ident().ProjectRoot - lpd := LockedProjectDiff2{ + lpd := LockedProjectDelta{ Name: pr1, } @@ -138,10 +138,10 @@ func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { switch strings.Compare(string(pr1), string(pr2)) { case 0: // Found a matching project - lpd.LockedProjectPartsDiff = DiffProjects2(lp1, lp2) + lpd.LockedProjectPartsDelta = DiffProjects2(lp1, lp2) i2next = i2 + 1 // Don't visit this project again case +1: // Found a new project - diff.ProjectDiffs[pr2] = LockedProjectDiff2{ + diff.ProjectDeltas[pr2] = LockedProjectDelta{ Name: pr2, ProjectAdded: true, } @@ -154,14 +154,14 @@ func DiffLocks2(l1, l2 gps.Lock) LockDiff2 { break // Done evaluating this project, move onto the next } - diff.ProjectDiffs[pr1] = lpd + diff.ProjectDeltas[pr1] = lpd } // Anything that still hasn't been evaluated are adds for i2 := i2next; i2 < len(p2); i2++ { lp2 := p2[i2] pr2 := lp2.Ident().ProjectRoot - diff.ProjectDiffs[pr2] = LockedProjectDiff2{ + diff.ProjectDeltas[pr2] = LockedProjectDelta{ Name: pr2, ProjectAdded: true, } @@ -205,8 +205,8 @@ func findAddedAndRemoved(l1, l2 []string) (add, remove []string) { return add, remove } -func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDiff { - ld := LockedProjectPartsDiff{ +func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDelta { + ld := LockedProjectPartsDelta{ SourceBefore: lp1.Ident().Source, SourceAfter: lp2.Ident().Source, } @@ -239,27 +239,48 @@ func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDiff { if ok1 && ok2 { ld.PruneOptsBefore, ld.PruneOptsAfter = vp1.PruneOpts, vp2.PruneOpts - // Only consider hashes for diffing if neither were the zero value. - if !vp1.Digest.IsEmpty() && !vp2.Digest.IsEmpty() { - if vp1.Digest.HashVersion != vp2.Digest.HashVersion { - ld.HashVersionChanged = true - } - if !bytes.Equal(vp1.Digest.Digest, vp2.Digest.Digest) { - ld.HashChanged = true - } + if vp1.Digest.HashVersion != vp2.Digest.HashVersion { + ld.HashVersionChanged = true + } + if !bytes.Equal(vp1.Digest.Digest, vp2.Digest.Digest) { + ld.HashChanged = true } + } else if ok1 { + ld.PruneOptsBefore = vp1.PruneOpts + ld.HashVersionChanged = true + ld.HashChanged = true + } else if ok2 { + ld.PruneOptsAfter = vp2.PruneOpts + ld.HashVersionChanged = true + ld.HashChanged = true } return ld } -func (ld LockDiff2) Changed() bool { - if len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0 { +type DeltaDimension uint16 + +const ( + InputImportsChanged DeltaDimension = 1 << iota + ProjectAdded + ProjectRemoved + SourceChanged + VersionChanged + RevisionChanged + PackagesChanged + PruneOptsChanged + HashVersionChanged + HashChanged + AnyChanged = (1 << iota) - 1 +) + +func (ld LockDelta) Changed(flags DeltaDimension) bool { + if flags&InputImportsChanged != 0 && (len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0) { return true } - for _, ld := range ld.ProjectDiffs { - if ld.Changed() { + for _, ld := range ld.ProjectDeltas { + if ld.Changed(AnyChanged) { return true } } @@ -267,23 +288,69 @@ func (ld LockDiff2) Changed() bool { return false } -func (ld LockedProjectDiff2) Changed() bool { - return ld.WasRemoved() || ld.WasAdded() || ld.RevisionChanged() || ld.VersionChanged() || ld.SourceChanged() || ld.PackagesChanged() || ld.HashChanged || ld.HashVersionChanged +// Changed indicates whether the delta contains a change along the dimensions +// with their corresponding bits set. +// +// For example, if only the Revision changed, and this method is called with +// SourceChanged | VersionChanged, it will return false; if it is called with +// VersionChanged | RevisionChanged, it will return true. +func (ld LockedProjectDelta) Changed(flags DeltaDimension) bool { + if flags&ProjectAdded != 0 && ld.WasAdded() { + return true + } + + if flags&ProjectRemoved != 0 && ld.WasRemoved() { + return true + } + + return ld.LockedProjectPartsDelta.Changed(flags & ^ProjectAdded & ^ProjectRemoved) } -func (ld LockedProjectDiff2) WasRemoved() bool { +func (ld LockedProjectDelta) WasRemoved() bool { return ld.ProjectRemoved } -func (ld LockedProjectDiff2) WasAdded() bool { +func (ld LockedProjectDelta) WasAdded() bool { return ld.ProjectAdded } -func (ld LockedProjectPartsDiff) SourceChanged() bool { +func (ld LockedProjectPartsDelta) Changed(flags DeltaDimension) bool { + if flags&SourceChanged != 0 && ld.SourceChanged() { + return true + } + + if flags&RevisionChanged != 0 && ld.RevisionChanged() { + return true + } + + if flags&PruneOptsChanged != 0 && ld.PruneOptsChanged() { + return true + } + + if flags&HashChanged != 0 && ld.HashChanged { + return true + } + + if flags&HashVersionChanged != 0 && ld.HashVersionChanged { + return true + } + + if flags&VersionChanged != 0 && ld.VersionChanged() { + return true + } + + if flags&PackagesChanged != 0 && ld.PackagesChanged() { + return true + } + + return false +} + +func (ld LockedProjectPartsDelta) SourceChanged() bool { return ld.SourceBefore != ld.SourceAfter } -func (ld LockedProjectPartsDiff) VersionChanged() bool { +func (ld LockedProjectPartsDelta) VersionChanged() bool { if ld.VersionBefore == nil && ld.VersionAfter == nil { return false } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { @@ -295,7 +362,7 @@ func (ld LockedProjectPartsDiff) VersionChanged() bool { return false } -func (ld LockedProjectPartsDiff) VersionTypeChanged() bool { +func (ld LockedProjectPartsDelta) VersionTypeChanged() bool { if ld.VersionBefore == nil && ld.VersionAfter == nil { return false } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { @@ -305,15 +372,15 @@ func (ld LockedProjectPartsDiff) VersionTypeChanged() bool { return false } -func (ld LockedProjectPartsDiff) RevisionChanged() bool { +func (ld LockedProjectPartsDelta) RevisionChanged() bool { return ld.RevisionBefore != ld.RevisionAfter } -func (ld LockedProjectPartsDiff) PackagesChanged() bool { +func (ld LockedProjectPartsDelta) PackagesChanged() bool { return len(ld.PackagesAdded) > 0 || len(ld.PackagesRemoved) > 0 } -func (ld LockedProjectPartsDiff) PruneOptsChanged() bool { +func (ld LockedProjectPartsDelta) PruneOptsChanged() bool { return ld.PruneOptsBefore != ld.PruneOptsAfter } @@ -518,3 +585,8 @@ func DiffProjects(lp1, lp2 gps.LockedProject) *LockedProjectDiff { } return &diff } + +type VendorDiff struct { + LockDelta LockDelta + VendorStatus map[string]VendorStatus +} diff --git a/testdata/txn_writer/original_lock.toml b/testdata/txn_writer/original_lock.toml deleted file mode 100644 index 2651064873..0000000000 --- a/testdata/txn_writer/original_lock.toml +++ /dev/null @@ -1,14 +0,0 @@ -[solve-meta] - inputs-digest = "595716d270828e763c811ef79c9c41f85b1d1bfbdfe85280036405c03772206c" - -[[projects]] - name = "github.com/foo/bar" - branch = "master" - revision = "f24338400f072ef18125ae0fbe6b06fe6d1783e7" - packages = ["placeholder", "util"] - -[[projects]] - name = "github.com/stuff/placeholder" - version = "2.0.0" - revision = "6694017eeb4e20fd277b049bf29dba4895c97234" - packages = ["."] diff --git a/testdata/txn_writer/updated_lock.toml b/testdata/txn_writer/updated_lock.toml deleted file mode 100644 index 81ae83ba34..0000000000 --- a/testdata/txn_writer/updated_lock.toml +++ /dev/null @@ -1,21 +0,0 @@ -[solve-meta] - inputs-digest = "2252a285ab27944a4d7adcba8dbd03980f59ba652f12db39fa93b927c345593e" - -[[projects]] - name = "github.com/foo/bar" - source = "http://github.example.com/foo/bar" - version = "1.2.0" - revision = "2a3a211e171803acb82d1d5d42ceb53228f51751" - packages = ["thing","util"] - -[[projects]] - name = "github.com/stuff/realthing" - version = "2.0.0" - revision = "1f02e52d6bac308da54ab84a234c58a98ca82347" - packages = ["."] - -[[projects]] - name = "github.com/sdboyer/deptest" - packages = ["."] - revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" - version = "v1.0.0" diff --git a/txn_writer.go b/txn_writer.go index 791497f5e9..d15d670150 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -5,7 +5,6 @@ package dep import ( - "bytes" "context" "fmt" "io/ioutil" @@ -16,10 +15,14 @@ import ( "github.com/golang/dep/gps" "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/fs" - "github.com/pelletier/go-toml" "github.com/pkg/errors" ) +const ( + // Helper consts for common diff-checking patterns. + anyExceptHash verify.DeltaDimension = verify.AnyChanged & ^verify.HashVersionChanged & ^verify.HashChanged +) + // Example string to be written to the manifest file // if no dependencies are found in the project // during `dep init` @@ -65,7 +68,7 @@ var lockFileComment = []byte(`# This file is autogenerated, do not edit; changes type SafeWriter struct { Manifest *Manifest lock *Lock - lockDiff *verify.LockDiff + lockDiff verify.LockDelta writeVendor bool writeLock bool pruneOptions gps.CascadingPruneOptions @@ -98,8 +101,8 @@ func NewSafeWriter(manifest *Manifest, oldLock, newLock *Lock, vendor VendorBeha return nil, errors.New("must provide newLock when oldLock is specified") } - sw.lockDiff = verify.DiffLocks(oldLock, newLock) - if sw.lockDiff != nil { + sw.lockDiff = verify.DiffLocks2(oldLock, newLock) + if sw.lockDiff.Changed(anyExceptHash) { sw.writeLock = true } } else if newLock != nil { @@ -110,7 +113,7 @@ func NewSafeWriter(manifest *Manifest, oldLock, newLock *Lock, vendor VendorBeha case VendorAlways: sw.writeVendor = true case VendorOnChanged: - sw.writeVendor = sw.lockDiff != nil || (newLock != nil && oldLock == nil) + sw.writeVendor = sw.lockDiff.Changed(anyExceptHash & ^verify.InputImportsChanged) || (newLock != nil && oldLock == nil) } if sw.writeVendor && newLock == nil { @@ -186,47 +189,6 @@ func toRawLockedProjectDiffs(diffs []verify.LockedProjectDiff) rawLockedProjectD return raw } -func formatLockDiff(diff verify.LockDiff) (string, error) { - var buf bytes.Buffer - - writeDiffs := func(diffs []verify.LockedProjectDiff) error { - raw := toRawLockedProjectDiffs(diffs) - chunk, err := toml.Marshal(raw) - if err != nil { - return err - } - buf.Write(chunk) - buf.WriteString("\n") - return nil - } - - if len(diff.Add) > 0 { - buf.WriteString("Add:") - err := writeDiffs(diff.Add) - if err != nil { - return "", errors.Wrap(err, "Unable to format LockDiff.Add") - } - } - - if len(diff.Remove) > 0 { - buf.WriteString("Remove:") - err := writeDiffs(diff.Remove) - if err != nil { - return "", errors.Wrap(err, "Unable to format LockDiff.Remove") - } - } - - if len(diff.Modify) > 0 { - buf.WriteString("Modify:") - err := writeDiffs(diff.Modify) - if err != nil { - return "", errors.Wrap(err, "Unable to format LockDiff.Modify") - } - } - - return buf.String(), nil -} - // VendorBehavior defines when the vendor directory should be written. type VendorBehavior int @@ -455,23 +417,14 @@ func (sw *SafeWriter) PrintPreparedActions(output *log.Logger, verbose bool) err } if sw.writeLock { - if sw.lockDiff == nil { - if verbose { - l, err := sw.lock.MarshalTOML() - if err != nil { - return errors.Wrap(err, "ensure DryRun cannot serialize lock") - } - output.Printf("Would have written the following %s:\n%s\n", LockName, string(l)) - } else { - output.Printf("Would have written %s.\n", LockName) - } - } else { - output.Printf("Would have written the following changes to %s:\n", LockName) - diff, err := formatLockDiff(*sw.lockDiff) + if verbose { + l, err := sw.lock.MarshalTOML() if err != nil { - return errors.Wrap(err, "ensure DryRun cannot serialize the lock diff") + return errors.Wrap(err, "ensure DryRun cannot serialize lock") } - output.Println(diff) + output.Printf("Would have written the following %s:\n%s\n", LockName, string(l)) + } else { + output.Printf("Would have written %s.\n", LockName) } } @@ -499,7 +452,7 @@ func hasDotGit(path string) bool { type DeltaWriter struct { lock *Lock - lockDiff verify.LockDiff2 + lockDiff verify.LockDelta pruneOptions gps.CascadingPruneOptions vendorDir string changed map[gps.ProjectRoot]changeType @@ -543,15 +496,10 @@ func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatu sw.lockDiff = verify.DiffLocks2(oldLock, newLock) - for pr, lpd := range sw.lockDiff.ProjectDiffs { - // Turn off all the hash diffing markers in the lock, unless we already - // know there's a mismatch. We don't want to rely on them for our case, - // as we're not sure they'll have been correctly populated in the new - // lock at this point. - lpd.HashVersionChanged, lpd.HashChanged = false, false - sw.lockDiff.ProjectDiffs[pr] = lpd - - if lpd.Changed() { + for pr, lpd := range sw.lockDiff.ProjectDeltas { + // Hash changes aren't relevant at this point, as they could be empty + // and therefore a symptom of a solver change. + if lpd.Changed(anyExceptHash) { if lpd.WasAdded() { sw.changed[pr] = projectAdded } else if lpd.WasRemoved() { @@ -619,7 +567,7 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l for pr, reason := range dw.changed { to := filepath.FromSlash(filepath.Join(vnewpath, string(pr))) po := dw.pruneOptions.PruneOptionsFor(pr) - lpd := dw.lockDiff.ProjectDiffs[pr] + lpd := dw.lockDiff.ProjectDeltas[pr] switch reason { case noChange: diff --git a/txn_writer_test.go b/txn_writer_test.go index 747dd80ff7..fad6279d3c 100644 --- a/txn_writer_test.go +++ b/txn_writer_test.go @@ -501,39 +501,6 @@ func TestSafeWriter_NewLockSkipVendor(t *testing.T) { } } -func TestSafeWriter_DiffLocks(t *testing.T) { - test.NeedsExternalNetwork(t) - test.NeedsGit(t) - - h := test.NewHelper(t) - defer h.Cleanup() - - pc := NewTestProjectContext(h, safeWriterProject) - defer pc.Release() - pc.CopyFile(LockName, "txn_writer/original_lock.toml") - pc.Load() - - ulf := h.GetTestFile("txn_writer/updated_lock.toml") - defer ulf.Close() - updatedLock, err := readLock(ulf) - h.Must(err) - - sw, _ := NewSafeWriter(nil, pc.Project.Lock, updatedLock, VendorOnChanged, defaultCascadingPruneOptions()) - - // Verify lock diff - diff := sw.lockDiff - if diff == nil { - t.Fatal("Expected the payload to contain a diff of the lock files") - } - - output, err := formatLockDiff(*diff) - h.Must(err) - goldenOutput := "txn_writer/expected_diff_output.txt" - if err = pc.ShouldMatchGolden(goldenOutput, output); err != nil { - t.Fatal(err) - } -} - func TestHasDotGit(t *testing.T) { // Create a tempdir with .git file td, err := ioutil.TempDir(os.TempDir(), "dotGitFile") From 6b47f58a62bc04cca25a3aeeecd5b83cfe2a4666 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Sun, 1 Jul 2018 15:13:22 -0400 Subject: [PATCH 11/25] dep: Tell the user why we're solving Add output to all of the information we assemble when checking if the Lock satisfies the current input set. Also some refactoring of the ctx.LoadProject() process to have fewer partial states. --- Gopkg.lock | 2 +- cmd/dep/ensure.go | 63 ++++++++++++++---------------- cmd/dep/init.go | 6 +-- cmd/dep/status.go | 18 +++------ context.go | 50 ++++++++++++++++++++++++ gps/verify/digest.go | 4 +- gps/verify/lock.go | 89 ++++++++++++++++++++++++++---------------- gps/verify/lockdiff.go | 2 +- lock.go | 13 ++++++ manifest.go | 7 ++++ project.go | 75 +++++++++++++++-------------------- txn_writer.go | 73 ++++++++++++++++++++++++---------- 12 files changed, 253 insertions(+), 149 deletions(-) diff --git a/Gopkg.lock b/Gopkg.lock index a1aa86afc9..02bde4018b 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -128,7 +128,7 @@ "github.com/pkg/errors", "github.com/sdboyer/constext", "golang.org/x/sync/errgroup", - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v2", ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 0024482b5a..93d1a2b469 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -211,11 +211,6 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { statchan <- status }(filepath.Join(p.AbsRoot, "vendor"), lps) - params.RootPackageTree, err = p.ParseRootPackageTree() - if err != nil { - return err - } - if fatal, err := checkErrors(params.RootPackageTree.Packages, p.Manifest.IgnoredPackages()); err != nil { if fatal { return err @@ -283,20 +278,32 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return err } - lock := p.Lock + lock := p.ChangedLock if lock != nil { - lsat := verify.LockSatisfiesInputs(p.Lock, p.Lock.SolveMeta.InputImports, p.Manifest, params.RootPackageTree) + lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) if !lsat.Passed() { - // TODO(sdboyer) print out what bits are unsatisfied here + if ctx.Verbose { + ctx.Out.Println("Gopkg.lock is out of sync with Gopkg.toml and project code:") + for _, missing := range lsat.MissingImports() { + ctx.Out.Printf("\t%s is missing from input-imports\n", missing) + } + for _, excess := range lsat.ExcessImports() { + ctx.Out.Printf("\t%s is in input-imports, but isn't imported\n", excess) + } + for pr, unmatched := range lsat.UnmatchedOverrides() { + ctx.Out.Printf("\t%s is at %s, which is not allowed by override %s\n", pr, unmatched.V, unmatched.C) + } + for pr, unmatched := range lsat.UnmatchedConstraints() { + ctx.Out.Printf("\t%s is at %s, which is not allowed by constraint %s\n", pr, unmatched.V, unmatched.C) + } + ctx.Out.Println() + } + solver, err := gps.Prepare(params, sm) if err != nil { return errors.Wrap(err, "prepare solver") } - if cmd.noVendor && cmd.dryRun { - return errors.New("Gopkg.lock was not up to date") - } - solution, err := solver.Solve(context.TODO()) if err != nil { return handleAllTheFailuresOfTheWorld(err) @@ -306,23 +313,22 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project // The user said not to touch vendor/, so definitely nothing to do. return nil } - } - sw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) + dw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) if err != nil { return err } if cmd.dryRun { - return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) + return dw.PrintPreparedActions(ctx.Out, ctx.Verbose) } var logger *log.Logger if ctx.Verbose { logger = ctx.Err } - return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") + return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { @@ -333,9 +339,10 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj if p.Lock == nil { return errors.Errorf("no %s exists from which to populate vendor/", dep.LockName) } + // Pass the same lock as old and new so that the writer will observe no // difference and choose not to write it out. - sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions) + sw, err := dep.NewSafeWriter(nil, p.Lock, p.ChangedLock, dep.VendorAlways, p.Manifest.PruneOptions) if err != nil { return err } @@ -383,19 +390,19 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return handleAllTheFailuresOfTheWorld(err) } - sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), cmd.vendorBehavior(), p.Manifest.PruneOptions) + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) if err != nil { return err } if cmd.dryRun { - return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) + return dw.PrintPreparedActions(ctx.Out, ctx.Verbose) } var logger *log.Logger if ctx.Verbose { logger = ctx.Err } - return errors.Wrap(sw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor") + return errors.Wrap(dw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor") } func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { @@ -417,16 +424,6 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm rm, _ := params.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) - // TODO(sdboyer) re-enable this once we ToReachMap() intelligently filters out normally-excluded (_*, .*), dirs from errmap - //rm, errmap := params.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) - // Having some problematic internal packages isn't cause for termination, - // but the user needs to be warned. - //for fail, err := range errmap { - //if _, is := err.Err.(*build.NoGoError); !is { - //ctx.Err.Printf("Warning: %s, %s", fail, err) - //} - //} - // Compile unique sets of 1) all external packages imported or required, and // 2) the project roots under which they fall. exmap := make(map[string]bool) @@ -673,20 +670,20 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm } sort.Strings(reqlist) - sw, err := dep.NewSafeWriter(nil, p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), dep.VendorOnChanged, p.Manifest.PruneOptions) + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) if err != nil { return err } if cmd.dryRun { - return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) + return dw.PrintPreparedActions(ctx.Out, ctx.Verbose) } var logger *log.Logger if ctx.Verbose { logger = ctx.Err } - if err := errors.Wrap(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor"); err != nil { + if err := errors.Wrap(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor"); err != nil { return err } diff --git a/cmd/dep/init.go b/cmd/dep/init.go index 1570e2acb6..5bdff0b345 100644 --- a/cmd/dep/init.go +++ b/cmd/dep/init.go @@ -102,12 +102,12 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { ctx.Out.Println("Getting direct dependencies...") } - ptree, directDeps, err := p.GetDirectDependencyNames(sm) + directDeps, err := p.GetDirectDependencyNames(sm) if err != nil { return errors.Wrap(err, "init failed: unable to determine direct dependencies") } if ctx.Verbose { - ctx.Out.Printf("Checked %d directories for packages.\nFound %d direct dependencies.\n", len(ptree.Packages), len(directDeps)) + ctx.Out.Printf("Checked %d directories for packages.\nFound %d direct dependencies.\n", len(p.RootPackageTree.Packages), len(directDeps)) } // Initialize with imported data, then fill in the gaps using the GOPATH @@ -133,7 +133,7 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { params := gps.SolveParameters{ RootDir: root, - RootPackageTree: ptree, + RootPackageTree: p.RootPackageTree, Manifest: p.Manifest, Lock: p.Lock, ProjectAnalyzer: rootAnalyzer, diff --git a/cmd/dep/status.go b/cmd/dep/status.go index 4e9e0f9ca7..35f2f1860f 100644 --- a/cmd/dep/status.go +++ b/cmd/dep/status.go @@ -329,13 +329,13 @@ type dotOutput struct { func (out *dotOutput) BasicHeader() error { out.g = new(graphviz).New() - ptree, err := out.p.ParseRootPackageTree() + ptree := out.p.RootPackageTree // TODO(sdboyer) should be true, true, false, out.p.Manifest.IgnoredPackages() prm, _ := ptree.ToReachMap(true, false, false, nil) out.g.createNode(string(out.p.ImportRoot), "", prm.FlattenFn(paths.IsStandardImportPath)) - return err + return nil } func (out *dotOutput) BasicFooter() error { @@ -649,10 +649,7 @@ func (os OldStatus) marshalJSON() *rawOldStatus { func (cmd *statusCommand) runOld(ctx *dep.Ctx, out oldOutputter, p *dep.Project, sm gps.SourceManager) error { // While the network churns on ListVersions() requests, statically analyze // code from the current project. - ptree, err := p.ParseRootPackageTree() - if err != nil { - return err - } + ptree := p.RootPackageTree // Set up a solver in order to check the InputHash. params := gps.SolveParameters{ @@ -888,10 +885,7 @@ type MissingStatus struct { func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Project, sm gps.SourceManager) (hasMissingPkgs bool, errCount int, err error) { // While the network churns on ListVersions() requests, statically analyze // code from the current project. - ptree, err := p.ParseRootPackageTree() - if err != nil { - return false, 0, err - } + ptree := p.RootPackageTree // Set up a solver in order to check the InputHash. params := gps.SolveParameters{ @@ -928,7 +922,7 @@ func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Proje return slp[i].Ident().Less(slp[j].Ident()) }) - lsat := verify.LockSatisfiesInputs(p.Lock, p.Lock.SolveMeta.InputImports, p.Manifest, params.RootPackageTree) + lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) if lsat.Passed() { // If these are equal, we're guaranteed that the lock is a transitively // complete picture of all deps. That eliminates the need for at least @@ -1305,7 +1299,7 @@ func collectConstraints(ctx *dep.Ctx, p *dep.Project, sm gps.SourceManager) (con // Collect the complete set of direct project dependencies, incorporating // requireds and ignores appropriately. - _, directDeps, err := p.GetDirectDependencyNames(sm) + directDeps, err := p.GetDirectDependencyNames(sm) if err != nil { // Return empty collection, not nil, if we fail here. return constraintCollection, []error{errors.Wrap(err, "failed to get direct dependencies")} diff --git a/context.go b/context.go index d7b403ab8c..91eb924427 100644 --- a/context.go +++ b/context.go @@ -9,9 +9,13 @@ import ( "os" "path/filepath" "runtime" + "sort" "time" "github.com/golang/dep/gps" + "github.com/golang/dep/gps/paths" + "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/fs" "github.com/pkg/errors" ) @@ -188,9 +192,55 @@ func (c *Ctx) LoadProject() (*Project, error) { return nil, errors.Wrapf(err, "error while parsing %s", lp) } + // Parse in the root package tree. + ptree, err := p.parseRootPackageTree() + if err != nil { + return nil, err + } + + // If there's a current Lock, apply the input and pruneopt changes that we + // can know without solving. + if p.Lock != nil { + p.ChangedLock = p.Lock.dup() + p.ChangedLock.SolveMeta.InputImports = externalImportList(ptree, p.Manifest) + + for k, lp := range p.ChangedLock.Projects() { + vp := lp.(verify.VerifiableProject) + vp.PruneOpts = p.Manifest.PruneOptions.PruneOptionsFor(lp.Ident().ProjectRoot) + p.ChangedLock.P[k] = vp + } + } + return p, nil } +func externalImportList(rpt pkgtree.PackageTree, m gps.RootManifest) []string { + rm, _ := rpt.ToReachMap(true, true, false, m.IgnoredPackages()) + reach := rm.FlattenFn(paths.IsStandardImportPath) + req := m.RequiredPackages() + + // If there are any requires, slide them into the reach list, as well. + if len(req) > 0 { + // Make a map of imports that are both in the import path list and the + // required list to avoid duplication. + skip := make(map[string]bool, len(req)) + for _, r := range reach { + if req[r] { + skip[r] = true + } + } + + for r := range req { + if !skip[r] { + reach = append(reach, r) + } + } + } + + sort.Strings(reach) + return reach +} + // DetectProjectGOPATH attempt to find the GOPATH containing the project. // // If p.AbsRoot is not a symlink and is within a GOPATH, the GOPATH containing p.AbsRoot is returned. diff --git a/gps/verify/digest.go b/gps/verify/digest.go index 9f0042289a..0191c95057 100644 --- a/gps/verify/digest.go +++ b/gps/verify/digest.go @@ -461,7 +461,9 @@ func VerifyDepTree(osDirname string, wantDigests map[string]VersionedDigest) (ma if expectedSum, ok := wantDigests[slashPathname]; ok { ls := EmptyDigestInLock if expectedSum.HashVersion != HashVersion { - ls = HashVersionMismatch + if !expectedSum.IsEmpty() { + ls = HashVersionMismatch + } } else if len(expectedSum.Digest) > 0 { projectSum, err := DigestFromDirectory(osPathname) if err != nil { diff --git a/gps/verify/lock.go b/gps/verify/lock.go index 3997bf9d86..1e9bf3d3e2 100644 --- a/gps/verify/lock.go +++ b/gps/verify/lock.go @@ -20,28 +20,24 @@ type VerifiableProject struct { Digest VersionedDigest } -type lockUnsatisfy uint8 - -const ( - missingFromLock lockUnsatisfy = iota - inAdditionToLock -) - -type constraintMismatch struct { - c gps.Constraint - v gps.Version +// ConstraintMismatch is a two-tuple of a gps.Version, and a gps.Constraint that +// does not allow that version. +type ConstraintMismatch struct { + C gps.Constraint + V gps.Version } -type constraintMismatches map[gps.ProjectRoot]constraintMismatch - +// LockSatisfaction holds the compound result of LockSatisfiesInputs, allowing +// the caller to inspect each of several orthogonal possible types of failure. type LockSatisfaction struct { nolock bool missingPkgs, excessPkgs []string - badovr, badconstraint constraintMismatches + badovr, badconstraint map[gps.ProjectRoot]ConstraintMismatch } -// Passed is a shortcut method to check if any problems with the evaluted lock -// were identified. +// Passed is a shortcut method that indicates whether there were any ways in +// which the Lock did not satisfy the inputs. It will return true only if no +// problems were found. func (ls LockSatisfaction) Passed() bool { if ls.nolock { return false @@ -66,19 +62,27 @@ func (ls LockSatisfaction) Passed() bool { return true } -func (ls LockSatisfaction) MissingPackages() []string { +// MissingImports reports the set of import paths that were present in the +// inputs but missing in the Lock. +func (ls LockSatisfaction) MissingImports() []string { return ls.missingPkgs } -func (ls LockSatisfaction) ExcessPackages() []string { +// ExcessImports reports the set of import paths that were present in the Lock +// but absent from the inputs. +func (ls LockSatisfaction) ExcessImports() []string { return ls.excessPkgs } -func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]constraintMismatch { +// UnmatchedOverrides reports any override rules that were not satisfied by the +// corresponding LockedProject in the Lock. +func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]ConstraintMismatch { return ls.badovr } -func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]constraintMismatch { +// UnmatchedOverrides reports any normal, non-override constraint rules that +// were not satisfied by the corresponding LockedProject in the Lock. +func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]ConstraintMismatch { return ls.badconstraint } @@ -87,6 +91,8 @@ func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[strin xt := radix.New() for pr, _ := range m.DependencyConstraints() { + // FIXME(sdboyer) this has the trailing slash ambiguity problem; adapt + // code from the solver xt.Insert(string(pr), nil) } @@ -107,7 +113,7 @@ func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[strin // compute package imports that may have been removed. Figuring out that // negative space would require exploring the entire graph to ensure there are // no in-edges for particular imports. -func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { +func LockSatisfiesInputs(l gps.LockWithImports, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { if l == nil { return LockSatisfaction{nolock: true} } @@ -122,8 +128,15 @@ func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rp rm, _ := rpt.ToReachMap(true, true, false, ig) reach := rm.FlattenFn(paths.IsStandardImportPath) - inlock := make(map[string]bool, len(oldimports)) + inlock := make(map[string]bool, len(l.InputImports())) ininputs := make(map[string]bool, len(reach)+len(req)) + + type lockUnsatisfy uint8 + const ( + missingFromLock lockUnsatisfy = iota + inAdditionToLock + ) + pkgDiff := make(map[string]lockUnsatisfy) for _, imp := range reach { @@ -134,13 +147,13 @@ func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rp ininputs[imp] = true } - for _, imp := range oldimports { + for _, imp := range l.InputImports() { inlock[imp] = true } lsat := LockSatisfaction{ - badovr: make(constraintMismatches), - badconstraint: make(constraintMismatches), + badovr: make(map[gps.ProjectRoot]ConstraintMismatch), + badconstraint: make(map[gps.ProjectRoot]ConstraintMismatch), } for ip := range ininputs { @@ -152,6 +165,12 @@ func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rp } } + // Something in the missing list might already be in the packages list, + // because another package in the depgraph imports it. We could make a + // special case for that, but it would break the simplicity of the model and + // complicate the notion of LockSatisfaction.Passed(), so let's see if we + // can get away without it. + for ip := range inlock { if !ininputs[ip] { pkgDiff[ip] = inAdditionToLock @@ -167,23 +186,27 @@ func LockSatisfiesInputs(l gps.Lock, oldimports []string, m gps.RootManifest, rp } eff := findEffectualConstraints(m, ininputs) - ovr := m.Overrides() - constraints := m.DependencyConstraints() + ovr, constraints := m.Overrides(), m.DependencyConstraints() for _, lp := range l.Projects() { pr := lp.Ident().ProjectRoot - if pp, has := ovr[pr]; has && !pp.Constraint.Matches(lp.Version()) { - lsat.badovr[pr] = constraintMismatch{ - c: pp.Constraint, - v: lp.Version(), + if pp, has := ovr[pr]; has { + if !pp.Constraint.Matches(lp.Version()) { + lsat.badovr[pr] = ConstraintMismatch{ + C: pp.Constraint, + V: lp.Version(), + } } + // The constraint isn't considered if we have an override, + // independent of whether the override is satisfied. + continue } if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { - lsat.badconstraint[pr] = constraintMismatch{ - c: pp.Constraint, - v: lp.Version(), + lsat.badconstraint[pr] = ConstraintMismatch{ + C: pp.Constraint, + V: lp.Version(), } } } diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index 3a742dc90d..d8cb087af0 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -211,7 +211,7 @@ func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDelta { SourceAfter: lp2.Ident().Source, } - ld.PackagesRemoved, ld.PackagesAdded = findAddedAndRemoved(lp1.Packages(), lp2.Packages()) + ld.PackagesAdded, ld.PackagesRemoved = findAddedAndRemoved(lp1.Packages(), lp2.Packages()) switch v := lp1.Version().(type) { case gps.PairedVersion: diff --git a/lock.go b/lock.go index fce0643b6d..a30a13d531 100644 --- a/lock.go +++ b/lock.go @@ -154,6 +154,19 @@ func (l *Lock) HasProjectWithRoot(root gps.ProjectRoot) bool { return false } +func (l *Lock) dup() *Lock { + l2 := &Lock{ + SolveMeta: l.SolveMeta, + P: make([]gps.LockedProject, len(l.P)), + } + + l2.SolveMeta.InputImports = make([]string, len(l.SolveMeta.InputImports)) + copy(l2.SolveMeta.InputImports, l.SolveMeta.InputImports) + copy(l2.P, l.P) + + return l2 +} + // toRaw converts the manifest into a representation suitable to write to the lock file func (l *Lock) toRaw() rawLock { raw := rawLock{ diff --git a/manifest.go b/manifest.go index 453de7af0f..19e121283c 100644 --- a/manifest.go +++ b/manifest.go @@ -609,6 +609,9 @@ func (m *Manifest) Overrides() gps.ProjectConstraints { // IgnoredPackages returns a set of import paths to ignore. func (m *Manifest) IgnoredPackages() *pkgtree.IgnoredRuleset { + if m == nil { + return pkgtree.NewIgnoredRuleset(nil) + } return pkgtree.NewIgnoredRuleset(m.Ignored) } @@ -627,6 +630,10 @@ func (m *Manifest) HasConstraintsOn(root gps.ProjectRoot) bool { // RequiredPackages returns a set of import paths to require. func (m *Manifest) RequiredPackages() map[string]bool { + if m == nil { + return map[string]bool{} + } + if len(m.Required) == 0 { return nil } diff --git a/project.go b/project.go index 0247ae9b40..5de3ce53cf 100644 --- a/project.go +++ b/project.go @@ -11,7 +11,6 @@ import ( "sort" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/paths" "github.com/golang/dep/gps/pkgtree" "github.com/golang/dep/internal/fs" "github.com/pkg/errors" @@ -101,13 +100,19 @@ type Project struct { // If AbsRoot is not a symlink, then ResolvedAbsRoot should equal AbsRoot. ResolvedAbsRoot string // ImportRoot is the import path of the project's root directory. - ImportRoot gps.ProjectRoot - Manifest *Manifest - Lock *Lock // Optional + ImportRoot gps.ProjectRoot + // The Manifest, as read from Gopkg.toml on disk. + Manifest *Manifest + // The Lock, as read from Gopkg.lock on disk. + Lock *Lock // Optional + // The above Lock, with changes applied to it. There are two possible classes of + // changes: + // 1. Changes to InputImports + // 2. Changes to per-project prune options + ChangedLock *Lock + // The PackageTree representing the project, with hidden and ignored + // packages already trimmed. RootPackageTree pkgtree.PackageTree - // If populated, contains the results of comparing the Lock against the - // current vendor tree, per verify.VerifyDepTree(). - //VendorStatus map[string]verify.VendorStatus } // SetRoot sets the project AbsRoot and ResolvedAbsRoot. If root is not a symlink, ResolvedAbsRoot will be set to root. @@ -127,25 +132,28 @@ func (p *Project) MakeParams() gps.SolveParameters { params := gps.SolveParameters{ RootDir: p.AbsRoot, ProjectAnalyzer: Analyzer{}, + RootPackageTree: p.RootPackageTree, } if p.Manifest != nil { params.Manifest = p.Manifest } - if p.Lock != nil { - params.Lock = p.Lock + // It should be impossible for p.ChangedLock to be nil if p.Lock is non-nil; + // we always want to use the former for solving. + if p.ChangedLock != nil { + params.Lock = p.ChangedLock } return params } -// ParseRootPackageTree analyzes the root project's disk contents to create a +// parseRootPackageTree analyzes the root project's disk contents to create a // PackageTree, trimming out packages that are not relevant for root projects // along the way. // // The resulting tree is cached internally at p.RootPackageTree. -func (p *Project) ParseRootPackageTree() (pkgtree.PackageTree, error) { +func (p *Project) parseRootPackageTree() (pkgtree.PackageTree, error) { if p.RootPackageTree.Packages == nil { ptree, err := pkgtree.ListPackages(p.ResolvedAbsRoot, string(p.ImportRoot)) if err != nil { @@ -177,49 +185,28 @@ func (p *Project) ParseRootPackageTree() (pkgtree.PackageTree, error) { // This function will correctly utilize ignores and requireds from an existing // manifest, if one is present, but will also do the right thing without a // manifest. -func (p *Project) GetDirectDependencyNames(sm gps.SourceManager) (pkgtree.PackageTree, map[gps.ProjectRoot]bool, error) { - ptree, err := p.ParseRootPackageTree() - if err != nil { - return pkgtree.PackageTree{}, nil, err - } - - var ig *pkgtree.IgnoredRuleset - var req map[string]bool - if p.Manifest != nil { - ig = p.Manifest.IgnoredPackages() - req = p.Manifest.RequiredPackages() - } - - rm, _ := ptree.ToReachMap(true, true, false, ig) - reach := rm.FlattenFn(paths.IsStandardImportPath) - - if len(req) > 0 { - // Make a map of imports that are both in the import path list and the - // required list to avoid duplication. - skip := make(map[string]bool, len(req)) - for _, r := range reach { - if req[r] { - skip[r] = true - } - } - - for r := range req { - if !skip[r] { - reach = append(reach, r) - } +func (p *Project) GetDirectDependencyNames(sm gps.SourceManager) (map[gps.ProjectRoot]bool, error) { + var reach []string + if p.ChangedLock != nil { + reach = p.ChangedLock.InputImports() + } else { + ptree, err := p.parseRootPackageTree() + if err != nil { + return nil, err } + reach = externalImportList(ptree, p.Manifest) } directDeps := map[gps.ProjectRoot]bool{} for _, ip := range reach { pr, err := sm.DeduceProjectRoot(ip) if err != nil { - return pkgtree.PackageTree{}, nil, err + return nil, err } directDeps[pr] = true } - return ptree, directDeps, nil + return directDeps, nil } // FindIneffectualConstraints looks for constraint rules expressed in the @@ -233,7 +220,7 @@ func (p *Project) FindIneffectualConstraints(sm gps.SourceManager) []gps.Project return nil } - _, dd, err := p.GetDirectDependencyNames(sm) + dd, err := p.GetDirectDependencyNames(sm) if err != nil { return nil } diff --git a/txn_writer.go b/txn_writer.go index d15d670150..ffccfc4de5 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -5,6 +5,7 @@ package dep import ( + "bytes" "context" "fmt" "io/ioutil" @@ -466,6 +467,7 @@ const ( solveChanged hashMismatch hashVersionMismatch + hashAbsent missingFromTree projectAdded projectRemoved @@ -518,10 +520,12 @@ func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatu switch stat { case verify.NotInTree: sw.changed[pr] = missingFromTree - case verify.EmptyDigestInLock, verify.DigestMismatchInLock: + case verify.DigestMismatchInLock: sw.changed[pr] = hashMismatch case verify.HashVersionMismatch: sw.changed[pr] = hashVersionMismatch + case verify.EmptyDigestInLock: + sw.changed[pr] = hashAbsent } } } @@ -564,29 +568,51 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l dropped := []gps.ProjectRoot{} // TODO(sdboyer) add a txn/rollback layer, like the safewriter? + i := 0 + tot := len(dw.changed) for pr, reason := range dw.changed { + if reason == projectRemoved { + dropped = append(dropped, pr) + continue + } + to := filepath.FromSlash(filepath.Join(vnewpath, string(pr))) po := dw.pruneOptions.PruneOptionsFor(pr) + if err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to); err != nil { + return errors.Wrapf(err, "failed to export %s", pr) + } + + i++ lpd := dw.lockDiff.ProjectDeltas[pr] + v, id := projs[pr].Version(), projs[pr].Ident() + var buf bytes.Buffer + fmt.Fprintf(&buf, "(%d/%d) Wrote %s@%s: ", i, tot, id, v) switch reason { case noChange: panic(fmt.Sprintf("wtf, no change for %s", pr)) case solveChanged: if lpd.SourceChanged() { - logger.Printf("Writing %s: source changed (%s -> %s)", pr, lpd.SourceBefore, lpd.SourceAfter) + fmt.Fprintf(&buf, "source changed (%s -> %s)", lpd.SourceBefore, lpd.SourceAfter) } else if lpd.VersionChanged() { - logger.Printf("Writing %s: version changed (%s -> %s)", pr, lpd.VersionBefore, lpd.VersionAfter) + bv, av := "(none)", "(none)" + if lpd.VersionBefore != nil { + bv = lpd.VersionBefore.String() + } + if lpd.VersionAfter != nil { + av = lpd.VersionAfter.String() + } + fmt.Fprintf(&buf, "version changed (%s -> %s)", bv, av) } else if lpd.RevisionChanged() { - logger.Printf("Writing %s: revision changed (%s -> %s)", pr, lpd.RevisionBefore, lpd.RevisionAfter) + fmt.Fprintf(&buf, "revision changed (%s -> %s)", lpd.RevisionBefore, lpd.RevisionAfter) } else if lpd.PackagesChanged() { la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) if la > 0 && lr > 0 { - logger.Printf("Writing %s: packages changed (%v added, %v removed)", pr, la, lr) + fmt.Fprintf(&buf, "packages changed (%v added, %v removed)", la, lr) } else if la > 0 { - logger.Printf("Writing %s: packages changed (%v added)", pr, la) + fmt.Fprintf(&buf, "packages changed (%v added)", la) } else { - logger.Printf("Writing %s: packages changed (%v removed)", pr, lr) + fmt.Fprintf(&buf, "packages changed (%v removed)", lr) } } else if lpd.PruneOptsChanged() { // Override what's on the lockdiff with the extra info we have; @@ -594,23 +620,20 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l // value from the input param in place. old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs - logger.Printf("Writing %s: prune options changed (%s -> %s)", pr, old, new) + fmt.Fprintf(&buf, "prune options changed (%s -> %s)", old, new) } case hashMismatch: - logger.Printf("Writing %s: hash mismatch between Gopkg.lock and vendor contents", pr) + fmt.Fprintf(&buf, "hash mismatch between Gopkg.lock and vendor contents") case hashVersionMismatch: - logger.Printf("Writing %s: hashing algorithm mismatch", pr) + fmt.Fprintf(&buf, "hashing algorithm mismatch") + case hashAbsent: + fmt.Fprintf(&buf, "hash digest absent from lock") case projectAdded: - logger.Printf("Writing new project %s", pr) - case projectRemoved: - dropped = append(dropped, pr) - continue + fmt.Fprintf(&buf, "new project") case missingFromTree: - logger.Printf("Writing %s: missing from vendor", pr) - } - if err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to); err != nil { - return errors.Wrapf(err, "failed to export %s", pr) + fmt.Fprint(&buf, "missing from vendor") } + logger.Print(buf.String()) digest, err := verify.DigestFromDirectory(to) if err != nil { @@ -649,9 +672,17 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } } - for _, pr := range dropped { - // Kind of a lie to print this here. ĀÆ\_(惄)_/ĀÆ - logger.Printf("Discarding unused project %s", pr) + for i, pr := range dropped { + // Kind of a lie to print this. ĀÆ\_(惄)_/ĀÆ + logger.Printf("(%d/%d) Removed unused project %s", tot-(len(dropped)-i-1), tot, pr) + } + + // Ensure vendor/.git is preserved if present + if hasDotGit(vpath) { + err = fs.RenameWithFallback(filepath.Join(vpath, ".git"), filepath.Join(vnewpath, "vendor/.git")) + if _, ok := err.(*os.LinkError); ok { + return errors.Wrap(err, "failed to preserve vendor/.git") + } } err = os.RemoveAll(vpath) From f00e82813fbe1a3b42cbddc066c2b65786e19777 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 3 Jul 2018 01:39:02 -0400 Subject: [PATCH 12/25] dep: Update scads of tests Tests are now almost completely working, after updating all the outputs to the new lock format. There is also an assortment of other fixes in here, mostly related to fixing nil pointer panics, that were uncovered by fixing up these tests. --- Gopkg.lock | 2 +- cmd/dep/ensure.go | 70 ++-- cmd/dep/init.go | 2 - cmd/dep/root_analyzer.go | 3 +- cmd/dep/status.go | 6 +- .../add/all-new-double-spec/final/Gopkg.lock | 9 +- .../add/all-new-double/final/Gopkg.lock | 10 +- .../ensure/add/all-new-spec/final/Gopkg.lock | 9 +- .../ensure/add/all-new/final/Gopkg.lock | 9 +- .../ensure/add/desync/final/Gopkg.lock | 12 +- .../ensure/add/desync/stdout.txt | 1 - .../errs/double-diff-spec/final/Gopkg.lock | 4 +- .../add/errs/self-add/case2/final/Gopkg.lock | 2 +- .../add/exists-imports/final/Gopkg.lock | 4 +- .../final/Gopkg.lock | 9 +- .../default/hasheq-novendor/final/Gopkg.lock | 4 +- .../ensure/default/hasheq/final/Gopkg.lock | 4 +- .../hashneq-novendor-dry/final/Gopkg.lock | 16 - .../hashneq-novendor-dry/final/Gopkg.toml | 4 - .../hashneq-novendor-dry/initial/Gopkg.lock | 16 - .../hashneq-novendor-dry/initial/Gopkg.toml | 4 - .../hashneq-novendor-dry/initial/main.go | 12 - .../hashneq-novendor-dry/testcase.json | 6 - .../ensure/empty/case1/final/Gopkg.lock | 4 +- .../ensure/empty/case2/final/Gopkg.lock | 4 +- .../ensure/empty/case3/final/Gopkg.lock | 4 +- .../ensure/pkg-errors/case1/final/Gopkg.lock | 2 +- .../wildcard-ignore/final/Gopkg.lock | 4 +- .../pkg-ignored/wildcard-ignore/stdout.txt | 11 - .../pkg-ignored/wildcard-ignore/testcase.json | 3 +- .../wildcard-other-root/final/Gopkg.lock | 2 +- .../wildcard-other-root/stdout.txt | 9 - .../wildcard-other-root/testcase.json | 3 +- .../ensure/update/case1/final/Gopkg.lock | 9 +- .../ensure/update/desync/final/Gopkg.lock | 9 +- .../ensure/update/desync/stdout.txt | 1 - .../ensure/update/novendor/final/Gopkg.lock | 4 +- .../harness_tests/init/case1/final/Gopkg.lock | 9 +- .../harness_tests/init/case2/final/Gopkg.lock | 9 +- .../harness_tests/init/case3/final/Gopkg.lock | 9 +- .../harness_tests/init/case4/final/Gopkg.lock | 9 +- .../init/glide/case1/final/Gopkg.lock | 11 +- .../init/glide/case2/final/Gopkg.lock | 6 +- .../init/glide/case3/final/Gopkg.lock | 6 +- .../init/glide/case4/final/Gopkg.lock | 6 +- .../init/glide/corrupt-glide/final/Gopkg.lock | 8 +- .../direct-trans-no-conflict/final/Gopkg.lock | 9 +- .../trans-trans-unspecified/final/Gopkg.lock | 11 +- .../init/glide/trans-trans/final/Gopkg.lock | 11 +- .../init/glock/case1/final/Gopkg.lock | 6 +- .../init/godep/case1/final/Gopkg.lock | 6 +- .../init/govend/case1/final/Gopkg.lock | 6 +- .../init/govendor/case1/final/Gopkg.lock | 6 +- .../init/gvt/case1/final/Gopkg.lock | 12 +- .../init/skip-hidden/final/Gopkg.lock | 4 +- .../init/vndr/case1/final/Gopkg.lock | 6 +- .../status/case1/dot/final/Gopkg.lock | 9 +- .../status/case1/json/final/Gopkg.lock | 9 +- .../status/case1/table/final/Gopkg.lock | 9 +- .../status/case1/template/final/Gopkg.lock | 9 +- .../ignore_lock_mismatch/final/Gopkg.lock | 9 - .../ignore_lock_mismatch/final/Gopkg.toml | 2 - .../ignore_lock_mismatch/initial/Gopkg.lock | 9 - .../ignore_lock_mismatch/initial/Gopkg.toml | 2 - .../ignore_lock_mismatch/initial/main.go | 12 - .../status/ignore_lock_mismatch/testcase.json | 7 - .../status/old_constraints/final/Gopkg.lock | 12 +- .../status/old_constraints/stdout.txt | 2 +- .../override_constraint/final/Gopkg.lock | 6 +- .../revision_constraint/final/Gopkg.lock | 6 +- context.go | 51 ++- gps/verify/lockdiff.go | 333 ++++-------------- internal/feedback/feedback.go | 12 +- internal/feedback/feedback_test.go | 12 +- internal/feedback/lockdiff.go | 252 +++++++++++++ .../feedback}/lockdiff_test.go | 2 +- lock.go | 6 + manifest.go | 2 +- project.go | 2 +- project_test.go | 1 + testdata/txn_writer/expected_lock.toml | 2 +- txn_writer.go | 7 +- 82 files changed, 697 insertions(+), 525 deletions(-) delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.lock delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.toml delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.lock delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.toml delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/main.go delete mode 100644 cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/testcase.json delete mode 100644 cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/stdout.txt delete mode 100644 cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/stdout.txt delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.lock delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.toml delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.lock delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.toml delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/main.go delete mode 100644 cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/testcase.json create mode 100644 internal/feedback/lockdiff.go rename {gps/verify => internal/feedback}/lockdiff_test.go (99%) diff --git a/Gopkg.lock b/Gopkg.lock index 02bde4018b..a1aa86afc9 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -128,7 +128,7 @@ "github.com/pkg/errors", "github.com/sdboyer/constext", "golang.org/x/sync/errgroup", - "gopkg.in/yaml.v2", + "gopkg.in/yaml.v2" ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 93d1a2b469..cf01349eb2 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -278,6 +278,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return err } + var solve bool lock := p.ChangedLock if lock != nil { lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) @@ -298,21 +299,26 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project } ctx.Out.Println() } - - solver, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "prepare solver") - } - - solution, err := solver.Solve(context.TODO()) - if err != nil { - return handleAllTheFailuresOfTheWorld(err) - } - lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions) + solve = true } else if cmd.noVendor { // The user said not to touch vendor/, so definitely nothing to do. return nil } + } else { + solve = true + } + + if solve { + solver, err := gps.Prepare(params, sm) + if err != nil { + return errors.Wrap(err, "prepare solver") + } + + solution, err := solver.Solve(context.TODO()) + if err != nil { + return handleAllTheFailuresOfTheWorld(err) + } + lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions) } dw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) @@ -414,35 +420,29 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm return err } - // We'll need to discard this prepared solver as later work changes params, - // but solver preparation is cheap and worth doing up front in order to - // perform the fastpath check of hash comparison. - solver, err := gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "fastpath solver prepare") - } - - rm, _ := params.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) - // Compile unique sets of 1) all external packages imported or required, and // 2) the project roots under which they fall. exmap := make(map[string]bool) - exrmap := make(map[gps.ProjectRoot]bool) - - for _, ex := range append(rm.FlattenFn(paths.IsStandardImportPath), p.Manifest.Required...) { - exmap[ex] = true - root, err := sm.DeduceProjectRoot(ex) - if err != nil { - // This should be very uncommon to hit, as it entails that we - // couldn't deduce the root for an import, but that some previous - // solve run WAS able to deduce the root. It's most likely to occur - // if the user has e.g. not connected to their organization's VPN, - // and thus cannot access an internal go-get metadata service. - return errors.Wrapf(err, "could not deduce project root for %s", ex) + if p.ChangedLock != nil { + for _, imp := range p.ChangedLock.InputImports() { + exmap[imp] = true + } + } else { + // The only time we'll hit this branch is if + rm, _ := p.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) + for _, imp := range rm.FlattenFn(paths.IsStandardImportPath) { + exmap[imp] = true + } + for imp := range p.Manifest.RequiredPackages() { + exmap[imp] = true } - exrmap[root] = true } + //exrmap, err := p.GetDirectDependencyNames(sm) + //if err != nil { + //return err + //} + // Note: these flags are only partially used by the latter parts of the // algorithm; rather, it relies on inference. However, they remain in their // entirety as future needs may make further use of them, being a handy, @@ -620,7 +620,7 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm } // Re-prepare a solver now that our params are complete. - solver, err = gps.Prepare(params, sm) + solver, err := gps.Prepare(params, sm) if err != nil { return errors.Wrap(err, "fastpath solver prepare") } diff --git a/cmd/dep/init.go b/cmd/dep/init.go index 5bdff0b345..cc2fa83231 100644 --- a/cmd/dep/init.go +++ b/cmd/dep/init.go @@ -168,8 +168,6 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { return errors.Wrap(err, "init failed: unable to recalculate the lock digest") } - //p.Lock.SolveMeta.InputsDigest = s.HashInputs() - // Pass timestamp (yyyyMMddHHmmss format) as suffix to backup name. vendorbak, err := dep.BackupVendor(filepath.Join(root, "vendor"), time.Now().Format("20060102150405")) if err != nil { diff --git a/cmd/dep/root_analyzer.go b/cmd/dep/root_analyzer.go index d3d432686d..72ba9c22ab 100644 --- a/cmd/dep/root_analyzer.go +++ b/cmd/dep/root_analyzer.go @@ -11,7 +11,6 @@ import ( "github.com/golang/dep" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/verify" fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/importers" "golang.org/x/sync/errgroup" @@ -168,7 +167,7 @@ func (a *rootAnalyzer) DeriveManifestAndLock(dir string, pr gps.ProjectRoot) (gp func (a *rootAnalyzer) FinalizeRootManifestAndLock(m *dep.Manifest, l *dep.Lock, ol dep.Lock) { // Iterate through the new projects in solved lock and add them to manifest // if they are direct deps and log feedback for all the new projects. - diff := verify.DiffLocks(&ol, l) + diff := fb.DiffLocks(&ol, l) bi := fb.NewBrokenImportFeedback(diff) bi.LogFeedback(a.ctx.Err) for _, y := range l.Projects() { diff --git a/cmd/dep/status.go b/cmd/dep/status.go index 35f2f1860f..77e7671f26 100644 --- a/cmd/dep/status.go +++ b/cmd/dep/status.go @@ -7,7 +7,6 @@ package main import ( "bytes" "context" - "encoding/hex" "encoding/json" "flag" "fmt" @@ -779,7 +778,6 @@ func newRawMetadata(metadata *dep.SolveMeta) rawDetailMetadata { return rawDetailMetadata{ AnalyzerName: metadata.AnalyzerName, AnalyzerVersion: metadata.AnalyzerVersion, - InputsDigest: hex.EncodeToString(metadata.InputsDigest), SolverName: metadata.SolverName, SolverVersion: metadata.SolverVersion, } @@ -921,6 +919,10 @@ func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Proje sort.Slice(slp, func(i, j int) bool { return slp[i].Ident().Less(slp[j].Ident()) }) + slcp := p.ChangedLock.Projects() + sort.Slice(slcp, func(i, j int) bool { + return slcp[i].Ident().Less(slcp[j].Ident()) + }) lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) if lsat.Passed() { diff --git a/cmd/dep/testdata/harness_tests/ensure/add/all-new-double-spec/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/all-new-double-spec/final/Gopkg.lock index eddb25596a..cb57bfd519 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/all-new-double-spec/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/all-new-double-spec/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] branch = "master" + digest = "1:d08235d21a5df95ab12e1eb0191ffe9c4ceb4fa8005f079f6815e8ff507855d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "UT" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "645b5b52e1bfb9e3db1cefde758485e009edfe5bad611b490582d94467f9c1b0" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptesttres" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/all-new-double/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/all-new-double/final/Gopkg.lock index 8481da4451..3cb5eedc0b 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/all-new-double/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/all-new-double/final/Gopkg.lock @@ -2,23 +2,31 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] branch = "master" + digest = "1:d62f7f8be8f431ede67fae7f90d75f923dddc627b309b9134ea1db95f0e34e6d" name = "github.com/sdboyer/deptesttres" packages = [ ".", "subp" ] + pruneopts = "UT" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "432bc141db9511df4e1b5754c6c4d8cf4dd8b4f8d5a13fd7d189c17c14e000b7" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptesttres", + "github.com/sdboyer/deptesttres/subp" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/all-new-spec/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/all-new-spec/final/Gopkg.lock index eddb25596a..cb57bfd519 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/all-new-spec/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/all-new-spec/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] branch = "master" + digest = "1:d08235d21a5df95ab12e1eb0191ffe9c4ceb4fa8005f079f6815e8ff507855d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "UT" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "645b5b52e1bfb9e3db1cefde758485e009edfe5bad611b490582d94467f9c1b0" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptesttres" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/all-new/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/all-new/final/Gopkg.lock index 5531a3e128..977af25e08 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/all-new/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/all-new/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] branch = "master" + digest = "1:d08235d21a5df95ab12e1eb0191ffe9c4ceb4fa8005f079f6815e8ff507855d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "UT" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "8f0b74fd1169808bd0e31dd7ad6c601c7b8f7ef25eec9e8a45e72b8a384ebb5c" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptesttres" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/desync/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/desync/final/Gopkg.lock index 2987289328..2e150705d1 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/desync/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/desync/final/Gopkg.lock @@ -2,26 +2,36 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [[projects]] branch = "master" + digest = "1:0dba41ffdf62b10cbbd79009edceb0eaf635031e854fb456fdd5be154802f8d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "86240895e0ee5788e7e8bb56e0d77afd58009a491b69f6835e546db9e5dacfcd" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos", + "github.com/sdboyer/deptesttres" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/desync/stdout.txt b/cmd/dep/testdata/harness_tests/ensure/add/desync/stdout.txt index c0d7219848..a7dfa2d65e 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/desync/stdout.txt +++ b/cmd/dep/testdata/harness_tests/ensure/add/desync/stdout.txt @@ -1,2 +1 @@ -Warning: Gopkg.lock is out of sync with Gopkg.toml or the project's imports. Fetching sources... diff --git a/cmd/dep/testdata/harness_tests/ensure/add/errs/double-diff-spec/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/errs/double-diff-spec/final/Gopkg.lock index 66ef021c68..b34d4cfd9f 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/errs/double-diff-spec/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/errs/double-diff-spec/final/Gopkg.lock @@ -3,13 +3,15 @@ [[projects]] branch = "master" + digest = "1:d08235d21a5df95ab12e1eb0191ffe9c4ceb4fa8005f079f6815e8ff507855d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "UT" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "342afd8c8a616d084eb7b67bf3a891710eca3ce5abc3cf60af0dae4ccfdcd001" + input-imports = ["github.com/sdboyer/deptesttres"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/errs/self-add/case2/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/errs/self-add/case2/final/Gopkg.lock index bef2d0092e..10ef811182 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/errs/self-add/case2/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/errs/self-add/case2/final/Gopkg.lock @@ -4,6 +4,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "ab4fef131ee828e96ba67d31a7d690bd5f2f42040c6766b1b12fe856f87e0ff7" + input-imports = [] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/exists-imports/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/exists-imports/final/Gopkg.lock index 66ef021c68..39b9ac12c9 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/exists-imports/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/exists-imports/final/Gopkg.lock @@ -3,13 +3,15 @@ [[projects]] branch = "master" + digest = "1:0dba41ffdf62b10cbbd79009edceb0eaf635031e854fb456fdd5be154802f8d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "342afd8c8a616d084eb7b67bf3a891710eca3ce5abc3cf60af0dae4ccfdcd001" + input-imports = ["github.com/sdboyer/deptesttres"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/add/exists-manifest-constraint/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/add/exists-manifest-constraint/final/Gopkg.lock index e235bce702..b22de034ea 100644 --- a/cmd/dep/testdata/harness_tests/ensure/add/exists-manifest-constraint/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/add/exists-manifest-constraint/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] branch = "master" + digest = "1:0dba41ffdf62b10cbbd79009edceb0eaf635031e854fb456fdd5be154802f8d3" name = "github.com/sdboyer/deptesttres" packages = ["."] + pruneopts = "" revision = "54aaeb0023e1f3dcf5f98f31dd8c565457945a12" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "d1fe1d4f4dd98b75908b524bd73d43a4b9e3ce0b9522ea6ce9d6c9ea15190c1d" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptesttres" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hasheq-novendor/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hasheq-novendor/final/Gopkg.lock index c7f497e7a1..188ece4f77 100644 --- a/cmd/dep/testdata/harness_tests/ensure/default/hasheq-novendor/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/default/hasheq-novendor/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hasheq/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hasheq/final/Gopkg.lock index c7f497e7a1..188ece4f77 100644 --- a/cmd/dep/testdata/harness_tests/ensure/default/hasheq/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/default/hasheq/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.lock deleted file mode 100644 index 11cb12c378..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/sdboyer/deptest" - packages = ["."] - revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" - version = "v1.0.0" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - # manually modified hash digest, it will not match any known inputs - inputs-digest = "94b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.toml b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.toml deleted file mode 100644 index e242e02114..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/final/Gopkg.toml +++ /dev/null @@ -1,4 +0,0 @@ - -[[constraint]] - name = "github.com/sdboyer/deptest" - version = "1.0.0" diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.lock deleted file mode 100644 index 11cb12c378..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/sdboyer/deptest" - packages = ["."] - revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" - version = "v1.0.0" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - # manually modified hash digest, it will not match any known inputs - inputs-digest = "94b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.toml b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.toml deleted file mode 100644 index e242e02114..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/Gopkg.toml +++ /dev/null @@ -1,4 +0,0 @@ - -[[constraint]] - name = "github.com/sdboyer/deptest" - version = "1.0.0" diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/main.go b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/main.go deleted file mode 100644 index 1fe0d19d6a..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/initial/main.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - _ "github.com/sdboyer/deptest" -) - -func main() { -} diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/testcase.json b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/testcase.json deleted file mode 100644 index 2e54069437..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-novendor-dry/testcase.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "commands": [ - ["ensure", "-no-vendor", "-dry-run"] - ], - "error-expected": "Gopkg.lock was not up to date" -} diff --git a/cmd/dep/testdata/harness_tests/ensure/empty/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/empty/case1/final/Gopkg.lock index c7f497e7a1..49180c231e 100644 --- a/cmd/dep/testdata/harness_tests/ensure/empty/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/empty/case1/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/empty/case2/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/empty/case2/final/Gopkg.lock index 02a1eabe8f..a09c4263a0 100644 --- a/cmd/dep/testdata/harness_tests/ensure/empty/case2/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/empty/case2/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "e7725ea56516a42a641aaaf5d48754258d9f3c59949cb8a0e8a21b1ab6e07179" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/empty/case3/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/empty/case3/final/Gopkg.lock index d2153e3747..c00429e5d4 100644 --- a/cmd/dep/testdata/harness_tests/ensure/empty/case3/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/empty/case3/final/Gopkg.lock @@ -3,13 +3,15 @@ [[projects]] branch = "master" + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "e5c16e09ed6f0a1a2b3cf472c34b7fd50861dd070e81d5e623f72e8173f0c065" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-errors/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/pkg-errors/case1/final/Gopkg.lock index bef2d0092e..10ef811182 100644 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-errors/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/pkg-errors/case1/final/Gopkg.lock @@ -4,6 +4,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "ab4fef131ee828e96ba67d31a7d690bd5f2f42040c6766b1b12fe856f87e0ff7" + input-imports = [] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/final/Gopkg.lock index 944e8436fa..c00429e5d4 100644 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/final/Gopkg.lock @@ -3,13 +3,15 @@ [[projects]] branch = "master" + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "5210e61a67f6e64dabb1eb8f28df2dbeeedfca1588c102067a6ec8a35e0b15f9" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/stdout.txt b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/stdout.txt deleted file mode 100644 index 74542e41e5..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/stdout.txt +++ /dev/null @@ -1,11 +0,0 @@ --CONSTRAINTS- -github.com/sdboyer/deptest -b-master --IMPORTS/REQS- -github.com/sdboyer/deptest --IGNORES- --OVERRIDES- --ANALYZER- -dep -1 - diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/testcase.json b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/testcase.json index 5641e85616..729de9d0f4 100644 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/testcase.json +++ b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-ignore/testcase.json @@ -1,7 +1,6 @@ { "commands": [ - ["ensure"], - ["hash-inputs"] + ["ensure"] ], "error-expected": "", "vendor-final": [ diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/final/Gopkg.lock index 53e42dcc48..10ef811182 100644 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/final/Gopkg.lock @@ -4,6 +4,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "b02b7a80e20404724ba5dbffab28e772017b03800916327f58bff0da86071b6a" + input-imports = [] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/stdout.txt b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/stdout.txt deleted file mode 100644 index a273de0e56..0000000000 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/stdout.txt +++ /dev/null @@ -1,9 +0,0 @@ --CONSTRAINTS- --IMPORTS/REQS- --IGNORES- -github.com/sdboyer/deptest* --OVERRIDES- --ANALYZER- -dep -1 - diff --git a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/testcase.json b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/testcase.json index 4f16d1c611..7c94832d8a 100644 --- a/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/testcase.json +++ b/cmd/dep/testdata/harness_tests/ensure/pkg-ignored/wildcard-other-root/testcase.json @@ -1,7 +1,6 @@ { "commands": [ - ["ensure"], - ["hash-inputs"] + ["ensure"] ], "error-expected": "", "vendor-final": [] diff --git a/cmd/dep/testdata/harness_tests/ensure/update/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/update/case1/final/Gopkg.lock index df995b1e94..e060be7657 100644 --- a/cmd/dep/testdata/harness_tests/ensure/update/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/update/case1/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/update/desync/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/update/desync/final/Gopkg.lock index df995b1e94..e060be7657 100644 --- a/cmd/dep/testdata/harness_tests/ensure/update/desync/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/update/desync/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/ensure/update/desync/stdout.txt b/cmd/dep/testdata/harness_tests/ensure/update/desync/stdout.txt index 9adb1974eb..e69de29bb2 100644 --- a/cmd/dep/testdata/harness_tests/ensure/update/desync/stdout.txt +++ b/cmd/dep/testdata/harness_tests/ensure/update/desync/stdout.txt @@ -1 +0,0 @@ -Warning: Gopkg.lock is out of sync with Gopkg.toml or the project's imports. diff --git a/cmd/dep/testdata/harness_tests/ensure/update/novendor/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/update/novendor/final/Gopkg.lock index c7f497e7a1..188ece4f77 100644 --- a/cmd/dep/testdata/harness_tests/ensure/update/novendor/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/update/novendor/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/case1/final/Gopkg.lock index 15b4e08bac..bbaecf622f 100644 --- a/cmd/dep/testdata/harness_tests/init/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/case1/final/Gopkg.lock @@ -2,19 +2,26 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "a0196baa11ea047dd65037287451d36b861b00ea" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/case2/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/case2/final/Gopkg.lock index 608d5a8d97..a5fdd6b108 100644 --- a/cmd/dep/testdata/harness_tests/init/case2/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/case2/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "ced51326ad990b11098d8076d0f7d72d89eee1ba6e8dacc7bc73be05cddac438" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/case3/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/case3/final/Gopkg.lock index c4f18284da..6f9ea67f7e 100644 --- a/cmd/dep/testdata/harness_tests/init/case3/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/case3/final/Gopkg.lock @@ -3,18 +3,25 @@ [[projects]] branch = "master" + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "a0196baa11ea047dd65037287451d36b861b00ea" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "af9a783a5430dabcaaf44683c09e2b729e1c0d61f13bfdf6677c4fd0b41387ca" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/case4/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/case4/final/Gopkg.lock index e076e162c8..75fc1a6682 100644 --- a/cmd/dep/testdata/harness_tests/init/case4/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/case4/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "a6ba2237d28d125b55fc6c86e94e33363f1dfd880d471118d36d7587398c30b4" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/case1/final/Gopkg.lock index 7455467c42..852bb14531 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/case1/final/Gopkg.lock @@ -2,25 +2,34 @@ [[projects]] + digest = "1:4f2c2c251356e56fdbe13960044263cdbde63355689e21db07267c4d0de33f3f" name = "github.com/carolynvs/deptest-subpkg" packages = ["subby"] + pruneopts = "UT" revision = "6c41d90f78bb1015696a2ad591debfa8971512d5" [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "def34af0f7cd619e1601eb68bdabf399c9b36a79c2081306adefa0ced03d182b" + input-imports = [ + "github.com/carolynvs/deptest-subpkg/subby", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/case2/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/case2/final/Gopkg.lock index ac445c05d2..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/case2/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/case2/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/case3/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/case3/final/Gopkg.lock index 9c2e59f243..a7bf0c2ab3 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/case3/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/case3/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:41a463620bcc5eba54d225d6108f58da4be08bc6307ecc9d17c6d1a5c1f2df30" name = "github.com/carolynvs/deptestglide" packages = ["."] + pruneopts = "UT" revision = "aa7fea6e17ca281c6f210afb93fc3c98ef29a695" version = "v0.1.1" [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "87996f836c70eac9c14221085412cbc96eb98cc6a0782c0724575a56511abe8d" + input-imports = ["github.com/carolynvs/deptestglide"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/case4/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/case4/final/Gopkg.lock index 1aadf7f962..f52bc428f9 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/case4/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/case4/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/corrupt-glide/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/corrupt-glide/final/Gopkg.lock index da4412685f..45f3b2de0f 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/corrupt-glide/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/corrupt-glide/final/Gopkg.lock @@ -2,26 +2,32 @@ [[projects]] + digest = "1:c0ee004f748a2e0a166f94d0aae3e4b34d0cb1aa95672075969feded052cde73" name = "github.com/ChinmayR/deptestglideA" packages = ["."] + pruneopts = "UT" revision = "cead75b1cde64ae1353ddbf73f6089303d6001b4" version = "v0.3.0" [[projects]] + digest = "1:855fce2344c810402e7e6d34a1e7e21f6b5e161689d0c3c086f920a212e3b074" name = "github.com/ChinmayR/deptestglideB" packages = ["."] + pruneopts = "UT" revision = "571b81795d767461736e6d0ca69e5f9840bdbf0e" version = "v0.5.0" [[projects]] + digest = "1:2cb412b34b26e26b270605d2c54e94a01b5f018ca060a87543bb3b72e21dca07" name = "github.com/ChinmayR/deptestglideC" packages = ["."] + pruneopts = "UT" revision = "4d3546304e8a1ceb6bb01e7e6201e852abb8ae4d" version = "v0.1.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "86bfffc8c6e5de1a4f6c613dcd88d5b76d8b5b17bf797320eb6842bf9239837d" + input-imports = ["github.com/ChinmayR/deptestglideA"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/direct-trans-no-conflict/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/direct-trans-no-conflict/final/Gopkg.lock index acbf835f22..aa049360e8 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/direct-trans-no-conflict/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/direct-trans-no-conflict/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:2bb2f3f169ad31382b7b41969518a99fe8974f4f5a737b6c30501a36f2fd40dc" name = "github.com/ChinmayR/deptestglideA" packages = ["."] + pruneopts = "UT" revision = "26ab0f16d85723be5ff44e5b4bd2a8e0f3a34989" version = "v0.2.0" [[projects]] + digest = "1:d35fc62a5ecad295b86623f47a2b3d6ce4e81cd9584c04b41d05c9cafea9137e" name = "github.com/ChinmayR/deptestglideB" packages = ["."] + pruneopts = "UT" revision = "143bb0e8f4cc3a3227a2d250f99d08ee879c7909" version = "v0.2.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "9f6691009992b85820af581ac5f81b1537fd791351a83ec852c7a553939dbae5" + input-imports = [ + "github.com/ChinmayR/deptestglideA", + "github.com/ChinmayR/deptestglideB" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/trans-trans-unspecified/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/trans-trans-unspecified/final/Gopkg.lock index 2cea226574..3d8decf695 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/trans-trans-unspecified/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/trans-trans-unspecified/final/Gopkg.lock @@ -2,26 +2,35 @@ [[projects]] + digest = "1:f3ebbb24c30241998a9b891d83113b4edd70b7d710fac33a4a20cb7b135f2677" name = "github.com/ChinmayR/deptestglideA" packages = ["."] + pruneopts = "UT" revision = "703f28fdee407d70dcc4cb774a0fbb82fa70daa9" version = "v0.4.0" [[projects]] + digest = "1:1c78f2479f39bf0b209d0ec082acfb2816ad3c79813ac49a57ce8997a6039b29" name = "github.com/ChinmayR/deptestglideB" packages = ["."] + pruneopts = "UT" revision = "55b6737d9d84461196123a51baa02b156abc4543" version = "v0.4.0" [[projects]] + digest = "1:2cb412b34b26e26b270605d2c54e94a01b5f018ca060a87543bb3b72e21dca07" name = "github.com/ChinmayR/deptestglideC" packages = ["."] + pruneopts = "UT" revision = "4d3546304e8a1ceb6bb01e7e6201e852abb8ae4d" version = "v0.1.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "178a09819c33edcef8eb9c4ed26cc9053aed45e4f04645085feaef7921c8688c" + input-imports = [ + "github.com/ChinmayR/deptestglideA", + "github.com/ChinmayR/deptestglideB" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glide/trans-trans/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glide/trans-trans/final/Gopkg.lock index 1184b2c41b..868fafff4c 100644 --- a/cmd/dep/testdata/harness_tests/init/glide/trans-trans/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glide/trans-trans/final/Gopkg.lock @@ -2,26 +2,35 @@ [[projects]] + digest = "1:698cd4951cb265ae57d473cc883630bd2d5cc9a472fe513acd54886751cb0457" name = "github.com/ChinmayR/deptestglideA" packages = ["."] + pruneopts = "UT" revision = "2f77d68963bb3dff94b88330d930cb59714cd2fc" version = "v0.5.0" [[projects]] + digest = "1:0ed6d2f0ec01022dbca6d19f6a89a4200a9430c51f07309446c3751591fc3c39" name = "github.com/ChinmayR/deptestglideB" packages = ["."] + pruneopts = "UT" revision = "7f8abdec9e29a008d40cfcbb0848b82cc4000d25" version = "v0.3.0" [[projects]] + digest = "1:4f14135d41f9b3692c6ac4e9defe4ea020ddeb41a169ba26fd1abdd193e097cd" name = "github.com/ChinmayR/deptestglideC" packages = ["."] + pruneopts = "UT" revision = "73ba3c1897d21e64bec0b89a026a1acb6604e846" version = "v0.2.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "e4ba06c77ad87955efd40ad92c8ddc0cd6dba1137fecdc958f02cc79b1f64202" + input-imports = [ + "github.com/ChinmayR/deptestglideA", + "github.com/ChinmayR/deptestglideB" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/glock/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/glock/case1/final/Gopkg.lock index ac445c05d2..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/glock/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/glock/case1/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/godep/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/godep/case1/final/Gopkg.lock index ac445c05d2..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/godep/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/godep/case1/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/govend/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/govend/case1/final/Gopkg.lock index ac445c05d2..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/govend/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/govend/case1/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/govendor/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/govendor/case1/final/Gopkg.lock index 529e48897d..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/govendor/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/govendor/case1/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "9cc662f2e1b80c8df205d9d667fe2c47825a06961ceae378f44a8290d01dd359" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/gvt/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/gvt/case1/final/Gopkg.lock index 1eb5e5e887..ce29be8565 100644 --- a/cmd/dep/testdata/harness_tests/init/gvt/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/gvt/case1/final/Gopkg.lock @@ -2,27 +2,37 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" source = "https://github.com/carolynvs/deptest" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [[projects]] branch = "v2" + digest = "1:10978cfda94a2069ac38ed0884b606aafe89f4578ff700b7845b02201a2d6b51" name = "gopkg.in/yaml.v2" packages = ["."] + pruneopts = "UT" revision = "f7716cbe52baa25d2e9b0d0da546fcf909fc16b4" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "d1681978cbca0e845950451461e0d69b58c5e896d9fd10ec5c159a4db3175161" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos", + "gopkg.in/yaml.v2" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/skip-hidden/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/skip-hidden/final/Gopkg.lock index c7f497e7a1..49180c231e 100644 --- a/cmd/dep/testdata/harness_tests/init/skip-hidden/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/skip-hidden/final/Gopkg.lock @@ -2,14 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/init/vndr/case1/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/init/vndr/case1/final/Gopkg.lock index ac445c05d2..09b9e1e14b 100644 --- a/cmd/dep/testdata/harness_tests/init/vndr/case1/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/init/vndr/case1/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "UT" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "UT" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1ed417a0bec57ffe988fae1cba8f3d49994fb893394d61844e0b3c96d69573fe" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/case1/dot/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/case1/dot/final/Gopkg.lock index 77278d07bc..6dd996ff8c 100644 --- a/cmd/dep/testdata/harness_tests/status/case1/dot/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/case1/dot/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/case1/json/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/case1/json/final/Gopkg.lock index 77278d07bc..6dd996ff8c 100644 --- a/cmd/dep/testdata/harness_tests/status/case1/json/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/case1/json/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/case1/table/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/case1/table/final/Gopkg.lock index 77278d07bc..6dd996ff8c 100644 --- a/cmd/dep/testdata/harness_tests/status/case1/table/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/case1/table/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/case1/template/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/case1/template/final/Gopkg.lock index 77278d07bc..6dd996ff8c 100644 --- a/cmd/dep/testdata/harness_tests/status/case1/template/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/case1/template/final/Gopkg.lock @@ -2,20 +2,27 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v0.8.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1b381263a360eafafe3ef7f9be626672668d17250a3c9a8debd169d1b5e2eebb" + input-imports = [ + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.lock deleted file mode 100644 index bef2d0092e..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.lock +++ /dev/null @@ -1,9 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "ab4fef131ee828e96ba67d31a7d690bd5f2f42040c6766b1b12fe856f87e0ff7" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.toml b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.toml deleted file mode 100644 index 418ac251f8..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/final/Gopkg.toml +++ /dev/null @@ -1,2 +0,0 @@ -ignored = ["github.com/sdboyer/deptestdos"] - diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.lock deleted file mode 100644 index bef2d0092e..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.lock +++ /dev/null @@ -1,9 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "ab4fef131ee828e96ba67d31a7d690bd5f2f42040c6766b1b12fe856f87e0ff7" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.toml b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.toml deleted file mode 100644 index 418ac251f8..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/Gopkg.toml +++ /dev/null @@ -1,2 +0,0 @@ -ignored = ["github.com/sdboyer/deptestdos"] - diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/main.go b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/main.go deleted file mode 100644 index 6fa0454844..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/initial/main.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - _ "github.com/sdboyer/deptestdos" -) - -func main() { -} diff --git a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/testcase.json b/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/testcase.json deleted file mode 100644 index dc1776d5f2..0000000000 --- a/cmd/dep/testdata/harness_tests/status/ignore_lock_mismatch/testcase.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "commands": [ - ["status"] - ], - "error-expected": "This happens when Gopkg.toml is modified", - "vendor-final": [] -} diff --git a/cmd/dep/testdata/harness_tests/status/old_constraints/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/old_constraints/final/Gopkg.lock index 7f844d35d6..27b9d7d0f6 100644 --- a/cmd/dep/testdata/harness_tests/status/old_constraints/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/old_constraints/final/Gopkg.lock @@ -2,26 +2,36 @@ [[projects]] + digest = "1:9f15720b74cca39adad1ea61f19e1aee73ed1a83cc3922521101fc758fa75715" name = "github.com/carolynvs/go-dep-test" packages = ["."] + pruneopts = "" revision = "b9c5511fa463628e6251554db29a4be161d02aed" version = "0.1.0" [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "c89811fc98c9a1310c94dc63b84f364d13c46ea3a40bd2cba7d77377ab346543" + input-imports = [ + "github.com/carolynvs/go-dep-test", + "github.com/sdboyer/deptest", + "github.com/sdboyer/deptestdos" + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/old_constraints/stdout.txt b/cmd/dep/testdata/harness_tests/status/old_constraints/stdout.txt index ad557f414b..ec1c1a398e 100644 --- a/cmd/dep/testdata/harness_tests/status/old_constraints/stdout.txt +++ b/cmd/dep/testdata/harness_tests/status/old_constraints/stdout.txt @@ -1,2 +1,2 @@ PROJECT CONSTRAINT REVISION LATEST -github.com/carolynvs/go-dep-test ^0.1.0 b9c5511 4069198 +github.com/carolynvs/go-dep-test ^0.1.0 b9c5511 4069198 diff --git a/cmd/dep/testdata/harness_tests/status/override_constraint/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/override_constraint/final/Gopkg.lock index f987a57915..304b102928 100644 --- a/cmd/dep/testdata/harness_tests/status/override_constraint/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/override_constraint/final/Gopkg.lock @@ -2,20 +2,24 @@ [[projects]] + digest = "1:6a4b7ea94689d9d4f231605ecc0248fbcbf16419d8571adb59c00396e37bbfc2" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "3f4c3bea144e112a69bbe5d8d01c1b09a544253f" version = "v0.8.1" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "5c607206be5decd28e6263ffffdcee067266015e" version = "v2.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "1c4444f47ab5d5c484634d1a0c95d99beb879a37337bc0d7aecbd97cf79b6cb1" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/cmd/dep/testdata/harness_tests/status/revision_constraint/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/status/revision_constraint/final/Gopkg.lock index 204b990861..4e58519b10 100644 --- a/cmd/dep/testdata/harness_tests/status/revision_constraint/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/status/revision_constraint/final/Gopkg.lock @@ -2,19 +2,23 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [[projects]] + digest = "1:d71dc37a7f6ffbbe0c768f28d904acade8f068cbd96c6e6f0885425d3c3b8df9" name = "github.com/sdboyer/deptestdos" packages = ["."] + pruneopts = "" revision = "a0196baa11ea047dd65037287451d36b861b00ea" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "a64abd431f23d6fbc8d83aef311d33ab12b3a6c74a46c271e89c2542c98bbb9a" + input-imports = ["github.com/sdboyer/deptestdos"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/context.go b/context.go index 91eb924427..d1af270dfe 100644 --- a/context.go +++ b/context.go @@ -175,40 +175,39 @@ func (c *Ctx) LoadProject() (*Project, error) { return nil, errors.Wrapf(err, "error while parsing %s", mp) } - lp := filepath.Join(p.AbsRoot, LockName) - lf, err := os.Open(lp) - if err != nil { - if os.IsNotExist(err) { - // It's fine for the lock not to exist - return p, nil - } - // But if a lock does exist and we can't open it, that's a problem - return nil, errors.Wrapf(err, "could not open %s", lp) - } - defer lf.Close() - - p.Lock, err = readLock(lf) - if err != nil { - return nil, errors.Wrapf(err, "error while parsing %s", lp) - } - // Parse in the root package tree. ptree, err := p.parseRootPackageTree() if err != nil { return nil, err } - // If there's a current Lock, apply the input and pruneopt changes that we - // can know without solving. - if p.Lock != nil { - p.ChangedLock = p.Lock.dup() - p.ChangedLock.SolveMeta.InputImports = externalImportList(ptree, p.Manifest) + lp := filepath.Join(p.AbsRoot, LockName) + lf, err := os.Open(lp) + if err == nil { + defer lf.Close() + + p.Lock, err = readLock(lf) + if err != nil { + return nil, errors.Wrapf(err, "error while parsing %s", lp) + } + + // If there's a current Lock, apply the input and pruneopt changes that we + // can know without solving. + if p.Lock != nil { + p.ChangedLock = p.Lock.dup() + p.ChangedLock.SolveMeta.InputImports = externalImportList(ptree, p.Manifest) - for k, lp := range p.ChangedLock.Projects() { - vp := lp.(verify.VerifiableProject) - vp.PruneOpts = p.Manifest.PruneOptions.PruneOptionsFor(lp.Ident().ProjectRoot) - p.ChangedLock.P[k] = vp + for k, lp := range p.ChangedLock.Projects() { + vp := lp.(verify.VerifiableProject) + vp.PruneOpts = p.Manifest.PruneOptions.PruneOptionsFor(lp.Ident().ProjectRoot) + p.ChangedLock.P[k] = vp + } } + + } else if !os.IsNotExist(err) { + // It's fine for the lock not to exist, but if a file does exist and we + // can't open it, that's a problem. + return nil, errors.Wrapf(err, "could not open %s", lp) } return p, nil diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index d8cb087af0..32309d62d8 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -6,43 +6,12 @@ package verify import ( "bytes" - "fmt" "sort" "strings" "github.com/golang/dep/gps" ) -// StringDiff represents a modified string value. -// * Added: Previous = nil, Current != nil -// * Deleted: Previous != nil, Current = nil -// * Modified: Previous != nil, Current != nil -// * No Change: Previous = Current, or a nil pointer -type StringDiff struct { - Previous string - Current string -} - -func (diff *StringDiff) String() string { - if diff == nil { - return "" - } - - if diff.Previous == "" && diff.Current != "" { - return fmt.Sprintf("+ %s", diff.Current) - } - - if diff.Previous != "" && diff.Current == "" { - return fmt.Sprintf("- %s", diff.Previous) - } - - if diff.Previous != diff.Current { - return fmt.Sprintf("%s -> %s", diff.Previous, diff.Current) - } - - return diff.Current -} - // sortLockedProjects returns a sorted copy of lps, or itself if already sorted. func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { @@ -60,31 +29,12 @@ func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { return cp } -// LockDiff is the set of differences between an existing lock file and an updated lock file. -// Fields are only populated when there is a difference, otherwise they are empty. -type LockDiff struct { - Add []LockedProjectDiff - Remove []LockedProjectDiff - Modify []LockedProjectDiff -} - type LockDelta struct { AddedImportInputs []string RemovedImportInputs []string ProjectDeltas map[gps.ProjectRoot]LockedProjectDelta } -// LockedProjectDiff contains the before and after snapshot of a project reference. -// Fields are only populated when there is a difference, otherwise they are empty. -type LockedProjectDiff struct { - Name gps.ProjectRoot - Source *StringDiff - Version *StringDiff - Branch *StringDiff - Revision *StringDiff - Packages []StringDiff -} - type LockedProjectDelta struct { Name gps.ProjectRoot ProjectRemoved, ProjectAdded bool @@ -258,7 +208,9 @@ func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDelta { return ld } -type DeltaDimension uint16 +// DeltaDimension defines a bitset enumerating all of the different dimensions +// along which a Lock, and its constitutent components, can change. +type DeltaDimension uint32 const ( InputImportsChanged DeltaDimension = 1 << iota @@ -274,13 +226,17 @@ const ( AnyChanged = (1 << iota) - 1 ) -func (ld LockDelta) Changed(flags DeltaDimension) bool { - if flags&InputImportsChanged != 0 && (len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0) { +// Changed indicates whether the delta contains a change along the dimensions +// with their corresponding bits set. +// +// This implementation checks the topmost-level Lock properties +func (ld LockDelta) Changed(dims DeltaDimension) bool { + if dims&InputImportsChanged != 0 && (len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0) { return true } for _, ld := range ld.ProjectDeltas { - if ld.Changed(AnyChanged) { + if ld.Changed(dims & ^InputImportsChanged) { return true } } @@ -288,6 +244,19 @@ func (ld LockDelta) Changed(flags DeltaDimension) bool { return false } +func (ld LockDelta) Changes(DeltaDimension) DeltaDimension { + var dd DeltaDimension + if len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0 { + dd |= InputImportsChanged + } + + for _, ld := range ld.ProjectDeltas { + dd |= ld.Changes() + } + + return dd +} + // Changed indicates whether the delta contains a change along the dimensions // with their corresponding bits set. // @@ -306,6 +275,19 @@ func (ld LockedProjectDelta) Changed(flags DeltaDimension) bool { return ld.LockedProjectPartsDelta.Changed(flags & ^ProjectAdded & ^ProjectRemoved) } +func (ld LockedProjectDelta) Changes() DeltaDimension { + var dd DeltaDimension + if ld.WasAdded() { + dd |= ProjectAdded + } + + if ld.WasRemoved() { + dd |= ProjectRemoved + } + + return dd | ld.LockedProjectPartsDelta.Changes() +} + func (ld LockedProjectDelta) WasRemoved() bool { return ld.ProjectRemoved } @@ -318,27 +300,21 @@ func (ld LockedProjectPartsDelta) Changed(flags DeltaDimension) bool { if flags&SourceChanged != 0 && ld.SourceChanged() { return true } - if flags&RevisionChanged != 0 && ld.RevisionChanged() { return true } - if flags&PruneOptsChanged != 0 && ld.PruneOptsChanged() { return true } - if flags&HashChanged != 0 && ld.HashChanged { return true } - if flags&HashVersionChanged != 0 && ld.HashVersionChanged { return true } - if flags&VersionChanged != 0 && ld.VersionChanged() { return true } - if flags&PackagesChanged != 0 && ld.PackagesChanged() { return true } @@ -346,6 +322,33 @@ func (ld LockedProjectPartsDelta) Changed(flags DeltaDimension) bool { return false } +func (ld LockedProjectPartsDelta) Changes() DeltaDimension { + var dd DeltaDimension + if ld.SourceChanged() { + dd |= SourceChanged + } + if ld.RevisionChanged() { + dd |= RevisionChanged + } + if ld.PruneOptsChanged() { + dd |= PruneOptsChanged + } + if ld.HashChanged { + dd |= HashChanged + } + if ld.HashVersionChanged { + dd |= HashVersionChanged + } + if ld.VersionChanged() { + dd |= VersionChanged + } + if ld.PackagesChanged() { + dd |= PackagesChanged + } + + return dd +} + func (ld LockedProjectPartsDelta) SourceChanged() bool { return ld.SourceBefore != ld.SourceAfter } @@ -384,209 +387,7 @@ func (ld LockedProjectPartsDelta) PruneOptsChanged() bool { return ld.PruneOptsBefore != ld.PruneOptsAfter } -// DiffLocks compares two locks and identifies the differences between them. -// Returns nil if there are no differences. -func DiffLocks(l1, l2 gps.Lock) *LockDiff { - // Default nil locks to empty locks, so that we can still generate a diff - if l1 == nil { - l1 = gps.SimpleLock{} - } - if l2 == nil { - l2 = gps.SimpleLock{} - } - - p1, p2 := l1.Projects(), l2.Projects() - - p1 = sortLockedProjects(p1) - p2 = sortLockedProjects(p2) - - diff := LockDiff{} - - var i2next int - for i1 := 0; i1 < len(p1); i1++ { - lp1 := p1[i1] - pr1 := lp1.Ident().ProjectRoot - - var matched bool - for i2 := i2next; i2 < len(p2); i2++ { - lp2 := p2[i2] - pr2 := lp2.Ident().ProjectRoot - - switch strings.Compare(string(pr1), string(pr2)) { - case 0: // Found a matching project - matched = true - pdiff := DiffProjects(lp1, lp2) - if pdiff != nil { - diff.Modify = append(diff.Modify, *pdiff) - } - i2next = i2 + 1 // Don't evaluate to this again - case +1: // Found a new project - add := buildLockedProjectDiff(lp2) - diff.Add = append(diff.Add, add) - i2next = i2 + 1 // Don't evaluate to this again - continue // Keep looking for a matching project - case -1: // Project has been removed, handled below - continue - } - - break // Done evaluating this project, move onto the next - } - - if !matched { - remove := buildLockedProjectDiff(lp1) - diff.Remove = append(diff.Remove, remove) - } - } - - // Anything that still hasn't been evaluated are adds - for i2 := i2next; i2 < len(p2); i2++ { - lp2 := p2[i2] - add := buildLockedProjectDiff(lp2) - diff.Add = append(diff.Add, add) - } - - if len(diff.Add) == 0 && len(diff.Remove) == 0 && len(diff.Modify) == 0 { - return nil // The locks are the equivalent - } - return &diff -} - -// DiffFor checks to see if there was a diff for the provided ProjectRoot. The -// first return value is a 0 if there was no diff, 1 if it was added, 2 if it -// was removed, and 3 if it was modified. -func (ld *LockDiff) DiffFor(pr gps.ProjectRoot) (uint8, LockedProjectDiff) { - for _, lpd := range ld.Add { - if lpd.Name == pr { - return 1, lpd - } - } - - for _, lpd := range ld.Remove { - if lpd.Name == pr { - return 2, lpd - } - } - - for _, lpd := range ld.Modify { - if lpd.Name == pr { - return 3, lpd - } - } - - return 0, LockedProjectDiff{} -} - -func buildLockedProjectDiff(lp gps.LockedProject) LockedProjectDiff { - s2 := lp.Ident().Source - r2, b2, v2 := gps.VersionComponentStrings(lp.Version()) - - var rev, version, branch, source *StringDiff - if s2 != "" { - source = &StringDiff{Previous: s2, Current: s2} - } - if r2 != "" { - rev = &StringDiff{Previous: r2, Current: r2} - } - if b2 != "" { - branch = &StringDiff{Previous: b2, Current: b2} - } - if v2 != "" { - version = &StringDiff{Previous: v2, Current: v2} - } - - add := LockedProjectDiff{ - Name: lp.Ident().ProjectRoot, - Source: source, - Revision: rev, - Version: version, - Branch: branch, - Packages: make([]StringDiff, len(lp.Packages())), - } - for i, pkg := range lp.Packages() { - add.Packages[i] = StringDiff{Previous: pkg, Current: pkg} - } - return add -} - -// DiffProjects compares two projects and identifies the differences between them. -// Returns nil if there are no differences. -func DiffProjects(lp1, lp2 gps.LockedProject) *LockedProjectDiff { - diff := LockedProjectDiff{Name: lp1.Ident().ProjectRoot} - - s1 := lp1.Ident().Source - s2 := lp2.Ident().Source - if s1 != s2 { - diff.Source = &StringDiff{Previous: s1, Current: s2} - } - - r1, b1, v1 := gps.VersionComponentStrings(lp1.Version()) - r2, b2, v2 := gps.VersionComponentStrings(lp2.Version()) - if r1 != r2 { - diff.Revision = &StringDiff{Previous: r1, Current: r2} - } - if b1 != b2 { - diff.Branch = &StringDiff{Previous: b1, Current: b2} - } - if v1 != v2 { - diff.Version = &StringDiff{Previous: v1, Current: v2} - } - - p1 := lp1.Packages() - p2 := lp2.Packages() - if !sort.StringsAreSorted(p1) { - p1 = make([]string, len(p1)) - copy(p1, lp1.Packages()) - sort.Strings(p1) - } - if !sort.StringsAreSorted(p2) { - p2 = make([]string, len(p2)) - copy(p2, lp2.Packages()) - sort.Strings(p2) - } - - var i2next int - for i1 := 0; i1 < len(p1); i1++ { - pkg1 := p1[i1] - - var matched bool - for i2 := i2next; i2 < len(p2); i2++ { - pkg2 := p2[i2] - - switch strings.Compare(pkg1, pkg2) { - case 0: // Found matching package - matched = true - i2next = i2 + 1 // Don't evaluate to this again - case +1: // Found a new package - add := StringDiff{Current: pkg2} - diff.Packages = append(diff.Packages, add) - i2next = i2 + 1 // Don't evaluate to this again - continue // Keep looking for a match - case -1: // Package has been removed (handled below) - continue - } - - break // Done evaluating this package, move onto the next - } - - if !matched { - diff.Packages = append(diff.Packages, StringDiff{Previous: pkg1}) - } - } - - // Anything that still hasn't been evaluated are adds - for i2 := i2next; i2 < len(p2); i2++ { - pkg2 := p2[i2] - add := StringDiff{Current: pkg2} - diff.Packages = append(diff.Packages, add) - } - - if diff.Source == nil && diff.Version == nil && diff.Revision == nil && len(diff.Packages) == 0 { - return nil // The projects are equivalent - } - return &diff -} - -type VendorDiff struct { - LockDelta LockDelta - VendorStatus map[string]VendorStatus -} +//type VendorDiff struct { +//LockDelta LockDelta +//VendorStatus map[string]VendorStatus +//} diff --git a/internal/feedback/feedback.go b/internal/feedback/feedback.go index 2f20cbcbca..69e2c7235e 100644 --- a/internal/feedback/feedback.go +++ b/internal/feedback/feedback.go @@ -10,7 +10,6 @@ import ( "log" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/verify" ) const ( @@ -88,7 +87,7 @@ type brokenImport interface { } type modifiedImport struct { - source, branch, revision, version *verify.StringDiff + source, branch, revision, version *StringDiff projectPath string } @@ -124,7 +123,7 @@ func (mi modifiedImport) String() string { } type removedImport struct { - source, branch, revision, version *verify.StringDiff + source, branch, revision, version *StringDiff projectPath string } @@ -158,10 +157,13 @@ type BrokenImportFeedback struct { // NewBrokenImportFeedback builds a feedback entry that compares an initially // imported, unsolved lock to the same lock after it has been solved. -func NewBrokenImportFeedback(ld *verify.LockDiff) *BrokenImportFeedback { +func NewBrokenImportFeedback(ld *LockDiff) *BrokenImportFeedback { bi := &BrokenImportFeedback{} + if ld == nil { + return bi + } + for _, lpd := range ld.Modify { - // Ignore diffs where it's just a modified package set if lpd.Branch == nil && lpd.Revision == nil && lpd.Source == nil && lpd.Version == nil { continue } diff --git a/internal/feedback/feedback_test.go b/internal/feedback/feedback_test.go index 9baf0fb185..7008adbd90 100644 --- a/internal/feedback/feedback_test.go +++ b/internal/feedback/feedback_test.go @@ -10,9 +10,7 @@ import ( "strings" "testing" - "github.com/golang/dep" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/verify" _ "github.com/golang/dep/internal/test" // DO NOT REMOVE, allows go test ./... -update to work ) @@ -144,14 +142,14 @@ func TestFeedback_BrokenImport(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { buf := &bytes.Buffer{} - ol := dep.Lock{ - P: []gps.LockedProject{gps.NewLockedProject(c.pID, c.oldVersion, nil)}, + ol := gps.SimpleLock{ + gps.NewLockedProject(c.pID, c.oldVersion, nil), } - l := dep.Lock{ - P: []gps.LockedProject{gps.NewLockedProject(c.altPID, c.currentVersion, nil)}, + l := gps.SimpleLock{ + gps.NewLockedProject(c.altPID, c.currentVersion, nil), } log := log2.New(buf, "", 0) - feedback := NewBrokenImportFeedback(verify.DiffLocks(&ol, &l)) + feedback := NewBrokenImportFeedback(DiffLocks(&ol, &l)) feedback.LogFeedback(log) got := strings.TrimSpace(buf.String()) if c.want != got { diff --git a/internal/feedback/lockdiff.go b/internal/feedback/lockdiff.go new file mode 100644 index 0000000000..d7fe535241 --- /dev/null +++ b/internal/feedback/lockdiff.go @@ -0,0 +1,252 @@ +package feedback + +import ( + "fmt" + "sort" + "strings" + + "github.com/golang/dep/gps" +) + +// StringDiff represents a modified string value. +// * Added: Previous = nil, Current != nil +// * Deleted: Previous != nil, Current = nil +// * Modified: Previous != nil, Current != nil +// * No Change: Previous = Current, or a nil pointer +type StringDiff struct { + Previous string + Current string +} + +func (diff *StringDiff) String() string { + if diff == nil { + return "" + } + + if diff.Previous == "" && diff.Current != "" { + return fmt.Sprintf("+ %s", diff.Current) + } + + if diff.Previous != "" && diff.Current == "" { + return fmt.Sprintf("- %s", diff.Previous) + } + + if diff.Previous != diff.Current { + return fmt.Sprintf("%s -> %s", diff.Previous, diff.Current) + } + + return diff.Current +} + +// LockDiff is the set of differences between an existing lock file and an updated lock file. +// Fields are only populated when there is a difference, otherwise they are empty. +type LockDiff struct { + Add []LockedProjectDiff + Remove []LockedProjectDiff + Modify []LockedProjectDiff +} + +// LockedProjectDiff contains the before and after snapshot of a project reference. +// Fields are only populated when there is a difference, otherwise they are empty. +type LockedProjectDiff struct { + Name gps.ProjectRoot + Source *StringDiff + Version *StringDiff + Branch *StringDiff + Revision *StringDiff + Packages []StringDiff +} + +// DiffLocks compares two locks and identifies the differences between them. +// Returns nil if there are no differences. +func DiffLocks(l1, l2 gps.Lock) *LockDiff { + // Default nil locks to empty locks, so that we can still generate a diff + if l1 == nil { + l1 = gps.SimpleLock{} + } + if l2 == nil { + l2 = gps.SimpleLock{} + } + + p1, p2 := l1.Projects(), l2.Projects() + + p1 = sortLockedProjects(p1) + p2 = sortLockedProjects(p2) + + diff := LockDiff{} + + var i2next int + for i1 := 0; i1 < len(p1); i1++ { + lp1 := p1[i1] + pr1 := lp1.Ident().ProjectRoot + + var matched bool + for i2 := i2next; i2 < len(p2); i2++ { + lp2 := p2[i2] + pr2 := lp2.Ident().ProjectRoot + + switch strings.Compare(string(pr1), string(pr2)) { + case 0: // Found a matching project + matched = true + pdiff := DiffProjects(lp1, lp2) + if pdiff != nil { + diff.Modify = append(diff.Modify, *pdiff) + } + i2next = i2 + 1 // Don't evaluate to this again + case +1: // Found a new project + add := buildLockedProjectDiff(lp2) + diff.Add = append(diff.Add, add) + i2next = i2 + 1 // Don't evaluate to this again + continue // Keep looking for a matching project + case -1: // Project has been removed, handled below + continue + } + + break // Done evaluating this project, move onto the next + } + + if !matched { + remove := buildLockedProjectDiff(lp1) + diff.Remove = append(diff.Remove, remove) + } + } + + // Anything that still hasn't been evaluated are adds + for i2 := i2next; i2 < len(p2); i2++ { + lp2 := p2[i2] + add := buildLockedProjectDiff(lp2) + diff.Add = append(diff.Add, add) + } + + if len(diff.Add) == 0 && len(diff.Remove) == 0 && len(diff.Modify) == 0 { + return nil // The locks are the equivalent + } + return &diff +} + +func buildLockedProjectDiff(lp gps.LockedProject) LockedProjectDiff { + s2 := lp.Ident().Source + r2, b2, v2 := gps.VersionComponentStrings(lp.Version()) + + var rev, version, branch, source *StringDiff + if s2 != "" { + source = &StringDiff{Previous: s2, Current: s2} + } + if r2 != "" { + rev = &StringDiff{Previous: r2, Current: r2} + } + if b2 != "" { + branch = &StringDiff{Previous: b2, Current: b2} + } + if v2 != "" { + version = &StringDiff{Previous: v2, Current: v2} + } + + add := LockedProjectDiff{ + Name: lp.Ident().ProjectRoot, + Source: source, + Revision: rev, + Version: version, + Branch: branch, + Packages: make([]StringDiff, len(lp.Packages())), + } + for i, pkg := range lp.Packages() { + add.Packages[i] = StringDiff{Previous: pkg, Current: pkg} + } + return add +} + +// DiffProjects compares two projects and identifies the differences between them. +// Returns nil if there are no differences. +func DiffProjects(lp1, lp2 gps.LockedProject) *LockedProjectDiff { + diff := LockedProjectDiff{Name: lp1.Ident().ProjectRoot} + + s1 := lp1.Ident().Source + s2 := lp2.Ident().Source + if s1 != s2 { + diff.Source = &StringDiff{Previous: s1, Current: s2} + } + + r1, b1, v1 := gps.VersionComponentStrings(lp1.Version()) + r2, b2, v2 := gps.VersionComponentStrings(lp2.Version()) + if r1 != r2 { + diff.Revision = &StringDiff{Previous: r1, Current: r2} + } + if b1 != b2 { + diff.Branch = &StringDiff{Previous: b1, Current: b2} + } + if v1 != v2 { + diff.Version = &StringDiff{Previous: v1, Current: v2} + } + + p1 := lp1.Packages() + p2 := lp2.Packages() + if !sort.StringsAreSorted(p1) { + p1 = make([]string, len(p1)) + copy(p1, lp1.Packages()) + sort.Strings(p1) + } + if !sort.StringsAreSorted(p2) { + p2 = make([]string, len(p2)) + copy(p2, lp2.Packages()) + sort.Strings(p2) + } + + var i2next int + for i1 := 0; i1 < len(p1); i1++ { + pkg1 := p1[i1] + + var matched bool + for i2 := i2next; i2 < len(p2); i2++ { + pkg2 := p2[i2] + + switch strings.Compare(pkg1, pkg2) { + case 0: // Found matching package + matched = true + i2next = i2 + 1 // Don't evaluate to this again + case +1: // Found a new package + add := StringDiff{Current: pkg2} + diff.Packages = append(diff.Packages, add) + i2next = i2 + 1 // Don't evaluate to this again + continue // Keep looking for a match + case -1: // Package has been removed (handled below) + continue + } + + break // Done evaluating this package, move onto the next + } + + if !matched { + diff.Packages = append(diff.Packages, StringDiff{Previous: pkg1}) + } + } + + // Anything that still hasn't been evaluated are adds + for i2 := i2next; i2 < len(p2); i2++ { + pkg2 := p2[i2] + add := StringDiff{Current: pkg2} + diff.Packages = append(diff.Packages, add) + } + + if diff.Source == nil && diff.Version == nil && diff.Revision == nil && len(diff.Packages) == 0 { + return nil // The projects are equivalent + } + return &diff +} + +// sortLockedProjects returns a sorted copy of lps, or itself if already sorted. +func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { + if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { + return lps[i].Ident().Less(lps[j].Ident()) + }) { + return lps + } + + cp := make([]gps.LockedProject, len(lps)) + copy(cp, lps) + + sort.Slice(cp, func(i, j int) bool { + return cp[i].Ident().Less(cp[j].Ident()) + }) + return cp +} diff --git a/gps/verify/lockdiff_test.go b/internal/feedback/lockdiff_test.go similarity index 99% rename from gps/verify/lockdiff_test.go rename to internal/feedback/lockdiff_test.go index 0470a545ae..f49898bd19 100644 --- a/gps/verify/lockdiff_test.go +++ b/internal/feedback/lockdiff_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package verify +package feedback import ( "bytes" diff --git a/lock.go b/lock.go index a30a13d531..cdb35927a2 100644 --- a/lock.go +++ b/lock.go @@ -131,12 +131,18 @@ func fromRawLock(raw rawLock) (*Lock, error) { // Projects returns the list of LockedProjects contained in the lock data. func (l *Lock) Projects() []gps.LockedProject { + if l == nil || l == (*Lock)(nil) { + return nil + } return l.P } // InputImports reports the list of input imports that were used in generating // this Lock. func (l *Lock) InputImports() []string { + if l == nil || l == (*Lock)(nil) { + return nil + } return l.SolveMeta.InputImports } diff --git a/manifest.go b/manifest.go index 19e121283c..0eb1195fcc 100644 --- a/manifest.go +++ b/manifest.go @@ -630,7 +630,7 @@ func (m *Manifest) HasConstraintsOn(root gps.ProjectRoot) bool { // RequiredPackages returns a set of import paths to require. func (m *Manifest) RequiredPackages() map[string]bool { - if m == nil { + if m == nil || m == (*Manifest)(nil) { return map[string]bool{} } diff --git a/project.go b/project.go index 5de3ce53cf..3214a3c64d 100644 --- a/project.go +++ b/project.go @@ -173,7 +173,7 @@ func (p *Project) parseRootPackageTree() (pkgtree.PackageTree, error) { // GetDirectDependencyNames returns the set of unique Project Roots that are the // direct dependencies of this Project. // -// A project is considered a direct dependency if at least one of packages in it +// A project is considered a direct dependency if at least one of its packages // is named in either this Project's required list, or if there is at least one // non-ignored import statement from a non-ignored package in the current // project's package tree. diff --git a/project_test.go b/project_test.go index eff7d30e29..9415544789 100644 --- a/project_test.go +++ b/project_test.go @@ -148,6 +148,7 @@ func TestProjectMakeParams(t *testing.T) { Manifest: m, Lock: &Lock{}, } + p.ChangedLock = p.Lock solveParam := p.MakeParams() diff --git a/testdata/txn_writer/expected_lock.toml b/testdata/txn_writer/expected_lock.toml index 09f38c7ace..9b21bdbfe6 100644 --- a/testdata/txn_writer/expected_lock.toml +++ b/testdata/txn_writer/expected_lock.toml @@ -2,7 +2,7 @@ [[projects]] - digest = "0:" + digest = "1:c4844614e2b12233bb037afec536831b92a4f58f7b712432b978d34df291e43a" name = "github.com/sdboyer/dep-test" packages = ["."] pruneopts = "" diff --git a/txn_writer.go b/txn_writer.go index ffccfc4de5..98d76584d7 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -15,6 +15,7 @@ import ( "github.com/golang/dep/gps" "github.com/golang/dep/gps/verify" + "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/fs" "github.com/pkg/errors" ) @@ -135,7 +136,7 @@ func (sw *SafeWriter) HasManifest() bool { } type rawStringDiff struct { - *verify.StringDiff + *feedback.StringDiff } // MarshalTOML serializes the diff as a string. @@ -152,7 +153,7 @@ type rawLockedProjectDiff struct { Packages []rawStringDiff `toml:"packages,omitempty"` } -func toRawLockedProjectDiff(diff verify.LockedProjectDiff) rawLockedProjectDiff { +func toRawLockedProjectDiff(diff feedback.LockedProjectDiff) rawLockedProjectDiff { // this is a shallow copy since we aren't modifying the raw diff raw := rawLockedProjectDiff{Name: diff.Name} if diff.Source != nil { @@ -178,7 +179,7 @@ type rawLockedProjectDiffs struct { Projects []rawLockedProjectDiff `toml:"projects"` } -func toRawLockedProjectDiffs(diffs []verify.LockedProjectDiff) rawLockedProjectDiffs { +func toRawLockedProjectDiffs(diffs []feedback.LockedProjectDiff) rawLockedProjectDiffs { raw := rawLockedProjectDiffs{ Projects: make([]rawLockedProjectDiff, len(diffs)), } From 9d4eca8e3e9dec5f87a3181a504bf07c044c802e Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 3 Jul 2018 20:31:50 -0400 Subject: [PATCH 13/25] gps: Collapse LockWithImports into Lock There was no real need to delineate between Lock and LockWithImports. The old Lock had the InputsDigest concept, which was even less feasible for theoretical implementations of Lock to have, so this can't possibly be more harmful. --- gps/lock.go | 24 ++-- gps/solution.go | 2 +- gps/solve_basic_test.go | 10 ++ gps/source_cache.go | 17 +++ gps/source_cache_bolt_encode.go | 8 +- gps/source_cache_bolt_test.go | 17 +-- gps/verify/lock.go | 197 ------------------------------- gps/verify/lockdiff.go | 9 +- gps/verify/locksat.go | 202 ++++++++++++++++++++++++++++++++ 9 files changed, 253 insertions(+), 233 deletions(-) create mode 100644 gps/verify/locksat.go diff --git a/gps/lock.go b/gps/lock.go index 91fd0e23e8..1fb909ecf8 100644 --- a/gps/lock.go +++ b/gps/lock.go @@ -19,18 +19,10 @@ import ( type Lock interface { // Projects returns the list of LockedProjects contained in the lock data. Projects() []LockedProject -} -// LockWithImports composes Lock to also add a method that reports all the -// imports that were present when generating the Lock. -// -// This information can be rederived, but it requires doing whole-graph -// analysis; tracking the information separately makes verification tasks -// easier, especially determining if an input import has been removed. -type LockWithImports interface { - Lock // The set of imports (and required statements) that were the inputs that - // generated this Lock. + // generated this Lock. It is acceptable to return a nil slice from this + // method if the information cannot reasonably be made available. InputImports() []string } @@ -80,6 +72,12 @@ func (l SimpleLock) Projects() []LockedProject { return l } +// InputImports returns a nil string slice, as SimpleLock does not provide a way +// of capturing string slices. +func (l SimpleLock) InputImports() []string { + return nil +} + // NewLockedProject creates a new LockedProject struct with a given // ProjectIdentifier (name and optional upstream source URL), version. and list // of packages required from the project. @@ -232,10 +230,8 @@ func prepLock(l Lock) safeLock { } copy(rl.p, pl) - if lwi, ok := l.(LockWithImports); ok { - rl.i = make([]string, len(lwi.InputImports())) - copy(rl.i, lwi.InputImports()) - } + rl.i = make([]string, len(l.InputImports())) + copy(rl.i, l.InputImports()) return rl } diff --git a/gps/solution.go b/gps/solution.go index 2b1f1ed310..3612a1305f 100644 --- a/gps/solution.go +++ b/gps/solution.go @@ -18,7 +18,7 @@ import ( // A Solution is returned by a solver run. It is mostly just a Lock, with some // additional methods that report information about the solve run. type Solution interface { - LockWithImports + Lock // The name of the ProjectAnalyzer used in generating this solution. AnalyzerName() string // The version of the ProjectAnalyzer used in generating this solution. diff --git a/gps/solve_basic_test.go b/gps/solve_basic_test.go index 3665daf2a3..b961911a19 100644 --- a/gps/solve_basic_test.go +++ b/gps/solve_basic_test.go @@ -1564,9 +1564,19 @@ func (l fixLock) Projects() []LockedProject { return l } +// impl Lock interface +func (fixLock) InputImports() []string { + return nil +} + type dummyLock struct{} // impl Lock interface func (dummyLock) Projects() []LockedProject { return nil } + +// impl Lock interface +func (dummyLock) InputImports() []string { + return nil +} diff --git a/gps/source_cache.go b/gps/source_cache.go index fe3c9b7266..966008a388 100644 --- a/gps/source_cache.go +++ b/gps/source_cache.go @@ -7,6 +7,7 @@ package gps import ( "fmt" "path" + "sort" "strings" "sync" @@ -274,6 +275,22 @@ func (c *singleSourceCacheMemory) toUnpaired(v Version) (UnpairedVersion, bool) // TODO(sdboyer) remove once source caching can be moved into separate package func locksAreEq(l1, l2 Lock) bool { + ii1, ii2 := l1.InputImports(), l2.InputImports() + if len(ii1) != len(ii2) { + return false + } + + ilen := len(ii1) + if ilen > 0 { + sort.Strings(ii1) + sort.Strings(ii2) + for i := 0; i < ilen; i++ { + if ii1[i] != ii2[i] { + return false + } + } + } + p1, p2 := l1.Projects(), l2.Projects() if len(p1) != len(p2) { return false diff --git a/gps/source_cache_bolt_encode.go b/gps/source_cache_bolt_encode.go index 5e2c255345..5b6a903cf6 100644 --- a/gps/source_cache_bolt_encode.go +++ b/gps/source_cache_bolt_encode.go @@ -308,11 +308,9 @@ func lockedProjectFromCache(m *pb.LockedProject) (LockedProject, error) { // cachePutLock stores the Lock as fields in the bolt.Bucket. func cachePutLock(b *bolt.Bucket, l Lock) error { // Input imports, if present. - if lwp, ok := l.(LockWithImports); ok && len(lwp.InputImports()) > 0 { - byt := []byte(strings.Join(lwp.InputImports(), "#")) - if err := b.Put(cacheKeyInputImports, byt); err != nil { - return errors.Wrap(err, "failed to put input imports") - } + byt := []byte(strings.Join(l.InputImports(), "#")) + if err := b.Put(cacheKeyInputImports, byt); err != nil { + return errors.Wrap(err, "failed to put input imports") } // Projects diff --git a/gps/source_cache_bolt_test.go b/gps/source_cache_bolt_test.go index d0bf0fce97..7bfdf2b7a9 100644 --- a/gps/source_cache_bolt_test.go +++ b/gps/source_cache_bolt_test.go @@ -54,11 +54,11 @@ func TestBoltCacheTimeout(t *testing.T) { lock := &safeLock{ p: []LockedProject{ - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0"), []string{"gps"}), - NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0"), nil), - NewLockedProject(mkPI("github.com/sdboyer/gps3"), NewVersion("v0.10.0"), []string{"gps", "flugle"}), - NewLockedProject(mkPI("foo"), NewVersion("nada"), []string{"foo"}), - NewLockedProject(mkPI("github.com/sdboyer/gps4"), NewVersion("v0.10.0"), []string{"flugle", "gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v0.10.0").Pair("foo"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps2"), NewVersion("v0.10.0").Pair("bar"), nil), + NewLockedProject(mkPI("github.com/sdboyer/gps3"), NewVersion("v0.10.0").Pair("baz"), []string{"gps", "flugle"}), + NewLockedProject(mkPI("foo"), NewVersion("nada").Pair("zero"), []string{"foo"}), + NewLockedProject(mkPI("github.com/sdboyer/gps4"), NewVersion("v0.10.0").Pair("qux"), []string{"flugle", "gps"}), }, } @@ -236,8 +236,9 @@ func TestBoltCacheTimeout(t *testing.T) { newLock := &safeLock{ p: []LockedProject{ - NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v1"), []string{"gps"}), + NewLockedProject(mkPI("github.com/sdboyer/gps"), NewVersion("v1").Pair("rev1"), []string{"gps"}), }, + i: []string{"foo", "bar"}, } newPtree := pkgtree.PackageTree{ @@ -285,8 +286,8 @@ func TestBoltCacheTimeout(t *testing.T) { } compareManifests(t, newManifest, gotM) // TODO(sdboyer) use DiffLocks after refactoring to avoid import cycles - if !locksAreEq(lock, gotL) { - t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", lock, gotL) + if !locksAreEq(newLock, gotL) { + t.Errorf("locks are different:\n\t(GOT): %s\n\t(WNT): %s", newLock, gotL) } got, ok := c.getPackageTree(rev, root) diff --git a/gps/verify/lock.go b/gps/verify/lock.go index 1e9bf3d3e2..d6742a8655 100644 --- a/gps/verify/lock.go +++ b/gps/verify/lock.go @@ -5,10 +5,7 @@ package verify import ( - "github.com/armon/go-radix" "github.com/golang/dep/gps" - "github.com/golang/dep/gps/paths" - "github.com/golang/dep/gps/pkgtree" ) // VerifiableProject composes a LockedProject to indicate what the hash digest @@ -19,197 +16,3 @@ type VerifiableProject struct { PruneOpts gps.PruneOptions Digest VersionedDigest } - -// ConstraintMismatch is a two-tuple of a gps.Version, and a gps.Constraint that -// does not allow that version. -type ConstraintMismatch struct { - C gps.Constraint - V gps.Version -} - -// LockSatisfaction holds the compound result of LockSatisfiesInputs, allowing -// the caller to inspect each of several orthogonal possible types of failure. -type LockSatisfaction struct { - nolock bool - missingPkgs, excessPkgs []string - badovr, badconstraint map[gps.ProjectRoot]ConstraintMismatch -} - -// Passed is a shortcut method that indicates whether there were any ways in -// which the Lock did not satisfy the inputs. It will return true only if no -// problems were found. -func (ls LockSatisfaction) Passed() bool { - if ls.nolock { - return false - } - - if len(ls.missingPkgs) > 0 { - return false - } - - if len(ls.excessPkgs) > 0 { - return false - } - - if len(ls.badovr) > 0 { - return false - } - - if len(ls.badconstraint) > 0 { - return false - } - - return true -} - -// MissingImports reports the set of import paths that were present in the -// inputs but missing in the Lock. -func (ls LockSatisfaction) MissingImports() []string { - return ls.missingPkgs -} - -// ExcessImports reports the set of import paths that were present in the Lock -// but absent from the inputs. -func (ls LockSatisfaction) ExcessImports() []string { - return ls.excessPkgs -} - -// UnmatchedOverrides reports any override rules that were not satisfied by the -// corresponding LockedProject in the Lock. -func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]ConstraintMismatch { - return ls.badovr -} - -// UnmatchedOverrides reports any normal, non-override constraint rules that -// were not satisfied by the corresponding LockedProject in the Lock. -func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]ConstraintMismatch { - return ls.badconstraint -} - -func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[string]bool { - eff := make(map[string]bool) - xt := radix.New() - - for pr, _ := range m.DependencyConstraints() { - // FIXME(sdboyer) this has the trailing slash ambiguity problem; adapt - // code from the solver - xt.Insert(string(pr), nil) - } - - for imp := range imports { - if root, _, has := xt.LongestPrefix(imp); has { - eff[root] = true - } - } - - return eff -} - -// LockSatisfiesInputs determines whether the provided Lock satisfies all the -// requirements indicated by the inputs (RootManifest and PackageTree). -// -// The second parameter is expected to be the list of imports that were used to -// generate the input Lock. Without this explicit list, it is not possible to -// compute package imports that may have been removed. Figuring out that -// negative space would require exploring the entire graph to ensure there are -// no in-edges for particular imports. -func LockSatisfiesInputs(l gps.LockWithImports, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { - if l == nil { - return LockSatisfaction{nolock: true} - } - - var ig *pkgtree.IgnoredRuleset - var req map[string]bool - if m != nil { - ig = m.IgnoredPackages() - req = m.RequiredPackages() - } - - rm, _ := rpt.ToReachMap(true, true, false, ig) - reach := rm.FlattenFn(paths.IsStandardImportPath) - - inlock := make(map[string]bool, len(l.InputImports())) - ininputs := make(map[string]bool, len(reach)+len(req)) - - type lockUnsatisfy uint8 - const ( - missingFromLock lockUnsatisfy = iota - inAdditionToLock - ) - - pkgDiff := make(map[string]lockUnsatisfy) - - for _, imp := range reach { - ininputs[imp] = true - } - - for imp := range req { - ininputs[imp] = true - } - - for _, imp := range l.InputImports() { - inlock[imp] = true - } - - lsat := LockSatisfaction{ - badovr: make(map[gps.ProjectRoot]ConstraintMismatch), - badconstraint: make(map[gps.ProjectRoot]ConstraintMismatch), - } - - for ip := range ininputs { - if !inlock[ip] { - pkgDiff[ip] = missingFromLock - } else { - // So we don't have to revisit it below - delete(inlock, ip) - } - } - - // Something in the missing list might already be in the packages list, - // because another package in the depgraph imports it. We could make a - // special case for that, but it would break the simplicity of the model and - // complicate the notion of LockSatisfaction.Passed(), so let's see if we - // can get away without it. - - for ip := range inlock { - if !ininputs[ip] { - pkgDiff[ip] = inAdditionToLock - } - } - - for ip, typ := range pkgDiff { - if typ == missingFromLock { - lsat.missingPkgs = append(lsat.missingPkgs, ip) - } else { - lsat.excessPkgs = append(lsat.excessPkgs, ip) - } - } - - eff := findEffectualConstraints(m, ininputs) - ovr, constraints := m.Overrides(), m.DependencyConstraints() - - for _, lp := range l.Projects() { - pr := lp.Ident().ProjectRoot - - if pp, has := ovr[pr]; has { - if !pp.Constraint.Matches(lp.Version()) { - lsat.badovr[pr] = ConstraintMismatch{ - C: pp.Constraint, - V: lp.Version(), - } - } - // The constraint isn't considered if we have an override, - // independent of whether the override is satisfied. - continue - } - - if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { - lsat.badconstraint[pr] = ConstraintMismatch{ - C: pp.Constraint, - V: lp.Version(), - } - } - } - - return lsat -} diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index 32309d62d8..c6ff9e99af 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -117,14 +117,7 @@ func DiffLocks2(l1, l2 gps.Lock) LockDelta { } } - // Only do the import inputs if both of the locks fulfill the interface, AND - // both have non-empty inputs. - il1, ok1 := l1.(gps.LockWithImports) - il2, ok2 := l2.(gps.LockWithImports) - - if ok1 && ok2 && len(il1.InputImports()) > 0 && len(il2.InputImports()) > 0 { - diff.AddedImportInputs, diff.RemovedImportInputs = findAddedAndRemoved(il1.InputImports(), il2.InputImports()) - } + diff.AddedImportInputs, diff.RemovedImportInputs = findAddedAndRemoved(l1.InputImports(), l2.InputImports()) return diff } diff --git a/gps/verify/locksat.go b/gps/verify/locksat.go new file mode 100644 index 0000000000..fbb8f99878 --- /dev/null +++ b/gps/verify/locksat.go @@ -0,0 +1,202 @@ +package verify + +import ( + radix "github.com/armon/go-radix" + "github.com/golang/dep/gps" + "github.com/golang/dep/gps/paths" + "github.com/golang/dep/gps/pkgtree" +) + +// LockSatisfaction holds the compound result of LockSatisfiesInputs, allowing +// the caller to inspect each of several orthogonal possible types of failure. +type LockSatisfaction struct { + nolock bool + missingPkgs, excessPkgs []string + badovr, badconstraint map[gps.ProjectRoot]ConstraintMismatch +} + +// ConstraintMismatch is a two-tuple of a gps.Version, and a gps.Constraint that +// does not allow that version. +type ConstraintMismatch struct { + C gps.Constraint + V gps.Version +} + +// LockSatisfiesInputs determines whether the provided Lock satisfies all the +// requirements indicated by the inputs (RootManifest and PackageTree). +// +// The second parameter is expected to be the list of imports that were used to +// generate the input Lock. Without this explicit list, it is not possible to +// compute package imports that may have been removed. Figuring out that +// negative space would require exploring the entire graph to ensure there are +// no in-edges for particular imports. +func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { + if l == nil { + return LockSatisfaction{nolock: true} + } + + var ig *pkgtree.IgnoredRuleset + var req map[string]bool + if m != nil { + ig = m.IgnoredPackages() + req = m.RequiredPackages() + } + + rm, _ := rpt.ToReachMap(true, true, false, ig) + reach := rm.FlattenFn(paths.IsStandardImportPath) + + inlock := make(map[string]bool, len(l.InputImports())) + ininputs := make(map[string]bool, len(reach)+len(req)) + + type lockUnsatisfy uint8 + const ( + missingFromLock lockUnsatisfy = iota + inAdditionToLock + ) + + pkgDiff := make(map[string]lockUnsatisfy) + + for _, imp := range reach { + ininputs[imp] = true + } + + for imp := range req { + ininputs[imp] = true + } + + for _, imp := range l.InputImports() { + inlock[imp] = true + } + + lsat := LockSatisfaction{ + badovr: make(map[gps.ProjectRoot]ConstraintMismatch), + badconstraint: make(map[gps.ProjectRoot]ConstraintMismatch), + } + + for ip := range ininputs { + if !inlock[ip] { + pkgDiff[ip] = missingFromLock + } else { + // So we don't have to revisit it below + delete(inlock, ip) + } + } + + // Something in the missing list might already be in the packages list, + // because another package in the depgraph imports it. We could make a + // special case for that, but it would break the simplicity of the model and + // complicate the notion of LockSatisfaction.Passed(), so let's see if we + // can get away without it. + + for ip := range inlock { + if !ininputs[ip] { + pkgDiff[ip] = inAdditionToLock + } + } + + for ip, typ := range pkgDiff { + if typ == missingFromLock { + lsat.missingPkgs = append(lsat.missingPkgs, ip) + } else { + lsat.excessPkgs = append(lsat.excessPkgs, ip) + } + } + + eff := findEffectualConstraints(m, ininputs) + ovr, constraints := m.Overrides(), m.DependencyConstraints() + + for _, lp := range l.Projects() { + pr := lp.Ident().ProjectRoot + + if pp, has := ovr[pr]; has { + if !pp.Constraint.Matches(lp.Version()) { + lsat.badovr[pr] = ConstraintMismatch{ + C: pp.Constraint, + V: lp.Version(), + } + } + // The constraint isn't considered if we have an override, + // independent of whether the override is satisfied. + continue + } + + if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { + lsat.badconstraint[pr] = ConstraintMismatch{ + C: pp.Constraint, + V: lp.Version(), + } + } + } + + return lsat +} + +// Passed is a shortcut method that indicates whether there were any ways in +// which the Lock did not satisfy the inputs. It will return true only if no +// problems were found. +func (ls LockSatisfaction) Passed() bool { + if ls.nolock { + return false + } + + if len(ls.missingPkgs) > 0 { + return false + } + + if len(ls.excessPkgs) > 0 { + return false + } + + if len(ls.badovr) > 0 { + return false + } + + if len(ls.badconstraint) > 0 { + return false + } + + return true +} + +// MissingImports reports the set of import paths that were present in the +// inputs but missing in the Lock. +func (ls LockSatisfaction) MissingImports() []string { + return ls.missingPkgs +} + +// ExcessImports reports the set of import paths that were present in the Lock +// but absent from the inputs. +func (ls LockSatisfaction) ExcessImports() []string { + return ls.excessPkgs +} + +// UnmatchedOverrides reports any override rules that were not satisfied by the +// corresponding LockedProject in the Lock. +func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]ConstraintMismatch { + return ls.badovr +} + +// UnmatchedOverrides reports any normal, non-override constraint rules that +// were not satisfied by the corresponding LockedProject in the Lock. +func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]ConstraintMismatch { + return ls.badconstraint +} + +func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[string]bool { + eff := make(map[string]bool) + xt := radix.New() + + for pr, _ := range m.DependencyConstraints() { + // FIXME(sdboyer) this has the trailing slash ambiguity problem; adapt + // code from the solver + xt.Insert(string(pr), nil) + } + + for imp := range imports { + if root, _, has := xt.LongestPrefix(imp); has { + eff[root] = true + } + } + + return eff +} From 3c60abcd2fc189f34e807d5f54d20051d919b3ad Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 3 Jul 2018 23:37:29 -0400 Subject: [PATCH 14/25] dep: Make DeltaWriter sensitive to -vendor-only Also a bunch of docs for the verify package. --- cmd/dep/ensure.go | 8 +- .../hashneq-vendoronly/final/Gopkg.lock | 5 +- gps/verify/lockdiff.go | 115 ++++++++++----- txn_writer.go | 138 ++++++++++-------- 4 files changed, 165 insertions(+), 101 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index cf01349eb2..eaf9481993 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -321,7 +321,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions) } - dw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) + dw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } @@ -347,7 +347,7 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj } // Pass the same lock as old and new so that the writer will observe no - // difference and choose not to write it out. + // difference and choose not to write it out, instead writing out only sw, err := dep.NewSafeWriter(nil, p.Lock, p.ChangedLock, dep.VendorAlways, p.Manifest.PruneOptions) if err != nil { return err @@ -396,7 +396,7 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return handleAllTheFailuresOfTheWorld(err) } - dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } @@ -670,7 +670,7 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm } sort.Strings(reqlist) - dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor")) + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock index 11cb12c378..188ece4f77 100644 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock @@ -2,15 +2,16 @@ [[projects]] + digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] + pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - # manually modified hash digest, it will not match any known inputs - inputs-digest = "94b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" + input-imports = ["github.com/sdboyer/deptest"] solver-name = "gps-cdcl" solver-version = 1 diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index c6ff9e99af..c85c89722a 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -29,19 +29,27 @@ func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { return cp } +// LockDelta represents all possible differences between two Locks. type LockDelta struct { AddedImportInputs []string RemovedImportInputs []string ProjectDeltas map[gps.ProjectRoot]LockedProjectDelta } +// LockedProjectDelta represents all possible state changes of a LockedProject +// within a Lock. It encapsulates the property-level differences represented by +// a LockedProjectPropertiesDelta, but can also represent existence deltas - a +// given name came to exist, or cease to exist, across two Locks. type LockedProjectDelta struct { Name gps.ProjectRoot ProjectRemoved, ProjectAdded bool - LockedProjectPartsDelta + LockedProjectPropertiesDelta } -type LockedProjectPartsDelta struct { +// LockedProjectPropertiesDelta represents all possible differences between the +// properties of two LockedProjects. It can represent deltas for +// VerifiableProject properties, as well. +type LockedProjectPropertiesDelta struct { PackagesAdded, PackagesRemoved []string VersionBefore, VersionAfter gps.UnpairedVersion RevisionBefore, RevisionAfter gps.Revision @@ -50,9 +58,9 @@ type LockedProjectPartsDelta struct { HashChanged, HashVersionChanged bool } -// DiffLocks2 compares two locks and computes a semantically rich delta between +// DiffLocks compares two locks and computes a semantically rich delta between // them. -func DiffLocks2(l1, l2 gps.Lock) LockDelta { +func DiffLocks(l1, l2 gps.Lock) LockDelta { // Default nil locks to empty locks, so that we can still generate a diff if l1 == nil { if l2 == nil { @@ -88,7 +96,7 @@ func DiffLocks2(l1, l2 gps.Lock) LockDelta { switch strings.Compare(string(pr1), string(pr2)) { case 0: // Found a matching project - lpd.LockedProjectPartsDelta = DiffProjects2(lp1, lp2) + lpd.LockedProjectPropertiesDelta = DiffLockedProjectProperties(lp1, lp2) i2next = i2 + 1 // Don't visit this project again case +1: // Found a new project diff.ProjectDeltas[pr2] = LockedProjectDelta{ @@ -148,8 +156,15 @@ func findAddedAndRemoved(l1, l2 []string) (add, remove []string) { return add, remove } -func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDelta { - ld := LockedProjectPartsDelta{ +// DiffLockedProjectProperties takes two gps.LockedProject and computes a delta +// for each of their component properties. +// +// This function is focused exclusively on the properties of a LockedProject. As +// such, it does not compare the ProjectRoot part of the LockedProject's +// ProjectIdentifier, as those are names, and the concern here is a difference +// in properties, not intrinsic identity. +func DiffLockedProjectProperties(lp1, lp2 gps.LockedProject) LockedProjectPropertiesDelta { + ld := LockedProjectPropertiesDelta{ SourceBefore: lp1.Ident().Source, SourceAfter: lp2.Ident().Source, } @@ -205,6 +220,8 @@ func DiffProjects2(lp1, lp2 gps.LockedProject) LockedProjectPartsDelta { // along which a Lock, and its constitutent components, can change. type DeltaDimension uint32 +// Each flag represents an ortohgonal dimension along which Locks can vary with +// respect to each other. const ( InputImportsChanged DeltaDimension = 1 << iota ProjectAdded @@ -237,7 +254,13 @@ func (ld LockDelta) Changed(dims DeltaDimension) bool { return false } -func (ld LockDelta) Changes(DeltaDimension) DeltaDimension { +// Changes returns a bitset indicating the dimensions along which deltas exist across +// all contents of the LockDelta. +// +// This recurses down into the individual LockedProjectDeltas contained within +// the LockDelta. A single delta along a particular dimension from a single +// project is sufficient to flip the bit on for that dimension. +func (ld LockDelta) Changes() DeltaDimension { var dd DeltaDimension if len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0 { dd |= InputImportsChanged @@ -256,18 +279,21 @@ func (ld LockDelta) Changes(DeltaDimension) DeltaDimension { // For example, if only the Revision changed, and this method is called with // SourceChanged | VersionChanged, it will return false; if it is called with // VersionChanged | RevisionChanged, it will return true. -func (ld LockedProjectDelta) Changed(flags DeltaDimension) bool { - if flags&ProjectAdded != 0 && ld.WasAdded() { +func (ld LockedProjectDelta) Changed(dims DeltaDimension) bool { + if dims&ProjectAdded != 0 && ld.WasAdded() { return true } - if flags&ProjectRemoved != 0 && ld.WasRemoved() { + if dims&ProjectRemoved != 0 && ld.WasRemoved() { return true } - return ld.LockedProjectPartsDelta.Changed(flags & ^ProjectAdded & ^ProjectRemoved) + return ld.LockedProjectPropertiesDelta.Changed(dims & ^ProjectAdded & ^ProjectRemoved) } +// Changes returns a bitset indicating the dimensions along which there were +// changes between the compared LockedProjects. This includes both +// existence-level deltas (add/remove) and property-level deltas. func (ld LockedProjectDelta) Changes() DeltaDimension { var dd DeltaDimension if ld.WasAdded() { @@ -278,44 +304,56 @@ func (ld LockedProjectDelta) Changes() DeltaDimension { dd |= ProjectRemoved } - return dd | ld.LockedProjectPartsDelta.Changes() + return dd | ld.LockedProjectPropertiesDelta.Changes() } +// WasRemoved returns true if the named project existed in the first lock, but +// did not exist in the second lock. func (ld LockedProjectDelta) WasRemoved() bool { return ld.ProjectRemoved } +// WasAdded returns true if the named project did not exist in the first lock, +// but did exist in the second lock. func (ld LockedProjectDelta) WasAdded() bool { return ld.ProjectAdded } -func (ld LockedProjectPartsDelta) Changed(flags DeltaDimension) bool { - if flags&SourceChanged != 0 && ld.SourceChanged() { +// Changed indicates whether the delta contains a change along the dimensions +// with their corresponding bits set. +// +// For example, if only the Revision changed, and this method is called with +// SourceChanged | VersionChanged, it will return false; if it is called with +// VersionChanged | RevisionChanged, it will return true. +func (ld LockedProjectPropertiesDelta) Changed(dims DeltaDimension) bool { + if dims&SourceChanged != 0 && ld.SourceChanged() { return true } - if flags&RevisionChanged != 0 && ld.RevisionChanged() { + if dims&RevisionChanged != 0 && ld.RevisionChanged() { return true } - if flags&PruneOptsChanged != 0 && ld.PruneOptsChanged() { + if dims&PruneOptsChanged != 0 && ld.PruneOptsChanged() { return true } - if flags&HashChanged != 0 && ld.HashChanged { + if dims&HashChanged != 0 && ld.HashChanged { return true } - if flags&HashVersionChanged != 0 && ld.HashVersionChanged { + if dims&HashVersionChanged != 0 && ld.HashVersionChanged { return true } - if flags&VersionChanged != 0 && ld.VersionChanged() { + if dims&VersionChanged != 0 && ld.VersionChanged() { return true } - if flags&PackagesChanged != 0 && ld.PackagesChanged() { + if dims&PackagesChanged != 0 && ld.PackagesChanged() { return true } return false } -func (ld LockedProjectPartsDelta) Changes() DeltaDimension { +// Changes returns a bitset indicating the dimensions along which there were +// changes between the compared LockedProjects. +func (ld LockedProjectPropertiesDelta) Changes() DeltaDimension { var dd DeltaDimension if ld.SourceChanged() { dd |= SourceChanged @@ -342,11 +380,17 @@ func (ld LockedProjectPartsDelta) Changes() DeltaDimension { return dd } -func (ld LockedProjectPartsDelta) SourceChanged() bool { +// SourceChanged returns true if the source field differed between the first and +// second locks. +func (ld LockedProjectPropertiesDelta) SourceChanged() bool { return ld.SourceBefore != ld.SourceAfter } -func (ld LockedProjectPartsDelta) VersionChanged() bool { +// VersionChanged returns true if the version property differed between the +// first and second locks. In addition to simple changes (e.g. 1.0.1 -> 1.0.2), +// this also includes all the possible type changes covered by +// VersionTypeCHanged(), as those necessarily also are version changes. +func (ld LockedProjectPropertiesDelta) VersionChanged() bool { if ld.VersionBefore == nil && ld.VersionAfter == nil { return false } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { @@ -358,7 +402,11 @@ func (ld LockedProjectPartsDelta) VersionChanged() bool { return false } -func (ld LockedProjectPartsDelta) VersionTypeChanged() bool { +// VersionTypeChanged returns true if the type of version differed between the +// first and second locks. This includes either going from a paired version to a +// plain revision, or the reverse direction, or the type of unpaired version +// changing (e.g. branch -> semver). +func (ld LockedProjectPropertiesDelta) VersionTypeChanged() bool { if ld.VersionBefore == nil && ld.VersionAfter == nil { return false } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { @@ -368,19 +416,20 @@ func (ld LockedProjectPartsDelta) VersionTypeChanged() bool { return false } -func (ld LockedProjectPartsDelta) RevisionChanged() bool { +// RevisionChanged returns true if the revision property differed between the +// first and second locks. +func (ld LockedProjectPropertiesDelta) RevisionChanged() bool { return ld.RevisionBefore != ld.RevisionAfter } -func (ld LockedProjectPartsDelta) PackagesChanged() bool { +// PackagesChanged returns true if the package set gained or lost members (or +// both) between the first and second locks. +func (ld LockedProjectPropertiesDelta) PackagesChanged() bool { return len(ld.PackagesAdded) > 0 || len(ld.PackagesRemoved) > 0 } -func (ld LockedProjectPartsDelta) PruneOptsChanged() bool { +// PruneOptsChanged returns true if the pruning flags for the project changed +// between teh first and second locks. +func (ld LockedProjectPropertiesDelta) PruneOptsChanged() bool { return ld.PruneOptsBefore != ld.PruneOptsAfter } - -//type VendorDiff struct { -//LockDelta LockDelta -//VendorStatus map[string]VendorStatus -//} diff --git a/txn_writer.go b/txn_writer.go index 98d76584d7..a935b61f4a 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -103,7 +103,7 @@ func NewSafeWriter(manifest *Manifest, oldLock, newLock *Lock, vendor VendorBeha return nil, errors.New("must provide newLock when oldLock is specified") } - sw.lockDiff = verify.DiffLocks2(oldLock, newLock) + sw.lockDiff = verify.DiffLocks(oldLock, newLock) if sw.lockDiff.Changed(anyExceptHash) { sw.writeLock = true } @@ -459,6 +459,7 @@ type DeltaWriter struct { vendorDir string changed map[gps.ProjectRoot]changeType status map[string]verify.VendorStatus + behavior VendorBehavior } type changeType uint8 @@ -478,12 +479,13 @@ const ( // directory by writing out only those projects that actually need to be written // out - they have changed in some way, or they lack the necessary hash // information to be verified. -func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string) (TransactionWriter, error) { +func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (TransactionWriter, error) { sw := &DeltaWriter{ lock: newLock, pruneOptions: prune, vendorDir: vendorDir, changed: make(map[gps.ProjectRoot]changeType), + behavior: behavior, } if newLock == nil { @@ -494,14 +496,14 @@ func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatu if err != nil && os.IsNotExist(err) { // Provided dir does not exist, so there's no disk contents to compare // against. Fall back to the old SafeWriter. - return NewSafeWriter(nil, oldLock, newLock, VendorOnChanged, prune) + return NewSafeWriter(nil, oldLock, newLock, behavior, prune) } - sw.lockDiff = verify.DiffLocks2(oldLock, newLock) + sw.lockDiff = verify.DiffLocks(oldLock, newLock) for pr, lpd := range sw.lockDiff.ProjectDeltas { // Hash changes aren't relevant at this point, as they could be empty - // and therefore a symptom of a solver change. + // in the new lock, and therefore a symptom of a solver change. if lpd.Changed(anyExceptHash) { if lpd.WasAdded() { sw.changed[pr] = projectAdded @@ -578,7 +580,7 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } to := filepath.FromSlash(filepath.Join(vnewpath, string(pr))) - po := dw.pruneOptions.PruneOptionsFor(pr) + po := projs[pr].(verify.VerifiableProject).PruneOpts if err := sm.ExportPrunedProject(context.TODO(), projs[pr], po, to); err != nil { return errors.Wrapf(err, "failed to export %s", pr) } @@ -587,54 +589,58 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l lpd := dw.lockDiff.ProjectDeltas[pr] v, id := projs[pr].Version(), projs[pr].Ident() - var buf bytes.Buffer - fmt.Fprintf(&buf, "(%d/%d) Wrote %s@%s: ", i, tot, id, v) - switch reason { - case noChange: - panic(fmt.Sprintf("wtf, no change for %s", pr)) - case solveChanged: - if lpd.SourceChanged() { - fmt.Fprintf(&buf, "source changed (%s -> %s)", lpd.SourceBefore, lpd.SourceAfter) - } else if lpd.VersionChanged() { - bv, av := "(none)", "(none)" - if lpd.VersionBefore != nil { - bv = lpd.VersionBefore.String() + // Only print things if we're actually going to leave behind a new + // vendor dir. + if dw.behavior != VendorNever { + var buf bytes.Buffer + fmt.Fprintf(&buf, "(%d/%d) Wrote %s@%s: ", i, tot, id, v) + switch reason { + case noChange: + panic(fmt.Sprintf("wtf, no change for %s", pr)) + case solveChanged: + if lpd.SourceChanged() { + fmt.Fprintf(&buf, "source changed (%s -> %s)", lpd.SourceBefore, lpd.SourceAfter) + } else if lpd.VersionChanged() { + bv, av := "(none)", "(none)" + if lpd.VersionBefore != nil { + bv = lpd.VersionBefore.String() + } + if lpd.VersionAfter != nil { + av = lpd.VersionAfter.String() + } + fmt.Fprintf(&buf, "version changed (%s -> %s)", bv, av) + } else if lpd.RevisionChanged() { + fmt.Fprintf(&buf, "revision changed (%s -> %s)", lpd.RevisionBefore, lpd.RevisionAfter) + } else if lpd.PackagesChanged() { + la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) + if la > 0 && lr > 0 { + fmt.Fprintf(&buf, "packages changed (%v added, %v removed)", la, lr) + } else if la > 0 { + fmt.Fprintf(&buf, "packages changed (%v added)", la) + } else { + fmt.Fprintf(&buf, "packages changed (%v removed)", lr) + } + } else if lpd.PruneOptsChanged() { + // Override what's on the lockdiff with the extra info we have; + // this lets us excise PruneNestedVendorDirs and get the real + // value from the input param in place. + old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs + new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs + fmt.Fprintf(&buf, "prune options changed (%s -> %s)", old, new) } - if lpd.VersionAfter != nil { - av = lpd.VersionAfter.String() - } - fmt.Fprintf(&buf, "version changed (%s -> %s)", bv, av) - } else if lpd.RevisionChanged() { - fmt.Fprintf(&buf, "revision changed (%s -> %s)", lpd.RevisionBefore, lpd.RevisionAfter) - } else if lpd.PackagesChanged() { - la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) - if la > 0 && lr > 0 { - fmt.Fprintf(&buf, "packages changed (%v added, %v removed)", la, lr) - } else if la > 0 { - fmt.Fprintf(&buf, "packages changed (%v added)", la) - } else { - fmt.Fprintf(&buf, "packages changed (%v removed)", lr) - } - } else if lpd.PruneOptsChanged() { - // Override what's on the lockdiff with the extra info we have; - // this lets us excise PruneNestedVendorDirs and get the real - // value from the input param in place. - old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs - new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs - fmt.Fprintf(&buf, "prune options changed (%s -> %s)", old, new) + case hashMismatch: + fmt.Fprintf(&buf, "hash mismatch between Gopkg.lock and vendor contents") + case hashVersionMismatch: + fmt.Fprintf(&buf, "hashing algorithm mismatch") + case hashAbsent: + fmt.Fprintf(&buf, "hash digest absent from lock") + case projectAdded: + fmt.Fprintf(&buf, "new project") + case missingFromTree: + fmt.Fprint(&buf, "missing from vendor") } - case hashMismatch: - fmt.Fprintf(&buf, "hash mismatch between Gopkg.lock and vendor contents") - case hashVersionMismatch: - fmt.Fprintf(&buf, "hashing algorithm mismatch") - case hashAbsent: - fmt.Fprintf(&buf, "hash digest absent from lock") - case projectAdded: - fmt.Fprintf(&buf, "new project") - case missingFromTree: - fmt.Fprint(&buf, "missing from vendor") + logger.Print(buf.String()) } - logger.Print(buf.String()) digest, err := verify.DigestFromDirectory(to) if err != nil { @@ -655,6 +661,25 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } } + // Write out the lock, now that it's fully updated with digests. + // + // This is OK to do even if -vendor-only, as the way the DeltaWriter are + // constructed mean that the only changes that could happen here are + // pruneopts (which are definitely fine) or input imports (which are less + // fine, but this is enough of an edge case that we can be lazy for now.) + l, err := dw.lock.MarshalTOML() + if err != nil { + return errors.Wrap(err, "failed to marshal lock to TOML") + } + + if err = ioutil.WriteFile(lpath, append(lockFileComment, l...), 0666); err != nil { + return errors.Wrap(err, "failed to write new lock file") + } + + if dw.behavior == VendorNever { + return os.RemoveAll(vnewpath) + } + // Changed projects are fully populated. Now, iterate over the lock's // projects and move any remaining ones not in the changed list to vnewpath. for _, lp := range dw.lock.Projects() { @@ -685,7 +710,6 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l return errors.Wrap(err, "failed to preserve vendor/.git") } } - err = os.RemoveAll(vpath) if err != nil { return errors.Wrap(err, "failed to remove original vendor directory") @@ -695,16 +719,6 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l return errors.Wrap(err, "failed to put new vendor directory into place") } - // Write out the lock last, now that it's fully updated with digests. - l, err := dw.lock.MarshalTOML() - if err != nil { - return errors.Wrap(err, "failed to marshal lock to TOML") - } - - if err = ioutil.WriteFile(lpath, append(lockFileComment, l...), 0666); err != nil { - return errors.Wrap(err, "failed to write new lock file") - } - return nil } From 08a43497b37232b44f6e1ae2b1914f59f4465001 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Wed, 4 Jul 2018 01:13:40 -0400 Subject: [PATCH 15/25] dep: Fix linting issues, update CHANGELOG --- CHANGELOG.md | 5 +++ cmd/dep/init.go | 7 ---- gps/constraint.go | 21 ---------- gps/prune.go | 2 + gps/selection.go | 1 + gps/verify/locksat.go | 4 +- txn_writer.go | 67 ++++-------------------------- txn_writer_test.go | 97 ------------------------------------------- 8 files changed, 18 insertions(+), 186 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d27d586fcf..b905d7c21f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ # (next version) +# v0.5.0 + NEW FEATURES: * Add CI tests against go1.10. Drop support for go1.8. ([#1620](https://github.com/golang/dep/pull/1620)). @@ -7,6 +9,8 @@ NEW FEATURES: * List out of date projects in dep status ([#1553](https://github.com/golang/dep/pull/1553)). * Enabled opt-in persistent caching via `DEPCACHEAGE` env var. ([#1711](https://github.com/golang/dep/pull/1711)). * Allow `DEPPROJECTROOT` [environment variable](https://golang.github.io/dep/docs/env-vars.html#depprojectroot) to supersede GOPATH deduction and explicitly set the current project's [root](https://golang.github.io/dep/docs/glossary.html#project-root) ([#1883](https://github.com/golang/dep/pull/1883)). +* `dep ensure` now explains what changes to the code or Gopkg.toml have induced solving ([#1912](https://github.com/golang/dep/pull/1912)). +* Hash digests of vendor contents are now stored in `Gopkg.lock`, and the contents of vendor are only rewritten on change or hash mismatch ([#1912](https://github.com/golang/dep/pull/1912)). BUG FIXES: @@ -17,6 +21,7 @@ IMPROVEMENTS: * Add template operations support in dep status template output ([#1549](https://github.com/golang/dep/pull/1549)). * Reduce network access by trusting local source information and only pulling from upstream when necessary ([#1250](https://github.com/golang/dep/pull/1250)). * Update our dependency on Masterminds/semver to follow upstream again now that [Masterminds/semver#67](https://github.com/Masterminds/semver/pull/67) is merged([#1792](https://github.com/golang/dep/pull/1792)). +* `inputs-digest` was removed from `Gopkg.lock` ([#1912](https://github.com/golang/dep/pull/1912)). WIP: * Enable importing external configuration from dependencies during init (#1277). This is feature flagged and disabled by default. diff --git a/cmd/dep/init.go b/cmd/dep/init.go index cc2fa83231..c50d82cbf0 100644 --- a/cmd/dep/init.go +++ b/cmd/dep/init.go @@ -161,13 +161,6 @@ func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error { rootAnalyzer.FinalizeRootManifestAndLock(p.Manifest, p.Lock, copyLock) - // Run gps.Prepare with appropriate constraint solutions from solve run - // to generate the final lock memo. - s, err = gps.Prepare(params, sm) - if err != nil { - return errors.Wrap(err, "init failed: unable to recalculate the lock digest") - } - // Pass timestamp (yyyyMMddHHmmss format) as suffix to backup name. vendorbak, err := dep.BackupVendor(filepath.Join(root, "vendor"), time.Now().Format("20060102150405")) if err != nil { diff --git a/gps/constraint.go b/gps/constraint.go index 1cb9451960..f7c600316e 100644 --- a/gps/constraint.go +++ b/gps/constraint.go @@ -362,27 +362,6 @@ func pcSliceToMap(l []ProjectConstraint, r ...[]ProjectConstraint) ProjectConstr return final } -func (m ProjectConstraints) asSortedSlice() []ProjectConstraint { - pcs := make([]ProjectConstraint, len(m)) - - k := 0 - for pr, pp := range m { - pcs[k] = ProjectConstraint{ - Ident: ProjectIdentifier{ - ProjectRoot: pr, - Source: pp.Source, - }, - Constraint: pp.Constraint, - } - k++ - } - - sort.SliceStable(pcs, func(i, j int) bool { - return pcs[i].Ident.Less(pcs[j].Ident) - }) - return pcs -} - // overrideAll treats the receiver ProjectConstraints map as a set of override // instructions, and applies overridden values to the ProjectConstraints. // diff --git a/gps/prune.go b/gps/prune.go index 98aebffd19..a0a68fadf3 100644 --- a/gps/prune.go +++ b/gps/prune.go @@ -59,6 +59,8 @@ type CascadingPruneOptions struct { PerProjectOptions map[ProjectRoot]PruneOptionSet } +// ParsePruneOptions extracts PruneOptions from a string using the standard +// encoding. func ParsePruneOptions(input string) (PruneOptions, error) { var po PruneOptions for _, char := range input { diff --git a/gps/selection.go b/gps/selection.go index 727b5206ae..c8d41a9c47 100644 --- a/gps/selection.go +++ b/gps/selection.go @@ -121,6 +121,7 @@ func (s *selection) getRequiredPackagesIn(id ProjectIdentifier) map[string]int { // Suppress unused linting warning. var _ = (*selection)(nil).getSelectedPackagesIn +var _ = (*selection)(nil).getProjectImportMap // Compute a list of the unique packages within the given ProjectIdentifier that // are currently selected, and the number of times each package has been diff --git a/gps/verify/locksat.go b/gps/verify/locksat.go index fbb8f99878..0d5a9a4a1f 100644 --- a/gps/verify/locksat.go +++ b/gps/verify/locksat.go @@ -176,7 +176,7 @@ func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]ConstraintMi return ls.badovr } -// UnmatchedOverrides reports any normal, non-override constraint rules that +// UnmatchedConstraints reports any normal, non-override constraint rules that // were not satisfied by the corresponding LockedProject in the Lock. func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]ConstraintMismatch { return ls.badconstraint @@ -186,7 +186,7 @@ func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[strin eff := make(map[string]bool) xt := radix.New() - for pr, _ := range m.DependencyConstraints() { + for pr := range m.DependencyConstraints() { // FIXME(sdboyer) this has the trailing slash ambiguity problem; adapt // code from the solver xt.Insert(string(pr), nil) diff --git a/txn_writer.go b/txn_writer.go index a935b61f4a..e7136779d5 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -15,7 +15,6 @@ import ( "github.com/golang/dep/gps" "github.com/golang/dep/gps/verify" - "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/fs" "github.com/pkg/errors" ) @@ -135,62 +134,6 @@ func (sw *SafeWriter) HasManifest() bool { return sw.Manifest != nil } -type rawStringDiff struct { - *feedback.StringDiff -} - -// MarshalTOML serializes the diff as a string. -func (diff rawStringDiff) MarshalTOML() ([]byte, error) { - return []byte(diff.String()), nil -} - -type rawLockedProjectDiff struct { - Name gps.ProjectRoot `toml:"name"` - Source *rawStringDiff `toml:"source,omitempty"` - Version *rawStringDiff `toml:"version,omitempty"` - Branch *rawStringDiff `toml:"branch,omitempty"` - Revision *rawStringDiff `toml:"revision,omitempty"` - Packages []rawStringDiff `toml:"packages,omitempty"` -} - -func toRawLockedProjectDiff(diff feedback.LockedProjectDiff) rawLockedProjectDiff { - // this is a shallow copy since we aren't modifying the raw diff - raw := rawLockedProjectDiff{Name: diff.Name} - if diff.Source != nil { - raw.Source = &rawStringDiff{diff.Source} - } - if diff.Version != nil { - raw.Version = &rawStringDiff{diff.Version} - } - if diff.Branch != nil { - raw.Branch = &rawStringDiff{diff.Branch} - } - if diff.Revision != nil { - raw.Revision = &rawStringDiff{diff.Revision} - } - raw.Packages = make([]rawStringDiff, len(diff.Packages)) - for i := 0; i < len(diff.Packages); i++ { - raw.Packages[i] = rawStringDiff{&diff.Packages[i]} - } - return raw -} - -type rawLockedProjectDiffs struct { - Projects []rawLockedProjectDiff `toml:"projects"` -} - -func toRawLockedProjectDiffs(diffs []feedback.LockedProjectDiff) rawLockedProjectDiffs { - raw := rawLockedProjectDiffs{ - Projects: make([]rawLockedProjectDiff, len(diffs)), - } - - for i := 0; i < len(diffs); i++ { - raw.Projects[i] = toRawLockedProjectDiff(diffs[i]) - } - - return raw -} - // VendorBehavior defines when the vendor directory should be written. type VendorBehavior int @@ -452,6 +395,9 @@ func hasDotGit(path string) bool { return err == nil } +// DeltaWriter manages batched writes to populate vendor/ and update Gopkg.lock. +// Its primary design goal is to minimize writes by only writing things that +// have changed. type DeltaWriter struct { lock *Lock lockDiff verify.LockDelta @@ -479,7 +425,7 @@ const ( // directory by writing out only those projects that actually need to be written // out - they have changed in some way, or they lack the necessary hash // information to be verified. -func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (TransactionWriter, error) { +func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (DepWriter, error) { sw := &DeltaWriter{ lock: newLock, pruneOptions: prune, @@ -722,12 +668,15 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l return nil } +// PrintPreparedActions indicates what changes the DeltaWriter plans to make. func (dw *DeltaWriter) PrintPreparedActions(output *log.Logger, verbose bool) error { // FIXME return nil } -type TransactionWriter interface { +// A DepWriter is responsible for writing important dep states to disk - +// Gopkg.lock, vendor, and possibly Gopkg.toml. +type DepWriter interface { PrintPreparedActions(output *log.Logger, verbose bool) error Write(path string, sm gps.SourceManager, examples bool, logger *log.Logger) error } diff --git a/txn_writer_test.go b/txn_writer_test.go index fad6279d3c..0c213edb6d 100644 --- a/txn_writer_test.go +++ b/txn_writer_test.go @@ -261,103 +261,6 @@ func TestSafeWriter_ManifestAndUnmodifiedLockWithForceVendor(t *testing.T) { } } -func testSafeWriter_ModifiedLock(t *testing.T) { - test.NeedsExternalNetwork(t) - test.NeedsGit(t) - - h := test.NewHelper(t) - defer h.Cleanup() - - pc := NewTestProjectContext(h, safeWriterProject) - defer pc.Release() - pc.CopyFile(LockName, safeWriterGoldenLock) - pc.Load() - - originalLock := new(Lock) - *originalLock = *pc.Project.Lock - //originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency - sw, _ := NewSafeWriter(nil, originalLock, pc.Project.Lock, VendorOnChanged, defaultCascadingPruneOptions()) - - // Verify prepared actions - if sw.HasManifest() { - t.Fatal("Did not expect the manifest to be written") - } - if !sw.HasLock() { - t.Fatal("Expected the payload to contain the lock") - } - if !sw.writeLock { - t.Fatal("Expected that the writer should plan to write the lock") - } - if !sw.writeVendor { - t.Fatal("Expected that the writer should plan to write the vendor directory") - } - - // Write changes - err := sw.Write(pc.Project.AbsRoot, pc.SourceManager, true, nil) - h.Must(errors.Wrap(err, "SafeWriter.Write failed")) - - // Verify file system changes - if err := pc.ManifestShouldNotExist(); err != nil { - t.Fatal(err) - } - if err := pc.LockShouldMatchGolden(safeWriterGoldenLock); err != nil { - t.Fatal(err) - } - if err := pc.VendorShouldExist(); err != nil { - t.Fatal(err) - } - if err := pc.VendorFileShouldExist("github.com/sdboyer/dep-test"); err != nil { - t.Fatal(err) - } -} - -func testSafeWriter_ModifiedLockSkipVendor(t *testing.T) { - test.NeedsExternalNetwork(t) - test.NeedsGit(t) - - h := test.NewHelper(t) - defer h.Cleanup() - - pc := NewTestProjectContext(h, safeWriterProject) - defer pc.Release() - pc.CopyFile(LockName, safeWriterGoldenLock) - pc.Load() - - originalLock := new(Lock) - *originalLock = *pc.Project.Lock - //originalLock.SolveMeta.InputsDigest = []byte{} // zero out the input hash to ensure non-equivalency - sw, _ := NewSafeWriter(nil, originalLock, pc.Project.Lock, VendorNever, defaultCascadingPruneOptions()) - - // Verify prepared actions - if sw.HasManifest() { - t.Fatal("Did not expect the payload to contain the manifest") - } - if !sw.HasLock() { - t.Fatal("Expected the payload to contain the lock") - } - if !sw.writeLock { - t.Fatal("Expected that the writer should plan to write the lock") - } - if sw.writeVendor { - t.Fatal("Did not expect the payload to contain the vendor directory") - } - - // Write changes - err := sw.Write(pc.Project.AbsRoot, pc.SourceManager, true, nil) - h.Must(errors.Wrap(err, "SafeWriter.Write failed")) - - // Verify file system changes - if err := pc.ManifestShouldNotExist(); err != nil { - t.Fatal(err) - } - if err := pc.LockShouldMatchGolden(safeWriterGoldenLock); err != nil { - t.Fatal(err) - } - if err := pc.VendorShouldNotExist(); err != nil { - t.Fatal(err) - } -} - func TestSafeWriter_ForceVendorWhenVendorAlreadyExists(t *testing.T) { test.NeedsExternalNetwork(t) test.NeedsGit(t) From 812b8c103f8d8a9f481b25eb695261428ce4750b Mon Sep 17 00:00:00 2001 From: sam boyer Date: Wed, 4 Jul 2018 01:40:34 -0400 Subject: [PATCH 16/25] dep: Linting fixes --- cmd/dep/ensure.go | 2 +- gps/manifest.go | 22 ---------------------- gps/verify/digest.go | 4 ++-- gps/verify/digest_test.go | 6 +++--- txn_writer.go | 7 +++---- 5 files changed, 9 insertions(+), 32 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index eaf9481993..66f5c85095 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -203,7 +203,7 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { sums[string(lp.Ident().ProjectRoot)] = lp.(verify.VerifiableProject).Digest } - status, err := verify.VerifyDepTree(vendorDir, sums) + status, err := verify.CheckDepTree(vendorDir, sums) if err != nil { ctx.Err.Printf("Error while verifying vendor directory: %q", err.Error()) os.Exit(1) diff --git a/gps/manifest.go b/gps/manifest.go index 4ae302281c..3974c95997 100644 --- a/gps/manifest.go +++ b/gps/manifest.go @@ -96,28 +96,6 @@ func (m simpleRootManifest) IgnoredPackages() *pkgtree.IgnoredRuleset { func (m simpleRootManifest) RequiredPackages() map[string]bool { return m.req } -func (m simpleRootManifest) dup() simpleRootManifest { - m2 := simpleRootManifest{ - c: make(ProjectConstraints, len(m.c)), - ovr: make(ProjectConstraints, len(m.ovr)), - req: make(map[string]bool, len(m.req)), - } - - for k, v := range m.c { - m2.c[k] = v - } - for k, v := range m.ovr { - m2.ovr[k] = v - } - for k, v := range m.req { - m2.req[k] = v - } - - // IgnoredRulesets are immutable, and safe to reuse. - m2.ig = m.ig - - return m2 -} // prepManifest ensures a manifest is prepared and safe for use by the solver. // This is mostly about ensuring that no outside routine can modify the manifest diff --git a/gps/verify/digest.go b/gps/verify/digest.go index 0191c95057..a424f50daf 100644 --- a/gps/verify/digest.go +++ b/gps/verify/digest.go @@ -373,7 +373,7 @@ func ParseVersionedDigest(input string) (VersionedDigest, error) { return vd, nil } -// VerifyDepTree verifies a dependency tree according to expected digest sums, +// CheckDepTree verifies a dependency tree according to expected digest sums, // and returns an associative array of file system nodes and their respective // vendor status conditions. // @@ -383,7 +383,7 @@ func ParseVersionedDigest(input string) (VersionedDigest, error) { // platform where the file system path separator is a character other than // solidus, one particular dependency would be represented as // "github.com/alice/alice1". -func VerifyDepTree(osDirname string, wantDigests map[string]VersionedDigest) (map[string]VendorStatus, error) { +func CheckDepTree(osDirname string, wantDigests map[string]VersionedDigest) (map[string]VendorStatus, error) { osDirname = filepath.Clean(osDirname) // Ensure top level pathname is a directory diff --git a/gps/verify/digest_test.go b/gps/verify/digest_test.go index c092312177..36fccb3dcb 100644 --- a/gps/verify/digest_test.go +++ b/gps/verify/digest_test.go @@ -173,7 +173,7 @@ func TestVerifyDepTree(t *testing.T) { } } - status, err := VerifyDepTree(vendorRoot, wantDigests) + status, err := CheckDepTree(vendorRoot, wantDigests) if err != nil { t.Fatal(err) } @@ -214,7 +214,7 @@ func TestVerifyDepTree(t *testing.T) { } } - status, err := VerifyDepTree(vendorRoot, wantDigests) + status, err := CheckDepTree(vendorRoot, wantDigests) if err != nil { t.Fatal(err) } @@ -252,7 +252,7 @@ func BenchmarkVerifyDepTree(b *testing.B) { prefix := filepath.Join(os.Getenv("GOPATH"), "src") for i := 0; i < b.N; i++ { - _, err := VerifyDepTree(prefix, nil) + _, err := CheckDepTree(prefix, nil) if err != nil { b.Fatal(err) } diff --git a/txn_writer.go b/txn_writer.go index e7136779d5..ed50926d2e 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -404,7 +404,6 @@ type DeltaWriter struct { pruneOptions gps.CascadingPruneOptions vendorDir string changed map[gps.ProjectRoot]changeType - status map[string]verify.VendorStatus behavior VendorBehavior } @@ -425,7 +424,7 @@ const ( // directory by writing out only those projects that actually need to be written // out - they have changed in some way, or they lack the necessary hash // information to be verified. -func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (DepWriter, error) { +func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (TreeWriter, error) { sw := &DeltaWriter{ lock: newLock, pruneOptions: prune, @@ -674,9 +673,9 @@ func (dw *DeltaWriter) PrintPreparedActions(output *log.Logger, verbose bool) er return nil } -// A DepWriter is responsible for writing important dep states to disk - +// A TreeWriter is responsible for writing important dep states to disk - // Gopkg.lock, vendor, and possibly Gopkg.toml. -type DepWriter interface { +type TreeWriter interface { PrintPreparedActions(output *log.Logger, verbose bool) error Write(path string, sm gps.SourceManager, examples bool, logger *log.Logger) error } From d22fbb834af8528d8bfff19f2d9b2f7e42bd9b1e Mon Sep 17 00:00:00 2001 From: sam boyer Date: Fri, 6 Jul 2018 22:02:00 -0400 Subject: [PATCH 17/25] dep: Implement DeltaWriter.PrintPreparedActions() This just abstracts and reuses the feedback already utilized by DeltaWriter.Write(). --- cmd/dep/ensure.go | 2 +- txn_writer.go | 168 +++++++++++++++++++++++++++------------------- 2 files changed, 100 insertions(+), 70 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 66f5c85095..878ae784ee 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -428,7 +428,7 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm exmap[imp] = true } } else { - // The only time we'll hit this branch is if + // We'll only hit this branch if Gopkg.lock did not exist. rm, _ := p.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages()) for _, imp := range rm.FlattenFn(paths.IsStandardImportPath) { exmap[imp] = true diff --git a/txn_writer.go b/txn_writer.go index ed50926d2e..9dd1bca6a8 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -5,7 +5,6 @@ package dep import ( - "bytes" "context" "fmt" "io/ioutil" @@ -399,12 +398,11 @@ func hasDotGit(path string) bool { // Its primary design goal is to minimize writes by only writing things that // have changed. type DeltaWriter struct { - lock *Lock - lockDiff verify.LockDelta - pruneOptions gps.CascadingPruneOptions - vendorDir string - changed map[gps.ProjectRoot]changeType - behavior VendorBehavior + lock *Lock + lockDiff verify.LockDelta + vendorDir string + changed map[gps.ProjectRoot]changeType + behavior VendorBehavior } type changeType uint8 @@ -415,6 +413,7 @@ const ( hashMismatch hashVersionMismatch hashAbsent + pruneOptsChanged missingFromTree projectAdded projectRemoved @@ -426,11 +425,10 @@ const ( // information to be verified. func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatus, prune gps.CascadingPruneOptions, vendorDir string, behavior VendorBehavior) (TreeWriter, error) { sw := &DeltaWriter{ - lock: newLock, - pruneOptions: prune, - vendorDir: vendorDir, - changed: make(map[gps.ProjectRoot]changeType), - behavior: behavior, + lock: newLock, + vendorDir: vendorDir, + changed: make(map[gps.ProjectRoot]changeType), + behavior: behavior, } if newLock == nil { @@ -454,6 +452,8 @@ func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatu sw.changed[pr] = projectAdded } else if lpd.WasRemoved() { sw.changed[pr] = projectRemoved + } else if lpd.PruneOptsChanged() { + sw.changed[pr] = pruneOptsChanged } else { sw.changed[pr] = solveChanged } @@ -468,6 +468,8 @@ func NewDeltaWriter(oldLock, newLock *Lock, status map[string]verify.VendorStatu switch stat { case verify.NotInTree: sw.changed[pr] = missingFromTree + case verify.NotInLock: + sw.changed[pr] = projectRemoved case verify.DigestMismatchInLock: sw.changed[pr] = hashMismatch case verify.HashVersionMismatch: @@ -499,9 +501,9 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l // adjacent directory to minimize the possibility of cross-filesystem renames // becoming expensive copies, and to make removal of unneeded projects implicit // and automatic. - vnewpath := vpath + ".new" + vnewpath := vpath + "-new" if _, err := os.Stat(vnewpath); err == nil { - return errors.Errorf("scratch directory %s already exists", vnewpath) + return errors.Errorf("scratch directory %s already exists, please remove it", vnewpath) } err := os.MkdirAll(vnewpath, os.FileMode(0777)) if err != nil { @@ -515,7 +517,6 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } dropped := []gps.ProjectRoot{} - // TODO(sdboyer) add a txn/rollback layer, like the safewriter? i := 0 tot := len(dw.changed) for pr, reason := range dw.changed { @@ -537,54 +538,7 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l // Only print things if we're actually going to leave behind a new // vendor dir. if dw.behavior != VendorNever { - var buf bytes.Buffer - fmt.Fprintf(&buf, "(%d/%d) Wrote %s@%s: ", i, tot, id, v) - switch reason { - case noChange: - panic(fmt.Sprintf("wtf, no change for %s", pr)) - case solveChanged: - if lpd.SourceChanged() { - fmt.Fprintf(&buf, "source changed (%s -> %s)", lpd.SourceBefore, lpd.SourceAfter) - } else if lpd.VersionChanged() { - bv, av := "(none)", "(none)" - if lpd.VersionBefore != nil { - bv = lpd.VersionBefore.String() - } - if lpd.VersionAfter != nil { - av = lpd.VersionAfter.String() - } - fmt.Fprintf(&buf, "version changed (%s -> %s)", bv, av) - } else if lpd.RevisionChanged() { - fmt.Fprintf(&buf, "revision changed (%s -> %s)", lpd.RevisionBefore, lpd.RevisionAfter) - } else if lpd.PackagesChanged() { - la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) - if la > 0 && lr > 0 { - fmt.Fprintf(&buf, "packages changed (%v added, %v removed)", la, lr) - } else if la > 0 { - fmt.Fprintf(&buf, "packages changed (%v added)", la) - } else { - fmt.Fprintf(&buf, "packages changed (%v removed)", lr) - } - } else if lpd.PruneOptsChanged() { - // Override what's on the lockdiff with the extra info we have; - // this lets us excise PruneNestedVendorDirs and get the real - // value from the input param in place. - old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs - new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs - fmt.Fprintf(&buf, "prune options changed (%s -> %s)", old, new) - } - case hashMismatch: - fmt.Fprintf(&buf, "hash mismatch between Gopkg.lock and vendor contents") - case hashVersionMismatch: - fmt.Fprintf(&buf, "hashing algorithm mismatch") - case hashAbsent: - fmt.Fprintf(&buf, "hash digest absent from lock") - case projectAdded: - fmt.Fprintf(&buf, "new project") - case missingFromTree: - fmt.Fprint(&buf, "missing from vendor") - } - logger.Print(buf.String()) + logger.Printf("(%d/%d) Wrote %s@%s: %s", i, tot, id, v, changeExplanation(reason, lpd)) } digest, err := verify.DigestFromDirectory(to) @@ -607,11 +561,6 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l } // Write out the lock, now that it's fully updated with digests. - // - // This is OK to do even if -vendor-only, as the way the DeltaWriter are - // constructed mean that the only changes that could happen here are - // pruneopts (which are definitely fine) or input imports (which are less - // fine, but this is enough of an edge case that we can be lazy for now.) l, err := dw.lock.MarshalTOML() if err != nil { return errors.Wrap(err, "failed to marshal lock to TOML") @@ -667,9 +616,90 @@ func (dw *DeltaWriter) Write(path string, sm gps.SourceManager, examples bool, l return nil } +// changeExplanation outputs a string explaining what changed for each different +// possible changeType. +func changeExplanation(c changeType, lpd verify.LockedProjectDelta) string { + switch c { + case solveChanged: + if lpd.SourceChanged() { + return fmt.Sprintf("source changed (%s -> %s)", lpd.SourceBefore, lpd.SourceAfter) + } else if lpd.VersionChanged() { + bv, av := "(none)", "(none)" + if lpd.VersionBefore != nil { + bv = lpd.VersionBefore.String() + } + if lpd.VersionAfter != nil { + av = lpd.VersionAfter.String() + } + return fmt.Sprintf("version changed (%s -> %s)", bv, av) + } else if lpd.RevisionChanged() { + return fmt.Sprintf("revision changed (%s -> %s)", lpd.RevisionBefore, lpd.RevisionAfter) + } else if lpd.PackagesChanged() { + la, lr := len(lpd.PackagesAdded), len(lpd.PackagesRemoved) + if la > 0 && lr > 0 { + return fmt.Sprintf("packages changed (%v added, %v removed)", la, lr) + } else if la > 0 { + return fmt.Sprintf("packages changed (%v added)", la) + } else { + return fmt.Sprintf("packages changed (%v removed)", lr) + } + } + case pruneOptsChanged: + // Override what's on the lockdiff with the extra info we have; + // this lets us excise PruneNestedVendorDirs and get the real + // value from the input param in place. + old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs + new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs + return fmt.Sprintf("prune options changed (%s -> %s)", old, new) + case hashMismatch: + return "hash mismatch between Gopkg.lock and vendor contents" + case hashVersionMismatch: + return "hashing algorithm mismatch" + case hashAbsent: + return "hash digest absent from lock" + case projectAdded: + return "new project" + case missingFromTree: + return "missing from vendor" + default: + panic(fmt.Sprintf("unrecognized changeType value %v", c)) + } + + return "" +} + // PrintPreparedActions indicates what changes the DeltaWriter plans to make. func (dw *DeltaWriter) PrintPreparedActions(output *log.Logger, verbose bool) error { - // FIXME + if verbose { + l, err := dw.lock.MarshalTOML() + if err != nil { + return errors.Wrap(err, "ensure DryRun cannot serialize lock") + } + output.Printf("Would have written the following %s (hash digests may be incorrect):\n%s\n", LockName, string(l)) + } else { + output.Printf("Would have written %s.\n", LockName) + } + + projs := make(map[gps.ProjectRoot]gps.LockedProject) + for _, lp := range dw.lock.Projects() { + projs[lp.Ident().ProjectRoot] = lp + } + + tot := len(dw.changed) + if tot > 0 { + output.Print("Would have updated the following projects in the vendor directory:\n\n") + i := 0 + for pr, reason := range dw.changed { + lpd := dw.lockDiff.ProjectDeltas[pr] + v, id := projs[pr].Version(), projs[pr].Ident() + if reason == projectRemoved { + output.Printf("(%d/%d) Would have removed %s", i, tot, id, v, changeExplanation(reason, lpd)) + } else { + output.Printf("(%d/%d) Would hae written %s@%s: %s", i, tot, id, v, changeExplanation(reason, lpd)) + } + } + } + return nil } From 4b02ee0588f272628f6a4e8a8b8806b657bffc0b Mon Sep 17 00:00:00 2001 From: sam boyer Date: Sat, 7 Jul 2018 23:26:56 -0400 Subject: [PATCH 18/25] dep: Use DeltaWriter on -no-vendor path, as well --- cmd/dep/ensure.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 878ae784ee..011834a44f 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -33,9 +33,9 @@ Project spec: Ensure gets a project into a complete, reproducible, and likely compilable state: - * All non-stdlib imports are fulfilled + * All imports are fulfilled * All rules in Gopkg.toml are respected - * Gopkg.lock records precise versions for all dependencies + * Gopkg.lock records immutable versions for all dependencies * vendor/ is populated according to Gopkg.lock Ensure has fast techniques to determine that some of these steps may be @@ -337,7 +337,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { +func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { if len(args) != 0 { return errors.Errorf("dep ensure -vendor-only only populates vendor/ from %s; it takes no spec arguments", dep.LockName) } @@ -347,21 +347,21 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj } // Pass the same lock as old and new so that the writer will observe no - // difference and choose not to write it out, instead writing out only - sw, err := dep.NewSafeWriter(nil, p.Lock, p.ChangedLock, dep.VendorAlways, p.Manifest.PruneOptions) + // difference, and write out only ncessary vendor/ changes. + dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), VendorAlways) if err != nil { return err } if cmd.dryRun { - return sw.PrintPreparedActions(ctx.Out, ctx.Verbose) + return dw.PrintPreparedActions(ctx.Out, ctx.Verbose) } var logger *log.Logger if ctx.Verbose { logger = ctx.Err } - return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") + return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { From 80eeec7b90537bc22bb0e519014ad0d89d119b71 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Sun, 8 Jul 2018 21:20:28 -0400 Subject: [PATCH 19/25] gps/verify: Add tests for LockSatisfaction Also better prepare it for public consumption by exporting its members, renaming some methods and improving docs. --- cmd/dep/ensure.go | 10 +- cmd/dep/status.go | 2 +- gps/verify/helper_types_test.go | 78 ++++++++++ gps/verify/locksat.go | 93 ++++++------ gps/verify/locksat_test.go | 259 ++++++++++++++++++++++++++++++++ 5 files changed, 388 insertions(+), 54 deletions(-) create mode 100644 gps/verify/helper_types_test.go create mode 100644 gps/verify/locksat_test.go diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 011834a44f..1f6f6948d1 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -282,19 +282,19 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project lock := p.ChangedLock if lock != nil { lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) - if !lsat.Passed() { + if !lsat.Satisfied() { if ctx.Verbose { ctx.Out.Println("Gopkg.lock is out of sync with Gopkg.toml and project code:") - for _, missing := range lsat.MissingImports() { + for _, missing := range lsat.MissingImports { ctx.Out.Printf("\t%s is missing from input-imports\n", missing) } - for _, excess := range lsat.ExcessImports() { + for _, excess := range lsat.ExcessImports { ctx.Out.Printf("\t%s is in input-imports, but isn't imported\n", excess) } - for pr, unmatched := range lsat.UnmatchedOverrides() { + for pr, unmatched := range lsat.UnmetOverrides { ctx.Out.Printf("\t%s is at %s, which is not allowed by override %s\n", pr, unmatched.V, unmatched.C) } - for pr, unmatched := range lsat.UnmatchedConstraints() { + for pr, unmatched := range lsat.UnmetConstraints { ctx.Out.Printf("\t%s is at %s, which is not allowed by constraint %s\n", pr, unmatched.V, unmatched.C) } ctx.Out.Println() diff --git a/cmd/dep/status.go b/cmd/dep/status.go index 77e7671f26..f87c78679e 100644 --- a/cmd/dep/status.go +++ b/cmd/dep/status.go @@ -925,7 +925,7 @@ func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Proje }) lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree) - if lsat.Passed() { + if lsat.Satisfied() { // If these are equal, we're guaranteed that the lock is a transitively // complete picture of all deps. That eliminates the need for at least // some checks. diff --git a/gps/verify/helper_types_test.go b/gps/verify/helper_types_test.go new file mode 100644 index 0000000000..b27cf7eb2e --- /dev/null +++ b/gps/verify/helper_types_test.go @@ -0,0 +1,78 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package verify + +import ( + "github.com/golang/dep/gps" + "github.com/golang/dep/gps/pkgtree" +) + +// mkPI creates a ProjectIdentifier with the ProjectRoot as the provided +// string, and the Source unset. +// +// Call normalize() on the returned value if you need the Source to be be +// equal to the ProjectRoot. +func mkPI(root string) gps.ProjectIdentifier { + return gps.ProjectIdentifier{ + ProjectRoot: gps.ProjectRoot(root), + } +} + +type safeLock struct { + p []gps.LockedProject + i []string +} + +func (sl safeLock) InputImports() []string { + return sl.i +} + +func (sl safeLock) Projects() []gps.LockedProject { + return sl.p +} + +// simpleRootManifest exists so that we have a safe value to swap into solver +// params when a nil Manifest is provided. +type simpleRootManifest struct { + c, ovr gps.ProjectConstraints + ig *pkgtree.IgnoredRuleset + req map[string]bool +} + +func (m simpleRootManifest) DependencyConstraints() gps.ProjectConstraints { + return m.c +} +func (m simpleRootManifest) Overrides() gps.ProjectConstraints { + return m.ovr +} +func (m simpleRootManifest) IgnoredPackages() *pkgtree.IgnoredRuleset { + return m.ig +} +func (m simpleRootManifest) RequiredPackages() map[string]bool { + return m.req +} + +func (m simpleRootManifest) dup() simpleRootManifest { + m2 := simpleRootManifest{ + c: make(gps.ProjectConstraints), + ovr: make(gps.ProjectConstraints), + ig: pkgtree.NewIgnoredRuleset(m.ig.ToSlice()), + req: make(map[string]bool), + } + + for k, v := range m.c { + m2.c[k] = v + } + + for k, v := range m.ovr { + m2.ovr[k] = v + } + + for k := range m.req { + m2.req[k] = true + } + + return m2 +} diff --git a/gps/verify/locksat.go b/gps/verify/locksat.go index 0d5a9a4a1f..bd0321aa21 100644 --- a/gps/verify/locksat.go +++ b/gps/verify/locksat.go @@ -1,3 +1,7 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + package verify import ( @@ -9,10 +13,26 @@ import ( // LockSatisfaction holds the compound result of LockSatisfiesInputs, allowing // the caller to inspect each of several orthogonal possible types of failure. +// +// The zero value assumes that there was no input lock, which necessarily means +// the inputs were not satisfied. This zero value means we err on the side of +// failure. type LockSatisfaction struct { - nolock bool - missingPkgs, excessPkgs []string - badovr, badconstraint map[gps.ProjectRoot]ConstraintMismatch + // If LockExisted is false, it indicates that a nil gps.Lock was passed to + // LockSatisfiesInputs(). + LockExisted bool + // MissingImports is the set of import paths that were present in the + // inputs but missing in the Lock. + MissingImports []string + // ExcessImports is the set of import paths that were present in the Lock + // but absent from the inputs. + ExcessImports []string + // UnmatchedConstraints reports any normal, non-override constraint rules that + // were not satisfied by the corresponding LockedProject in the Lock. + UnmetConstraints map[gps.ProjectRoot]ConstraintMismatch + // UnmatchedOverrides reports any override rules that were not satisfied by the + // corresponding LockedProject in the Lock. + UnmetOverrides map[gps.ProjectRoot]ConstraintMismatch } // ConstraintMismatch is a two-tuple of a gps.Version, and a gps.Constraint that @@ -30,9 +50,15 @@ type ConstraintMismatch struct { // compute package imports that may have been removed. Figuring out that // negative space would require exploring the entire graph to ensure there are // no in-edges for particular imports. -func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree) LockSatisfaction { +func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, ptree pkgtree.PackageTree) LockSatisfaction { if l == nil { - return LockSatisfaction{nolock: true} + return LockSatisfaction{} + } + + lsat := LockSatisfaction{ + LockExisted: true, + UnmetOverrides: make(map[gps.ProjectRoot]ConstraintMismatch), + UnmetConstraints: make(map[gps.ProjectRoot]ConstraintMismatch), } var ig *pkgtree.IgnoredRuleset @@ -42,7 +68,7 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree req = m.RequiredPackages() } - rm, _ := rpt.ToReachMap(true, true, false, ig) + rm, _ := ptree.ToReachMap(true, true, false, ig) reach := rm.FlattenFn(paths.IsStandardImportPath) inlock := make(map[string]bool, len(l.InputImports())) @@ -68,11 +94,6 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree inlock[imp] = true } - lsat := LockSatisfaction{ - badovr: make(map[gps.ProjectRoot]ConstraintMismatch), - badconstraint: make(map[gps.ProjectRoot]ConstraintMismatch), - } - for ip := range ininputs { if !inlock[ip] { pkgDiff[ip] = missingFromLock @@ -96,9 +117,9 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree for ip, typ := range pkgDiff { if typ == missingFromLock { - lsat.missingPkgs = append(lsat.missingPkgs, ip) + lsat.MissingImports = append(lsat.MissingImports, ip) } else { - lsat.excessPkgs = append(lsat.excessPkgs, ip) + lsat.ExcessImports = append(lsat.ExcessImports, ip) } } @@ -110,7 +131,7 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree if pp, has := ovr[pr]; has { if !pp.Constraint.Matches(lp.Version()) { - lsat.badovr[pr] = ConstraintMismatch{ + lsat.UnmetOverrides[pr] = ConstraintMismatch{ C: pp.Constraint, V: lp.Version(), } @@ -121,7 +142,7 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree } if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) { - lsat.badconstraint[pr] = ConstraintMismatch{ + lsat.UnmetConstraints[pr] = ConstraintMismatch{ C: pp.Constraint, V: lp.Version(), } @@ -131,57 +152,33 @@ func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, rpt pkgtree.PackageTree return lsat } -// Passed is a shortcut method that indicates whether there were any ways in -// which the Lock did not satisfy the inputs. It will return true only if no -// problems were found. -func (ls LockSatisfaction) Passed() bool { - if ls.nolock { +// Satisfied is a shortcut method that indicates whether there were any ways in +// which the Lock did not satisfy the inputs. It will return true only if the +// Lock was satisfactory in all respects vis-a-vis the inputs. +func (ls LockSatisfaction) Satisfied() bool { + if !ls.LockExisted { return false } - if len(ls.missingPkgs) > 0 { + if len(ls.MissingImports) > 0 { return false } - if len(ls.excessPkgs) > 0 { + if len(ls.ExcessImports) > 0 { return false } - if len(ls.badovr) > 0 { + if len(ls.UnmetOverrides) > 0 { return false } - if len(ls.badconstraint) > 0 { + if len(ls.UnmetConstraints) > 0 { return false } return true } -// MissingImports reports the set of import paths that were present in the -// inputs but missing in the Lock. -func (ls LockSatisfaction) MissingImports() []string { - return ls.missingPkgs -} - -// ExcessImports reports the set of import paths that were present in the Lock -// but absent from the inputs. -func (ls LockSatisfaction) ExcessImports() []string { - return ls.excessPkgs -} - -// UnmatchedOverrides reports any override rules that were not satisfied by the -// corresponding LockedProject in the Lock. -func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]ConstraintMismatch { - return ls.badovr -} - -// UnmatchedConstraints reports any normal, non-override constraint rules that -// were not satisfied by the corresponding LockedProject in the Lock. -func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]ConstraintMismatch { - return ls.badconstraint -} - func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[string]bool { eff := make(map[string]bool) xt := radix.New() diff --git a/gps/verify/locksat_test.go b/gps/verify/locksat_test.go new file mode 100644 index 0000000000..658e04ceb3 --- /dev/null +++ b/gps/verify/locksat_test.go @@ -0,0 +1,259 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package verify + +import ( + "strings" + "testing" + + "github.com/golang/dep/gps" + "github.com/golang/dep/gps/pkgtree" +) + +type lockUnsatisfactionDimension uint8 + +const ( + noLock lockUnsatisfactionDimension = 1 << iota + missingImports + excessImports + unmatchedOverrides + unmatchedConstraints +) + +func (lsd lockUnsatisfactionDimension) String() string { + var parts []string + for i := uint(0); i < 5; i++ { + if lsd&(1< Date: Mon, 9 Jul 2018 01:18:28 -0400 Subject: [PATCH 20/25] gps/verify: Add LockDiff unit tests These should be nearly comprehensive tests for lock diffing behaviors. They follow the same functional transform table testing pattern as with the lock satisfaction tests. --- gps/verify/helper_types_test.go | 41 +++ gps/verify/lockdiff.go | 98 +++---- gps/verify/lockdiff_test.go | 483 ++++++++++++++++++++++++++++++++ 3 files changed, 567 insertions(+), 55 deletions(-) create mode 100644 gps/verify/lockdiff_test.go diff --git a/gps/verify/helper_types_test.go b/gps/verify/helper_types_test.go index b27cf7eb2e..75d4ddf6f5 100644 --- a/gps/verify/helper_types_test.go +++ b/gps/verify/helper_types_test.go @@ -33,6 +33,37 @@ func (sl safeLock) Projects() []gps.LockedProject { return sl.p } +func (sl safeLock) dup() safeLock { + sl2 := safeLock{ + i: make([]string, len(sl.i)), + p: make([]gps.LockedProject, 0, len(sl.p)), + } + copy(sl2.i, sl.i) + + for _, lp := range sl.p { + // Only for use with VerifiableProjects. + sl2.p = append(sl2.p, lp.(VerifiableProject).dup()) + } + + return sl2 +} + +func (vp VerifiableProject) dup() VerifiableProject { + pkglist := make([]string, len(vp.Packages())) + copy(pkglist, vp.Packages()) + hashbytes := make([]byte, len(vp.Digest.Digest)) + copy(hashbytes, vp.Digest.Digest) + + return VerifiableProject{ + LockedProject: gps.NewLockedProject(vp.Ident(), vp.Version(), pkglist), + PruneOpts: vp.PruneOpts, + Digest: VersionedDigest{ + HashVersion: vp.Digest.HashVersion, + Digest: hashbytes, + }, + } +} + // simpleRootManifest exists so that we have a safe value to swap into solver // params when a nil Manifest is provided. type simpleRootManifest struct { @@ -76,3 +107,13 @@ func (m simpleRootManifest) dup() simpleRootManifest { return m2 } + +func newVerifiableProject(id gps.ProjectIdentifier, v gps.Version, pkgs []string) VerifiableProject { + return VerifiableProject{ + LockedProject: gps.NewLockedProject(id, v, pkgs), + Digest: VersionedDigest{ + HashVersion: HashVersion, + Digest: []byte("something"), + }, + } +} diff --git a/gps/verify/lockdiff.go b/gps/verify/lockdiff.go index c85c89722a..60928d0876 100644 --- a/gps/verify/lockdiff.go +++ b/gps/verify/lockdiff.go @@ -1,4 +1,4 @@ -// Copyright 2017 The Go Authors. All rights reserved. +// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -12,22 +12,25 @@ import ( "github.com/golang/dep/gps" ) -// sortLockedProjects returns a sorted copy of lps, or itself if already sorted. -func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { - if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { - return lps[i].Ident().Less(lps[j].Ident()) - }) { - return lps - } - - cp := make([]gps.LockedProject, len(lps)) - copy(cp, lps) +// DeltaDimension defines a bitset enumerating all of the different dimensions +// along which a Lock, and its constitutent components, can change. +type DeltaDimension uint32 - sort.Slice(cp, func(i, j int) bool { - return cp[i].Ident().Less(cp[j].Ident()) - }) - return cp -} +// Each flag represents an ortohgonal dimension along which Locks can vary with +// respect to each other. +const ( + InputImportsChanged DeltaDimension = 1 << iota + ProjectAdded + ProjectRemoved + SourceChanged + VersionChanged + RevisionChanged + PackagesChanged + PruneOptsChanged + HashVersionChanged + HashChanged + AnyChanged = (1 << iota) - 1 +) // LockDelta represents all possible differences between two Locks. type LockDelta struct { @@ -61,9 +64,10 @@ type LockedProjectPropertiesDelta struct { // DiffLocks compares two locks and computes a semantically rich delta between // them. func DiffLocks(l1, l2 gps.Lock) LockDelta { - // Default nil locks to empty locks, so that we can still generate a diff + // Default nil locks to empty locks, so that we can still generate a diff. if l1 == nil { if l2 == nil { + // But both locks being nil results in an empty delta. return LockDelta{} } l1 = gps.SimpleLock{} @@ -131,8 +135,8 @@ func DiffLocks(l1, l2 gps.Lock) LockDelta { } func findAddedAndRemoved(l1, l2 []string) (add, remove []string) { - // Computing package add/removes could probably be optimized to O(n), but - // it's not critical path for any known case, so not worth the effort right now. + // Computing package add/removes might be optimizable to O(n) (?), but it's + // not critical path for any known case, so not worth the effort right now. p1, p2 := make(map[string]bool, len(l1)), make(map[string]bool, len(l2)) for _, pkg := range l1 { @@ -216,26 +220,6 @@ func DiffLockedProjectProperties(lp1, lp2 gps.LockedProject) LockedProjectProper return ld } -// DeltaDimension defines a bitset enumerating all of the different dimensions -// along which a Lock, and its constitutent components, can change. -type DeltaDimension uint32 - -// Each flag represents an ortohgonal dimension along which Locks can vary with -// respect to each other. -const ( - InputImportsChanged DeltaDimension = 1 << iota - ProjectAdded - ProjectRemoved - SourceChanged - VersionChanged - RevisionChanged - PackagesChanged - PruneOptsChanged - HashVersionChanged - HashChanged - AnyChanged = (1 << iota) - 1 -) - // Changed indicates whether the delta contains a change along the dimensions // with their corresponding bits set. // @@ -388,8 +372,9 @@ func (ld LockedProjectPropertiesDelta) SourceChanged() bool { // VersionChanged returns true if the version property differed between the // first and second locks. In addition to simple changes (e.g. 1.0.1 -> 1.0.2), -// this also includes all the possible type changes covered by -// VersionTypeCHanged(), as those necessarily also are version changes. +// this also includes all possible version type changes either going from a +// paired version to a plain revision, or the reverse direction, or the type of +// unpaired version changing (e.g. branch -> semver). func (ld LockedProjectPropertiesDelta) VersionChanged() bool { if ld.VersionBefore == nil && ld.VersionAfter == nil { return false @@ -402,20 +387,6 @@ func (ld LockedProjectPropertiesDelta) VersionChanged() bool { return false } -// VersionTypeChanged returns true if the type of version differed between the -// first and second locks. This includes either going from a paired version to a -// plain revision, or the reverse direction, or the type of unpaired version -// changing (e.g. branch -> semver). -func (ld LockedProjectPropertiesDelta) VersionTypeChanged() bool { - if ld.VersionBefore == nil && ld.VersionAfter == nil { - return false - } else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) { - return true - } - - return false -} - // RevisionChanged returns true if the revision property differed between the // first and second locks. func (ld LockedProjectPropertiesDelta) RevisionChanged() bool { @@ -433,3 +404,20 @@ func (ld LockedProjectPropertiesDelta) PackagesChanged() bool { func (ld LockedProjectPropertiesDelta) PruneOptsChanged() bool { return ld.PruneOptsBefore != ld.PruneOptsAfter } + +// sortLockedProjects returns a sorted copy of lps, or itself if already sorted. +func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject { + if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool { + return lps[i].Ident().Less(lps[j].Ident()) + }) { + return lps + } + + cp := make([]gps.LockedProject, len(lps)) + copy(cp, lps) + + sort.Slice(cp, func(i, j int) bool { + return cp[i].Ident().Less(cp[j].Ident()) + }) + return cp +} diff --git a/gps/verify/lockdiff_test.go b/gps/verify/lockdiff_test.go new file mode 100644 index 0000000000..52955a392d --- /dev/null +++ b/gps/verify/lockdiff_test.go @@ -0,0 +1,483 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package verify + +import ( + "fmt" + "math/bits" + "strings" + "testing" + + "github.com/golang/dep/gps" +) + +func contains(haystack []string, needle string) bool { + for _, str := range haystack { + if str == needle { + return true + } + } + return false +} + +func (dd DeltaDimension) String() string { + var parts []string + + for dd != 0 { + index := bits.TrailingZeros32(uint32(dd)) + dd &= ^(1 << uint(index)) + + switch DeltaDimension(1 << uint(index)) { + case InputImportsChanged: + parts = append(parts, "input imports") + case ProjectAdded: + parts = append(parts, "project added") + case ProjectRemoved: + parts = append(parts, "project removed") + case SourceChanged: + parts = append(parts, "source changed") + case VersionChanged: + parts = append(parts, "version changed") + case RevisionChanged: + parts = append(parts, "revision changed") + case PackagesChanged: + parts = append(parts, "packages changed") + case PruneOptsChanged: + parts = append(parts, "pruneopts changed") + case HashVersionChanged: + parts = append(parts, "hash version changed") + case HashChanged: + parts = append(parts, "hash digest changed") + } + } + + return strings.Join(parts, ", ") +} + +func TestLockDelta(t *testing.T) { + fooversion := gps.NewVersion("v1.0.0").Pair("foorev1") + bazversion := gps.NewVersion("v2.0.0").Pair("bazrev1") + transver := gps.NewVersion("v0.5.0").Pair("transrev1") + l := safeLock{ + i: []string{"foo.com/bar", "baz.com/qux"}, + p: []gps.LockedProject{ + newVerifiableProject(mkPI("foo.com/bar"), fooversion, []string{".", "subpkg"}), + newVerifiableProject(mkPI("baz.com/qux"), bazversion, []string{".", "other"}), + newVerifiableProject(mkPI("transitive.com/dependency"), transver, []string{"."}), + }, + } + + var dup lockTransformer = func(l safeLock) safeLock { + return l.dup() + } + + tt := map[string]struct { + lt lockTransformer + delta DeltaDimension + checkfn func(*testing.T, LockDelta) + }{ + "ident": { + lt: dup, + }, + "added import": { + lt: dup.addII("other.org"), + delta: InputImportsChanged, + }, + "added import 2x": { + lt: dup.addII("other.org").addII("andsomethingelse.com/wowie"), + delta: InputImportsChanged, + checkfn: func(t *testing.T, ld LockDelta) { + if !contains(ld.AddedImportInputs, "other.org") { + t.Error("first added input import missing") + } + if !contains(ld.AddedImportInputs, "andsomethingelse.com/wowie") { + t.Error("first added input import missing") + } + }, + }, + "removed import": { + lt: dup.rmII("baz.com/qux"), + delta: InputImportsChanged, + checkfn: func(t *testing.T, ld LockDelta) { + if !contains(ld.RemovedImportInputs, "baz.com/qux") { + t.Error("removed input import missing") + } + }, + }, + "add project": { + lt: dup.addDumbProject("madeup.org"), + delta: ProjectAdded, + }, + "remove project": { + lt: dup.rmProject("foo.com/bar"), + delta: ProjectRemoved, + }, + "all": { + lt: dup.addII("other.org").rmII("baz.com/qux").addDumbProject("zebrafun.org").rmProject("foo.com/bar"), + delta: InputImportsChanged | ProjectRemoved | ProjectAdded, + }, + } + + for name, fix := range tt { + fix := fix + t.Run(name, func(t *testing.T) { + fixl := fix.lt(l) + ld := DiffLocks(l, fixl) + + if !ld.Changed(AnyChanged) && fix.delta != 0 { + t.Errorf("Changed() reported false when expecting some dimensions to be changed: %s", fix.delta) + } else if ld.Changed(AnyChanged) && fix.delta == 0 { + t.Error("Changed() reported true when expecting no changes") + } + if ld.Changed(AnyChanged & ^fix.delta) { + t.Errorf("Changed() reported true when checking along not-expected dimensions: %s", ld.Changes() & ^fix.delta) + } + + gotdelta := ld.Changes() + if fix.delta & ^gotdelta != 0 { + t.Errorf("wanted change in some dimensions that were unchanged: %s", fix.delta & ^gotdelta) + } + if gotdelta & ^fix.delta != 0 { + t.Errorf("did not want change in some dimensions that were changed: %s", gotdelta & ^fix.delta) + } + + if fix.checkfn != nil { + fix.checkfn(t, ld) + } + }) + } +} + +func TestLockedProjectPropertiesDelta(t *testing.T) { + fooversion, foorev := gps.NewVersion("v1.0.0"), gps.Revision("foorev1") + foopair := fooversion.Pair(foorev) + foovp := VerifiableProject{ + LockedProject: gps.NewLockedProject(mkPI("foo.com/project"), foopair, []string{".", "subpkg"}), + PruneOpts: gps.PruneNestedVendorDirs, + Digest: VersionedDigest{ + HashVersion: HashVersion, + Digest: []byte("foobytes"), + }, + } + var dup lockedProjectTransformer = func(lp gps.LockedProject) gps.LockedProject { + return lp.(VerifiableProject).dup() + } + + tt := map[string]struct { + lt1, lt2 lockedProjectTransformer + delta DeltaDimension + checkfn func(*testing.T, LockedProjectPropertiesDelta) + }{ + "ident": { + lt1: dup, + }, + "add pkg": { + lt1: dup.addPkg("whatev"), + delta: PackagesChanged, + }, + "rm pkg": { + lt1: dup.rmPkg("subpkg"), + delta: PackagesChanged, + }, + "add and rm pkg": { + lt1: dup.rmPkg("subpkg").addPkg("whatev"), + delta: PackagesChanged, + checkfn: func(t *testing.T, ld LockedProjectPropertiesDelta) { + if !contains(ld.PackagesAdded, "whatev") { + t.Error("added pkg missing from list") + } + if !contains(ld.PackagesRemoved, "subpkg") { + t.Error("removed pkg missing from list") + } + }, + }, + "add source": { + lt1: dup.setSource("somethingelse"), + delta: SourceChanged, + }, + "remove source": { + lt1: dup.setSource("somethingelse"), + lt2: dup, + delta: SourceChanged, + }, + "to rev only": { + lt1: dup.setVersion(foorev), + delta: VersionChanged, + }, + "from rev only": { + lt1: dup.setVersion(foorev), + lt2: dup, + delta: VersionChanged, + }, + "to new rev only": { + lt1: dup.setVersion(gps.Revision("newrev")), + delta: VersionChanged | RevisionChanged, + }, + "from new rev only": { + lt1: dup.setVersion(gps.Revision("newrev")), + lt2: dup, + delta: VersionChanged | RevisionChanged, + }, + "version change": { + lt1: dup.setVersion(gps.NewVersion("v0.5.0").Pair(foorev)), + delta: VersionChanged, + }, + "version change to norev": { + lt1: dup.setVersion(gps.NewVersion("v0.5.0")), + delta: VersionChanged | RevisionChanged, + }, + "version change from norev": { + lt1: dup.setVersion(gps.NewVersion("v0.5.0")), + lt2: dup.setVersion(gps.NewVersion("v0.5.0").Pair(foorev)), + delta: RevisionChanged, + }, + "to branch": { + lt1: dup.setVersion(gps.NewBranch("master").Pair(foorev)), + delta: VersionChanged, + }, + "to branch new rev": { + lt1: dup.setVersion(gps.NewBranch("master").Pair(gps.Revision("newrev"))), + delta: VersionChanged | RevisionChanged, + }, + "to empty prune opts": { + lt1: dup.setPruneOpts(0), + delta: PruneOptsChanged, + }, + "from empty prune opts": { + lt1: dup.setPruneOpts(0), + lt2: dup, + delta: PruneOptsChanged, + }, + "prune opts change": { + lt1: dup.setPruneOpts(gps.PruneNestedVendorDirs | gps.PruneNonGoFiles), + delta: PruneOptsChanged, + }, + "empty digest": { + lt1: dup.setDigest(VersionedDigest{}), + delta: HashVersionChanged | HashChanged, + }, + "to empty digest": { + lt1: dup.setDigest(VersionedDigest{}), + lt2: dup, + delta: HashVersionChanged | HashChanged, + }, + "hash version changed": { + lt1: dup.setDigest(VersionedDigest{HashVersion: HashVersion + 1, Digest: []byte("foobytes")}), + delta: HashVersionChanged, + }, + "hash contents changed": { + lt1: dup.setDigest(VersionedDigest{HashVersion: HashVersion, Digest: []byte("barbytes")}), + delta: HashChanged, + }, + "to plain locked project": { + lt1: dup.toPlainLP(), + delta: PruneOptsChanged | HashChanged | HashVersionChanged, + }, + "from plain locked project": { + lt1: dup.toPlainLP(), + lt2: dup, + delta: PruneOptsChanged | HashChanged | HashVersionChanged, + }, + "all": { + lt1: dup.setDigest(VersionedDigest{}).setVersion(gps.NewBranch("master").Pair(gps.Revision("newrev"))).setPruneOpts(gps.PruneNestedVendorDirs | gps.PruneNonGoFiles).setSource("whatever"), + delta: SourceChanged | VersionChanged | RevisionChanged | PruneOptsChanged | HashChanged | HashVersionChanged, + }, + } + + for name, fix := range tt { + fix := fix + t.Run(name, func(t *testing.T) { + // Use two patterns for constructing locks to compare: if only lt1 + // is set, use foovp as the first lp and compare with the lt1 + // transforms applied. If lt2 is set, transform foovp with lt1 for + // the first lp, then transform foovp with lt2 for the second lp. + var lp1, lp2 gps.LockedProject + if fix.lt2 == nil { + lp1 = foovp + lp2 = fix.lt1(foovp) + } else { + lp1 = fix.lt1(foovp) + lp2 = fix.lt2(foovp) + } + + lppd := DiffLockedProjectProperties(lp1, lp2) + if !lppd.Changed(AnyChanged) && fix.delta != 0 { + t.Errorf("Changed() reporting false when expecting some dimensions to be changed: %s", fix.delta) + } else if lppd.Changed(AnyChanged) && fix.delta == 0 { + t.Error("Changed() reporting true when expecting no changes") + } + if lppd.Changed(AnyChanged & ^fix.delta) { + t.Errorf("Changed() reported true when checking along not-expected dimensions: %s", lppd.Changes() & ^fix.delta) + } + + gotdelta := lppd.Changes() + if fix.delta & ^gotdelta != 0 { + t.Errorf("wanted change in some dimensions that were unchanged: %s", fix.delta & ^gotdelta) + } + if gotdelta & ^fix.delta != 0 { + t.Errorf("did not want change in some dimensions that were changed: %s", gotdelta & ^fix.delta) + } + + if fix.checkfn != nil { + fix.checkfn(t, lppd) + } + }) + } +} + +type lockTransformer func(safeLock) safeLock + +func (lt lockTransformer) compose(lt2 lockTransformer) lockTransformer { + if lt == nil { + return lt2 + } + return func(l safeLock) safeLock { + return lt2(lt(l)) + } +} + +func (lt lockTransformer) addDumbProject(root string) lockTransformer { + vp := newVerifiableProject(mkPI(root), gps.NewVersion("whatever").Pair("addedrev"), []string{"."}) + return lt.compose(func(l safeLock) safeLock { + for _, lp := range l.p { + if lp.Ident().ProjectRoot == vp.Ident().ProjectRoot { + panic(fmt.Sprintf("%q already in lock", vp.Ident().ProjectRoot)) + } + } + l.p = append(l.p, vp) + return l + }) +} + +func (lt lockTransformer) rmProject(pr string) lockTransformer { + return lt.compose(func(l safeLock) safeLock { + for k, lp := range l.p { + if lp.Ident().ProjectRoot == gps.ProjectRoot(pr) { + l.p = l.p[:k+copy(l.p[k:], l.p[k+1:])] + return l + } + } + panic(fmt.Sprintf("%q not in lock", pr)) + }) +} + +func (lt lockTransformer) addII(path string) lockTransformer { + return lt.compose(func(l safeLock) safeLock { + for _, impath := range l.i { + if path == impath { + panic(fmt.Sprintf("%q already in input imports", impath)) + } + } + l.i = append(l.i, path) + return l + }) +} + +func (lt lockTransformer) rmII(path string) lockTransformer { + return lt.compose(func(l safeLock) safeLock { + for k, impath := range l.i { + if path == impath { + l.i = l.i[:k+copy(l.i[k:], l.i[k+1:])] + return l + } + } + panic(fmt.Sprintf("%q not in input imports", path)) + }) +} + +type lockedProjectTransformer func(gps.LockedProject) gps.LockedProject + +func (lpt lockedProjectTransformer) compose(lpt2 lockedProjectTransformer) lockedProjectTransformer { + if lpt == nil { + return lpt2 + } + return func(lp gps.LockedProject) gps.LockedProject { + return lpt2(lpt(lp)) + } +} + +func (lpt lockedProjectTransformer) addPkg(path string) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + for _, pkg := range lp.Packages() { + if path == pkg { + panic(fmt.Sprintf("%q already in pkg list", path)) + } + } + + nlp := gps.NewLockedProject(lp.Ident(), lp.Version(), append(lp.Packages(), path)) + if vp, ok := lp.(VerifiableProject); ok { + vp.LockedProject = nlp + return vp + } + return nlp + }) +} + +func (lpt lockedProjectTransformer) rmPkg(path string) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + pkglist := lp.Packages() + for k, pkg := range pkglist { + if path == pkg { + pkglist = pkglist[:k+copy(pkglist[k:], pkglist[k+1:])] + nlp := gps.NewLockedProject(lp.Ident(), lp.Version(), pkglist) + if vp, ok := lp.(VerifiableProject); ok { + vp.LockedProject = nlp + return vp + } + return nlp + } + } + panic(fmt.Sprintf("%q not in pkg list", path)) + }) +} + +func (lpt lockedProjectTransformer) setSource(source string) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + ident := lp.Ident() + ident.Source = source + nlp := gps.NewLockedProject(ident, lp.Version(), lp.Packages()) + if vp, ok := lp.(VerifiableProject); ok { + vp.LockedProject = nlp + return vp + } + return nlp + }) +} + +func (lpt lockedProjectTransformer) setVersion(v gps.Version) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + nlp := gps.NewLockedProject(lp.Ident(), v, lp.Packages()) + if vp, ok := lp.(VerifiableProject); ok { + vp.LockedProject = nlp + return vp + } + return nlp + }) +} + +func (lpt lockedProjectTransformer) setPruneOpts(po gps.PruneOptions) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + vp := lp.(VerifiableProject) + vp.PruneOpts = po + return vp + }) +} + +func (lpt lockedProjectTransformer) setDigest(vd VersionedDigest) lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + vp := lp.(VerifiableProject) + vp.Digest = vd + return vp + }) +} + +func (lpt lockedProjectTransformer) toPlainLP() lockedProjectTransformer { + return lpt.compose(func(lp gps.LockedProject) gps.LockedProject { + if vp, ok := lp.(VerifiableProject); ok { + return vp.LockedProject + } + return lp + }) +} From 69991c72bcf5626e903f6752c737a4a460681a15 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Mon, 9 Jul 2018 01:38:02 -0400 Subject: [PATCH 21/25] dep: Fix -vendor-only path's invocation ordering --- cmd/dep/ensure.go | 10 +++++----- txn_writer.go | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 1f6f6948d1..5196be9e44 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -180,10 +180,6 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { params.TraceLogger = ctx.Err } - if cmd.vendorOnly { - return cmd.runVendorOnly(ctx, args, p, sm, params) - } - statchan := make(chan map[string]verify.VendorStatus) var lps []gps.LockedProject if p.Lock != nil { @@ -211,6 +207,10 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { statchan <- status }(filepath.Join(p.AbsRoot, "vendor"), lps) + if cmd.vendorOnly { + return cmd.runVendorOnly(ctx, args, p, sm, params, statchan) + } + if fatal, err := checkErrors(params.RootPackageTree.Packages, p.Manifest.IgnoredPackages()); err != nil { if fatal { return err @@ -348,7 +348,7 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj // Pass the same lock as old and new so that the writer will observe no // difference, and write out only ncessary vendor/ changes. - dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), VendorAlways) + dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways) if err != nil { return err } diff --git a/txn_writer.go b/txn_writer.go index 9dd1bca6a8..4e0994b729 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -693,7 +693,7 @@ func (dw *DeltaWriter) PrintPreparedActions(output *log.Logger, verbose bool) er lpd := dw.lockDiff.ProjectDeltas[pr] v, id := projs[pr].Version(), projs[pr].Ident() if reason == projectRemoved { - output.Printf("(%d/%d) Would have removed %s", i, tot, id, v, changeExplanation(reason, lpd)) + output.Printf("(%d/%d) Would have removed %s", i, tot, id) } else { output.Printf("(%d/%d) Would hae written %s@%s: %s", i, tot, id, v, changeExplanation(reason, lpd)) } From d7a412f32e390e49d94dccd503bceb69c7c67c1f Mon Sep 17 00:00:00 2001 From: sam boyer Date: Mon, 9 Jul 2018 03:25:43 -0400 Subject: [PATCH 22/25] dep: Update docs to reflect vendor verification --- docs/Gopkg.lock.md | 41 +++++++++++++++++++++++++++++----------- docs/daily-dep.md | 2 +- docs/ensure-mechanics.md | 2 -- docs/env-vars.md | 6 +----- docs/glossary.md | 2 +- 5 files changed, 33 insertions(+), 20 deletions(-) diff --git a/docs/Gopkg.lock.md b/docs/Gopkg.lock.md index 7b7876e210..1a930fcccb 100644 --- a/docs/Gopkg.lock.md +++ b/docs/Gopkg.lock.md @@ -1,5 +1,5 @@ --- -title: Gopkg.lock + title: Gopkg.lock --- The `Gopkg.lock` file is generated by `dep ensure` and `dep init`. It is the output of [the solving function](ensure-mechanics.md#functional-flow): a transitively complete snapshot of a project's dependency graph, expressed as a series of `[[project]]` stanzas. That means: @@ -8,7 +8,7 @@ The `Gopkg.lock` file is generated by `dep ensure` and `dep init`. It is the out * Plus any [`required`](Gopkg.toml.md#required) packages * Less any [`ignored`](Gopkg.toml.md#ignored) packages -`Gopkg.lock` also contains some metadata about the algorithm used to arrive at the final graph, under `[solve-meta]`. +`Gopkg.lock` also contains some metadata about the algorithm and inputs used to arrive at the final graph, under `[solve-meta]`. `Gopkg.lock` always includes a `revision` for all listed dependencies, as the semantics of `revision` guarantee them to be immutable. Thus, the `Gopkg.lock` acts as a reproducible build list - as long as the upstream remains available, all dependencies can be precisely reproduced. @@ -28,6 +28,8 @@ These are all the properties that can appear in a `[[projects]]` stanza, and whe | `revision` | Y | | `version` | N | | `branch` | N | +| `pruneopts` | Y | +| `digest` | Y | ### `name` @@ -47,6 +49,27 @@ In general, this is the set of packages that were found to be participants in th * Being imported by a package from either the current project or a different dependency * Being imported by a package from within this project that, directly or transitively, is imported by a package from a different project +### `pruneopts` + +A compactly-encoded form of the [prune options designated in `Gopkg.toml`](Gopkg.toml.md#prune) . Each character represents one of the three possible rules: + +| Character | Pruning Rule in `Gopkg.toml` | +| --------- | ---------------------------- | +| `N` | `non-go` | +| `U` | `unused-packages` | +| `T` | `go-tests` | + +If the character is present in `pruneopts`, the pruning rule is enabled for that project. Thus, `NUT` indicates that all three pruning rules are active. + +### `digest` + +The hash digest of the contents of `vendor/` for this project, _after_ pruning rules have been applied. The digest is versioned, by way of a colon-delimited prefix; the string is of the form `:` . The hashing algorithm corresponding to version 1 is SHA256, as implemented in the stdlib package `crypto/sha256`. + +There are some tweaks that differentiate the hasher apart from a naive filesystem tree hashing implementation: + +* Symlinks are ignored. +* Line endings are normalized to LF (using an algorithm similar to git's) in order to ensure digests do not vary across platforms. + ### Version information: `revision`, `version`, and `branch` In order to provide reproducible builds, it is an absolute requirement that every project stanza contain a `revision`, no matter what kinds of constraints were encountered in `Gopkg.toml` files. It is further possible that exactly one of either `version` or `branch` will _additionally_ be present. @@ -61,6 +84,10 @@ More details on "analyzer" and "solver" follow, but the versioning principle is By bumping versions only on solution set contractions, but not expansions, it allows us to avoid having to bump constantly (which could make using dep across teams awkward), while still making it likely that when the solver and version numbers match between `Gopkg.lock` and a running version of dep, what's recorded in the file is acceptable by the running version's rules. +### `input-imports` + +A sorted list of all the import inputs that were present at the time the `Gopkg.lock` was computed. This list includes both actual `import` statements from the project, as well as any `required` import paths listed in `Gopkg.toml`. + ### `analyzer-name` and `analyzer-version` The analyzer is an internal dep component responsible for interpreting the contents of `Gopkg.toml` files, as well as metadata files from any tools dep knows about: `glide.yaml`, `vendor.json`, etc. @@ -75,12 +102,4 @@ The solver is the algorithm behind [the solving function](ensure-mechanics.md#fu The solver is named because, like the analyzer, it is pluggable; an alternative algorithm could be written that applies different rules to achieve the same goal. The one dep uses, "gps-cdcl", is named after [the general class of SAT solving algorithm it most resembles](https://en.wikipedia.org/wiki/Conflict-Driven_Clause_Learning), though the algorithm is actually a specialized, domain-specific [SMT solver](https://en.wikipedia.org/wiki/Satisfiability_modulo_theories). -The same general principles of version-bumping apply to the solver version: if the solver starts enforcing [Go 1.4 import path comments](https://golang.org/cmd/go/#hdr-Import_path_checking), that entails a bump, because it can only narrow the solution set. If it were to later relax that requirement, it would not require a bump, as that can only expand the solution set. - -### `inputs-digest` - -A SHA256 hash digest of all the [inputs to the solving function](ensure-mechanics.md#functional-flow). Those inputs can be shown directly with the hidden command `dep hash-inputs`, allowing this value to be generated directly: - -``` -dep hash-inputs | tr -d ā€œ\nā€ | shasum -a256 -``` +The same general principles of version-bumping apply to the solver version: if the solver starts enforcing [Go 1.4 import path comments](https://golang.org/cmd/go/#hdr-Import_path_checking), that entails a bump, because it can only narrow the solution set. If it were to later relax that requirement, it would not require a bump, as that can only expand the solution set. \ No newline at end of file diff --git a/docs/daily-dep.md b/docs/daily-dep.md index 7ef4432d01..7bf0066e37 100644 --- a/docs/daily-dep.md +++ b/docs/daily-dep.md @@ -83,7 +83,7 @@ $ dep ensure -update ### Adding and removing `import` statements -As noted in [the section on adding dependencies](#adding-a-new-dependency), dep relies on the import statements in your code to figure out which dependencies your project actually needs. Thus, when you add or remove import statements, dep might need to care about it. +As noted in [the section on adding dependencies](#adding-a-new-dependency), dep relies on the `import` statements in your code to figure out which dependencies your project actually needs. Thus, when you add or remove import statements, dep might need to care about it. It's only "might," though, because most of the time, adding or removing imports doesn't matter to dep. Only if one of the following has occurred will a `dep ensure` be necessary to bring the project back in sync: diff --git a/docs/ensure-mechanics.md b/docs/ensure-mechanics.md index 7d813f82a8..78b80b6359 100644 --- a/docs/ensure-mechanics.md +++ b/docs/ensure-mechanics.md @@ -51,9 +51,7 @@ The four state system, and these functional flows through it, are the foundation One of dep's design goals is that both of its "functions" minimize both the work they do, and the change they induce in their respective results. (Note: "minimize" is not currently formally defined with respect to a cost function.) Consequently, both functions peek ahead at the pre-existing result to understand what work actually needs to be done: * The solving function checks the existing `Gopkg.lock` to determine if all of its inputs (project import statements + `Gopkg.toml` rules) are satisfied. If they are, the solving function can be bypassed entirely. If not, the solving function proceeds, but attempts to change as few of the selections in `Gopkg.lock` as possible. - * WIP: The current implementation's check relies on a coarse heuristic check that can be wrong in some cases. There is a [plan to fix this](https://github.com/golang/dep/issues/1496). * The vendoring function hashes each discrete project already in `vendor/` to see if the code present on disk is what `Gopkg.lock` indicates it should be. Only projects that deviate from expectations are written out. - * WIP: the hashing check is generally referred to as "vendor verification," and [is not yet complete](https://github.com/golang/dep/issues/121). Without this verification, dep is blind to whether code in `vendor/` is correct or not; as such, dep must defensively re-write all projects to ensure the state of `vendor/` is correct. Of course, it's possible that, in peeking ahead, either function might discover that the pre-existing result is already correct - so no work need be done at all. Either way, when each function completes, we can be sure that the output, changed or not, is correct with respect to the inputs. In other words, the inputs and outputs are "in sync." Indeed, being in sync is the "known good state" of dep; `dep ensure` (without flags) guarantees that if it exits 0, all four states in the project are in sync. diff --git a/docs/env-vars.md b/docs/env-vars.md index 83aeed9218..7f38fef194 100644 --- a/docs/env-vars.md +++ b/docs/env-vars.md @@ -25,8 +25,4 @@ This is primarily useful if you're not using the standard `go` toolchain as a co ### `DEPNOLOCK` -By default, dep creates an `sm.lock` file at `$DEPCACHEDIR/sm.lock` in order to -prevent multiple dep processes from interacting with the [local -cache](glossary.md#local-cache) simultaneously. Setting this variable will -bypass that protection; no file will be created. This can be useful on certain -filesystems; VirtualBox shares in particular are known to misbehave. +By default, dep creates an `sm.lock` file at `$DEPCACHEDIR/sm.lock` in order to prevent multiple dep processes from interacting with the [local cache](glossary.md#local-cache) simultaneously. Setting this variable will bypass that protection; no file will be created. This can be useful on certain filesystems; VirtualBox shares in particular are known to misbehave. diff --git a/docs/glossary.md b/docs/glossary.md index 9c491aa211..5b78e980d3 100644 --- a/docs/glossary.md +++ b/docs/glossary.md @@ -69,7 +69,7 @@ An `import` statement that points to a package in a project other than the one i ### GPS -Stands for "Go packaging solver", it is [a subtree of library-style packages within dep](https://godoc.org/github.com/golang/dep/gps), and is the engine around which dep is built. Most commonly referred to as "gps." +Acronym for "Go packaging solver", it is [a subtree of library-style packages within dep](https://godoc.org/github.com/golang/dep/gps), and is the engine around which dep is built. Most commonly referred to as "gps." ### Local cache From fe299f7324ba91f58118ebf360066c792d98d459 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Mon, 9 Jul 2018 03:29:22 -0400 Subject: [PATCH 23/25] dep: Switch back to SafeWriter for -vendor-only --- cmd/dep/ensure.go | 3 ++- .../ensure/default/hashneq-vendoronly/final/Gopkg.lock | 5 ++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index 5196be9e44..d64bcd71a1 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -348,7 +348,8 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj // Pass the same lock as old and new so that the writer will observe no // difference, and write out only ncessary vendor/ changes. - dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways) + dw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions) + //dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways) if err != nil { return err } diff --git a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock index 188ece4f77..11cb12c378 100644 --- a/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock +++ b/cmd/dep/testdata/harness_tests/ensure/default/hashneq-vendoronly/final/Gopkg.lock @@ -2,16 +2,15 @@ [[projects]] - digest = "1:ddbbbe7f7a81c86d54e89fa388b532f4c144d666a14e8e483ba04fa58265b135" name = "github.com/sdboyer/deptest" packages = ["."] - pruneopts = "" revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf" version = "v1.0.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - input-imports = ["github.com/sdboyer/deptest"] + # manually modified hash digest, it will not match any known inputs + inputs-digest = "94b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c" solver-name = "gps-cdcl" solver-version = 1 From a34a48d6bc4912150cd76bc19d6663a9586b90a3 Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 10 Jul 2018 00:57:26 -0400 Subject: [PATCH 24/25] dep: Encapsulate vendor verification into method Some tests were complaining of a data race when writing to ctx.Out on a vendor verification failure. It's not clear why those tests were failing, but the complaint became a forcing function to refactor the previously-sloppy use of a channel and os.Exit(1) within a goroutine to encapsulation in a method on dep.Project. --- cmd/dep/ensure.go | 67 ++++++++++++++++++------------------------ cmd/dep/ensure_test.go | 4 +-- project.go | 42 ++++++++++++++++++++++++++ txn_writer.go | 3 +- 4 files changed, 73 insertions(+), 43 deletions(-) diff --git a/cmd/dep/ensure.go b/cmd/dep/ensure.go index d64bcd71a1..573fb6cd26 100644 --- a/cmd/dep/ensure.go +++ b/cmd/dep/ensure.go @@ -180,35 +180,8 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { params.TraceLogger = ctx.Err } - statchan := make(chan map[string]verify.VendorStatus) - var lps []gps.LockedProject - if p.Lock != nil { - lps = p.Lock.Projects() - } - go func(vendorDir string, p []gps.LockedProject) { - // Make sure vendor dir exists - err := os.MkdirAll(vendorDir, os.FileMode(0777)) - if err != nil { - ctx.Err.Printf("Error creating vendor directory: %q", err.Error()) - // TODO(sdboyer) handle these better - os.Exit(1) - } - - sums := make(map[string]verify.VersionedDigest) - for _, lp := range p { - sums[string(lp.Ident().ProjectRoot)] = lp.(verify.VerifiableProject).Digest - } - - status, err := verify.CheckDepTree(vendorDir, sums) - if err != nil { - ctx.Err.Printf("Error while verifying vendor directory: %q", err.Error()) - os.Exit(1) - } - statchan <- status - }(filepath.Join(p.AbsRoot, "vendor"), lps) - if cmd.vendorOnly { - return cmd.runVendorOnly(ctx, args, p, sm, params, statchan) + return cmd.runVendorOnly(ctx, args, p, sm, params) } if fatal, err := checkErrors(params.RootPackageTree.Packages, p.Manifest.IgnoredPackages()); err != nil { @@ -233,12 +206,16 @@ func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error { ctx.Err.Printf("on these projects, if they happen to be transitive dependencies.\n\n") } + // Kick off vendor verification in the background. All of the remaining + // paths from here will need it, whether or not they end up solving. + go p.VerifyVendor() + if cmd.add { - return cmd.runAdd(ctx, args, p, sm, params, statchan) + return cmd.runAdd(ctx, args, p, sm, params) } else if cmd.update { - return cmd.runUpdate(ctx, args, p, sm, params, statchan) + return cmd.runUpdate(ctx, args, p, sm, params) } - return cmd.runDefault(ctx, args, p, sm, params, statchan) + return cmd.runDefault(ctx, args, p, sm, params) } func (cmd *ensureCommand) validateFlags() error { @@ -268,7 +245,7 @@ func (cmd *ensureCommand) vendorBehavior() dep.VendorBehavior { return dep.VendorOnChanged } -func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { +func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { // Bare ensure doesn't take any args. if len(args) != 0 { return errors.New("dep ensure only takes spec arguments with -add or -update") @@ -321,7 +298,11 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions) } - dw, err := dep.NewDeltaWriter(p.Lock, lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) + status, err := p.VerifyVendor() + if err != nil { + return errors.Wrap(err, "error while verifying vendor directory") + } + dw, err := dep.NewDeltaWriter(p.Lock, lock, status, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } @@ -337,7 +318,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { +func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { if len(args) != 0 { return errors.Errorf("dep ensure -vendor-only only populates vendor/ from %s; it takes no spec arguments", dep.LockName) } @@ -349,7 +330,7 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj // Pass the same lock as old and new so that the writer will observe no // difference, and write out only ncessary vendor/ changes. dw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions) - //dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways) + //dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways) if err != nil { return err } @@ -365,7 +346,7 @@ func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Proj return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { +func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { if p.Lock == nil { return errors.Errorf("-update works by updating the versions recorded in %s, but %s does not exist", dep.LockName, dep.LockName) } @@ -397,7 +378,11 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return handleAllTheFailuresOfTheWorld(err) } - dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) + status, err := p.VerifyVendor() + if err != nil { + return errors.Wrap(err, "error while verifying vendor directory") + } + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), status, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } @@ -412,7 +397,7 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, return errors.Wrap(dw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor") } -func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters, statchan chan map[string]verify.VendorStatus) error { +func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error { if len(args) == 0 { return errors.New("must specify at least one project or package to -add") } @@ -671,7 +656,11 @@ func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm } sort.Strings(reqlist) - dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), <-statchan, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) + status, err := p.VerifyVendor() + if err != nil { + return errors.Wrap(err, "error while verifying vendor directory") + } + dw, err := dep.NewDeltaWriter(p.Lock, dep.LockFromSolution(solution, p.Manifest.PruneOptions), status, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), cmd.vendorBehavior()) if err != nil { return err } diff --git a/cmd/dep/ensure_test.go b/cmd/dep/ensure_test.go index 0b101c170d..0b75d3bfb8 100644 --- a/cmd/dep/ensure_test.go +++ b/cmd/dep/ensure_test.go @@ -50,11 +50,11 @@ func TestInvalidEnsureFlagCombinations(t *testing.T) { // anything other than the error being non-nil. For now, it works well // because a panic will quickly result if the initial arg length validation // checks are incorrectly handled. - if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}, nil); err == nil { + if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil { t.Errorf("no args to plain ensure with -vendor-only") } ec.vendorOnly = false - if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}, nil); err == nil { + if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil { t.Errorf("no args to plain ensure") } } diff --git a/project.go b/project.go index 3214a3c64d..913744e1ec 100644 --- a/project.go +++ b/project.go @@ -9,9 +9,11 @@ import ( "os" "path/filepath" "sort" + "sync" "github.com/golang/dep/gps" "github.com/golang/dep/gps/pkgtree" + "github.com/golang/dep/gps/verify" "github.com/golang/dep/internal/fs" "github.com/pkg/errors" ) @@ -113,6 +115,46 @@ type Project struct { // The PackageTree representing the project, with hidden and ignored // packages already trimmed. RootPackageTree pkgtree.PackageTree + // Oncer to manage access to initial check of vendor. + CheckVendor sync.Once + // The result of calling verify.CheckDepTree against the current lock and + // vendor dir. + VendorStatus map[string]verify.VendorStatus + // The error, if any, from checking vendor. + CheckVendorErr error +} + +// VerifyVendor checks the vendor directory against the hash digests in +// Gopkg.lock. +// +// This operation is overseen by the sync.Once in CheckVendor. This is intended +// to facilitate running verification in the background while solving, then +// having the results ready later. +func (p *Project) VerifyVendor() (map[string]verify.VendorStatus, error) { + p.CheckVendor.Do(func() { + p.VendorStatus = make(map[string]verify.VendorStatus) + vendorDir := filepath.Join(p.AbsRoot, "vendor") + + var lps []gps.LockedProject + if p.Lock != nil { + lps = p.Lock.Projects() + } + + err := os.MkdirAll(vendorDir, os.FileMode(0777)) + if err != nil { + p.CheckVendorErr = err + return + } + + sums := make(map[string]verify.VersionedDigest) + for _, lp := range lps { + sums[string(lp.Ident().ProjectRoot)] = lp.(verify.VerifiableProject).Digest + } + + p.VendorStatus, p.CheckVendorErr = verify.CheckDepTree(vendorDir, sums) + }) + + return p.VendorStatus, p.CheckVendorErr } // SetRoot sets the project AbsRoot and ResolvedAbsRoot. If root is not a symlink, ResolvedAbsRoot will be set to root. diff --git a/txn_writer.go b/txn_writer.go index 4e0994b729..c93182f7a1 100644 --- a/txn_writer.go +++ b/txn_writer.go @@ -408,8 +408,7 @@ type DeltaWriter struct { type changeType uint8 const ( - noChange changeType = iota - solveChanged + solveChanged changeType = iota + 1 hashMismatch hashVersionMismatch hashAbsent From fc9484ab2eded8964d36066e1f38eeda8184602d Mon Sep 17 00:00:00 2001 From: sam boyer Date: Tue, 10 Jul 2018 01:08:11 -0400 Subject: [PATCH 25/25] Add missing license header --- internal/feedback/lockdiff.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/internal/feedback/lockdiff.go b/internal/feedback/lockdiff.go index d7fe535241..f17dd05b52 100644 --- a/internal/feedback/lockdiff.go +++ b/internal/feedback/lockdiff.go @@ -1,3 +1,7 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + package feedback import (