Skip to content
This repository has been archived by the owner on Sep 9, 2020. It is now read-only.

Commit

Permalink
NEEDS REVIEW: DO NOT MERGE
Browse files Browse the repository at this point in the history
This is a prototype for keeping track of the path through the selection
process to a project. It is used to help dep ignore "stale" transitive
constraints: constraints that when created applied to a descendent but
should no longer apply now that that the project has moved to another
location in the dependency graph.

Questions:
* I put bmi on atomWithPackages because it already had a bmi method to
recreate the original bmi. Not sure if it's safe to live there
considering the comments on the original bmi function about avoiding
copys of the package list. So it may need to shift elsewhere or path
should be be split out from the bmi struct so that it can be attached
directly to an atom.
* Is a non-bimodal solve ever a possibility in dep (outside of the unit
tests?). I think we could drop "dep.isTransitive" in favor of using
bmi.path exclusively but the unit tests have a non-bimodal set of tests
that cause this to fail.
  • Loading branch information
carolynvs authored and carolynvs-msft committed Jan 25, 2018
1 parent 5b8fdcb commit 0b71dd2
Show file tree
Hide file tree
Showing 9 changed files with 121 additions and 100 deletions.
20 changes: 5 additions & 15 deletions gps/identifier.go
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,8 @@ type bimodalIdentifier struct {
prefv Version
// Indicates that the bmi came from the root project originally
fromRoot bool
// The path to the atom in the graph, e.g. root -> foo -> bar
path []atom
}

type atom struct {
Expand All @@ -190,20 +192,8 @@ var nilpa = atom{
}

type atomWithPackages struct {
a atom
pl []string
}

// bmi converts an atomWithPackages into a bimodalIdentifier.
//
// This is mostly intended for (read-only) trace use, so the package list slice
// is not copied. It is the callers responsibility to not modify the pl slice,
// lest that backpropagate and cause inconsistencies.
func (awp atomWithPackages) bmi() bimodalIdentifier {
return bimodalIdentifier{
id: awp.a.id,
pl: awp.pl,
}
a atom
bmi bimodalIdentifier
}

// completeDep (name hopefully to change) provides the whole picture of a
Expand All @@ -223,6 +213,6 @@ type completeDep struct {
// fully-realized atom as the depender (the tail/source of the edge), and a set
// of requirements that any atom to be attached at the head/target must satisfy.
type dependency struct {
depender atom
depender atomWithPackages
dep completeDep
}
4 changes: 2 additions & 2 deletions gps/rootdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ func (rd rootdata) rootAtom() atomWithPackages {
sort.Strings(list)

return atomWithPackages{
a: a,
pl: list,
a: a,
bmi: bimodalIdentifier{id: a.id, pl: list},
}
}
43 changes: 21 additions & 22 deletions gps/satisfy.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ package gps
// The goal is to determine whether selecting the atom would result in a state
// where all the solver requirements are still satisfied.
func (s *solver) check(a atomWithPackages, pkgonly bool) error {
pa := a.a
if nilpa == pa {
if nilpa == a.a {
// This shouldn't be able to happen, but if it does, it unequivocally
// indicates a logical bug somewhere, so blowing up is preferable
panic("canary - checking version of empty ProjectAtom")
Expand All @@ -30,7 +29,7 @@ func (s *solver) check(a atomWithPackages, pkgonly bool) error {
// If we're pkgonly, then base atom was already determined to be allowable,
// so we can skip the checkAtomAllowable step.
if !pkgonly {
if err = s.checkAtomAllowable(pa); err != nil {
if err = s.checkAtomAllowable(a); err != nil {
return err
}
}
Expand Down Expand Up @@ -78,24 +77,24 @@ func (s *solver) check(a atomWithPackages, pkgonly bool) error {

// checkAtomAllowable ensures that an atom itself is acceptable with respect to
// the constraints established by the current solution.
func (s *solver) checkAtomAllowable(pa atom) error {
constraint := s.sel.getConstraint(pa.id)
if s.vUnify.matches(pa.id, constraint, pa.v) {
func (s *solver) checkAtomAllowable(awp atomWithPackages) error {
constraint := s.sel.getConstraint(awp.a.id, awp.bmi)
if s.vUnify.matches(awp.a.id, constraint, awp.a.v) {
return nil
}
// TODO(sdboyer) collect constraint failure reason (wait...aren't we, below?)

deps := s.sel.getDependenciesOn(pa.id)
deps := s.sel.getDependenciesOn(awp.a.id)
var failparent []dependency
for _, dep := range deps {
if !s.vUnify.matches(pa.id, dep.dep.Constraint, pa.v) {
s.fail(dep.depender.id)
if !s.vUnify.matches(awp.a.id, dep.dep.Constraint, awp.a.v) {
s.fail(dep.depender.a.id)
failparent = append(failparent, dep)
}
}

err := &versionNotAllowedFailure{
goal: pa,
goal: awp.a,
failparent: failparent,
c: constraint,
}
Expand All @@ -120,14 +119,14 @@ func (s *solver) checkRequiredPackagesExist(a atomWithPackages) error {
for _, dep := range deps {
for _, pkg := range dep.dep.pl {
if errdep, seen := fp[pkg]; seen {
errdep.deppers = append(errdep.deppers, dep.depender)
errdep.deppers = append(errdep.deppers, dep.depender.a)
fp[pkg] = errdep
} else {
perr, has := ptree.Packages[pkg]
if !has || perr.Err != nil {
fp[pkg] = errDeppers{
err: perr.Err,
deppers: []atom{dep.depender},
deppers: []atom{dep.depender.a},
}
}
}
Expand All @@ -147,7 +146,7 @@ func (s *solver) checkRequiredPackagesExist(a atomWithPackages) error {
// given dep are valid with respect to existing constraints.
func (s *solver) checkDepsConstraintsAllowable(a atomWithPackages, cdep completeDep) error {
dep := cdep.workingConstraint
constraint := s.sel.getConstraint(dep.Ident)
constraint := s.sel.getConstraint(dep.Ident, a.bmi)
// Ensure the constraint expressed by the dep has at least some possible
// intersection with the intersection of existing constraints.
if s.vUnify.matchesAny(dep.Ident, constraint, dep.Constraint) {
Expand All @@ -160,15 +159,15 @@ func (s *solver) checkDepsConstraintsAllowable(a atomWithPackages, cdep complete
var nofailsib []dependency
for _, sibling := range siblings {
if !s.vUnify.matchesAny(dep.Ident, sibling.dep.Constraint, dep.Constraint) {
s.fail(sibling.depender.id)
s.fail(sibling.depender.a.id)
failsib = append(failsib, sibling)
} else {
nofailsib = append(nofailsib, sibling)
}
}

return &disjointConstraintFailure{
goal: dependency{depender: a.a, dep: cdep},
goal: dependency{depender: a, dep: cdep},
failsib: failsib,
nofailsib: nofailsib,
c: constraint,
Expand All @@ -185,7 +184,7 @@ func (s *solver) checkDepsDisallowsSelected(a atomWithPackages, cdep completeDep
s.fail(dep.Ident)

return &constraintNotAllowedFailure{
goal: dependency{depender: a.a, dep: cdep},
goal: dependency{depender: a, dep: cdep},
v: selected.a.v,
}
}
Expand All @@ -206,7 +205,7 @@ func (s *solver) checkIdentMatches(a atomWithPackages, cdep completeDep) error {
// Fail all the other deps, as there's no way atom can ever be
// compatible with them
for _, d := range deps {
s.fail(d.depender.id)
s.fail(d.depender.a.id)
}

return &sourceMismatchFailure{
Expand Down Expand Up @@ -236,7 +235,7 @@ func (s *solver) checkRootCaseConflicts(a atomWithPackages, cdep completeDep) er
curid, _ := s.sel.getIdentFor(current)
deps := s.sel.getDependenciesOn(curid)
for _, d := range deps {
s.fail(d.depender.id)
s.fail(d.depender.a.id)
}

// If a project has multiple packages that import each other, we treat that
Expand All @@ -260,13 +259,13 @@ func (s *solver) checkRootCaseConflicts(a atomWithPackages, cdep completeDep) er
if current == a.a.id.ProjectRoot {
return &wrongCaseFailure{
correct: pr,
goal: dependency{depender: a.a, dep: cdep},
goal: dependency{depender: a, dep: cdep},
badcase: deps,
}
}

return &caseMismatchFailure{
goal: dependency{depender: a.a, dep: cdep},
goal: dependency{depender: a, dep: cdep},
current: current,
failsib: deps,
}
Expand All @@ -289,7 +288,7 @@ func (s *solver) checkPackageImportsFromDepExist(a atomWithPackages, cdep comple

e := &depHasProblemPackagesFailure{
goal: dependency{
depender: a.a,
depender: a,
dep: cdep,
},
v: sel.a.v,
Expand Down Expand Up @@ -329,7 +328,7 @@ func (s *solver) checkRevisionExists(a atomWithPackages, cdep completeDep) error

return &nonexistentRevisionFailure{
goal: dependency{
depender: a.a,
depender: a,
dep: cdep,
},
r: r,
Expand Down
27 changes: 25 additions & 2 deletions gps/selection.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

package gps

import (
"fmt"
)

type selection struct {
// projects is a stack of the atoms that have currently been selected by the
// solver. It can also be thought of as the vertex set of the current
Expand Down Expand Up @@ -134,7 +138,7 @@ func (s *selection) getSelectedPackagesIn(id ProjectIdentifier) map[string]int {
uniq := make(map[string]int)
for _, p := range s.projects {
if p.a.a.id.eq(id) {
for _, pkg := range p.a.pl {
for _, pkg := range p.a.bmi.pl {
uniq[pkg] = uniq[pkg] + 1
}
}
Expand All @@ -143,12 +147,18 @@ func (s *selection) getSelectedPackagesIn(id ProjectIdentifier) map[string]int {
return uniq
}

func (s *selection) getConstraint(id ProjectIdentifier) Constraint {
func (s *selection) getConstraint(id ProjectIdentifier, bmi bimodalIdentifier) Constraint {
deps, exists := s.deps[id.ProjectRoot]
if !exists || len(deps) == 0 {
return any
}

// Enable quick lookup of where in the depgraph a constraint was defined
ancestors := map[ProjectRoot]bool{}
for _, ancestor := range bmi.path {
ancestors[ancestor.id.ProjectRoot] = true
}

// TODO(sdboyer) recomputing this sucks and is quite wasteful. Precompute/cache it
// on changes to the constraint set, instead.

Expand All @@ -159,6 +169,19 @@ func (s *selection) getConstraint(id ProjectIdentifier) Constraint {
// Start with the open set
var ret Constraint = any
for _, dep := range deps {
if dep.dep.isTransitive {
// TODO(carolynvs): Remove print statements. It's just to help debug the transitive constraint prototype.
path := ""
for ancestor := range ancestors {
path += "/" + string(ancestor)
}
if _, isAncestor := ancestors[dep.depender.a.id.ProjectRoot]; !isAncestor {
fmt.Printf("*** Ignoring unreachable constraint %s on %s (path: %s) from %s ***\n", dep.dep.Constraint.String(), dep.dep.workingConstraint.Ident.ProjectRoot, path, dep.depender.a.id.ProjectRoot)
continue
}
fmt.Printf("*** Applying transitive constraint %s on %s (path: %s) from %s ***\n", dep.dep.Constraint.String(), dep.dep.workingConstraint.Ident.ProjectRoot, path, dep.depender.a.id.ProjectRoot)
}

ret = s.vu.intersect(id, ret, dep.dep.Constraint)
}

Expand Down
5 changes: 3 additions & 2 deletions gps/solve_basic_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,14 +226,15 @@ func mkDepspec(pi string, deps ...string) depspec {

func mkDep(atom, pdep string, pl ...string) dependency {
return dependency{
depender: mkAtom(atom),
depender: atomWithPackages{a: mkAtom(atom)},
dep: mkCDep(pdep, pl...),
}
}

func mkADep(atom, pdep string, c Constraint, pl ...string) dependency {
return dependency{
depender: mkAtom(atom),
// TODO(carolynvs): I don't think we need to set the bmi (specifically the path to the atom) in the fixture data.
depender: atomWithPackages{a: mkAtom(atom)},
dep: completeDep{
workingConstraint: workingConstraint{
Ident: ProjectIdentifier{
Expand Down
2 changes: 1 addition & 1 deletion gps/solve_bimodal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ var bimodalFixtures = map[string]bimodalFixture{
"b 1.0.0",
),
},
"transitive constraint": {
"used transitive constraint": {
ds: []depspec{
dsp(mkDepspec("root 1.0.0", "foo 1.0.0"),
pkg("root", "foo"),
Expand Down
Loading

0 comments on commit 0b71dd2

Please sign in to comment.