Skip to content

Commit

Permalink
Refactor the ranges computation for comp indexes
Browse files Browse the repository at this point in the history
  • Loading branch information
jhchabran committed Apr 5, 2021
1 parent c40a004 commit ebfd78c
Show file tree
Hide file tree
Showing 2 changed files with 119 additions and 42 deletions.
93 changes: 51 additions & 42 deletions planner/optimizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ outer:
// - given a query SELECT ... WHERE a = 1 AND c > 2
// - the paths a and c are not contiguous in the index definition, this index cannot be used
var fops []*stream.FilterOperator
var rranges []stream.Ranges
var usableFilterNodes []*filterNode
contiguous := true
for i, fno := range found {
if contiguous {
Expand Down Expand Up @@ -584,13 +584,7 @@ outer:
}
}

op := fno.f.E.(expr.Operator)
ranges, err := getRangesFromOp(op, fno.v)
if err != nil {
return nil, err
}

rranges = append(rranges, ranges)
usableFilterNodes = append(usableFilterNodes, fno)
fops = append(fops, fno.f)
}

Expand All @@ -611,13 +605,9 @@ outer:
cd.priority = 1
}

// merges the ranges inferred from each filter op into a single one
var ranges stream.Ranges
if idx.IsComposite() {
rng := compactCompIndexRanges(rranges, idx.Arity())
ranges = ranges.Append(rng)
} else {
ranges = rranges[0]
ranges, err := getRangesFromFilterNodes(usableFilterNodes, idx.Arity())
if err != nil {
return nil, err
}

cd.newOp = stream.IndexScan(idx.Info.IndexName, ranges...)
Expand Down Expand Up @@ -682,33 +672,6 @@ outer:
return s, nil
}

func compactCompIndexRanges(rangesList []stream.Ranges, indexArity int) stream.Range {
var rng stream.Range
for _, rs := range rangesList {
if rs[0].Min.V != nil {
if rng.Min.V == nil {
rng.Min = document.NewArrayValue(document.NewValueBuffer())
}

rng.Min.V.(*document.ValueBuffer).Append(rs[0].Min)
}

if rs[0].Max.V != nil {
if rng.Max.V == nil {
rng.Max = document.NewArrayValue(document.NewValueBuffer())
}

rng.Max.V.(*document.ValueBuffer).Append(rs[0].Max)
}
}

rng.Exact = rangesList[len(rangesList)-1][0].Exact
rng.Exclusive = rangesList[len(rangesList)-1][0].Exclusive
rng.Arity = indexArity

return rng
}

type candidate struct {
// filter operators to remove and replace by either an indexScan
// or pkScan operators.
Expand Down Expand Up @@ -781,6 +744,52 @@ func operandCanUseIndex(indexType document.ValueType, path document.Path, fc dat
return converted, indexType == converted.Type, nil
}

func getRangesFromFilterNodes(fnodes []*filterNode, indexArity int) (stream.Ranges, error) {
if indexArity <= 1 {
op := fnodes[0].f.E.(expr.Operator)
return getRangesFromOp(op, fnodes[0].v)
}

vb := document.NewValueBuffer()
for _, fno := range fnodes {
op := fno.f.E.(expr.Operator)
v := fno.v

switch op.(type) {
case *expr.EqOperator, *expr.GtOperator, *expr.GteOperator, *expr.LtOperator, *expr.LteOperator:
vb = vb.Append(v)
case *expr.InOperator:
// an index like idx_foo_a_b on (a,b) and a query like
// WHERE a IN [1, 1] and b IN [2, 2]
// would lead to [1, 1] x [2, 2] = [[1,1], [1,2], [2,1], [2,2]]
// which could eventually be added later.
panic("unsupported operator IN for composite indexes")
default:
panic(stringutil.Sprintf("unknown operator %#v", op))
}
}

rng := stream.Range{
Min: document.NewArrayValue(vb),
}

// the last node is the only one that can be a comparison operator, so
// it's the one setting the range behaviour
last := fnodes[len(fnodes)-1]
op := last.f.E.(expr.Operator)

switch op.(type) {
case *expr.EqOperator:
rng.Exact = true
case *expr.GtOperator:
rng.Exclusive = true
case *expr.LtOperator:
rng.Exclusive = true
}

return stream.Ranges{rng}, nil
}

func getRangesFromOp(op expr.Operator, v document.Value) (stream.Ranges, error) {
var ranges stream.Ranges

Expand Down
68 changes: 68 additions & 0 deletions planner/optimizer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -580,6 +580,13 @@ func TestUseIndexBasedOnSelectionNodeRule_Composite(t *testing.T) {
Pipe(st.Filter(parser.MustParseExpr("b = 2"))),
st.New(st.IndexScan("idx_foo_a_b_c", st.Range{Min: testutil.MakeArrayValue(t, 1, 2), Exact: true})),
},
{
"FROM foo WHERE a = 1 AND b > 2", // c is omitted, but it can still use idx_foo_a_b_c, with > b
st.New(st.SeqScan("foo")).
Pipe(st.Filter(parser.MustParseExpr("a = 1"))).
Pipe(st.Filter(parser.MustParseExpr("b > 2"))),
st.New(st.IndexScan("idx_foo_a_b_c", st.Range{Min: testutil.MakeArrayValue(t, 1, 2), Exclusive: true})),
},
{
"FROM foo WHERE a = 1 AND b = 2 and k = 3", // c is omitted, but it can still use idx_foo_a_b_c
st.New(st.SeqScan("foo")).
Expand Down Expand Up @@ -648,4 +655,65 @@ func TestUseIndexBasedOnSelectionNodeRule_Composite(t *testing.T) {
require.Equal(t, test.expected.String(), res.String())
})
}

t.Run("array indexes", func(t *testing.T) {
tests := []struct {
name string
root, expected *st.Stream
}{
{
"FROM foo WHERE a = [1, 1] AND b = [2, 2]",
st.New(st.SeqScan("foo")).
Pipe(st.Filter(parser.MustParseExpr("a = [1, 1]"))).
Pipe(st.Filter(parser.MustParseExpr("b = [2, 2]"))),
st.New(st.IndexScan("idx_foo_a_b", st.Range{
Min: document.NewArrayValue(
testutil.MakeArray(t, `[[1, 1], [2, 2]]`)),
Exact: true})),
},
{
"FROM foo WHERE a = [1, 1] AND b > [2, 2]",
st.New(st.SeqScan("foo")).
Pipe(st.Filter(parser.MustParseExpr("a = [1, 1]"))).
Pipe(st.Filter(parser.MustParseExpr("b > [2, 2]"))),
st.New(st.IndexScan("idx_foo_a_b", st.Range{
Min: document.NewArrayValue(
testutil.MakeArray(t, `[[1, 1], [2, 2]]`)),
Exclusive: true})),
},
}

for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
db, err := genji.Open(":memory:")
require.NoError(t, err)
defer db.Close()

tx, err := db.Begin(true)
require.NoError(t, err)
defer tx.Rollback()

err = tx.Exec(`
CREATE TABLE foo (
k ARRAY PRIMARY KEY,
a ARRAY
);
CREATE INDEX idx_foo_a_b ON foo(a, b);
CREATE INDEX idx_foo_a0 ON foo(a[0]);
INSERT INTO foo (k, a, b) VALUES
([1, 1], [1, 1], [1, 1]),
([2, 2], [2, 2], [2, 2]),
([3, 3], [3, 3], [3, 3])
`)
require.NoError(t, err)

res, err := planner.PrecalculateExprRule(test.root, tx.Transaction, nil)
require.NoError(t, err)

res, err = planner.UseIndexBasedOnFilterNodeRule(res, tx.Transaction, nil)
require.NoError(t, err)
require.Equal(t, test.expected.String(), res.String())
})
}
})
}

0 comments on commit ebfd78c

Please sign in to comment.