Skip to content

Commit

Permalink
Treat all NULL values differently in UNIQUE indexes
Browse files Browse the repository at this point in the history
SQL standard can be interpreted differently:
either NULL values are all unique (SQLite, PostgreSQL, ... and now Genji)
or they are considered equal (SQL Server, ...).
  • Loading branch information
asdine committed Dec 13, 2021
1 parent 1da209b commit 2d4df65
Show file tree
Hide file tree
Showing 7 changed files with 87 additions and 38 deletions.
1 change: 0 additions & 1 deletion document/array.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ func ArrayContains(a types.Array, v types.Value) (bool, error) {
// ValueBuffer is an array that holds values in memory.
type ValueBuffer struct {
Values []types.Value
err error
}

// NewValueBuffer creates a buffer of values.
Expand Down
2 changes: 1 addition & 1 deletion document/encoding/encodingtest/testing.go
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ func testDocumentGetByField(t *testing.T, codecBuilder func() encoding.Codec) {
assert.NoError(t, err)
require.Equal(t, types.NewTextValue("john"), v)

v, err = d.GetByField("d")
_, err = d.GetByField("d")
assert.ErrorIs(t, err, document.ErrFieldNotFound)
}

Expand Down
21 changes: 0 additions & 21 deletions internal/expr/expr_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,29 +23,8 @@ var doc types.Document = func() types.Document {

var envWithDoc = environment.New(doc)

var envWithDocAndKey *environment.Environment = func() *environment.Environment {
env := environment.New(doc)
env.Set(environment.TableKey, types.NewTextValue("string"))
env.Set(environment.DocPKKey, types.NewBlobValue([]byte("foo")))
return env
}()

var nullLiteral = types.NewNullValue()

func testExpr(t testing.TB, exprStr string, env *environment.Environment, want types.Value, fails bool) {
t.Helper()

e, err := parser.NewParser(strings.NewReader(exprStr)).ParseExpr()
assert.NoError(t, err)
res, err := e.Eval(env)
if fails {
assert.Error(t, err)
} else {
assert.NoError(t, err)
require.Equal(t, want, res)
}
}

func TestString(t *testing.T) {
var operands = []string{
`10.4`,
Expand Down
2 changes: 1 addition & 1 deletion internal/sql/scanner/scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -636,7 +636,7 @@ func scanBareIdent(r io.RuneScanner) string {
if err != nil {
break
} else if !isIdentChar(ch) {
r.UnreadRune()
_ = r.UnreadRune()
break
} else {
_, _ = buf.WriteRune(ch)
Expand Down
33 changes: 19 additions & 14 deletions internal/stream/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,40 +150,45 @@ func (op *IndexValidateOperator) Iterate(in *environment.Environment, fn func(ou
return err
}

var newEnv environment.Environment

return op.Prev.Iterate(in, func(out *environment.Environment) error {
newEnv.SetOuter(out)

doc, ok := out.GetDocument()
if !ok {
return errors.New("missing document")
}

vs := make([]types.Value, 0, len(info.Paths))

// if the indexes values contain NULL somewhere,
// we don't check for unicity.
// cf: https://sqlite.org/lang_createindex.html#unique_indexes
var hasNull bool
for _, path := range info.Paths {
v, err := path.GetValueFromDocument(doc)
if err != nil {
hasNull = true
v = types.NewNullValue()
} else if v.Type() == types.NullValue {
hasNull = true
}

vs = append(vs, v)
}

duplicate, key, err := idx.Exists(vs)
if err != nil {
return err
}
if duplicate {
return &errs.ConstraintViolationError{
Constraint: "UNIQUE",
Paths: info.Paths,
Key: key,
if !hasNull {
duplicate, key, err := idx.Exists(vs)
if err != nil {
return err
}
if duplicate {
return &errs.ConstraintViolationError{
Constraint: "UNIQUE",
Paths: info.Paths,
Key: key,
}
}
}

return fn(&newEnv)
return fn(out)
})
}

Expand Down
28 changes: 28 additions & 0 deletions sqltests/INSERT/unique.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
-- setup:
CREATE TABLE test (a int unique, b int);

-- test: same value
INSERT INTO test (a, b) VALUES (1, 1);
INSERT INTO test (a, b) VALUES (1, 1);
-- error:

-- test: same value, same statement
INSERT INTO test (a, b) VALUES (1, 1), (1, 1);
-- error:

-- test: different values
INSERT INTO test (a, b) VALUES (1, 1), (2, 2);
/* result:
{a: 1, b: 1}
{a: 2, b: 2}
*/

-- test: NULL
INSERT INTO test (b) VALUES (1), (2);
INSERT INTO test (a, b) VALUES (NULL, 3);
SELECT a, b FROM test;
/* result:
{a: NULL, b: 1}
{a: NULL, b: 2}
{a: NULL, b: 3}
*/
38 changes: 38 additions & 0 deletions sqltests/INSERT/unique_composite.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
-- setup:
CREATE TABLE test (a int, b int, c int, d int, UNIQUE (a, b, c));

-- test: same value
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1);
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1);
-- error:

-- test: same value, same statement
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1), (1, 1, 1, 1);
-- error:

-- test: different values
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1), (1, 2, 1, 1);
/* result:
{a: 1, b: 1, c: 1, d: 1}
{a: 1, b: 2, c: 1, d: 1}
*/

-- test: NULL
INSERT INTO test (d) VALUES (1), (2);
INSERT INTO test (c, d) VALUES (3, 3);
INSERT INTO test (c, d) VALUES (3, 3);
INSERT INTO test (b, c, d) VALUES (4, 4, 4);
INSERT INTO test (b, c, d) VALUES (4, 4, 4);
INSERT INTO test (a, b, c, d) VALUES (5, null, 5, 5);
INSERT INTO test (a, c, d) VALUES (5, 5, 5);
SELECT a, b, c, d FROM test;
/* result:
{a: NULL, b: NULL, c: NULL, d: 1}
{a: NULL, b: NULL, c: NULL, d: 2}
{a: NULL, b: NULL, c: 3, d: 3}
{a: NULL, b: NULL, c: 3, d: 3}
{a: NULL, b: 4, c: 4, d: 4}
{a: NULL, b: 4, c: 4, d: 4}
{a: 5, b: NULL, c: 5, d: 5}
{a: 5, b: NULL, c: 5, d: 5}
*/

0 comments on commit 2d4df65

Please sign in to comment.