Treat all NULL values differently in UNIQUE indexes

SQL standard can be interpreted differently:
either NULL values are all unique (SQLite, PostgreSQL, ... and now Genji)
or they are considered equal (SQL Server, ...).
This commit is contained in:
Asdine El Hrychy
2021-12-13 09:32:22 +05:30
parent 1da209b2ac
commit 2d4df658e4
7 changed files with 87 additions and 38 deletions

View File

@@ -55,7 +55,6 @@ func ArrayContains(a types.Array, v types.Value) (bool, error) {
// ValueBuffer is an array that holds values in memory.
type ValueBuffer struct {
Values []types.Value
err error
}
// NewValueBuffer creates a buffer of values.

View File

@@ -134,7 +134,7 @@ func testDocumentGetByField(t *testing.T, codecBuilder func() encoding.Codec) {
assert.NoError(t, err)
require.Equal(t, types.NewTextValue("john"), v)
v, err = d.GetByField("d")
_, err = d.GetByField("d")
assert.ErrorIs(t, err, document.ErrFieldNotFound)
}

View File

@@ -23,29 +23,8 @@ var doc types.Document = func() types.Document {
var envWithDoc = environment.New(doc)
var envWithDocAndKey *environment.Environment = func() *environment.Environment {
env := environment.New(doc)
env.Set(environment.TableKey, types.NewTextValue("string"))
env.Set(environment.DocPKKey, types.NewBlobValue([]byte("foo")))
return env
}()
var nullLiteral = types.NewNullValue()
func testExpr(t testing.TB, exprStr string, env *environment.Environment, want types.Value, fails bool) {
t.Helper()
e, err := parser.NewParser(strings.NewReader(exprStr)).ParseExpr()
assert.NoError(t, err)
res, err := e.Eval(env)
if fails {
assert.Error(t, err)
} else {
assert.NoError(t, err)
require.Equal(t, want, res)
}
}
func TestString(t *testing.T) {
var operands = []string{
`10.4`,

View File

@@ -636,7 +636,7 @@ func scanBareIdent(r io.RuneScanner) string {
if err != nil {
break
} else if !isIdentChar(ch) {
r.UnreadRune()
_ = r.UnreadRune()
break
} else {
_, _ = buf.WriteRune(ch)

View File

@@ -150,11 +150,7 @@ func (op *IndexValidateOperator) Iterate(in *environment.Environment, fn func(ou
return err
}
var newEnv environment.Environment
return op.Prev.Iterate(in, func(out *environment.Environment) error {
newEnv.SetOuter(out)
doc, ok := out.GetDocument()
if !ok {
return errors.New("missing document")
@@ -162,15 +158,23 @@ func (op *IndexValidateOperator) Iterate(in *environment.Environment, fn func(ou
vs := make([]types.Value, 0, len(info.Paths))
// if the indexes values contain NULL somewhere,
// we don't check for unicity.
// cf: https://sqlite.org/lang_createindex.html#unique_indexes
var hasNull bool
for _, path := range info.Paths {
v, err := path.GetValueFromDocument(doc)
if err != nil {
hasNull = true
v = types.NewNullValue()
} else if v.Type() == types.NullValue {
hasNull = true
}
vs = append(vs, v)
}
if !hasNull {
duplicate, key, err := idx.Exists(vs)
if err != nil {
return err
@@ -182,8 +186,9 @@ func (op *IndexValidateOperator) Iterate(in *environment.Environment, fn func(ou
Key: key,
}
}
}
return fn(&newEnv)
return fn(out)
})
}

View File

@@ -0,0 +1,28 @@
-- setup:
CREATE TABLE test (a int unique, b int);
-- test: same value
INSERT INTO test (a, b) VALUES (1, 1);
INSERT INTO test (a, b) VALUES (1, 1);
-- error:
-- test: same value, same statement
INSERT INTO test (a, b) VALUES (1, 1), (1, 1);
-- error:
-- test: different values
INSERT INTO test (a, b) VALUES (1, 1), (2, 2);
/* result:
{a: 1, b: 1}
{a: 2, b: 2}
*/
-- test: NULL
INSERT INTO test (b) VALUES (1), (2);
INSERT INTO test (a, b) VALUES (NULL, 3);
SELECT a, b FROM test;
/* result:
{a: NULL, b: 1}
{a: NULL, b: 2}
{a: NULL, b: 3}
*/

View File

@@ -0,0 +1,38 @@
-- setup:
CREATE TABLE test (a int, b int, c int, d int, UNIQUE (a, b, c));
-- test: same value
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1);
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1);
-- error:
-- test: same value, same statement
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1), (1, 1, 1, 1);
-- error:
-- test: different values
INSERT INTO test (a, b, c, d) VALUES (1, 1, 1, 1), (1, 2, 1, 1);
/* result:
{a: 1, b: 1, c: 1, d: 1}
{a: 1, b: 2, c: 1, d: 1}
*/
-- test: NULL
INSERT INTO test (d) VALUES (1), (2);
INSERT INTO test (c, d) VALUES (3, 3);
INSERT INTO test (c, d) VALUES (3, 3);
INSERT INTO test (b, c, d) VALUES (4, 4, 4);
INSERT INTO test (b, c, d) VALUES (4, 4, 4);
INSERT INTO test (a, b, c, d) VALUES (5, null, 5, 5);
INSERT INTO test (a, c, d) VALUES (5, 5, 5);
SELECT a, b, c, d FROM test;
/* result:
{a: NULL, b: NULL, c: NULL, d: 1}
{a: NULL, b: NULL, c: NULL, d: 2}
{a: NULL, b: NULL, c: 3, d: 3}
{a: NULL, b: NULL, c: 3, d: 3}
{a: NULL, b: 4, c: 4, d: 4}
{a: NULL, b: 4, c: 4, d: 4}
{a: 5, b: NULL, c: 5, d: 5}
{a: 5, b: NULL, c: 5, d: 5}
*/