diff --git a/document/document.go b/document/document.go index a0eb4300d..c696f9cb5 100644 --- a/document/document.go +++ b/document/document.go @@ -549,3 +549,33 @@ func (p Path) getValueFromValue(v types.Value) (types.Value, error) { return nil, ErrFieldNotFound } + +type Paths []Path + +func (p Paths) String() string { + var sb strings.Builder + + for i, pt := range p { + if i > 0 { + sb.WriteString(", ") + } + sb.WriteString(pt.String()) + } + + return sb.String() +} + +// IsEqual returns whether other is equal to p. +func (p Paths) IsEqual(other Paths) bool { + if len(other) != len(p) { + return false + } + + for i := range p { + if !other[i].IsEqual(p[i]) { + return false + } + } + + return true +} diff --git a/document/document_test.go b/document/document_test.go index 0889e70b0..3120954f8 100644 --- a/document/document_test.go +++ b/document/document_test.go @@ -8,6 +8,7 @@ import ( "github.com/genjidb/genji/document" "github.com/genjidb/genji/internal/errors" "github.com/genjidb/genji/internal/sql/parser" + "github.com/genjidb/genji/internal/testutil" "github.com/genjidb/genji/internal/testutil/assert" "github.com/genjidb/genji/types" "github.com/stretchr/testify/require" @@ -15,12 +16,6 @@ import ( var _ types.Document = new(document.FieldBuffer) -func parsePath(t testing.TB, p string) document.Path { - path, err := parser.ParsePath(p) - assert.NoError(t, err) - return path -} - func TestFieldBuffer(t *testing.T) { var buf document.FieldBuffer buf.Add("a", types.NewIntegerValue(10)) @@ -169,7 +164,7 @@ func TestFieldBuffer(t *testing.T) { err := json.Unmarshal([]byte(test.document), &buf) assert.NoError(t, err) - path := parsePath(t, test.deletePath) + path := testutil.ParseDocumentPath(t, test.deletePath) err = buf.Delete(path) if test.fails { diff --git a/internal/database/catalog.go b/internal/database/catalog.go index c480c0250..89c53c53b 100644 --- a/internal/database/catalog.go +++ b/internal/database/catalog.go @@ -293,8 +293,8 @@ func (c *Catalog) DropIndex(tx *Transaction, name string) error { } // check if the index has been created by a table constraint - if info.Owner.Path != nil { - return stringutil.Errorf("cannot drop index %s because constraint on %s(%s) requires it", info.IndexName, info.TableName, info.Owner.Path) + if len(info.Owner.Paths) > 0 { + return stringutil.Errorf("cannot drop index %s because constraint on %s(%s) requires it", info.IndexName, info.TableName, info.Owner.Paths) } _, err = c.Cache.Delete(tx, RelationIndexType, name) @@ -697,10 +697,8 @@ func newCatalogStore() *CatalogStore { TableConstraints: []*TableConstraint{ { PrimaryKey: true, - Path: document.Path{ - document.PathFragment{ - FieldName: "name", - }, + Paths: []document.Path{ + document.NewPath("name"), }, }, }, @@ -862,12 +860,7 @@ func sequenceInfoToDocument(seq *SequenceInfo) types.Document { buf.Add("sql", types.NewTextValue(seq.String())) if seq.Owner.TableName != "" { - owner := document.NewFieldBuffer().Add("table_name", types.NewTextValue(seq.Owner.TableName)) - if seq.Owner.Path != nil { - owner.Add("path", types.NewTextValue(seq.Owner.Path.String())) - } - - buf.Add("owner", types.NewDocumentValue(owner)) + buf.Add("owner", types.NewDocumentValue(ownerToDocument(&seq.Owner))) } return buf @@ -875,8 +868,12 @@ func sequenceInfoToDocument(seq *SequenceInfo) types.Document { func ownerToDocument(owner *Owner) types.Document { buf := document.NewFieldBuffer().Add("table_name", types.NewTextValue(owner.TableName)) - if owner.Path != nil { - buf.Add("path", types.NewTextValue(owner.Path.String())) + if owner.Paths != nil { + vb := document.NewValueBuffer() + for _, p := range owner.Paths { + vb.Append(types.NewTextValue(p.String())) + } + buf.Add("paths", types.NewArrayValue(vb)) } return buf diff --git a/internal/database/catalog_test.go b/internal/database/catalog_test.go index 12654ba16..b1ae7e9fc 100644 --- a/internal/database/catalog_test.go +++ b/internal/database/catalog_test.go @@ -119,7 +119,7 @@ func TestCatalogTable(t *testing.T) { {Path: testutil.ParseDocumentPath(t, "gender"), Type: types.TextValue}, {Path: testutil.ParseDocumentPath(t, "city"), Type: types.TextValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "age"), PrimaryKey: true}, + {Paths: []document.Path{testutil.ParseDocumentPath(t, "age")}, PrimaryKey: true}, }} updateCatalog(t, db, func(tx *database.Transaction, catalog *database.Catalog) error { @@ -201,7 +201,7 @@ func TestCatalogTable(t *testing.T) { {Path: testutil.ParseDocumentPath(t, "gender"), Type: types.TextValue}, {Path: testutil.ParseDocumentPath(t, "city"), Type: types.TextValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "age"), PrimaryKey: true}, + {Paths: []document.Path{testutil.ParseDocumentPath(t, "age")}, PrimaryKey: true}, }} updateCatalog(t, db, func(tx *database.Transaction, catalog *database.Catalog) error { @@ -242,7 +242,7 @@ func TestCatalogTable(t *testing.T) { // Adding a second primary key should return an error tcs = nil - tcs.AddPrimaryKey("foo", testutil.ParseDocumentPath(t, "address")) + tcs.AddPrimaryKey("foo", []document.Path{testutil.ParseDocumentPath(t, "address")}) err = catalog.AddFieldConstraint(tx, "foo", nil, tcs) assert.Error(t, err) @@ -320,7 +320,7 @@ func TestCatalogCreateIndex(t *testing.T) { updateCatalog(t, db, func(tx *database.Transaction, catalog *database.Catalog) error { return catalog.CreateTable(tx, "test", &database.TableInfo{ TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "a"), PrimaryKey: true}, + {Paths: []document.Path{testutil.ParseDocumentPath(t, "a")}, PrimaryKey: true}, }, }) }) @@ -484,7 +484,7 @@ func TestReadOnlyTables(t *testing.T) { case 2: testutil.RequireDocJSONEq(t, d, `{"name":"foo", "docid_sequence_name":"foo_seq", "sql":"CREATE TABLE foo (a INTEGER, b[3].c DOUBLE, UNIQUE (b[3].c))", "store_name":"AQ==", "type":"table"}`) case 3: - testutil.RequireDocJSONEq(t, d, `{"name":"foo_b[3].c_idx", "owner":{"table_name":"foo", "path":"b[3].c"}, "sql":"CREATE UNIQUE INDEX `+"`foo_b[3].c_idx`"+` ON foo (b[3].c)", "store_name":"Ag==", "table_name":"foo", "type":"index"}`) + testutil.RequireDocJSONEq(t, d, `{"name":"foo_b[3].c_idx", "owner":{"table_name":"foo", "paths":["b[3].c"]}, "sql":"CREATE UNIQUE INDEX `+"`foo_b[3].c_idx`"+` ON foo (b[3].c)", "store_name":"Ag==", "table_name":"foo", "type":"index"}`) case 4: testutil.RequireDocJSONEq(t, d, `{"name":"foo_seq", "owner":{"table_name":"foo"}, "sql":"CREATE SEQUENCE foo_seq CACHE 64", "type":"sequence"}`) case 5: diff --git a/internal/database/catalogstore/store.go b/internal/database/catalogstore/store.go index aa180fa84..9c1d77add 100644 --- a/internal/database/catalogstore/store.go +++ b/internal/database/catalogstore/store.go @@ -94,7 +94,7 @@ func loadSequences(tx *database.Transaction, c *database.Catalog, info []databas func loadCatalogStore(tx *database.Transaction, s *database.CatalogStore) (tables []database.TableInfo, indexes []database.IndexInfo, sequences []database.SequenceInfo, err error) { tb := s.Table(tx) - err = tb.Iterate(nil, false, func(key tree.Key, d types.Document) error { + err = tb.IterateOnRange(nil, false, func(key tree.Key, d types.Document) error { tp, err := d.GetByField("type") if err != nil { return err @@ -234,7 +234,15 @@ func ownerFromDocument(d types.Document) (*database.Owner, error) { return nil, err } if err == nil { - owner.Path, err = parser.ParsePath(v.V().(string)) + err = v.V().(types.Array).Iterate(func(i int, value types.Value) error { + pp, err := parser.ParsePath(v.V().(string)) + if err != nil { + return err + } + + owner.Paths = append(owner.Paths, pp) + return nil + }) if err != nil { return nil, err } diff --git a/internal/database/constraint.go b/internal/database/constraint.go index f72b5d437..e81e0a5a6 100644 --- a/internal/database/constraint.go +++ b/internal/database/constraint.go @@ -467,34 +467,23 @@ func (t *inferredTableExpression) String() string { // and not necessarily to a single field path. type TableConstraint struct { Name string - Path document.Path + Paths document.Paths Check TableExpression Unique bool PrimaryKey bool } -// IsEqual compares t with other member by member. -func (t *TableConstraint) IsEqual(other *TableConstraint) bool { - if t == nil { - return other == nil - } - if other == nil { - return false - } - return t.Name == other.Name && t.Path.IsEqual(other.Path) && t.Check.IsEqual(other.Check) && t.Unique == other.Unique && t.PrimaryKey == other.PrimaryKey -} - func (t *TableConstraint) String() string { if t.Check != nil { return stringutil.Sprintf("CHECK (%s)", t.Check) } if t.PrimaryKey { - return stringutil.Sprintf("PRIMARY KEY (%s)", t.Path) + return stringutil.Sprintf("PRIMARY KEY (%s)", t.Paths) } if t.Unique { - return stringutil.Sprintf("UNIQUE (%s)", t.Path) + return stringutil.Sprintf("UNIQUE (%s)", t.Paths) } return "" @@ -553,7 +542,7 @@ func (t *TableConstraints) AddCheck(tableName string, e TableExpression) { }) } -func (t *TableConstraints) AddPrimaryKey(tableName string, p document.Path) error { +func (t *TableConstraints) AddPrimaryKey(tableName string, p document.Paths) error { for _, tc := range *t { if tc.PrimaryKey { return stringutil.Errorf("multiple primary keys for table %q are not allowed", tableName) @@ -561,7 +550,7 @@ func (t *TableConstraints) AddPrimaryKey(tableName string, p document.Path) erro } *t = append(*t, &TableConstraint{ - Path: p, + Paths: p, PrimaryKey: true, }) @@ -570,15 +559,15 @@ func (t *TableConstraints) AddPrimaryKey(tableName string, p document.Path) erro // AddUnique adds a unique constraint to the table. // If the constraint is already present, it is ignored. -func (t *TableConstraints) AddUnique(p document.Path) { +func (t *TableConstraints) AddUnique(p document.Paths) { for _, tc := range *t { - if tc.Unique && tc.Path.IsEqual(p) { + if tc.Unique && tc.Paths.IsEqual(p) { return } } *t = append(*t, &TableConstraint{ - Path: p, + Paths: p, Unique: true, }) } @@ -586,11 +575,11 @@ func (t *TableConstraints) AddUnique(p document.Path) { func (t *TableConstraints) Merge(other TableConstraints) error { for _, tc := range other { if tc.PrimaryKey { - if err := t.AddPrimaryKey(tc.Name, tc.Path); err != nil { + if err := t.AddPrimaryKey(tc.Name, tc.Paths); err != nil { return err } } else if tc.Unique { - t.AddUnique(tc.Path) + t.AddUnique(tc.Paths) } else if tc.Check != nil { t.AddCheck(tc.Name, tc.Check) } diff --git a/internal/database/index.go b/internal/database/index.go index 34845e422..c8c006df1 100644 --- a/internal/database/index.go +++ b/internal/database/index.go @@ -14,10 +14,6 @@ import ( var ( // ErrIndexDuplicateValue is returned when a value is already associated with a key ErrIndexDuplicateValue = errors.New("duplicate value") - - // ErrIndexWrongArity is returned when trying to index more values that what an - // index supports. - ErrIndexWrongArity = errors.New("wrong index arity") ) // An Index associates encoded values with keys. @@ -28,17 +24,15 @@ var ( type Index struct { // How many values the index is operating on. // For example, an index created with `CREATE INDEX idx_a_b ON foo (a, b)` has an arity of 2. - Arity int - Unique bool - Tree *tree.Tree + Arity int + Tree *tree.Tree } // NewIndex creates an index that associates values with a list of keys. func NewIndex(tr *tree.Tree, opts IndexInfo) *Index { return &Index{ - Tree: tr, - Arity: len(opts.Paths), - Unique: opts.Unique, + Tree: tr, + Arity: len(opts.Paths), } } @@ -74,17 +68,6 @@ func (idx *Index) Set(vs []types.Value, key tree.Key) error { return err } - // if the index is unique, we need to check if the value is already associated with the key - if idx.Unique { - found, _, err := idx.Exists(vs) - if err != nil { - return err - } - if found { - return errors.Wrap(ErrIndexDuplicateValue) - } - } - return idx.Tree.Put(treeKey, nil) } @@ -160,12 +143,6 @@ func (idx *Index) Delete(vs []types.Value, key tree.Key) error { return engine.ErrKeyNotFound } -func (idx *Index) Iterate(pivot tree.Key, reverse bool, fn func(key tree.Key) error) error { - return idx.Tree.Iterate(pivot, reverse, idx.iterateFn(func(itmKey, key tree.Key) error { - return fn(key) - })) -} - // IterateOnRange seeks for the pivot and then goes through all the subsequent key value pairs in increasing or decreasing order and calls the given function for each pair. // If the given function returns an error, the iteration stops and returns that error. // If the pivot(s) is/are empty, starts from the beginning. @@ -217,23 +194,6 @@ func (idx *Index) iterateOnRange(rng *tree.Range, reverse bool, fn func(itmKey t }) } -func (idx *Index) iterateFn(fn func(itmKey tree.Key, key tree.Key) error) func(k tree.Key, v types.Value) error { - return func(k tree.Key, v types.Value) error { - // we don't care about the value, we just want to extract the key - // which is the last element of the encoded array - pos := bytes.LastIndex(k, []byte{encoding.ArrayValueDelim}) - - kv, err := encoding.DecodeValue(k[pos+1:]) - if err != nil { - return err - } - - pk := tree.Key(kv.V().([]byte)) - - return fn(k, pk) - } -} - // Truncate deletes all the index data. func (idx *Index) Truncate() error { return idx.Tree.Truncate() diff --git a/internal/database/index_test.go b/internal/database/index_test.go index 629e67b26..0096a233f 100644 --- a/internal/database/index_test.go +++ b/internal/database/index_test.go @@ -24,7 +24,7 @@ func values(vs ...types.Value) []types.Value { return vs } -func getIndex(t testing.TB, unique bool, arity int) (*database.Index, func()) { +func getIndex(t testing.TB, arity int) (*database.Index, func()) { ng := memoryengine.NewEngine() tx, err := ng.Begin(context.Background(), engine.TxOptions{ Writable: true, @@ -41,7 +41,7 @@ func getIndex(t testing.TB, unique bool, arity int) (*database.Index, func()) { for i := 0; i < arity; i++ { paths = append(paths, document.NewPath(fmt.Sprintf("[%d]", i))) } - idx := database.NewIndex(tr, database.IndexInfo{Unique: unique, Paths: paths}) + idx := database.NewIndex(tr, database.IndexInfo{Paths: paths}) return idx, func() { tx.Rollback() @@ -49,91 +49,46 @@ func getIndex(t testing.TB, unique bool, arity int) (*database.Index, func()) { } func TestIndexSet(t *testing.T) { - for _, unique := range []bool{true, false} { - text := fmt.Sprintf("Unique: %v, ", unique) - - t.Run(text+"Set nil key falls (arity=1)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 1) - defer cleanup() - assert.Error(t, idx.Set(values(types.NewBoolValue(true)), nil)) - }) - - t.Run(text+"Set value and key succeeds (arity=1)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 1) - defer cleanup() - assert.NoError(t, idx.Set(values(types.NewBoolValue(true)), []byte("key"))) - }) - - t.Run(text+"Set two values and key succeeds (arity=2)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 2) - defer cleanup() - assert.NoError(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) - }) - - t.Run(text+"Set one value fails (arity=1)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 2) - defer cleanup() - assert.Error(t, idx.Set(values(types.NewBoolValue(true)), []byte("key"))) - }) - - t.Run(text+"Set two values fails (arity=1)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 1) - defer cleanup() - assert.Error(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) - }) - - t.Run(text+"Set three values fails (arity=2)", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 2) - defer cleanup() - assert.Error(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) - }) - } - - t.Run("Unique: true, Duplicate", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 1) + t.Run("Set nil key falls (arity=1)", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) defer cleanup() - - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10)), []byte("key"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(11)), []byte("key"))) - err := idx.Set(values(types.NewIntegerValue(10)), []byte("key")) - assert.ErrorIs(t, err, database.ErrIndexDuplicateValue) + assert.Error(t, idx.Set(values(types.NewBoolValue(true)), nil)) }) - t.Run("Unique: true, Type: integer Duplicate", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 1) + t.Run("Set value and key succeeds (arity=1)", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) defer cleanup() - - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10)), []byte("key"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(11)), []byte("key"))) - err := idx.Set(values(types.NewIntegerValue(10)), []byte("key")) - assert.ErrorIs(t, err, database.ErrIndexDuplicateValue) + assert.NoError(t, idx.Set(values(types.NewBoolValue(true)), []byte("key"))) }) - t.Run("Unique: true, Type: (integer, integer) Duplicate,", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 2) + t.Run("Set two values and key succeeds (arity=2)", func(t *testing.T) { + idx, cleanup := getIndex(t, 2) defer cleanup() + assert.NoError(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) + }) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10), types.NewIntegerValue(10)), []byte("key"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10), types.NewIntegerValue(11)), []byte("key"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(11), types.NewIntegerValue(11)), []byte("key"))) - err := idx.Set(values(types.NewIntegerValue(10), types.NewIntegerValue(10)), []byte("key")) - assert.ErrorIs(t, err, database.ErrIndexDuplicateValue) + t.Run("Set one value fails (arity=1)", func(t *testing.T) { + idx, cleanup := getIndex(t, 2) + defer cleanup() + assert.Error(t, idx.Set(values(types.NewBoolValue(true)), []byte("key"))) }) - t.Run("Unique: true, Type: (integer, text) Duplicate,", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 2) + t.Run("Set two values fails (arity=1)", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) defer cleanup() + assert.Error(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) + }) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10), types.NewTextValue("foo")), []byte("key"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(11), types.NewTextValue("foo")), []byte("key"))) - err := idx.Set(values(types.NewIntegerValue(10), types.NewTextValue("foo")), []byte("key")) - assert.ErrorIs(t, err, database.ErrIndexDuplicateValue) + t.Run("Set three values fails (arity=2)", func(t *testing.T) { + idx, cleanup := getIndex(t, 2) + defer cleanup() + assert.Error(t, idx.Set(values(types.NewBoolValue(true), types.NewBoolValue(true), types.NewBoolValue(true)), []byte("key"))) }) } func TestIndexDelete(t *testing.T) { - t.Run("Unique: false, Delete valid key succeeds", func(t *testing.T) { - idx, cleanup := getIndex(t, false, 1) + t.Run("Delete valid key succeeds", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) defer cleanup() assert.NoError(t, idx.Set(values(types.NewDoubleValue(10)), []byte("key"))) @@ -160,8 +115,8 @@ func TestIndexDelete(t *testing.T) { require.Equal(t, 2, i) }) - t.Run("Unique: false, Delete valid key succeeds (arity=2)", func(t *testing.T) { - idx, cleanup := getIndex(t, false, 2) + t.Run("Delete valid key succeeds (arity=2)", func(t *testing.T) { + idx, cleanup := getIndex(t, 2) defer cleanup() assert.NoError(t, idx.Set(values(types.NewDoubleValue(10), types.NewDoubleValue(10)), []byte("key"))) @@ -188,77 +143,16 @@ func TestIndexDelete(t *testing.T) { require.Equal(t, 2, i) }) - t.Run("Unique: true, Delete valid key succeeds", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 1) - defer cleanup() - - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10)), []byte("key1"))) - assert.NoError(t, idx.Set(values(types.NewDoubleValue(11)), []byte("key2"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(12)), []byte("key3"))) - assert.NoError(t, idx.Delete(values(types.NewDoubleValue(11)), []byte("key2"))) - - i := 0 - err := idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, types.NewIntegerValue(0))}, false, func(key tree.Key) error { - switch i { - case 0: - require.Equal(t, "key1", string(key)) - case 1: - require.Equal(t, "key3", string(key)) - default: - return errors.New("should not reach this point") - } - - i++ - return nil - }) - assert.NoError(t, err) - require.Equal(t, 2, i) - }) - - t.Run("Unique: true, Delete valid key succeeds (arity=2)", func(t *testing.T) { - idx, cleanup := getIndex(t, true, 2) + t.Run("Delete non existing key fails", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) defer cleanup() - assert.NoError(t, idx.Set(values(types.NewIntegerValue(10), types.NewIntegerValue(10)), []byte("key1"))) - assert.NoError(t, idx.Set(values(types.NewDoubleValue(11), types.NewDoubleValue(11)), []byte("key2"))) - assert.NoError(t, idx.Set(values(types.NewIntegerValue(12), types.NewIntegerValue(12)), []byte("key3"))) - assert.NoError(t, idx.Delete(values(types.NewDoubleValue(11), types.NewDoubleValue(11)), []byte("key2"))) - - i := 0 - // this will break until the [v, int] case is supported - // pivot := values(types.NewIntegerValue(0), types.NewIntegerValue(0)) - pivot := values(types.NewIntegerValue(0), types.NewIntegerValue(0)) - err := idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, pivot...)}, false, func(key tree.Key) error { - switch i { - case 0: - require.Equal(t, "key1", string(key)) - case 1: - require.Equal(t, "key3", string(key)) - default: - return errors.New("should not reach this point") - } - - i++ - return nil - }) - assert.NoError(t, err) - require.Equal(t, 2, i) + assert.Error(t, idx.Delete(values(types.NewTextValue("foo")), []byte("foo"))) }) - - for _, unique := range []bool{true, false} { - text := fmt.Sprintf("Unique: %v, ", unique) - - t.Run(text+"Delete non existing key fails", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 1) - defer cleanup() - - assert.Error(t, idx.Delete(values(types.NewTextValue("foo")), []byte("foo"))) - }) - } } func TestIndexExists(t *testing.T) { - idx, cleanup := getIndex(t, false, 2) + idx, cleanup := getIndex(t, 2) defer cleanup() assert.NoError(t, idx.Set(values(types.NewDoubleValue(10), types.NewIntegerValue(11)), []byte("key1"))) @@ -275,575 +169,558 @@ func TestIndexExists(t *testing.T) { } func TestIndexAscendGreaterThan(t *testing.T) { - for _, unique := range []bool{true, false} { - text := fmt.Sprintf("Unique: %v, ", unique) - - t.Run(text+"Should not iterate if index is empty", func(t *testing.T) { - idx, cleanup := getIndex(t, unique, 1) - defer cleanup() + t.Run("Should not iterate if index is empty", func(t *testing.T) { + idx, cleanup := getIndex(t, 1) + defer cleanup() - i := 0 - err := idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, types.NewIntegerValue(0))}, false, func(key tree.Key) error { - i++ - return errors.New("should not iterate") - }) - assert.NoError(t, err) - require.Equal(t, 0, i) + i := 0 + err := idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, types.NewIntegerValue(0))}, false, func(key tree.Key) error { + i++ + return errors.New("should not iterate") }) + assert.NoError(t, err) + require.Equal(t, 0, i) + }) - t.Run(text+"Should iterate through documents in order, ", func(t *testing.T) { - noiseBlob := func(i int) []types.Value { - t.Helper() - return []types.Value{types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))} - } - noiseInts := func(i int) []types.Value { - t.Helper() - return []types.Value{types.NewIntegerValue(int64(i))} - } - - noCallEq := func(t *testing.T, i uint8, key tree.Key) { - require.Fail(t, "equality test should not be called here") - } - - // the following tests will use that constant to determine how many values needs to be inserted - // with the value and noise generators. - total := 5 + t.Run("Should iterate through documents in order, ", func(t *testing.T) { + noiseBlob := func(i int) []types.Value { + t.Helper() + return []types.Value{types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))} + } + noiseInts := func(i int) []types.Value { + t.Helper() + return []types.Value{types.NewIntegerValue(int64(i))} + } + + noCallEq := func(t *testing.T, i uint8, key tree.Key) { + require.Fail(t, "equality test should not be called here") + } + + // the following tests will use that constant to determine how many values needs to be inserted + // with the value and noise generators. + total := 5 + + tests := []struct { + name string + // the index type(s) that is being used + arity int + // the pivot, typed or not used to iterate + pivot database.Pivot + // the generator for the values that are being indexed + val func(i int) []types.Value + // the generator for the noise values that are being indexed + noise func(i int) []types.Value + // the function to compare the key/value that the iteration yields + expectedEq func(t *testing.T, i uint8, key tree.Key) + // the total count of iteration that should happen + expectedCount int + mustPanic bool + }{ + // integers --------------------------------------------------- + {name: "vals=integers, pivot=integer", + arity: 1, + pivot: values(types.NewIntegerValue(0)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + noise: noiseBlob, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "index=integer, vals=integers, pivot=integer", + arity: 1, + pivot: values(types.NewIntegerValue(0)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=integers, pivot=integer:2", + arity: 1, + pivot: values(types.NewIntegerValue(2)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + noise: noiseBlob, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=integers, pivot=integer:10", + arity: 1, + pivot: values(types.NewIntegerValue(10)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + noise: noiseBlob, + expectedEq: noCallEq, + expectedCount: 0, + }, + {name: "index=integer, vals=integers, pivot=integer:2", + arity: 1, + pivot: values(types.NewIntegerValue(2)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "index=integer, vals=integers, pivot=double", + arity: 1, + pivot: values(types.NewDoubleValue(0)), + val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, + expectedEq: noCallEq, + expectedCount: 0, + }, + + // doubles ---------------------------------------------------- + {name: "vals=doubles, pivot=double", + arity: 1, + pivot: values(types.NewDoubleValue(0)), + val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=doubles, pivot=double:1.8", + arity: 1, + pivot: values(types.NewDoubleValue(1.8)), + val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "index=double, vals=doubles, pivot=double:1.8", + arity: 1, + pivot: values(types.NewDoubleValue(1.8)), + val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=doubles, pivot=double:10.8", + arity: 1, + pivot: values(types.NewDoubleValue(10.8)), + val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, + expectedEq: noCallEq, + expectedCount: 0, + }, + + // text ------------------------------------------------------- + {name: "vals=text pivot=text", + arity: 1, + pivot: values(types.NewTextValue("")), + val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, + noise: noiseInts, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=text, pivot=text('2')", + arity: 1, + pivot: values(types.NewTextValue("2")), + val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, + noise: noiseInts, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=text, pivot=text('')", + arity: 1, + pivot: values(types.NewTextValue("")), + val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, + noise: noiseInts, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=text, pivot=text('foo')", + arity: 1, + pivot: values(types.NewTextValue("foo")), + val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, + noise: noiseInts, + expectedEq: noCallEq, + expectedCount: 0, + }, + {name: "index=text, vals=text, pivot=text('2')", + arity: 1, + pivot: values(types.NewTextValue("2")), + val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // composite -------------------------------------------------- + {name: "vals=[int, int], noise=[blob, blob], pivot=[int, int]", + arity: 2, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[int]", + arity: 2, + pivot: values(types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[0, int, 0]", + arity: 3, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[int, 0]", + arity: 2, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[0, 0]", + arity: 2, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[2, 0]", + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[2, int]", + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // pivot [2, int] should filter out [2, not(int)] + {name: "vals=[int, int], noise=[int, blob], pivot=[2, int]", + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + // only [3, not(int)] is greater than [2, int], so it will appear anyway if we don't skip it + if i < 3 { + return values(types.NewIntegerValue(int64(i)), types.NewBoolValue(true)) + } - tests := []struct { - name string - // the index type(s) that is being used - arity int - // the pivot, typed or not used to iterate - pivot database.Pivot - // the generator for the values that are being indexed - val func(i int) []types.Value - // the generator for the noise values that are being indexed - noise func(i int) []types.Value - // the function to compare the key/value that the iteration yields - expectedEq func(t *testing.T, i uint8, key tree.Key) - // the total count of iteration that should happen - expectedCount int - mustPanic bool - }{ - // integers --------------------------------------------------- - {name: "vals=integers, pivot=integer", - arity: 1, - pivot: values(types.NewIntegerValue(0)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - noise: noiseBlob, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "index=integer, vals=integers, pivot=integer", - arity: 1, - pivot: values(types.NewIntegerValue(0)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=integers, pivot=integer:2", - arity: 1, - pivot: values(types.NewIntegerValue(2)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - noise: noiseBlob, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=integers, pivot=integer:10", - arity: 1, - pivot: values(types.NewIntegerValue(10)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - noise: noiseBlob, - expectedEq: noCallEq, - expectedCount: 0, + return nil }, - {name: "index=integer, vals=integers, pivot=integer:2", - arity: 1, - pivot: values(types.NewIntegerValue(2)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // a more subtle case + {name: "vals=[int, blob], noise=[blob, blob], pivot=[2, 'a']", // pivot is [2, a] but value is [2, c] but that must work anyway + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewBlobValue([]byte{byte('a')})), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewBlobValue([]byte{byte('a' + uint8(i))})) + }, + noise: func(i int) []types.Value { + return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // partial pivot + {name: "vals=[int, int], noise=[blob, blob], pivot=[0]", + arity: 2, + pivot: values(types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + // let's not try to match, it's not important + }, + expectedCount: 10, + }, + {name: "vals=[int, int], noise=[blob, blob], pivot=[2]", + arity: 2, + pivot: values(types.NewIntegerValue(2)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + // let's not try to match, it's not important + }, + expectedCount: 6, // total * 2 - (noise + val = 2) * 2 + }, + // this is a tricky test, when we have multiple values but they share the first pivot element; + // this is by definition a very implementation dependent test. + {name: "vals=[int, int], noise=int, bool], pivot=[int:0, int:0]", + arity: 2, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + noise: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewBoolValue(true)) + }, + // [0, 0] > [0, true] but [1, true] > [0, 0] so we will see some bools in the results + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + if i%2 == 0 { + i = i / 2 require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "index=integer, vals=integers, pivot=double", - arity: 1, - pivot: values(types.NewDoubleValue(0)), - val: func(i int) []types.Value { return values(types.NewIntegerValue(int64(i))) }, - expectedEq: noCallEq, - expectedCount: 0, + } }, + expectedCount: 9, // 10 elements, but pivot skipped the initial [0, true] + }, + // index typed + {name: "index=[int, int], vals=[int, int], pivot=[0, 0]", + arity: 2, + pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "index=[int, int], vals=[int, int], pivot=[2, 0]", + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // a more subtle case + {name: "vals=[int, blob], pivot=[2, 'a']", // pivot is [2, a] but value is [2, c] but that must work anyway + arity: 2, + pivot: values(types.NewIntegerValue(2), types.NewBlobValue([]byte{byte('a')})), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewBlobValue([]byte{byte('a' + uint8(i))})) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + // partial pivot + {name: "vals=[int, int], pivot=[0]", + arity: 2, + pivot: values(types.NewIntegerValue(0)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 5, + }, + {name: "vals=[int, int], pivot=[2]", + arity: 2, + pivot: values(types.NewIntegerValue(2)), + val: func(i int) []types.Value { + return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + + // documents -------------------------------------------------- + {name: "vals=[doc, int], pivot=[{a:2}, 3]", + arity: 2, + pivot: values( + types.NewDocumentValue(testutil.MakeDocument(t, `{"a":2}`)), + types.NewIntegerValue(int64(3)), + ), + val: func(i int) []types.Value { + return values( + types.NewDocumentValue(testutil.MakeDocument(t, `{"a":`+strconv.Itoa(i)+`}`)), + types.NewIntegerValue(int64(i+1)), + ) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + + // arrays ----------------------------------------------------- + {name: "vals=[int[], int], pivot=[[2,2], 3]", + arity: 2, + pivot: values( + testutil.MakeArrayValue(t, 2, 2), + types.NewIntegerValue(int64(3)), + ), + val: func(i int) []types.Value { + return values( + testutil.MakeArrayValue(t, i, i), + types.NewIntegerValue(int64(i+1)), + ) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=[int[], int[]], pivot=[[2,2], [3,3]]", + arity: 2, + pivot: values( + testutil.MakeArrayValue(t, 2, 2), + testutil.MakeArrayValue(t, 3, 3), + ), + val: func(i int) []types.Value { + return values( + testutil.MakeArrayValue(t, i, i), + testutil.MakeArrayValue(t, i+1, i+1), + ) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + {name: "vals=[int[], int], pivot=[[2,2], 3]", + arity: 2, + pivot: values( + testutil.MakeArrayValue(t, 2, 2), + types.NewIntegerValue(int64(3)), + ), + val: func(i int) []types.Value { + return values( + testutil.MakeArrayValue(t, i, i), + types.NewIntegerValue(int64(i+1)), + ) + }, + expectedEq: func(t *testing.T, i uint8, key tree.Key) { + i += 2 + require.Equal(t, []byte{'a' + i}, []byte(key)) + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + idx, cleanup := getIndex(t, test.arity) + defer cleanup() + + for i := 0; i < total; i++ { + assert.NoError(t, idx.Set(test.val(i), []byte{'a' + byte(i)})) + if test.noise != nil { + v := test.noise(i) + if v != nil { + assert.NoError(t, idx.Set(test.noise(i), []byte{'a' + byte(i)})) + } + } + } - // doubles ---------------------------------------------------- - {name: "vals=doubles, pivot=double", - arity: 1, - pivot: values(types.NewDoubleValue(0)), - val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=doubles, pivot=double:1.8", - arity: 1, - pivot: values(types.NewDoubleValue(1.8)), - val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "index=double, vals=doubles, pivot=double:1.8", - arity: 1, - pivot: values(types.NewDoubleValue(1.8)), - val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=doubles, pivot=double:10.8", - arity: 1, - pivot: values(types.NewDoubleValue(10.8)), - val: func(i int) []types.Value { return values(types.NewDoubleValue(float64(i) + float64(i)/2)) }, - expectedEq: noCallEq, - expectedCount: 0, - }, - - // text ------------------------------------------------------- - {name: "vals=text pivot=text", - arity: 1, - pivot: values(types.NewTextValue("")), - val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, - noise: noiseInts, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=text, pivot=text('2')", - arity: 1, - pivot: values(types.NewTextValue("2")), - val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, - noise: noiseInts, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=text, pivot=text('')", - arity: 1, - pivot: values(types.NewTextValue("")), - val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, - noise: noiseInts, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=text, pivot=text('foo')", - arity: 1, - pivot: values(types.NewTextValue("foo")), - val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, - noise: noiseInts, - expectedEq: noCallEq, - expectedCount: 0, - }, - {name: "index=text, vals=text, pivot=text('2')", - arity: 1, - pivot: values(types.NewTextValue("2")), - val: func(i int) []types.Value { return values(types.NewTextValue(strconv.Itoa(i))) }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // composite -------------------------------------------------- - {name: "vals=[int, int], noise=[blob, blob], pivot=[int, int]", - arity: 2, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[int]", - arity: 2, - pivot: values(types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[0, int, 0]", - arity: 3, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[int, 0]", - arity: 2, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[0, 0]", - arity: 2, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[2, 0]", - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[2, int]", - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // pivot [2, int] should filter out [2, not(int)] - {name: "vals=[int, int], noise=[int, blob], pivot=[2, int]", - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - // only [3, not(int)] is greater than [2, int], so it will appear anyway if we don't skip it - if i < 3 { - return values(types.NewIntegerValue(int64(i)), types.NewBoolValue(true)) - } - - return nil - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // a more subtle case - {name: "vals=[int, blob], noise=[blob, blob], pivot=[2, 'a']", // pivot is [2, a] but value is [2, c] but that must work anyway - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewBlobValue([]byte{byte('a')})), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewBlobValue([]byte{byte('a' + uint8(i))})) - }, - noise: func(i int) []types.Value { - return values(types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // partial pivot - {name: "vals=[int, int], noise=[blob, blob], pivot=[0]", - arity: 2, - pivot: values(types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - // let's not try to match, it's not important - }, - expectedCount: 10, - }, - {name: "vals=[int, int], noise=[blob, blob], pivot=[2]", - arity: 2, - pivot: values(types.NewIntegerValue(2)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewBlobValue(strconv.AppendInt(nil, int64(i), 10))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - // let's not try to match, it's not important - }, - expectedCount: 6, // total * 2 - (noise + val = 2) * 2 - }, - // this is a tricky test, when we have multiple values but they share the first pivot element; - // this is by definition a very implementation dependent test. - {name: "vals=[int, int], noise=int, bool], pivot=[int:0, int:0]", - arity: 2, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - noise: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewBoolValue(true)) - }, - // [0, 0] > [0, true] but [1, true] > [0, 0] so we will see some bools in the results - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - if i%2 == 0 { - i = i / 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - } - }, - expectedCount: 9, // 10 elements, but pivot skipped the initial [0, true] - }, - // index typed - {name: "index=[int, int], vals=[int, int], pivot=[0, 0]", - arity: 2, - pivot: values(types.NewIntegerValue(0), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "index=[int, int], vals=[int, int], pivot=[2, 0]", - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // a more subtle case - {name: "vals=[int, blob], pivot=[2, 'a']", // pivot is [2, a] but value is [2, c] but that must work anyway - arity: 2, - pivot: values(types.NewIntegerValue(2), types.NewBlobValue([]byte{byte('a')})), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewBlobValue([]byte{byte('a' + uint8(i))})) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - // partial pivot - {name: "vals=[int, int], pivot=[0]", - arity: 2, - pivot: values(types.NewIntegerValue(0)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 5, - }, - {name: "vals=[int, int], pivot=[2]", - arity: 2, - pivot: values(types.NewIntegerValue(2)), - val: func(i int) []types.Value { - return values(types.NewIntegerValue(int64(i)), types.NewIntegerValue(int64(i+1))) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - - // documents -------------------------------------------------- - {name: "vals=[doc, int], pivot=[{a:2}, 3]", - arity: 2, - pivot: values( - types.NewDocumentValue(testutil.MakeDocument(t, `{"a":2}`)), - types.NewIntegerValue(int64(3)), - ), - val: func(i int) []types.Value { - return values( - types.NewDocumentValue(testutil.MakeDocument(t, `{"a":`+strconv.Itoa(i)+`}`)), - types.NewIntegerValue(int64(i+1)), - ) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - - // arrays ----------------------------------------------------- - {name: "vals=[int[], int], pivot=[[2,2], 3]", - arity: 2, - pivot: values( - testutil.MakeArrayValue(t, 2, 2), - types.NewIntegerValue(int64(3)), - ), - val: func(i int) []types.Value { - return values( - testutil.MakeArrayValue(t, i, i), - types.NewIntegerValue(int64(i+1)), - ) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=[int[], int[]], pivot=[[2,2], [3,3]]", - arity: 2, - pivot: values( - testutil.MakeArrayValue(t, 2, 2), - testutil.MakeArrayValue(t, 3, 3), - ), - val: func(i int) []types.Value { - return values( - testutil.MakeArrayValue(t, i, i), - testutil.MakeArrayValue(t, i+1, i+1), - ) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - {name: "vals=[int[], int], pivot=[[2,2], 3]", - arity: 2, - pivot: values( - testutil.MakeArrayValue(t, 2, 2), - types.NewIntegerValue(int64(3)), - ), - val: func(i int) []types.Value { - return values( - testutil.MakeArrayValue(t, i, i), - types.NewIntegerValue(int64(i+1)), - ) - }, - expectedEq: func(t *testing.T, i uint8, key tree.Key) { - i += 2 - require.Equal(t, []byte{'a' + i}, []byte(key)) - }, - expectedCount: 3, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - idx, cleanup := getIndex(t, unique, test.arity) - defer cleanup() - - for i := 0; i < total; i++ { - assert.NoError(t, idx.Set(test.val(i), []byte{'a' + byte(i)})) - if test.noise != nil { - v := test.noise(i) - if v != nil { - assert.NoError(t, idx.Set(test.noise(i), []byte{'a' + byte(i)})) - } - } - } - - var i uint8 - var count int - fn := func() error { - return idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, test.pivot...)}, false, func(key tree.Key) error { - test.expectedEq(t, i, key) - i++ - count++ - return nil - }) - } - if test.mustPanic { - // let's avoid panicking because expectedEq wasn't defined, which would - // be a false positive. - if test.expectedEq == nil { - test.expectedEq = func(t *testing.T, i uint8, key tree.Key) {} - } - require.Panics(t, func() { _ = fn() }) - } else { - err := fn() - assert.NoError(t, err) - require.Equal(t, test.expectedCount, count) - } - }) - } - }) - } -} + var i uint8 + var count int + fn := func() error { + return idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, test.pivot...)}, false, func(key tree.Key) error { + test.expectedEq(t, i, key) + i++ + count++ + return nil + }) + } + if test.mustPanic { + // let's avoid panicking because expectedEq wasn't defined, which would + // be a false positive. + if test.expectedEq == nil { + test.expectedEq = func(t *testing.T, i uint8, key tree.Key) {} + } + require.Panics(t, func() { _ = fn() }) + } else { + err := fn() + assert.NoError(t, err) + require.Equal(t, test.expectedCount, count) + } + }) + } + }) +} func TestIndexDescendLessOrEqual(t *testing.T) { for _, unique := range []bool{true, false} { text := fmt.Sprintf("Unique: %v, ", unique) - // t.Run(text+"Should not iterate if index is empty", func(t *testing.T) { - // idx, cleanup := getIndex(t, unique, 1) - // defer cleanup() - - // i := 0 - // err := idx.IterateOnRange(&tree.Range{Min: testutil.NewKey(t, types.NewIntegerValue(0))}, false, func(key tree.Key) error { - // i++ - // return errors.New("should not iterate") - // }) - // assert.NoError(t, err) - // require.Equal(t, 0, i) - // }) - t.Run(text+"Should iterate through documents in order, ", func(t *testing.T) { noiseBlob := func(i int) []types.Value { t.Helper() @@ -1310,7 +1187,7 @@ func TestIndexDescendLessOrEqual(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - idx, cleanup := getIndex(t, unique, test.arity) + idx, cleanup := getIndex(t, test.arity) defer cleanup() for i := 0; i < total; i++ { @@ -1362,7 +1239,7 @@ func BenchmarkIndexSet(b *testing.B) { b.ResetTimer() b.StopTimer() for i := 0; i < b.N; i++ { - idx, cleanup := getIndex(b, false, 1) + idx, cleanup := getIndex(b, 1) b.StartTimer() for j := 0; j < size; j++ { @@ -1380,7 +1257,7 @@ func BenchmarkIndexSet(b *testing.B) { func BenchmarkIndexIteration(b *testing.B) { for size := 10; size <= 10000; size *= 10 { b.Run(fmt.Sprintf("%.05d", size), func(b *testing.B) { - idx, cleanup := getIndex(b, false, 1) + idx, cleanup := getIndex(b, 1) defer cleanup() for i := 0; i < size; i++ { @@ -1406,7 +1283,7 @@ func BenchmarkCompositeIndexSet(b *testing.B) { b.ResetTimer() b.StopTimer() for i := 0; i < b.N; i++ { - idx, cleanup := getIndex(b, false, 2) + idx, cleanup := getIndex(b, 2) b.StartTimer() for j := 0; j < size; j++ { @@ -1424,7 +1301,7 @@ func BenchmarkCompositeIndexSet(b *testing.B) { func BenchmarkCompositeIndexIteration(b *testing.B) { for size := 10; size <= 10000; size *= 10 { b.Run(fmt.Sprintf("%.05d", size), func(b *testing.B) { - idx, cleanup := getIndex(b, false, 2) + idx, cleanup := getIndex(b, 2) defer cleanup() for i := 0; i < size; i++ { diff --git a/internal/database/info.go b/internal/database/info.go index aa035073f..bd2334cd6 100644 --- a/internal/database/info.go +++ b/internal/database/info.go @@ -61,20 +61,26 @@ func (ti *TableInfo) ValidateDocument(tx *Transaction, d types.Document) (*docum return fb, nil } -func (ti *TableInfo) GetPrimaryKey() *FieldConstraint { +func (ti *TableInfo) GetPrimaryKey() *PrimaryKey { + var pk PrimaryKey + for _, tc := range ti.TableConstraints { if !tc.PrimaryKey { continue } - fc := ti.GetFieldConstraintForPath(tc.Path) - if fc == nil { - return &FieldConstraint{ - Path: tc.Path, + pk.Paths = tc.Paths + + for _, pp := range tc.Paths { + fc := ti.GetFieldConstraintForPath(pp) + if fc != nil { + pk.Types = append(pk.Types, fc.Type) + } else { + pk.Types = append(pk.Types, 0) } } - return fc + return &pk } return nil @@ -139,6 +145,11 @@ func (ti *TableInfo) Clone() *TableInfo { return &cp } +type PrimaryKey struct { + Paths document.Paths + Types []types.ValueType +} + // IndexInfo holds the configuration of an index. type IndexInfo struct { TableName string @@ -282,5 +293,5 @@ func (s SequenceInfo) Clone() *SequenceInfo { // path must also be filled. type Owner struct { TableName string - Path document.Path + Paths document.Paths } diff --git a/internal/database/sequence.go b/internal/database/sequence.go index 8529ed6c3..2bcaf0a2e 100644 --- a/internal/database/sequence.go +++ b/internal/database/sequence.go @@ -38,10 +38,8 @@ var sequenceTableInfo = &TableInfo{ }, TableConstraints: []*TableConstraint{ { - Path: document.Path{ - document.PathFragment{ - FieldName: "name", - }, + Paths: []document.Path{ + document.NewPath("name"), }, PrimaryKey: true, }, @@ -228,9 +226,9 @@ func (s *Sequence) SetName(name string) { func (s *Sequence) GenerateBaseName() string { var sb strings.Builder sb.WriteString(s.Info.Owner.TableName) - if s.Info.Owner.Path != nil { + if len(s.Info.Owner.Paths) > 0 { sb.WriteString("_") - sb.WriteString(s.Info.Owner.Path.String()) + sb.WriteString(s.Info.Owner.Paths.String()) } sb.WriteString("_seq") return sb.String() diff --git a/internal/database/table.go b/internal/database/table.go index 75a16931f..7445c9964 100644 --- a/internal/database/table.go +++ b/internal/database/table.go @@ -49,7 +49,7 @@ func (t *Table) Insert(d types.Document) (tree.Key, types.Document, error) { if err == nil { return nil, nil, &errs.ConstraintViolationError{ Constraint: "PRIMARY KEY", - Paths: []document.Path{t.Info.GetPrimaryKey().Path}, + Paths: t.Info.GetPrimaryKey().Paths, Key: key, } } @@ -120,55 +120,19 @@ func (d *lazilyDecodedDocument) Iterate(fn func(field string, value types.Value) return doc.Iterate(fn) } -// ValueToKey encodes a value following primary key constraints. -// It can be used to manually add a new entry to the store or to compare -// with other keys while iterating on the table. -// TODO(asdine): Change Table methods to receive values and build keys from them? -func (t *Table) ValueToKey(v types.Value) (tree.Key, error) { - var err error +func (t *Table) IterateOnRange(rng *Range, reverse bool, fn func(key tree.Key, d types.Document) error) error { + var paths []document.Path pk := t.Info.GetPrimaryKey() - if pk == nil { - // if no primary key was defined, cast the value as integer - v, err = document.CastAsInteger(v) - if err != nil { - return nil, err - } - - return tree.NewKey(v) - } - - // if a primary key was defined and the primary is typed, convert the value to the right type. - if !pk.Type.IsAny() { - v, err = document.CastAs(v, pk.Type) - if err != nil { - return nil, err - } - // it no primary key type is specified, - // and the value to encode is an integer - // convert it to a double. - } else if v.Type() == types.IntegerValue { - v, err = document.CastAsDouble(v) - if err != nil { - return nil, err - } + if pk != nil { + paths = pk.Paths } - return tree.NewKey(v) -} + var r *tree.Range + var err error -// Iterate goes through all the documents of the table and calls the given function by passing each one of them. -// If the given function returns an error, the iteration stops. -// If a pivot is specified and reverse is false, the iteration will return all documents greater -// than or equal to the pivot in ascending order. -// If a pivot is specified and reverse is true, the iteration will return all documents less -// than or equal to the pivot, in descending order. -// Prior to iteration, The pivot is converted to the type of the primary key, if any. -func (t *Table) Iterate(pivot Pivot, reverse bool, fn func(key tree.Key, d types.Document) error) error { - var key tree.Key - if len(pivot) > 0 { - var err error - key, err = t.ValueToKey(pivot[0]) + if rng != nil { + r, err = rng.ToTreeRange(&t.Info.FieldConstraints, paths) if err != nil { return err } @@ -176,27 +140,6 @@ func (t *Table) Iterate(pivot Pivot, reverse bool, fn func(key tree.Key, d types var d lazilyDecodedDocument - return t.Tree.Iterate(key, reverse, func(k tree.Key, v types.Value) error { - d.Value = v - return fn(k, &d) - }) -} - -func (t *Table) IterateOnRange(rng *Range, reverse bool, fn func(key tree.Key, d types.Document) error) error { - var paths []document.Path - - pk := t.Info.GetPrimaryKey() - if pk != nil { - paths = append(paths, pk.Path) - } - - r, err := rng.ToTreeRange(&t.Info.FieldConstraints, paths) - if err != nil { - return err - } - - var d lazilyDecodedDocument - return t.Tree.IterateOnRange(r, reverse, func(k tree.Key, v types.Value) error { d.Value = v return fn(k, &d) @@ -224,15 +167,20 @@ func (t *Table) GetDocument(key tree.Key) (types.Document, error) { // key is generated, called the docid. func (t *Table) generateKey(info *TableInfo, d types.Document) (tree.Key, error) { if pk := t.Info.GetPrimaryKey(); pk != nil { - v, err := pk.Path.GetValueFromDocument(d) - if errors.Is(err, document.ErrFieldNotFound) { - return nil, stringutil.Errorf("missing primary key at path %q", pk.Path) - } - if err != nil { - return nil, err + vs := make([]types.Value, 0, len(pk.Paths)) + for _, p := range pk.Paths { + v, err := p.GetValueFromDocument(d) + if errors.Is(err, document.ErrFieldNotFound) { + return nil, stringutil.Errorf("missing primary key at path %q", p) + } + if err != nil { + return nil, err + } + + vs = append(vs, v) } - return tree.NewKey(v) + return tree.NewKey(vs...) } seq, err := t.Catalog.GetSequence(t.Info.DocidSequenceName) diff --git a/internal/database/table_test.go b/internal/database/table_test.go index 5ede53949..de03973b8 100644 --- a/internal/database/table_test.go +++ b/internal/database/table_test.go @@ -191,7 +191,7 @@ func TestTableInsert(t *testing.T) { {Path: testutil.ParseDocumentPath(t, "foo.a[1]"), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "foo.a[1]"), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo.a[1]"), PrimaryKey: true}, }, }) assert.NoError(t, err) @@ -256,7 +256,7 @@ func TestTableInsert(t *testing.T) { {Path: testutil.ParseDocumentPath(t, "foo"), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "foo"), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }) assert.NoError(t, err) @@ -496,7 +496,7 @@ func TestTableInsert(t *testing.T) { {Path: testutil.ParseDocumentPath(t, "foo"), IsNotNull: true}, }, TableConstraints: []*database.TableConstraint{ - {Path: testutil.ParseDocumentPath(t, "foo"), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }) @@ -631,7 +631,7 @@ func TestTableTruncate(t *testing.T) { err = tb.Truncate() assert.NoError(t, err) - err = tb.Iterate(nil, false, func(key tree.Key, _ types.Document) error { + err = tb.IterateOnRange(nil, false, func(key tree.Key, _ types.Document) error { return errors.New("should not iterate") }) @@ -685,7 +685,7 @@ func BenchmarkTableScan(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - tb.Iterate(nil, false, func(tree.Key, types.Document) error { + tb.IterateOnRange(nil, false, func(tree.Key, types.Document) error { return nil }) } diff --git a/internal/expr/functions/builtins.go b/internal/expr/functions/builtins.go index a16ae9140..562d73c3e 100644 --- a/internal/expr/functions/builtins.go +++ b/internal/expr/functions/builtins.go @@ -1,8 +1,6 @@ package functions import ( - "fmt" - "github.com/genjidb/genji/document" "github.com/genjidb/genji/internal/environment" "github.com/genjidb/genji/internal/errors" @@ -130,13 +128,24 @@ func (k *PK) Eval(env *environment.Environment) (types.Value, error) { } pk := info.GetPrimaryKey() - if pk == nil { - fmt.Println("pk() -> ", vs[0]) - // decode as docid - return vs[0], nil + if pk != nil { + for i, tp := range pk.Types { + if !tp.IsAny() { + vs[i], err = document.CastAs(vs[i], tp) + if err != nil { + return nil, err + } + } + } + } + + vb := document.NewValueBuffer() + + for _, v := range vs { + vb.Append(v) } - return document.CastAs(vs[0], pk.Type) + return types.NewArrayValue(vb), nil } func (*PK) Params() []expr.Expr { return nil } diff --git a/internal/planner/index_selection.go b/internal/planner/index_selection.go index f6280cf50..d0eec099f 100644 --- a/internal/planner/index_selection.go +++ b/internal/planner/index_selection.go @@ -133,9 +133,12 @@ func (i *indexSelector) selectIndex(s *stream.Stream, filters []*stream.FilterOp if err != nil { return nil, err } - selected = i.associatePkWithNodes(tb, nodes) - if selected != nil { - cost = selected.Cost() + pk := tb.GetPrimaryKey() + if pk != nil { + selected = i.associateIndexWithNodes(tb.TableName, false, false, pk.Paths, nodes) + if selected != nil { + cost = selected.Cost() + } } // get all the indexes for this table and associate them @@ -146,7 +149,7 @@ func (i *indexSelector) selectIndex(s *stream.Stream, filters []*stream.FilterOp return nil, err } - candidate := i.associateIndexWithNodes(idxInfo, nodes) + candidate := i.associateIndexWithNodes(idxInfo.IndexName, true, idxInfo.Unique, idxInfo.Paths, nodes) if candidate == nil { continue @@ -216,37 +219,6 @@ func (i *indexSelector) isFilterIndexable(f *stream.FilterOperator) *filterNode return &node } -func (i *indexSelector) associatePkWithNodes(tb *database.TableInfo, nodes filterNodes) *candidate { - // TODO: add support for the pk() function - pk := tb.GetPrimaryKey() - - if pk == nil { - return nil - } - - n := nodes.getByPath(pk.Path) - if n == nil { - return nil - } - - var ranges stream.Ranges - if n.operator == scanner.IN { - for _, e := range n.operand.(expr.LiteralExprList) { - ranges = append(ranges, i.buildRangeFromOperator(scanner.EQ, []document.Path{pk.Path}, e)) - } - } else { - ranges = append(ranges, i.buildRangeFromOperator(n.operator, []document.Path{pk.Path}, n.operand)) - } - - return &candidate{ - nodes: filterNodes{n}, - rangesCost: ranges.Cost(), - replaceRootBy: []stream.Operator{ - stream.PkScan(tb.TableName, ranges...), - }, - } -} - // for a given index, select all filter nodes that match according to the following rules: // - from left to right, associate each indexed path to a filter node and stop when there is no // node available or the node is not compatible @@ -261,11 +233,11 @@ func (i *indexSelector) associatePkWithNodes(tb *database.TableInfo, nodes filte // -> range = {min: [3], exact: true} // filter(a IN (1, 2)) // -> ranges = [1], [2] -func (i *indexSelector) associateIndexWithNodes(idx *database.IndexInfo, nodes filterNodes) *candidate { - found := make([]*filterNode, 0, len(idx.Paths)) +func (i *indexSelector) associateIndexWithNodes(treeName string, isIndex bool, isUnique bool, paths []document.Path, nodes filterNodes) *candidate { + found := make([]*filterNode, 0, len(paths)) var hasIn bool - for _, p := range idx.Paths { + for _, p := range paths { n := nodes.getByPath(p) if n == nil { break @@ -297,23 +269,32 @@ func (i *indexSelector) associateIndexWithNodes(idx *database.IndexInfo, nodes f var ranges stream.Ranges if !hasIn { - ranges = stream.Ranges{i.buildRangeFromFilterNodes(idx, found...)} + ranges = stream.Ranges{i.buildRangeFromFilterNodes(found...)} } else { - ranges = i.buildRangesFromFilterNodes(idx, found) + ranges = i.buildRangesFromFilterNodes(paths, found) } - return &candidate{ + c := candidate{ nodes: found, rangesCost: ranges.Cost(), - isIndex: true, - isUnique: idx.Unique, - replaceRootBy: []stream.Operator{ - stream.IndexScan(idx.IndexName, ranges...), - }, + isIndex: isIndex, + isUnique: isUnique, } + + if !isIndex { + c.replaceRootBy = []stream.Operator{ + stream.PkScan(treeName, ranges...), + } + } else { + c.replaceRootBy = []stream.Operator{ + stream.IndexScan(treeName, ranges...), + } + } + + return &c } -func (i *indexSelector) buildRangesFromFilterNodes(idx *database.IndexInfo, filters []*filterNode) stream.Ranges { +func (i *indexSelector) buildRangesFromFilterNodes(paths []document.Path, filters []*filterNode) stream.Ranges { // build a 2 dimentional list of all expressions // so that: filter(a IN (10, 11)) | filter(b = 20) | filter(c IN (30, 31)) // becomes: @@ -344,7 +325,7 @@ func (i *indexSelector) buildRangesFromFilterNodes(idx *database.IndexInfo, filt var ranges stream.Ranges i.walkExpr(l, func(row []expr.Expr) { - ranges = append(ranges, i.buildRangeFromOperator(scanner.EQ, idx.Paths[:len(row)], row...)) + ranges = append(ranges, i.buildRangeFromOperator(scanner.EQ, paths[:len(row)], row...)) }) return ranges @@ -372,7 +353,7 @@ func (i *indexSelector) walkExpr(l [][]expr.Expr, fn func(row []expr.Expr)) { } } -func (i *indexSelector) buildRangeFromFilterNodes(idx *database.IndexInfo, filters ...*filterNode) stream.Range { +func (i *indexSelector) buildRangeFromFilterNodes(filters ...*filterNode) stream.Range { // first, generate a list of paths and a list of expressions paths := make([]document.Path, 0, len(filters)) el := make(expr.LiteralExprList, 0, len(filters)) diff --git a/internal/query/statement/create.go b/internal/query/statement/create.go index abb90ae33..fc3d617f5 100644 --- a/internal/query/statement/create.go +++ b/internal/query/statement/create.go @@ -3,7 +3,6 @@ package statement import ( "math" - "github.com/genjidb/genji/document" errs "github.com/genjidb/genji/errors" "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/stream" @@ -56,11 +55,11 @@ func (stmt *CreateTableStmt) Run(ctx *Context) (Result, error) { if tc.Unique { err = ctx.Catalog.CreateIndex(ctx.Tx, &database.IndexInfo{ TableName: stmt.Info.TableName, - Paths: []document.Path{tc.Path}, + Paths: tc.Paths, Unique: true, Owner: database.Owner{ TableName: stmt.Info.TableName, - Path: tc.Path, + Paths: tc.Paths, }, }) if err != nil { diff --git a/internal/query/statement/create_test.go b/internal/query/statement/create_test.go index 9a644fe69..4bf749ea1 100644 --- a/internal/query/statement/create_test.go +++ b/internal/query/statement/create_test.go @@ -14,12 +14,21 @@ import ( "github.com/stretchr/testify/require" ) -func parsePath(t testing.TB, str string) document.Path { +func ParseDocumentPath(t testing.TB, str string) document.Path { vp, err := parser.ParsePath(str) assert.NoError(t, err) return vp } +func ParseDocumentPaths(t testing.TB, str ...string) []document.Path { + var paths []document.Path + for _, s := range str { + paths = append(paths, ParseDocumentPath(t, s)) + } + + return paths +} + func TestCreateTable(t *testing.T) { tests := []struct { name string @@ -73,8 +82,8 @@ func TestCreateTable(t *testing.T) { assert.NoError(t, err) require.Equal(t, database.FieldConstraints{ - {Path: parsePath(t, "d"), Type: types.DoubleValue}, - {Path: parsePath(t, "b"), Type: types.BoolValue}, + {Path: testutil.ParseDocumentPath(t, "d"), Type: types.DoubleValue}, + {Path: testutil.ParseDocumentPath(t, "b"), Type: types.BoolValue}, }, tb.Info.FieldConstraints) assert.NoError(t, err) }) @@ -93,46 +102,46 @@ func TestCreateTable(t *testing.T) { assert.NoError(t, err) require.Equal(t, database.FieldConstraints{ - {Path: parsePath(t, "foo"), Type: types.DocumentValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.bar"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), }}, - {Path: parsePath(t, "foo.bar[1]"), Type: types.DocumentValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar[1]"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), }}, - {Path: parsePath(t, "foo.bar[1].hello"), Type: types.BlobValue}, - {Path: parsePath(t, "foo.a"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar[1].hello"), Type: types.BlobValue}, + {Path: testutil.ParseDocumentPath(t, "foo.a"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.a[1]"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.a[1]"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.a[1][2]"), Type: types.TextValue, IsNotNull: true}, - {Path: parsePath(t, "bar"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.a[1][2]"), Type: types.TextValue, IsNotNull: true}, + {Path: testutil.ParseDocumentPath(t, "bar"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4]"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "bar[4]"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4][0]"), Type: types.DocumentValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "bar[4][0]"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4][0].bat"), Type: types.IntegerValue}, - {Path: parsePath(t, "b"), Type: types.BlobValue}, - {Path: parsePath(t, "t"), Type: types.TextValue}, - {Path: parsePath(t, "a"), Type: types.ArrayValue}, - {Path: parsePath(t, "d"), Type: types.DocumentValue}, + {Path: testutil.ParseDocumentPath(t, "bar[4][0].bat"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "b"), Type: types.BlobValue}, + {Path: testutil.ParseDocumentPath(t, "t"), Type: types.TextValue}, + {Path: testutil.ParseDocumentPath(t, "a"), Type: types.ArrayValue}, + {Path: testutil.ParseDocumentPath(t, "d"), Type: types.DocumentValue}, }, tb.Info.FieldConstraints) assert.NoError(t, err) }) @@ -151,49 +160,49 @@ func TestCreateTable(t *testing.T) { tb, err := db.Catalog.GetTable(tx, "test2") assert.NoError(t, err) - require.Equal(t, database.FieldConstraints{{Path: parsePath(t, "foo"), Type: types.DocumentValue, IsInferred: true, + require.Equal(t, database.FieldConstraints{{Path: testutil.ParseDocumentPath(t, "foo"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.bar"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), }}, - {Path: parsePath(t, "foo.bar[1]"), Type: types.DocumentValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar[1]"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.bar[1].hello"), + testutil.ParseDocumentPath(t, "foo.bar[1].hello"), }}, - {Path: parsePath(t, "foo.bar[1].hello"), Type: types.BlobValue}, - {Path: parsePath(t, "foo.a"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.bar[1].hello"), Type: types.BlobValue}, + {Path: testutil.ParseDocumentPath(t, "foo.a"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.a[1]"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.a[1]"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "foo.a[1][2]"), + testutil.ParseDocumentPath(t, "foo.a[1][2]"), }}, - {Path: parsePath(t, "foo.a[1][2]"), Type: types.TextValue, IsNotNull: true}, - {Path: parsePath(t, "bar"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "foo.a[1][2]"), Type: types.TextValue, IsNotNull: true}, + {Path: testutil.ParseDocumentPath(t, "bar"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4]"), Type: types.ArrayValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "bar[4]"), Type: types.ArrayValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4][0]"), Type: types.DocumentValue, IsInferred: true, + {Path: testutil.ParseDocumentPath(t, "bar[4][0]"), Type: types.DocumentValue, IsInferred: true, InferredBy: []document.Path{ - parsePath(t, "bar[4][0].bat"), + testutil.ParseDocumentPath(t, "bar[4][0].bat"), }}, - {Path: parsePath(t, "bar[4][0].bat"), Type: types.IntegerValue}, - {Path: parsePath(t, "dp"), Type: types.DoubleValue}, - {Path: parsePath(t, "r"), Type: types.DoubleValue}, - {Path: parsePath(t, "b"), Type: types.IntegerValue}, - {Path: parsePath(t, "m"), Type: types.IntegerValue}, - {Path: parsePath(t, "eight"), Type: types.IntegerValue}, - {Path: parsePath(t, "ii"), Type: types.IntegerValue}, - {Path: parsePath(t, "c"), Type: types.TextValue}, + {Path: testutil.ParseDocumentPath(t, "bar[4][0].bat"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "dp"), Type: types.DoubleValue}, + {Path: testutil.ParseDocumentPath(t, "r"), Type: types.DoubleValue}, + {Path: testutil.ParseDocumentPath(t, "b"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "m"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "eight"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "ii"), Type: types.IntegerValue}, + {Path: testutil.ParseDocumentPath(t, "c"), Type: types.TextValue}, }, tb.Info.FieldConstraints) assert.NoError(t, err) }) @@ -205,9 +214,9 @@ func TestCreateTable(t *testing.T) { constraints database.FieldConstraints fails bool }{ - {"With default, no type and integer default", "CREATE TABLE test(foo DEFAULT 10)", database.FieldConstraints{{Path: parsePath(t, "foo"), DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, - {"With default, double type and integer default", "CREATE TABLE test(foo DOUBLE DEFAULT 10)", database.FieldConstraints{{Path: parsePath(t, "foo"), Type: types.DoubleValue, DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, - {"With default, some type and compatible default", "CREATE TABLE test(foo BOOL DEFAULT 10)", database.FieldConstraints{{Path: parsePath(t, "foo"), Type: types.BoolValue, DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, + {"With default, no type and integer default", "CREATE TABLE test(foo DEFAULT 10)", database.FieldConstraints{{Path: testutil.ParseDocumentPath(t, "foo"), DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, + {"With default, double type and integer default", "CREATE TABLE test(foo DOUBLE DEFAULT 10)", database.FieldConstraints{{Path: testutil.ParseDocumentPath(t, "foo"), Type: types.DoubleValue, DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, + {"With default, some type and compatible default", "CREATE TABLE test(foo BOOL DEFAULT 10)", database.FieldConstraints{{Path: testutil.ParseDocumentPath(t, "foo"), Type: types.BoolValue, DefaultValue: expr.Constraint(testutil.IntegerValue(10))}}, false}, {"With default, some type and incompatible default", "CREATE TABLE test(foo BOOL DEFAULT 10.5)", nil, true}, } @@ -248,27 +257,27 @@ func TestCreateTable(t *testing.T) { require.Len(t, tb.Info.TableConstraints, 3) require.Equal(t, &database.FieldConstraint{ - Path: parsePath(t, "a"), + Path: testutil.ParseDocumentPath(t, "a"), Type: types.IntegerValue, }, tb.Info.FieldConstraints[0]) require.Equal(t, &database.FieldConstraint{ - Path: parsePath(t, "b"), + Path: testutil.ParseDocumentPath(t, "b"), Type: types.DoubleValue, }, tb.Info.FieldConstraints[1]) require.Equal(t, &database.TableConstraint{ - Path: parsePath(t, "a"), + Paths: testutil.ParseDocumentPaths(t, "a"), Unique: true, }, tb.Info.TableConstraints[0]) require.Equal(t, &database.TableConstraint{ - Path: parsePath(t, "b"), + Paths: testutil.ParseDocumentPaths(t, "b"), Unique: true, }, tb.Info.TableConstraints[1]) require.Equal(t, &database.TableConstraint{ - Path: parsePath(t, "c"), + Paths: testutil.ParseDocumentPaths(t, "c"), Unique: true, }, tb.Info.TableConstraints[2]) diff --git a/internal/query/statement/reindex.go b/internal/query/statement/reindex.go index b586a6ef2..fdf07d2c9 100644 --- a/internal/query/statement/reindex.go +++ b/internal/query/statement/reindex.go @@ -1,8 +1,6 @@ package statement import ( - "fmt" - errs "github.com/genjidb/genji/errors" "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/stream" @@ -43,7 +41,6 @@ func (stmt ReIndexStmt) Prepare(ctx *Context) (Statement, error) { var streams []*stream.Stream for _, indexName := range indexNames { - fmt.Println("indexName", indexName) idx, err := ctx.Catalog.GetIndex(ctx.Tx, indexName) if err != nil { return nil, err diff --git a/internal/query/statement/reindex_test.go b/internal/query/statement/reindex_test.go index 6039187be..fe11bdf75 100644 --- a/internal/query/statement/reindex_test.go +++ b/internal/query/statement/reindex_test.go @@ -1,12 +1,12 @@ package statement_test import ( - "fmt" "testing" "github.com/genjidb/genji/internal/testutil" "github.com/genjidb/genji/internal/testutil/assert" "github.com/genjidb/genji/internal/tree" + "github.com/genjidb/genji/types" "github.com/stretchr/testify/require" ) @@ -45,7 +45,6 @@ func TestReIndex(t *testing.T) { // truncate all indexes c := db.Catalog for _, idxName := range c.ListIndexes("") { - fmt.Println("truncating", idxName) idx, err := c.GetIndex(tx, idxName) assert.NoError(t, err) err = idx.Truncate() @@ -74,7 +73,7 @@ func TestReIndex(t *testing.T) { } i := 0 - err = idx.Iterate(nil, false, func(key tree.Key) error { + err = idx.Tree.Iterate(nil, false, func(tree.Key, types.Value) error { i++ return nil }) diff --git a/internal/query/statement/select_test.go b/internal/query/statement/select_test.go index 8f0dbaf66..a17f8a4f0 100644 --- a/internal/query/statement/select_test.go +++ b/internal/query/statement/select_test.go @@ -30,7 +30,7 @@ func TestSelectStmt(t *testing.T) { // {"No table, BitwiseAnd", "SELECT 10 & 6", false, `[{"10 & 6":2}]`, nil}, // {"No table, BitwiseOr", "SELECT 10 | 6", false, `[{"10 | 6":14}]`, nil}, // {"No table, BitwiseXor", "SELECT 10 ^ 6", false, `[{"10 ^ 6":12}]`, nil}, - // {"No table, function pk()", "SELECT pk()", false, `[{"pk()":null}]`, nil}, + // {"No table, function pk()", "SELECT pk()", false, `[{"pk()":[n]ull}]`, nil}, // {"No table, field", "SELECT a", true, ``, nil}, // {"No table, wildcard", "SELECT *", true, ``, nil}, // {"No table, document", "SELECT {a: 1, b: 2 + 1}", false, `[{"{a: 1, b: 2 + 1}":{"a":1,"b":3}}]`, nil}, @@ -78,12 +78,12 @@ func TestSelectStmt(t *testing.T) { {"With order by pk desc", "SELECT * FROM test ORDER BY k DESC", false, `[{"k":3,"height":100,"weight":200},{"k":2,"color":"blue","size":10,"weight":100},{"k":1,"color":"red","size":10,"shape":"square"}]`, nil}, {"With order by and where", "SELECT * FROM test WHERE color != 'blue' ORDER BY color DESC LIMIT 1", false, `[{"k":1,"color":"red","size":10,"shape":"square"}]`, nil}, {"With limit", "SELECT * FROM test WHERE size = 10 LIMIT 1", false, `[{"k":1,"color":"red","size":10,"shape":"square"}]`, nil}, - {"With offset", "SELECT *, pk() FROM test WHERE size = 10 OFFSET 1", false, `[{"pk()":2,"color":"blue","size":10,"weight":100,"k":2}]`, nil}, + {"With offset", "SELECT *, pk() FROM test WHERE size = 10 OFFSET 1", false, `[{"pk()":[2],"color":"blue","size":10,"weight":100,"k":2}]`, nil}, {"With limit then offset", "SELECT * FROM test WHERE size = 10 LIMIT 1 OFFSET 1", false, `[{"k":2,"color":"blue","size":10,"weight":100,"k":2}]`, nil}, {"With offset then limit", "SELECT * FROM test WHERE size = 10 OFFSET 1 LIMIT 1", true, "", nil}, {"With positional params", "SELECT * FROM test WHERE color = ? OR height = ?", false, `[{"k":1,"color":"red","size":10,"shape":"square"},{"k":3,"height":100,"weight":200}]`, []interface{}{"red", 100}}, {"With named params", "SELECT * FROM test WHERE color = $a OR height = $d", false, `[{"k":1,"color":"red","size":10,"shape":"square"},{"k":3,"height":100,"weight":200}]`, []interface{}{sql.Named("a", "red"), sql.Named("d", 100)}}, - {"With pk()", "SELECT pk(), color FROM test", false, `[{"pk()":1,"color":"red"},{"pk()":2,"color":"blue"},{"pk()":3,"color":null}]`, []interface{}{sql.Named("a", "red"), sql.Named("d", 100)}}, + {"With pk()", "SELECT pk(), color FROM test", false, `[{"pk()":[1],"color":"red"},{"pk()":[2],"color":"blue"},{"pk()":[3],"color":null}]`, []interface{}{sql.Named("a", "red"), sql.Named("d", 100)}}, {"With pk in cond, gt", "SELECT * FROM test WHERE k > 0 AND weight = 100", false, `[{"k":2,"color":"blue","size":10,"weight":100,"k":2}]`, nil}, {"With pk in cond, =", "SELECT * FROM test WHERE k = 2.0 AND weight = 100", false, `[{"k":2,"color":"blue","size":10,"weight":100,"k":2}]`, nil}, {"With count", "SELECT COUNT(k) FROM test", false, `[{"COUNT(k)": 3}]`, nil}, diff --git a/internal/sql/parser/create.go b/internal/sql/parser/create.go index da00c9c5a..e4b211d8f 100644 --- a/internal/sql/parser/create.go +++ b/internal/sql/parser/create.go @@ -3,6 +3,7 @@ package parser import ( "math" + "github.com/genjidb/genji/document" "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/expr" "github.com/genjidb/genji/internal/query/statement" @@ -145,7 +146,7 @@ LOOP: return err } - err = info.TableConstraints.AddPrimaryKey(info.TableName, fc.Path) + err = info.TableConstraints.AddPrimaryKey(info.TableName, []document.Path{fc.Path}) if err != nil { return err } @@ -214,7 +215,7 @@ LOOP: } } case scanner.UNIQUE: - info.TableConstraints.AddUnique(fc.Path) + info.TableConstraints.AddUnique([]document.Path{fc.Path}) addedTc++ case scanner.CHECK: // Parse "(" @@ -259,46 +260,36 @@ func (p *Parser) parseTableConstraint(stmt *statement.CreateTableStmt) (bool, er switch tok { case scanner.PRIMARY: // Parse "KEY (" - err = p.parseTokens(scanner.KEY, scanner.LPAREN) + err = p.parseTokens(scanner.KEY) if err != nil { return false, err } - primaryKeyPath, err := p.parsePath() + paths, err := p.parsePathList() if err != nil { return false, err } - - // Parse ")" - err = p.parseTokens(scanner.RPAREN) - if err != nil { - return false, err + if len(paths) == 0 { + tok, pos, lit := p.ScanIgnoreWhitespace() + return false, newParseError(scanner.Tokstr(tok, lit), []string{"PATHS"}, pos) } - if err := stmt.Info.TableConstraints.AddPrimaryKey(stmt.Info.TableName, primaryKeyPath); err != nil { + if err := stmt.Info.TableConstraints.AddPrimaryKey(stmt.Info.TableName, paths); err != nil { return false, err } return true, nil case scanner.UNIQUE: - // Parse "(" - err = p.parseTokens(scanner.LPAREN) - if err != nil { - return false, err - } - - uniquePath, err := p.parsePath() + paths, err := p.parsePathList() if err != nil { return false, err } - - // Parse ")" - err = p.parseTokens(scanner.RPAREN) - if err != nil { - return false, err + if len(paths) == 0 { + tok, pos, lit := p.ScanIgnoreWhitespace() + return false, newParseError(scanner.Tokstr(tok, lit), []string{"PATHS"}, pos) } - stmt.Info.TableConstraints.AddUnique(uniquePath) + stmt.Info.TableConstraints.AddUnique(paths) return true, nil case scanner.CHECK: // Parse "(" diff --git a/internal/sql/parser/create_test.go b/internal/sql/parser/create_test.go index ad2fea2e2..cd7a33ae7 100644 --- a/internal/sql/parser/create_test.go +++ b/internal/sql/parser/create_test.go @@ -30,10 +30,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }, }, false}, @@ -43,7 +43,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, }, }, }, false}, @@ -52,7 +52,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), IsNotNull: true}, }, }, }, false}, @@ -61,7 +61,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), DefaultValue: expr.Constraint(expr.LiteralValue{Value: types.NewTextValue("10")})}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), DefaultValue: expr.Constraint(expr.LiteralValue{Value: types.NewTextValue("10")})}, }, }, }, false}, @@ -70,7 +70,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), DefaultValue: expr.Constraint(expr.LiteralValue{Value: types.NewTextValue("10")})}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), DefaultValue: expr.Constraint(expr.LiteralValue{Value: types.NewTextValue("10")})}, }, }, }, false}, @@ -83,7 +83,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Unique: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), Unique: true}, }, }, }, false}, @@ -93,7 +93,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Unique: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), Unique: true}, }, }, }, false}, @@ -102,7 +102,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "b")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "b")), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ {Name: "test_check", Check: expr.Constraint(testutil.ParseExpr(t, "a > 10"))}, @@ -118,7 +118,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, }, }, }, false}, @@ -127,10 +127,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }, }, false}, @@ -139,10 +139,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue, IsNotNull: true}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }, }, false}, @@ -151,12 +151,12 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "bar")), Type: types.IntegerValue, IsNotNull: true}, - {Path: document.Path(testutil.ParsePath(t, "baz[4][1].bat")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "bar")), Type: types.IntegerValue, IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "baz[4][1].bat")), Type: types.TextValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }, }, false}, @@ -165,11 +165,11 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "bar")), IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "bar")), IsNotNull: true}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), PrimaryKey: true}, }, }, }, false}, @@ -178,10 +178,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "bar")), PrimaryKey: true}, + {Paths: testutil.ParseDocumentPaths(t, "bar"), PrimaryKey: true}, }, }, }, false}, @@ -193,11 +193,11 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "bar")), IsNotNull: true}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "bar")), IsNotNull: true}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Unique: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), Unique: true}, }, }, }, false}, @@ -206,10 +206,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "bar")), Unique: true}, + {Paths: testutil.ParseDocumentPaths(t, "bar"), Unique: true}, }, }, }, false}, @@ -218,10 +218,10 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "foo")), Type: types.IntegerValue}, }, TableConstraints: []*database.TableConstraint{ - {Path: document.Path(testutil.ParsePath(t, "foo")), Unique: true}, + {Paths: testutil.ParseDocumentPaths(t, "foo"), Unique: true}, }, }, }, false}, @@ -233,8 +233,8 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "d")), Type: types.DoubleValue}, - {Path: document.Path(testutil.ParsePath(t, "b")), Type: types.BoolValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "d")), Type: types.DoubleValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "b")), Type: types.BoolValue}, }, }, }, false}, @@ -244,12 +244,12 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "i")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "b")), Type: types.BlobValue}, - {Path: document.Path(testutil.ParsePath(t, "byt")), Type: types.BlobValue}, - {Path: document.Path(testutil.ParsePath(t, "t")), Type: types.TextValue}, - {Path: document.Path(testutil.ParsePath(t, "a")), Type: types.ArrayValue}, - {Path: document.Path(testutil.ParsePath(t, "d")), Type: types.DocumentValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "i")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "b")), Type: types.BlobValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "byt")), Type: types.BlobValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "t")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "a")), Type: types.ArrayValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "d")), Type: types.DocumentValue}, }, }, }, false}, @@ -259,13 +259,13 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "i")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "ii")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "ei")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "m")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "s")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "b")), Type: types.IntegerValue}, - {Path: document.Path(testutil.ParsePath(t, "t")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "i")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "ii")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "ei")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "m")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "s")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "b")), Type: types.IntegerValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "t")), Type: types.IntegerValue}, }, }, }, false}, @@ -275,9 +275,9 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "dp")), Type: types.DoubleValue}, - {Path: document.Path(testutil.ParsePath(t, "r")), Type: types.DoubleValue}, - {Path: document.Path(testutil.ParsePath(t, "d")), Type: types.DoubleValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "dp")), Type: types.DoubleValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "r")), Type: types.DoubleValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "d")), Type: types.DoubleValue}, }, }, }, false}, @@ -287,9 +287,9 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "v")), Type: types.TextValue}, - {Path: document.Path(testutil.ParsePath(t, "c")), Type: types.TextValue}, - {Path: document.Path(testutil.ParsePath(t, "t")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "v")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "c")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "t")), Type: types.TextValue}, }, }, }, false}, @@ -299,7 +299,7 @@ func TestParserCreateTable(t *testing.T) { Info: database.TableInfo{ TableName: "test", FieldConstraints: []*database.FieldConstraint{ - {Path: document.Path(testutil.ParsePath(t, "v")), Type: types.TextValue}, + {Path: document.Path(testutil.ParseDocumentPath(t, "v")), Type: types.TextValue}, }, }, }, true}, @@ -328,18 +328,18 @@ func TestParserCreateIndex(t *testing.T) { }{ {"Basic", "CREATE INDEX idx ON test (foo)", &statement.CreateIndexStmt{ Info: database.IndexInfo{ - IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParsePath(t, "foo"))}, + IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParseDocumentPath(t, "foo"))}, }}, false}, {"If not exists", "CREATE INDEX IF NOT EXISTS idx ON test (foo.bar[1])", &statement.CreateIndexStmt{ Info: database.IndexInfo{ - IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParsePath(t, "foo.bar[1]"))}, + IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParseDocumentPath(t, "foo.bar[1]"))}, }, IfNotExists: true}, false}, {"Unique", "CREATE UNIQUE INDEX IF NOT EXISTS idx ON test (foo[3].baz)", &statement.CreateIndexStmt{ Info: database.IndexInfo{ - IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParsePath(t, "foo[3].baz"))}, Unique: true, + IndexName: "idx", TableName: "test", Paths: []document.Path{document.Path(testutil.ParseDocumentPath(t, "foo[3].baz"))}, Unique: true, }, IfNotExists: true}, false}, {"No name", "CREATE UNIQUE INDEX ON test (foo[3].baz)", &statement.CreateIndexStmt{ - Info: database.IndexInfo{TableName: "test", Paths: []document.Path{document.Path(testutil.ParsePath(t, "foo[3].baz"))}, Unique: true}}, false}, + Info: database.IndexInfo{TableName: "test", Paths: []document.Path{document.Path(testutil.ParseDocumentPath(t, "foo[3].baz"))}, Unique: true}}, false}, {"No name with IF NOT EXISTS", "CREATE UNIQUE INDEX IF NOT EXISTS ON test (foo[3].baz)", nil, true}, {"More than 1 path", "CREATE INDEX idx ON test (foo, bar)", &statement.CreateIndexStmt{ @@ -347,8 +347,8 @@ func TestParserCreateIndex(t *testing.T) { IndexName: "idx", TableName: "test", Paths: []document.Path{ - document.Path(testutil.ParsePath(t, "foo")), - document.Path(testutil.ParsePath(t, "bar")), + document.Path(testutil.ParseDocumentPath(t, "foo")), + document.Path(testutil.ParseDocumentPath(t, "bar")), }, }, }, diff --git a/internal/stream/aggregate.go b/internal/stream/aggregate.go index 71ab1085e..1d275e4ce 100644 --- a/internal/stream/aggregate.go +++ b/internal/stream/aggregate.go @@ -31,17 +31,7 @@ func (op *GroupAggregateOperator) Iterate(in *environment.Environment, f func(ou groupExpr = stringutil.Sprintf("%s", op.E) } - // var tableName string - err := op.Prev.Iterate(in, func(out *environment.Environment) error { - // if tableName == "" { - // v, ok := out.Get(environment.TableKey) - // if !ok { - // return errors.New("missing table name") - // } - // tableName = v.String() - // } - if op.E == nil { if ga == nil { ga = newGroupAggregator(nil, groupExpr, op.Builders) diff --git a/internal/stream/index.go b/internal/stream/index.go index f414e7d97..b51c87da7 100644 --- a/internal/stream/index.go +++ b/internal/stream/index.go @@ -1,6 +1,9 @@ package stream import ( + "strconv" + "strings" + errs "github.com/genjidb/genji/errors" "github.com/genjidb/genji/internal/environment" "github.com/genjidb/genji/internal/errors" @@ -212,3 +215,109 @@ func (op *IndexDeleteOperator) Iterate(in *environment.Environment, fn func(out func (op *IndexDeleteOperator) String() string { return stringutil.Sprintf("indexDelete(%q)", op.indexName) } + +// A IndexScanOperator iterates over the documents of an index. +type IndexScanOperator struct { + baseOperator + + // IndexName references the index that will be used to perform the scan + IndexName string + // Ranges defines the boundaries of the scan, each corresponding to one value of the group of values + // being indexed in the case of a composite index. + Ranges Ranges + // Reverse indicates the direction used to traverse the index. + Reverse bool +} + +// IndexScan creates an iterator that iterates over each document of the given table. +func IndexScan(name string, ranges ...Range) *IndexScanOperator { + if len(ranges) == 0 { + panic("IndexScan: no ranges specified") + } + return &IndexScanOperator{IndexName: name, Ranges: ranges} +} + +// IndexScanReverse creates an iterator that iterates over each document of the given table in reverse order. +func IndexScanReverse(name string, ranges ...Range) *IndexScanOperator { + return &IndexScanOperator{IndexName: name, Ranges: ranges, Reverse: true} +} + +func (it *IndexScanOperator) String() string { + var s strings.Builder + + s.WriteString("indexScan") + if it.Reverse { + s.WriteString("Reverse") + } + + s.WriteRune('(') + + s.WriteString(strconv.Quote(it.IndexName)) + if len(it.Ranges) > 0 { + s.WriteString(", ") + s.WriteString(it.Ranges.String()) + } + + s.WriteString(")") + + return s.String() +} + +// Iterate over the documents of the table. Each document is stored in the environment +// that is passed to the fn function, using SetCurrentValue. +func (it *IndexScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { + catalog := in.GetCatalog() + tx := in.GetTx() + + index, err := catalog.GetIndex(tx, it.IndexName) + if err != nil { + return err + } + + info, err := catalog.GetIndexInfo(it.IndexName) + if err != nil { + return err + } + + table, err := catalog.GetTable(tx, info.TableName) + if err != nil { + return err + } + + var newEnv environment.Environment + newEnv.SetOuter(in) + newEnv.Set(environment.TableKey, types.NewTextValue(table.Info.Name())) + + ranges, err := it.Ranges.Eval(in) + if err != nil || len(ranges) != len(it.Ranges) { + return err + } + + ptr := DocumentPointer{ + Table: table, + } + newEnv.SetDocument(&ptr) + + for _, rng := range ranges { + r, err := rng.ToTreeRange(&table.Info.FieldConstraints, info.Paths) + if err != nil { + return err + } + + err = index.IterateOnRange(r, it.Reverse, func(key tree.Key) error { + ptr.key = key + ptr.Doc = nil + newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) + + return fn(&newEnv) + }) + if errors.Is(err, ErrStreamClosed) { + err = nil + } + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/stream/scan_test.go b/internal/stream/index_test.go similarity index 70% rename from internal/stream/scan_test.go rename to internal/stream/index_test.go index 0aa3ee9a8..d439d8795 100644 --- a/internal/stream/scan_test.go +++ b/internal/stream/index_test.go @@ -6,8 +6,6 @@ import ( "github.com/genjidb/genji/document" "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/environment" - "github.com/genjidb/genji/internal/expr" - "github.com/genjidb/genji/internal/sql/parser" "github.com/genjidb/genji/internal/stream" "github.com/genjidb/genji/internal/testutil" "github.com/genjidb/genji/internal/testutil/assert" @@ -15,268 +13,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestExpressions(t *testing.T) { - tests := []struct { - e expr.Expr - output types.Document - fails bool - }{ - {parser.MustParseExpr("3 + 4"), nil, true}, - {parser.MustParseExpr("{a: 3 + 4}"), testutil.MakeDocument(t, `{"a": 7}`), false}, - } - - for _, test := range tests { - t.Run(test.e.String(), func(t *testing.T) { - s := stream.New(stream.Expressions(test.e)) - - err := s.Iterate(new(environment.Environment), func(env *environment.Environment) error { - d, ok := env.GetDocument() - require.True(t, ok) - require.Equal(t, d, test.output) - return nil - }) - if test.fails { - assert.Error(t, err) - } else { - assert.NoError(t, err) - } - }) - } - - t.Run("String", func(t *testing.T) { - require.Equal(t, stream.Expressions(parser.MustParseExpr("1 + 1"), parser.MustParseExpr("pk()")).String(), "exprs(1 + 1, pk())") - }) -} - -func TestSeqScan(t *testing.T) { - tests := []struct { - name string - docsInTable, expected testutil.Docs - reverse bool - fails bool - }{ - {name: "empty"}, - { - "ok", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - false, - false, - }, - { - "reverse", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), - true, - false, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - db, tx, cleanup := testutil.NewTestTx(t) - defer cleanup() - - testutil.MustExec(t, db, tx, "CREATE TABLE test (a INTEGER)") - - for _, doc := range test.docsInTable { - testutil.MustExec(t, db, tx, "INSERT INTO test VALUES ?", environment.Param{Value: doc}) - } - - op := stream.SeqScan("test") - op.Reverse = test.reverse - var in environment.Environment - in.Tx = tx - in.Catalog = db.Catalog - - var i int - var got testutil.Docs - err := op.Iterate(&in, func(env *environment.Environment) error { - d, ok := env.GetDocument() - require.True(t, ok) - var fb document.FieldBuffer - err := fb.Copy(d) - assert.NoError(t, err) - got = append(got, &fb) - i++ - return nil - }) - if test.fails { - assert.Error(t, err) - } else { - assert.NoError(t, err) - require.Equal(t, len(test.expected), i) - test.expected.RequireEqual(t, got) - } - }) - } - - t.Run("String", func(t *testing.T) { - require.Equal(t, `seqScan(test)`, stream.SeqScan("test").String()) - }) -} - -func TestPkScan(t *testing.T) { - tests := []struct { - name string - docsInTable, expected testutil.Docs - ranges stream.Ranges - reverse bool - fails bool - }{ - { - "max:2", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - stream.Ranges{ - {Max: testutil.ExprList(t, `[2]`)}, - }, - false, false, - }, - { - "max:1", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`), - stream.Ranges{ - {Max: testutil.ExprList(t, `[1]`)}, - }, - false, false, - }, - { - "max:1.1", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`), - stream.Ranges{ - {Max: testutil.ExprList(t, `[1.1]`)}, - }, - false, false, - }, - { - "min", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[1]`)}, - }, - false, false, - }, - { - "min:0.5", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[0.5]`)}, - }, - false, false, - }, - { - "min/max", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`)}, - }, - false, false, - }, - { - "min/max:0.5/1.5", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[0.5]`), Max: testutil.ExprList(t, `[1.5]`)}, - }, - false, false, - }, - { - "reverse/max", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), - stream.Ranges{ - {Max: testutil.ExprList(t, `[2]`)}, - }, - true, false, - }, - { - "reverse/min", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[1]`)}, - }, - true, false, - }, - { - "reverse/min/max", - testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), - testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), - stream.Ranges{ - {Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`)}, - }, - true, false, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - db, tx, cleanup := testutil.NewTestTx(t) - defer cleanup() - - testutil.MustExec(t, db, tx, "CREATE TABLE test (a INTEGER NOT NULL PRIMARY KEY)") - - for _, doc := range test.docsInTable { - testutil.MustExec(t, db, tx, "INSERT INTO test VALUES ?", environment.Param{Value: doc}) - } - - op := stream.PkScan("test", test.ranges...) - op.Reverse = test.reverse - var env environment.Environment - env.Tx = tx - env.Catalog = db.Catalog - env.Params = []environment.Param{{Name: "foo", Value: 1}} - - var i int - var got testutil.Docs - err := op.Iterate(&env, func(env *environment.Environment) error { - d, ok := env.GetDocument() - require.True(t, ok) - var fb document.FieldBuffer - - err := fb.Copy(d) - assert.NoError(t, err) - - got = append(got, &fb) - v, err := env.GetParamByName("foo") - assert.NoError(t, err) - require.Equal(t, types.NewIntegerValue(1), v) - i++ - return nil - }) - if test.fails { - assert.Error(t, err) - } else { - assert.NoError(t, err) - require.Equal(t, len(test.expected), i) - test.expected.RequireEqual(t, got) - } - }) - } - - t.Run("String", func(t *testing.T) { - require.Equal(t, `pkScan("test", [1, 2])`, stream.PkScan("test", stream.Range{ - Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`), - }).String()) - - op := stream.PkScan("test", - stream.Range{Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`), Exclusive: true}, - stream.Range{Min: testutil.ExprList(t, `[10]`), Exact: true}, - stream.Range{Min: testutil.ExprList(t, `[100]`)}, - ) - op.Reverse = true - - require.Equal(t, `pkScanReverse("test", [1, 2, true], 10, [100, -1])`, op.String()) - }) -} - func TestIndexScan(t *testing.T) { testIndexScan(t, func(db *database.Database, tx *database.Transaction, name string, indexOn string, reverse bool, ranges ...stream.Range) stream.Operator { t.Helper() diff --git a/internal/stream/project.go b/internal/stream/project.go index ba6b53830..0a99493af 100644 --- a/internal/stream/project.go +++ b/internal/stream/project.go @@ -38,9 +38,6 @@ func (op *ProjectOperator) Iterate(in *environment.Environment, f func(out *envi return op.Prev.Iterate(in, func(env *environment.Environment) error { mask.Env = env mask.Exprs = op.Exprs - // if d, ok := env.GetDocument(); ok { - // newEnv.Set(environment.OriginalDocumentKey, types.NewDocumentValue(d)) - // } newEnv.SetDocument(&mask) newEnv.SetOuter(env) return f(&newEnv) diff --git a/internal/stream/scan.go b/internal/stream/scan.go index 8bc7da7f7..553d7de49 100644 --- a/internal/stream/scan.go +++ b/internal/stream/scan.go @@ -1,7 +1,6 @@ package stream import ( - "strconv" "strings" "github.com/genjidb/genji/internal/database" @@ -9,7 +8,6 @@ import ( "github.com/genjidb/genji/internal/errors" "github.com/genjidb/genji/internal/expr" "github.com/genjidb/genji/internal/stringutil" - "github.com/genjidb/genji/internal/tree" "github.com/genjidb/genji/types" ) @@ -147,229 +145,3 @@ func (op *ExprsOperator) String() string { return sb.String() } - -// A SeqScanOperator iterates over the documents of a table. -type SeqScanOperator struct { - baseOperator - TableName string - Reverse bool -} - -// SeqScan creates an iterator that iterates over each document of the given table. -func SeqScan(tableName string) *SeqScanOperator { - return &SeqScanOperator{TableName: tableName} -} - -// SeqScanReverse creates an iterator that iterates over each document of the given table in reverse order. -func SeqScanReverse(tableName string) *SeqScanOperator { - return &SeqScanOperator{TableName: tableName, Reverse: true} -} - -func (it *SeqScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { - table, err := in.GetCatalog().GetTable(in.GetTx(), it.TableName) - if err != nil { - return err - } - - var newEnv environment.Environment - newEnv.SetOuter(in) - newEnv.Set(environment.TableKey, types.NewTextValue(it.TableName)) - - return table.Iterate(nil, it.Reverse, func(key tree.Key, d types.Document) error { - newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) - newEnv.SetDocument(d) - return fn(&newEnv) - }) -} - -func (it *SeqScanOperator) String() string { - if !it.Reverse { - return stringutil.Sprintf("seqScan(%s)", it.TableName) - } - return stringutil.Sprintf("seqScanReverse(%s)", it.TableName) -} - -// A PkScanOperator iterates over the documents of a table. -type PkScanOperator struct { - baseOperator - TableName string - Ranges Ranges - Reverse bool -} - -// PkScan creates an iterator that iterates over each document of the given table. -func PkScan(tableName string, ranges ...Range) *PkScanOperator { - return &PkScanOperator{TableName: tableName, Ranges: ranges} -} - -// PkScanReverse creates an iterator that iterates over each document of the given table in reverse order. -func PkScanReverse(tableName string, ranges ...Range) *PkScanOperator { - return &PkScanOperator{TableName: tableName, Ranges: ranges, Reverse: true} -} - -func (it *PkScanOperator) String() string { - var s strings.Builder - - s.WriteString("pkScan") - if it.Reverse { - s.WriteString("Reverse") - } - - s.WriteRune('(') - - s.WriteString(strconv.Quote(it.TableName)) - if len(it.Ranges) > 0 { - s.WriteString(", ") - for i, r := range it.Ranges { - s.WriteString(r.String()) - if i+1 < len(it.Ranges) { - s.WriteString(", ") - } - } - } - - s.WriteString(")") - - return s.String() -} - -// Iterate over the documents of the table. Each document is stored in the environment -// that is passed to the fn function, using SetCurrentValue. -func (it *PkScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { - var newEnv environment.Environment - newEnv.SetOuter(in) - newEnv.Set(environment.TableKey, types.NewTextValue(it.TableName)) - - table, err := in.GetCatalog().GetTable(in.GetTx(), it.TableName) - if err != nil { - return err - } - - ranges, err := it.Ranges.Eval(in) - if err != nil { - return err - } - - for _, rng := range ranges { - err = table.IterateOnRange(rng, it.Reverse, func(key tree.Key, d types.Document) error { - newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) - newEnv.SetDocument(d) - - return fn(&newEnv) - }) - if errors.Is(err, ErrStreamClosed) { - err = nil - } - if err != nil { - return err - } - } - - return nil -} - -// A IndexScanOperator iterates over the documents of an index. -type IndexScanOperator struct { - baseOperator - - // IndexName references the index that will be used to perform the scan - IndexName string - // Ranges defines the boundaries of the scan, each corresponding to one value of the group of values - // being indexed in the case of a composite index. - Ranges Ranges - // Reverse indicates the direction used to traverse the index. - Reverse bool -} - -// IndexScan creates an iterator that iterates over each document of the given table. -func IndexScan(name string, ranges ...Range) *IndexScanOperator { - if len(ranges) == 0 { - panic("IndexScan: no ranges specified") - } - return &IndexScanOperator{IndexName: name, Ranges: ranges} -} - -// IndexScanReverse creates an iterator that iterates over each document of the given table in reverse order. -func IndexScanReverse(name string, ranges ...Range) *IndexScanOperator { - return &IndexScanOperator{IndexName: name, Ranges: ranges, Reverse: true} -} - -func (it *IndexScanOperator) String() string { - var s strings.Builder - - s.WriteString("indexScan") - if it.Reverse { - s.WriteString("Reverse") - } - - s.WriteRune('(') - - s.WriteString(strconv.Quote(it.IndexName)) - if len(it.Ranges) > 0 { - s.WriteString(", ") - s.WriteString(it.Ranges.String()) - } - - s.WriteString(")") - - return s.String() -} - -// Iterate over the documents of the table. Each document is stored in the environment -// that is passed to the fn function, using SetCurrentValue. -func (it *IndexScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { - catalog := in.GetCatalog() - tx := in.GetTx() - - index, err := catalog.GetIndex(tx, it.IndexName) - if err != nil { - return err - } - - info, err := catalog.GetIndexInfo(it.IndexName) - if err != nil { - return err - } - - table, err := catalog.GetTable(tx, info.TableName) - if err != nil { - return err - } - - var newEnv environment.Environment - newEnv.SetOuter(in) - newEnv.Set(environment.TableKey, types.NewTextValue(table.Info.Name())) - - ranges, err := it.Ranges.Eval(in) - if err != nil || len(ranges) != len(it.Ranges) { - return err - } - - ptr := DocumentPointer{ - Table: table, - } - newEnv.SetDocument(&ptr) - - for _, rng := range ranges { - r, err := rng.ToTreeRange(&table.Info.FieldConstraints, info.Paths) - if err != nil { - return err - } - - err = index.IterateOnRange(r, it.Reverse, func(key tree.Key) error { - ptr.key = key - ptr.Doc = nil - newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) - - return fn(&newEnv) - }) - if errors.Is(err, ErrStreamClosed) { - err = nil - } - if err != nil { - return err - } - } - - return nil -} diff --git a/internal/stream/table.go b/internal/stream/table.go index 0f40b459c..9ebf85be4 100644 --- a/internal/stream/table.go +++ b/internal/stream/table.go @@ -1,10 +1,14 @@ package stream import ( + "strconv" + "strings" + "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/environment" "github.com/genjidb/genji/internal/errors" "github.com/genjidb/genji/internal/stringutil" + "github.com/genjidb/genji/internal/tree" "github.com/genjidb/genji/types" ) @@ -192,3 +196,123 @@ func (op *TableDeleteOperator) Iterate(in *environment.Environment, f func(out * func (op *TableDeleteOperator) String() string { return stringutil.Sprintf("tableDelete('%s')", op.Name) } + +// A SeqScanOperator iterates over the documents of a table. +type SeqScanOperator struct { + baseOperator + TableName string + Reverse bool +} + +// SeqScan creates an iterator that iterates over each document of the given table. +func SeqScan(tableName string) *SeqScanOperator { + return &SeqScanOperator{TableName: tableName} +} + +// SeqScanReverse creates an iterator that iterates over each document of the given table in reverse order. +func SeqScanReverse(tableName string) *SeqScanOperator { + return &SeqScanOperator{TableName: tableName, Reverse: true} +} + +func (it *SeqScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { + table, err := in.GetCatalog().GetTable(in.GetTx(), it.TableName) + if err != nil { + return err + } + + var newEnv environment.Environment + newEnv.SetOuter(in) + newEnv.Set(environment.TableKey, types.NewTextValue(it.TableName)) + + return table.IterateOnRange(nil, it.Reverse, func(key tree.Key, d types.Document) error { + newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) + newEnv.SetDocument(d) + return fn(&newEnv) + }) +} + +func (it *SeqScanOperator) String() string { + if !it.Reverse { + return stringutil.Sprintf("seqScan(%s)", it.TableName) + } + return stringutil.Sprintf("seqScanReverse(%s)", it.TableName) +} + +// A PkScanOperator iterates over the documents of a table. +type PkScanOperator struct { + baseOperator + TableName string + Ranges Ranges + Reverse bool +} + +// PkScan creates an iterator that iterates over each document of the given table. +func PkScan(tableName string, ranges ...Range) *PkScanOperator { + return &PkScanOperator{TableName: tableName, Ranges: ranges} +} + +// PkScanReverse creates an iterator that iterates over each document of the given table in reverse order. +func PkScanReverse(tableName string, ranges ...Range) *PkScanOperator { + return &PkScanOperator{TableName: tableName, Ranges: ranges, Reverse: true} +} + +func (it *PkScanOperator) String() string { + var s strings.Builder + + s.WriteString("pkScan") + if it.Reverse { + s.WriteString("Reverse") + } + + s.WriteRune('(') + + s.WriteString(strconv.Quote(it.TableName)) + if len(it.Ranges) > 0 { + s.WriteString(", ") + for i, r := range it.Ranges { + s.WriteString(r.String()) + if i+1 < len(it.Ranges) { + s.WriteString(", ") + } + } + } + + s.WriteString(")") + + return s.String() +} + +// Iterate over the documents of the table. Each document is stored in the environment +// that is passed to the fn function, using SetCurrentValue. +func (it *PkScanOperator) Iterate(in *environment.Environment, fn func(out *environment.Environment) error) error { + var newEnv environment.Environment + newEnv.SetOuter(in) + newEnv.Set(environment.TableKey, types.NewTextValue(it.TableName)) + + table, err := in.GetCatalog().GetTable(in.GetTx(), it.TableName) + if err != nil { + return err + } + + ranges, err := it.Ranges.Eval(in) + if err != nil { + return err + } + + for _, rng := range ranges { + err = table.IterateOnRange(rng, it.Reverse, func(key tree.Key, d types.Document) error { + newEnv.Set(environment.DocPKKey, types.NewBlobValue(key)) + newEnv.SetDocument(d) + + return fn(&newEnv) + }) + if errors.Is(err, ErrStreamClosed) { + err = nil + } + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/stream/table_test.go b/internal/stream/table_test.go new file mode 100644 index 000000000..db0a359ba --- /dev/null +++ b/internal/stream/table_test.go @@ -0,0 +1,277 @@ +package stream_test + +import ( + "testing" + + "github.com/genjidb/genji/document" + "github.com/genjidb/genji/internal/environment" + "github.com/genjidb/genji/internal/expr" + "github.com/genjidb/genji/internal/sql/parser" + "github.com/genjidb/genji/internal/stream" + "github.com/genjidb/genji/internal/testutil" + "github.com/genjidb/genji/internal/testutil/assert" + "github.com/genjidb/genji/types" + "github.com/stretchr/testify/require" +) + +func TestExpressions(t *testing.T) { + tests := []struct { + e expr.Expr + output types.Document + fails bool + }{ + {parser.MustParseExpr("3 + 4"), nil, true}, + {parser.MustParseExpr("{a: 3 + 4}"), testutil.MakeDocument(t, `{"a": 7}`), false}, + } + + for _, test := range tests { + t.Run(test.e.String(), func(t *testing.T) { + s := stream.New(stream.Expressions(test.e)) + + err := s.Iterate(new(environment.Environment), func(env *environment.Environment) error { + d, ok := env.GetDocument() + require.True(t, ok) + require.Equal(t, d, test.output) + return nil + }) + if test.fails { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } + + t.Run("String", func(t *testing.T) { + require.Equal(t, stream.Expressions(parser.MustParseExpr("1 + 1"), parser.MustParseExpr("pk()")).String(), "exprs(1 + 1, pk())") + }) +} + +func TestSeqScan(t *testing.T) { + tests := []struct { + name string + docsInTable, expected testutil.Docs + reverse bool + fails bool + }{ + {name: "empty"}, + { + "ok", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + false, + false, + }, + { + "reverse", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), + true, + false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + db, tx, cleanup := testutil.NewTestTx(t) + defer cleanup() + + testutil.MustExec(t, db, tx, "CREATE TABLE test (a INTEGER)") + + for _, doc := range test.docsInTable { + testutil.MustExec(t, db, tx, "INSERT INTO test VALUES ?", environment.Param{Value: doc}) + } + + op := stream.SeqScan("test") + op.Reverse = test.reverse + var in environment.Environment + in.Tx = tx + in.Catalog = db.Catalog + + var i int + var got testutil.Docs + err := op.Iterate(&in, func(env *environment.Environment) error { + d, ok := env.GetDocument() + require.True(t, ok) + var fb document.FieldBuffer + err := fb.Copy(d) + assert.NoError(t, err) + got = append(got, &fb) + i++ + return nil + }) + if test.fails { + assert.Error(t, err) + } else { + assert.NoError(t, err) + require.Equal(t, len(test.expected), i) + test.expected.RequireEqual(t, got) + } + }) + } + + t.Run("String", func(t *testing.T) { + require.Equal(t, `seqScan(test)`, stream.SeqScan("test").String()) + }) +} + +func TestPkScan(t *testing.T) { + tests := []struct { + name string + docsInTable, expected testutil.Docs + ranges stream.Ranges + reverse bool + fails bool + }{ + { + "max:2", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + stream.Ranges{ + {Max: testutil.ExprList(t, `[2]`)}, + }, + false, false, + }, + { + "max:1", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`), + stream.Ranges{ + {Max: testutil.ExprList(t, `[1]`)}, + }, + false, false, + }, + { + "max:1.1", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`), + stream.Ranges{ + {Max: testutil.ExprList(t, `[1.1]`)}, + }, + false, false, + }, + { + "min", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[1]`)}, + }, + false, false, + }, + { + "min:0.5", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[0.5]`)}, + }, + false, false, + }, + { + "min/max", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`)}, + }, + false, false, + }, + { + "min/max:0.5/1.5", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[0.5]`), Max: testutil.ExprList(t, `[1.5]`)}, + }, + false, false, + }, + { + "reverse/max", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), + stream.Ranges{ + {Max: testutil.ExprList(t, `[2]`)}, + }, + true, false, + }, + { + "reverse/min", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[1]`)}, + }, + true, false, + }, + { + "reverse/min/max", + testutil.MakeDocuments(t, `{"a": 1}`, `{"a": 2}`), + testutil.MakeDocuments(t, `{"a": 2}`, `{"a": 1}`), + stream.Ranges{ + {Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`)}, + }, + true, false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + db, tx, cleanup := testutil.NewTestTx(t) + defer cleanup() + + testutil.MustExec(t, db, tx, "CREATE TABLE test (a INTEGER NOT NULL PRIMARY KEY)") + + for _, doc := range test.docsInTable { + testutil.MustExec(t, db, tx, "INSERT INTO test VALUES ?", environment.Param{Value: doc}) + } + + op := stream.PkScan("test", test.ranges...) + op.Reverse = test.reverse + var env environment.Environment + env.Tx = tx + env.Catalog = db.Catalog + env.Params = []environment.Param{{Name: "foo", Value: 1}} + + var i int + var got testutil.Docs + err := op.Iterate(&env, func(env *environment.Environment) error { + d, ok := env.GetDocument() + require.True(t, ok) + var fb document.FieldBuffer + + err := fb.Copy(d) + assert.NoError(t, err) + + got = append(got, &fb) + v, err := env.GetParamByName("foo") + assert.NoError(t, err) + require.Equal(t, types.NewIntegerValue(1), v) + i++ + return nil + }) + if test.fails { + assert.Error(t, err) + } else { + assert.NoError(t, err) + require.Equal(t, len(test.expected), i) + test.expected.RequireEqual(t, got) + } + }) + } + + t.Run("String", func(t *testing.T) { + require.Equal(t, `pkScan("test", [1, 2])`, stream.PkScan("test", stream.Range{ + Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`), + }).String()) + + op := stream.PkScan("test", + stream.Range{Min: testutil.ExprList(t, `[1]`), Max: testutil.ExprList(t, `[2]`), Exclusive: true}, + stream.Range{Min: testutil.ExprList(t, `[10]`), Exact: true}, + stream.Range{Min: testutil.ExprList(t, `[100]`)}, + ) + op.Reverse = true + + require.Equal(t, `pkScanReverse("test", [1, 2, true], 10, [100, -1])`, op.String()) + }) +} diff --git a/internal/testutil/expr.go b/internal/testutil/expr.go index 8ad4ec9bf..ff5124948 100644 --- a/internal/testutil/expr.go +++ b/internal/testutil/expr.go @@ -82,6 +82,15 @@ func ParseDocumentPath(t testing.TB, p string) document.Path { return vp } +func ParseDocumentPaths(t testing.TB, str ...string) []document.Path { + var paths []document.Path + for _, s := range str { + paths = append(paths, ParseDocumentPath(t, s)) + } + + return paths +} + func ParseNamedExpr(t testing.TB, s string, name ...string) expr.Expr { t.Helper() diff --git a/internal/testutil/index.go b/internal/testutil/index.go index 54173145a..39ffe3624 100644 --- a/internal/testutil/index.go +++ b/internal/testutil/index.go @@ -3,31 +3,11 @@ package testutil import ( "testing" - "github.com/genjidb/genji/internal/database" "github.com/genjidb/genji/internal/testutil/assert" "github.com/genjidb/genji/internal/tree" "github.com/genjidb/genji/types" ) -// GetIndexContent iterates over the entire index and returns all the key-value pairs in order. -func GetIndexContent(t testing.TB, tx *database.Transaction, catalog *database.Catalog, indexName string) []KV { - t.Helper() - - idx, err := catalog.GetIndex(tx, indexName) - assert.NoError(t, err) - - var content []KV - err = idx.Iterate(nil, false, func(key tree.Key) error { - content = append(content, KV{ - Key: append([]byte{}, key...), - }) - return nil - }) - assert.NoError(t, err) - - return content -} - func NewKey(t testing.TB, values ...types.Value) tree.Key { t.Helper() diff --git a/internal/tree/tree_test.go b/internal/tree/tree_test.go index 46204fae5..db0b0c3b2 100644 --- a/internal/tree/tree_test.go +++ b/internal/tree/tree_test.go @@ -226,6 +226,7 @@ func TestTreeIterateOnRange(t *testing.T) { reverse bool keys []tree.Key }{ + {"asc/nil-range", nil, false, keys}, {"asc/empty-range", &tree.Range{}, false, keys}, {"asc/ >= [5]", &tree.Range{Min: MustNewKey(t, types.NewIntegerValue(5))}, false, keys[5:]}, {"asc/ > [5]", &tree.Range{Min: MustNewKey(t, types.NewIntegerValue(5)), Exclusive: true}, false, keys[6:]}, diff --git a/open_test.go b/open_test.go index 213143c14..fefcc717f 100644 --- a/open_test.go +++ b/open_test.go @@ -69,7 +69,7 @@ func TestOpen(t *testing.T) { } if count == 5 { - testutil.RequireDocJSONEq(t, d, `{"name":"tableA_a_idx", "owner":{"table_name":"tableA", "path":"a"}, "sql":"CREATE UNIQUE INDEX tableA_a_idx ON tableA (a)", "store_name":"Ag==", "table_name":"tableA", "type":"index"}`) + testutil.RequireDocJSONEq(t, d, `{"name":"tableA_a_idx", "owner":{"table_name":"tableA", "paths":["a"]}, "sql":"CREATE UNIQUE INDEX tableA_a_idx ON tableA (a)", "store_name":"Ag==", "table_name":"tableA", "type":"index"}`) return nil } diff --git a/sqltests/INSERT/base.sql b/sqltests/INSERT/base.sql index ea06f7f0f..9818beb15 100644 --- a/sqltests/INSERT/base.sql +++ b/sqltests/INSERT/base.sql @@ -15,7 +15,7 @@ INSERT INTO test (a, b, c) VALUES ('a', 'b', 'c'); SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": "b", "c":"c" @@ -35,7 +35,7 @@ INSERT INTO test (a, `foo bar`) VALUES ('c', 'd'); SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "c", "foo bar": "d" } @@ -46,7 +46,7 @@ INSERT INTO test (a, b, c) VALUES ("a", 'b', [1, 2, 3]); SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b":"b", "c": [1.0, 2.0, 3.0] @@ -58,7 +58,7 @@ INSERT INTO test (a, b, c) VALUES ("a", 'b', {c: 1, d: c + 1}); SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": "b", "c": { @@ -73,7 +73,7 @@ INSERT INTO test VALUES {a: 'a', b: 2.3, c: 1 = 1}; SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": 2.3, "c": true @@ -85,7 +85,7 @@ INSERT INTO test VALUES {a: [1, 2, 3]}; SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": [ 1.0, 2.0, @@ -99,7 +99,7 @@ INSERT INTO test VALUES {'a': 'a', b: 2.3}; SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": 2.3 } @@ -110,7 +110,7 @@ INSERT INTO test VALUES {"a": "b"}; SELECT pk(), * FROM test; /* result: { - "pk()": 1, + "pk()": [1], "a": "b" } */ @@ -119,7 +119,7 @@ SELECT pk(), * FROM test; INSERT INTO test VALUES {a: 400, b: a * 4}; SELECT pk(), * FROM test; /* result: -{"pk()":1,"a":400.0,"b":1600.0} +{"pk()":[1],"a":400.0,"b":1600.0} */ -- with indexes @@ -132,7 +132,7 @@ INSERT INTO test_idx (a, b, c) VALUES ('a', 'b', 'c'); SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": "b", "c": "c" @@ -152,7 +152,7 @@ INSERT INTO test_idx (a, `foo bar`) VALUES ('c', 'd'); SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "c", "foo bar": "d" } @@ -163,7 +163,7 @@ INSERT INTO test_idx (a, b, c) VALUES ("a", 'b', [1, 2, 3]); SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b":"b", "c": [1.0, 2.0, 3.0] @@ -175,7 +175,7 @@ INSERT INTO test_idx (a, b, c) VALUES ("a", 'b', {c: 1, d: c + 1}); SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": "b", "c": { @@ -190,7 +190,7 @@ INSERT INTO test_idx VALUES {a: 'a', b: 2.3, c: 1 = 1}; SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": 2.3, "c": true @@ -202,7 +202,7 @@ INSERT INTO test_idx VALUES {a: [1, 2, 3]}; SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": [ 1.0, 2.0, @@ -216,7 +216,7 @@ INSERT INTO test_idx VALUES {'a': 'a', b: 2.3}; SELECT pk(), * FROM test_idx; /*result: { - "pk()": 1, + "pk()": [1], "a": "a", "b": 2.3 } @@ -227,7 +227,7 @@ INSERT INTO test_idx VALUES {"a": "b"}; SELECT pk(), * FROM test_idx; /* result: { - "pk()": 1, + "pk()": [1], "a": "b" } */ @@ -236,7 +236,7 @@ SELECT pk(), * FROM test_idx; INSERT INTO test_idx VALUES {a: 400, b: a * 4}; SELECT pk(), * FROM test_idx; /* result: -{"pk()":1,"a":400.0,"b":1600.0} +{"pk()":[1],"a":400.0,"b":1600.0} */ -- test: read-only tables @@ -259,7 +259,7 @@ INSERT INTO test (`pk()`) VALUES (10); SELECT pk() AS pk, `pk()` from test; /* result: { - "pk": 1, + "pk": [1], "pk()": 10.0 } */ diff --git a/sqltests/INSERT/insert_select.sql b/sqltests/INSERT/insert_select.sql index 8ddcef115..549405581 100644 --- a/sqltests/INSERT/insert_select.sql +++ b/sqltests/INSERT/insert_select.sql @@ -11,28 +11,28 @@ INSERT INTO foo SELECT * FROM foo; INSERT INTO foo SELECT * FROM bar; SELECT pk(), * FROM foo; /* result: -{"pk()":1, "a":1.0, "b":10.0} +{"pk()": [1], "a":1.0, "b":10.0} */ -- test: No fields / Projection INSERT INTO foo SELECT a FROM bar; SELECT pk(), * FROM foo; /* result: -{"pk()":1, "a":1.0} +{"pk()": [1], "a":1.0} */ -- test: With fields / No Projection INSERT INTO foo (a, b) SELECT * FROM bar; SELECT pk(), * FROM foo; /* result: -{"pk()":1, "a":1.0, "b":10.0} +{"pk()": [1], "a":1.0, "b":10.0} */ -- test: With fields / Projection INSERT INTO foo (c, d) SELECT a, b FROM bar; SELECT pk(), * FROM foo; /* result: -{"pk()":1, "c":1.0, "d":10.0} +{"pk()": [1], "c":1.0, "d":10.0} */ -- test: Too many fields / No Projection diff --git a/sqltests/SELECT/pk.sql b/sqltests/SELECT/pk.sql index 00d2cb5b0..47c19f51a 100644 --- a/sqltests/SELECT/pk.sql +++ b/sqltests/SELECT/pk.sql @@ -5,9 +5,9 @@ INSERT INTO test(a) VALUES (1), (2), (3), (4), (5); -- test: wildcard SELECT pk(), a FROM test; /* result: -{"pk()": 1, "a": 1.0} -{"pk()": 2, "a": 2.0} -{"pk()": 3, "a": 3.0} -{"pk()": 4, "a": 4.0} -{"pk()": 5, "a": 5.0} +{"pk()": [1], "a": 1.0} +{"pk()": [2], "a": 2.0} +{"pk()": [3], "a": 3.0} +{"pk()": [4], "a": 4.0} +{"pk()": [5], "a": 5.0} */ diff --git a/sqltests/planning/where_pk.sql b/sqltests/planning/where_pk.sql new file mode 100644 index 000000000..e1e8de581 --- /dev/null +++ b/sqltests/planning/where_pk.sql @@ -0,0 +1,42 @@ +-- setup: +CREATE TABLE test(a int, b int, c int, PRIMARY KEY (a, b)); +INSERT INTO test (a, b, c) VALUES (1, 1, 1), (2, 2, 2), (3, 3, 3), (4, 4, 4), (5, 5, 5); + +-- test: = +EXPLAIN SELECT * FROM test WHERE a = 10 AND b = 5; +/* result: +{ + "plan": 'pkScan("test", [10, 5])' +} +*/ + +-- test: > vs = +EXPLAIN SELECT * FROM test WHERE a > 10 AND b = 5; +/* result: +{ + "plan": 'pkScan(\"test\", [10, -1, true]) | filter(b = 5)' +} +*/ + +-- test: > +EXPLAIN SELECT * FROM test WHERE a > 10 AND b > 5; +/* result: +{ + "plan": 'pkScan("test", [10, -1, true]) | filter(b > 5)' +} +*/ + +-- test: >= +EXPLAIN SELECT * FROM test WHERE a >= 10 AND b > 5; +/* result: +{ + "plan": 'pkScan("test", [10, -1]) | filter(b > 5)' +} +*/ +-- test: < +EXPLAIN SELECT * FROM test WHERE a < 10 AND b > 5; +/* result: +{ + "plan": 'pkScan("test", [-1, 10, true]) | filter(b > 5)' +} +*/