diff --git a/connor/and_test.go b/connor/and_test.go deleted file mode 100644 index 1e6ae02c34..0000000000 --- a/connor/and_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$and", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("and")) - }) - - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - {"match with a single value", `{ "x": { "$and": [1] } }`, true, false}, - {"not match with a single value", `{ "x": { "$and": [2] } }`, false, false}, - {"match with a single operation", `{ "x": { "$and": [{ "$eq": 1 }] } }`, true, false}, - {"not match with a single operation", `{ "x": { "$and": [{ "$eq": 2 }] } }`, false, false}, - {"not match with multiple values", `{ "x": { "$and": [1, 2] } }`, false, false}, - {"error without an array of values", `{ "x": { "$and": 1 } }`, false, true}, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - {"not match when a nested operator doesn't match", `{ "x": { "$and": [{ "$eq": 1 }] } }`, false, false}, - {"match when nested operators all match", `{ "a.x": { "$and": [{ "$in": [1, 3] }, { "$in": [1, 2] }] } }`, true, false}, - }, - } - - cases.Generate(nil) -}) diff --git a/connor/connor.go b/connor/connor.go index 2ca80aaccd..6e4a818e5c 100644 --- a/connor/connor.go +++ b/connor/connor.go @@ -3,11 +3,13 @@ package connor import ( "fmt" "strings" + + "github.com/sourcenetwork/defradb/core" ) // Match is the default method used in Connor to match some data to a // set of conditions. -func Match(conditions, data map[string]interface{}) (bool, error) { +func Match(conditions map[FilterKey]interface{}, data core.Doc) (bool, error) { return MatchWith("$eq", conditions, data) } diff --git a/connor/connor_test.go b/connor/connor_test.go deleted file mode 100644 index ed75283251..0000000000 --- a/connor/connor_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("Connor", func() { - Describe("with a malformed operator", func() { - _, err := MatchWith("malformed", nil, nil) - - It("should return an error", func() { - Expect(err).ToNot(BeNil()) - }) - - It("should provide a descriptive error", func() { - Expect(err.Error()).To(Equal("operator should have '$' prefix")) - }) - - It("should return a short error", func() { - Expect(len(err.Error()) < 80).To(BeTrue(), "error message should be less than 80 characters long") - }) - }) - - Describe("with an invalid/unknown operator", func() { - _, err := MatchWith("$invalid", nil, nil) - - It("should return an error", func() { - Expect(err).ToNot(BeNil()) - }) - - It("should provide a descriptive error", func() { - Expect(err.Error()).To(Equal("unknown operator 'invalid'")) - }) - - It("should return a short error", func() { - Expect(len(err.Error()) < 80).To(BeTrue(), "error message should be less than 80 characters long") - }) - }) -}) diff --git a/connor/contains_test.go b/connor/contains_test.go deleted file mode 100644 index 6f4bf381c4..0000000000 --- a/connor/contains_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$contains", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("contains")) - }) - - cases := TestCases{ - `{"x":1}`: []TestCase{ - { - "error if a non-string value is provided", - `{"x":{"$contains":"abc"}}`, - false, - true, - }, - }, - `{"x":"abc"}`: []TestCase{ - { - "match a complete string", - `{"x":{"$contains":"abc"}}`, - true, - false, - }, - { - "match a partial suffix", - `{"x":{"$contains":"bc"}}`, - true, - false, - }, - { - "match a partial prefix", - `{"x":{"$contains":"ab"}}`, - true, - false, - }, - { - "not match a different string", - `{"x":{"$contains":"xyz"}}`, - false, - false, - }, - { - "not match a missing field", - `{"y":{"$contains":"xyz"}}`, - false, - false, - }, - }, - } - - cases.Generate(nil) -}) diff --git a/connor/eq.go b/connor/eq.go index cd417f0509..faa0993041 100644 --- a/connor/eq.go +++ b/connor/eq.go @@ -2,10 +2,9 @@ package connor import ( "reflect" - "strings" - "github.com/sourcenetwork/defradb/connor/fields" "github.com/sourcenetwork/defradb/connor/numbers" + "github.com/sourcenetwork/defradb/core" ) func init() { @@ -34,7 +33,8 @@ func (o *EqualOperator) Evaluate(condition, data interface{}) (bool, error) { return true, nil } } - case []map[string]interface{}: + return false, nil + case []core.Doc: for _, item := range arr { m, err := MatchWith("$eq", condition, item) if err != nil { @@ -45,6 +45,7 @@ func (o *EqualOperator) Evaluate(condition, data interface{}) (bool, error) { return true, nil } } + return false, nil } switch cn := condition.(type) { @@ -65,7 +66,7 @@ func (o *EqualOperator) Evaluate(condition, data interface{}) (bool, error) { return numbers.Equal(cn, data), nil case float64: return numbers.Equal(cn, data), nil - case map[string]interface{}: + case map[FilterKey]interface{}: m := true for prop, cond := range cn { if !m { @@ -73,23 +74,12 @@ func (o *EqualOperator) Evaluate(condition, data interface{}) (bool, error) { continue } - if strings.HasPrefix(prop, "$") { - mm, err := MatchWith(prop, cond, data) - if err != nil { - return false, err - } - - m = m && mm - } else if d, ok := data.(map[string]interface{}); ok { - mm, err := MatchWith("$eq", cond, fields.TryGet(d, prop)) - if err != nil { - return false, err - } - - m = m && mm - } else { - return reflect.DeepEqual(condition, data), nil + mm, err := MatchWith(prop.GetOperatorOrDefault("$eq"), cond, prop.GetProp(data)) + if err != nil { + return false, err } + + m = m && mm } return m, nil diff --git a/connor/eq_test.go b/connor/eq_test.go deleted file mode 100644 index 9925c618af..0000000000 --- a/connor/eq_test.go +++ /dev/null @@ -1,218 +0,0 @@ -package connor_test - -import ( - "fmt" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$eq", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("eq")) - }) - - Describe("Complex Objects", func() { - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "match a field with the same value", - `{ "x": { "$eq": 1 } }`, - true, - false, - }, - { - "not match a field with a different value", - `{ "x": { "$eq": 2 } }`, - false, - false, - }, - { - "not match a missing field", - `{ "a": { "$eq": 1 } }`, - false, - false, - }, - { - "not match a field with a different value type", - `{ "x": { "$eq": "1" } }`, - false, - false, - }, - }, - `{ "x": [1] }`: []TestCase{ - { - "match fields for deep equality", - `{ "x": [1] }`, - true, - false, - }, - }, - `{ "x": [1, 2, 3] }`: []TestCase{ - { - "match values which exist within an array", - `{ "x": 1 }`, - true, - false, - }, - }, - `{ "x": "1", "y": 2 }`: []TestCase{ - { - "not match a field with a different value type", - `{ "x": { "$eq": 1 } }`, - false, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match a deep field", - `{ "a.x": { "$eq": 1 } }`, - true, - false, - }, - { - "match an object", - `{ "a": { "x": 1 } }`, - true, - false, - }, - { - "not match a deep value without a full path to it", - `{ "a": { "$eq": 1 } }`, - false, - false, - }, - }, - `{ "x": null, "y": 2 }`: []TestCase{ - { - "match an explicitly null field if null is searched for", - `{ "x": null }`, - true, - false, - }, - { - "not match a field which is explicitly null", - `{ "x": 1 }`, - false, - false, - }, - }, - `{ "x": { "y": 1, "z": 1 } }`: []TestCase{ - { - "match a deep object explicitly", - `{ "x": { "$eq": { "y": 1, "z": 1 } } }`, - true, - false, - }, - { - "not match a deep object explicitly if the values of its fields differ", - `{ "x": { "$eq": { "y": 2, "z": 2 } } }`, - false, - false, - }, - }, - `{ "x": { "y": [1], "z": 1 } }`: []TestCase{ - { - "match an object if it has complex properties explicitly searched for", - `{ "x": { "$eq": { "y": [1] } } }`, - true, - false, - }, - { - "not match an object if it has complex properties explicitly searched for but values differ", - `{ "x": { "$eq": { "y": [2] } } }`, - false, - false, - }, - }, - `{ "a": [{ "x": 1 }, { "x": 2 }, { "x": 3 }] }`: []TestCase{ - { - "match objects which exist within an array", - `{ "a": { "x": 1 } }`, - true, - false, - }, - }, - } - - cases.Generate(nil) - }) - - Describe("Different Types", func() { - cases := []struct { - con interface{} - data interface{} - - match bool - hasErr bool - }{ - { - "test", "test", - true, false, - }, - { - "test", 1, - false, false, - }, - { - int8(10), 10, - true, false, - }, - { - int16(10), 10, - true, false, - }, - { - int32(10), 10, - true, false, - }, - { - int64(10), 10, - true, false, - }, - { - float32(10), 10, - true, false, - }, - } - - for _, c := range cases { - conds := c.con - data := c.data - match := c.match - hasErr := c.hasErr - - Describe(fmt.Sprintf("%T(%v) == %T(%v)", c.con, c.con, c.data, c.data), func() { - m, err := Match(map[string]interface{}{ - "x": map[string]interface{}{"$eq": conds}, - }, map[string]interface{}{ - "x": data, - }) - - if hasErr { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - } - - if match { - It("should match", func() { - Expect(m).To(BeTrue()) - }) - } else { - It("should not match", func() { - Expect(m).To(BeFalse()) - - }) - } - }) - } - }) -}) diff --git a/connor/ge_test.go b/connor/ge_test.go deleted file mode 100644 index a7b91730fb..0000000000 --- a/connor/ge_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package connor_test - -import ( - "fmt" - "time" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$ge", func() { - now := time.Now() - - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("ge")) - }) - - Describe("Basic Cases", func() { - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "match numbers which are greater", - `{ "x": { "$ge": 0 } }`, - true, - false, - }, - { - "match numbers which are equal", - `{ "x": { "$ge": 1 } }`, - true, - false, - }, - { - "not match numbers which are less", - `{ "x": { "$ge": 2 } }`, - false, - false, - }, - { - "match numbers by up-casting them as necessary", - `{ "x": { "$ge": 0.5 } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match nested object properties", - `{ "a.x": { "$ge": 0 } }`, - true, - false, - }, - { - "not match nested object properties which are less", - `{ "a": { "$ge": 2 } }`, - false, - false, - }, - }, - `{ "x": "5", "y": 2 }`: []TestCase{ - { - "not match strings logically when they are lexicographically less", - `{ "x": { "$ge": "6" } }`, - false, - false, - }, - { - "match strings logically when they are lexicographically equal", - `{ "x": { "$ge": "5" } }`, - true, - false, - }, - { - "not match across different value types", - `{ "x": { "$ge": 0 } }`, - false, - false, - }, - }, - `{ "x": "b", "y": 2 }`: []TestCase{ - { - "match strings which are lexicographically larger", - `{ "x": { "$ge": "a" } }`, - true, - false, - }, - { - "not match strings which are lexicographically smaller", - `{ "x": { "$ge": "c" } }`, - false, - false, - }, - }, - } - - cases.Generate(nil) - }) - - Describe("Different Types", func() { - cases := []struct { - con interface{} - data interface{} - - match bool - hasErr bool - }{ - { - "abc", "def", - true, false, - }, - { - "abc", "abc", - true, false, - }, - { - "abc", "aaa", - false, false, - }, - { - "test", 1, - false, false, - }, - { - int8(10), 10, - true, false, - }, - { - int16(10), 10, - true, false, - }, - { - int32(10), 10, - true, false, - }, - { - int64(10), 10, - true, false, - }, - { - float32(10), 11, - true, false, - }, - { - int64(10), float32(10), - true, false, - }, - { - int64(10), "test", - false, false, - }, - { - now, now, - true, false, - }, - { - now, now.Add(time.Second), - true, false, - }, - { - now, now.Add(-time.Second), - false, false, - }, - { - now, 10, - false, false, - }, - { - []int{10}, []int{12}, - false, true, - }, - } - - for _, c := range cases { - conds := c.con - data := c.data - match := c.match - hasErr := c.hasErr - - Describe(fmt.Sprintf("%T(%v) == %T(%v)", c.con, c.con, c.data, c.data), func() { - m, err := Match(map[string]interface{}{ - "x": map[string]interface{}{"$ge": conds}, - }, map[string]interface{}{ - "x": data, - }) - - if hasErr { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - } - - if match { - It("should match", func() { - Expect(m).To(BeTrue()) - }) - } else { - It("should not match", func() { - Expect(m).To(BeFalse()) - - }) - } - }) - } - }) -}) diff --git a/connor/gt_test.go b/connor/gt_test.go deleted file mode 100644 index 3d72bb12b4..0000000000 --- a/connor/gt_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package connor_test - -import ( - "fmt" - "time" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$gt", func() { - now := time.Now() - - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("gt")) - }) - - Describe("Basic Cases", func() { - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "match numbers which are greater", - `{ "x": { "$gt": 0 } }`, - true, - false, - }, - { - "not match numbers which are equal", - `{ "x": { "$gt": 1 } }`, - false, - false, - }, - { - "not match numbers which are less", - `{ "x": { "$gt": 2 } }`, - false, - false, - }, - { - "match numbers by up-casting them as necessary", - `{ "x": { "$gt": 0.5 } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match nested object properties", - `{ "a.x": { "$gt": 0 } }`, - true, - false, - }, - { - "not match nested object properties which are less", - `{ "a": { "$gt": 2 } }`, - false, - false, - }, - }, - `{ "x": "5", "y": 2 }`: []TestCase{ - { - "not match strings logically when they are lexicographically less", - `{ "x": { "$gt": "7" } }`, - false, - false, - }, - { - "not match strings logically when they are lexicographically equal", - `{ "x": { "$gt": "5" } }`, - false, - false, - }, - { - "not match across different value types", - `{ "x": { "$gt": 0 } }`, - false, - false, - }, - }, - `{ "x": "b", "y": 2 }`: []TestCase{ - { - "match strings which are lexicographically larger", - `{ "x": { "$gt": "a" } }`, - true, - false, - }, - { - "not match strings which are lexicographically smaller", - `{ "x": { "$gt": "c" } }`, - false, - false, - }, - }, - } - - cases.Generate(nil) - }) - - Describe("Different Types", func() { - cases := []struct { - con interface{} - data interface{} - - match bool - hasErr bool - }{ - { - "abc", "def", - true, false, - }, - { - "abc", "abc", - false, false, - }, - { - "abc", "aaa", - false, false, - }, - { - "test", 1, - false, false, - }, - { - int8(10), 100, - true, false, - }, - { - int16(10), 106, - true, false, - }, - { - int32(10), 15, - true, false, - }, - { - int64(10), 12, - true, false, - }, - { - float32(10), 11, - true, false, - }, - { - int64(10), float32(10), - false, false, - }, - { - int64(10), "test", - false, false, - }, - { - now, now, - false, false, - }, - { - now, now.Add(time.Second), - true, false, - }, - { - now, now.Add(-time.Second), - false, false, - }, - { - now, 10, - false, false, - }, - { - []int{10}, []int{12}, - false, true, - }, - } - - for _, c := range cases { - conds := c.con - data := c.data - match := c.match - hasErr := c.hasErr - - Describe(fmt.Sprintf("%T(%v) == %T(%v)", c.con, c.con, c.data, c.data), func() { - m, err := Match(map[string]interface{}{ - "x": map[string]interface{}{"$gt": conds}, - }, map[string]interface{}{ - "x": data, - }) - - if hasErr { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - } - - if match { - It("should match", func() { - Expect(m).To(BeTrue()) - }) - } else { - It("should not match", func() { - Expect(m).To(BeFalse()) - - }) - } - }) - } - }) -}) diff --git a/connor/helpers_test.go b/connor/helpers_test.go deleted file mode 100644 index 9a39547cf2..0000000000 --- a/connor/helpers_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package connor_test - -import ( - "encoding/json" - "fmt" - "strings" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -type TestCases map[string][]TestCase - -type TestCase struct { - Name string - Conditions interface{} - ShouldMatch bool - ShouldHaveError bool -} - -func (t TestCases) Generate(test func(conditions, data map[string]interface{}) (bool, error)) { - if test == nil { - test = Match - } - - for d, casesl := range t { - d := d - cases := casesl - - var data map[string]interface{} - BeforeEach(func() { - Expect(json.NewDecoder(strings.NewReader(d)).Decode(&data)).To(Succeed()) - }) - - Describe(fmt.Sprintf("with %s as data", d), func() { - for _, tc := range cases { - tc := tc - - var conditions map[string]interface{} - BeforeEach(func() { - switch c := tc.Conditions.(type) { - case string: - Expect(json.NewDecoder(strings.NewReader(c)).Decode(&conditions)).To(Succeed()) - case map[string]interface{}: - conditions = c - default: - Expect(tc.Conditions).To(Or(BeAssignableToTypeOf(string("")), BeAssignableToTypeOf(map[string]interface{}{}))) - } - }) - - var ( - match bool - err error - ) - JustBeforeEach(func() { - match, err = test(conditions, data) - }) - - Context(fmt.Sprintf("and %s as a condition", tc.Conditions), func() { - Describe(fmt.Sprintf("should %s", tc.Name), func() { - if tc.ShouldHaveError { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - Expect(match).To(BeFalse()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - - if tc.ShouldMatch { - It("should match", func() { - Expect(match).To(BeTrue()) - }) - } else { - It("shouldn't match", func() { - Expect(match).To(BeFalse()) - }) - } - } - }) - }) - } - }) - } -} diff --git a/connor/in_test.go b/connor/in_test.go deleted file mode 100644 index 52a55fa5b4..0000000000 --- a/connor/in_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$in", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("in")) - }) - - cases := TestCases{ - - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "match values which are in the query list", - `{ "x": { "$in": [1] } }`, - true, - false, - }, - { - "not match values which are not in the query list", - `{ "x": { "$in": [2] } }`, - false, - false, - }, - { - "match values which are in the query list with many options", - `{ "x": { "$in": [1, 2, 3] } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match nested object properties", - `{ "a.x": { "$in": [1] } }`, - true, - false, - }, - { - "not match nested properties if their full key path is not provided", - `{ "a": { "$in": [1] } }`, - false, - false, - }, - { - "return an error if a query which is not a list is provided", - `{ "a": { "$in": 1 } }`, - false, - true, - }, - }, - } - - cases.Generate(nil) -}) diff --git a/connor/key.go b/connor/key.go new file mode 100644 index 0000000000..c1a6768988 --- /dev/null +++ b/connor/key.go @@ -0,0 +1,12 @@ +package connor + +// FilterKey represents a type that may be used as a map key +// in a filter. +type FilterKey interface { + // GetProp returns the data that should be used with this key + // from the given data. + GetProp(data interface{}) interface{} + // GetOperatorOrDefault returns either the operator that corresponds + // to this key, or the given default. + GetOperatorOrDefault(defaultOp string) string +} diff --git a/connor/le_test.go b/connor/le_test.go deleted file mode 100644 index a683b8be76..0000000000 --- a/connor/le_test.go +++ /dev/null @@ -1,209 +0,0 @@ -package connor_test - -import ( - "fmt" - "time" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$le", func() { - now := time.Now() - - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("le")) - }) - - Describe("Basic Cases", func() { - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "not match numbers which are greater", - `{ "x": { "$le": 0 } }`, - false, - false, - }, - { - "match numbers which are equal", - `{ "x": { "$le": 1 } }`, - true, - false, - }, - { - "match numbers which are less", - `{ "x": { "$le": 2 } }`, - true, - false, - }, - { - "match numbers by up-casting them as necessary", - `{ "x": { "$le": 1.3 } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match nested object properties", - `{ "a.x": { "$le": 2 } }`, - true, - false, - }, - { - "not match nested object properties which are less", - `{ "a": { "$le": 0 } }`, - false, - false, - }, - }, - `{ "x": "5", "y": 2 }`: []TestCase{ - { - "not match strings logically when they are lexicographically less", - `{ "x": { "$le": "3" } }`, - false, - false, - }, - { - "not match across different value types", - `{ "x": { "$le": 10 } }`, - false, - false, - }, - }, - `{ "x": "b", "y": 2 }`: []TestCase{ - { - "match strings which are lexicographically larger", - `{ "x": { "$le": "c" } }`, - true, - false, - }, - { - "not match strings which are lexicographically smaller", - `{ "x": { "$le": "a" } }`, - false, - false, - }, - }, - } - - cases.Generate(nil) - }) - - Describe("Different Types", func() { - cases := []struct { - con interface{} - data interface{} - - match bool - hasErr bool - }{ - { - "abc", "def", - false, false, - }, - { - "abc", "abc", - true, false, - }, - { - "abc", "aaa", - true, false, - }, - { - "test", 1, - false, false, - }, - { - int8(10), 10, - true, false, - }, - { - int16(10), 10, - true, false, - }, - { - int32(10), 10, - true, false, - }, - { - int64(10), 10, - true, false, - }, - { - int64(10), 12, - false, false, - }, - { - float32(10), 9, - true, false, - }, - { - int64(10), float32(10), - true, false, - }, - { - int64(10), "test", - false, false, - }, - { - now, now, - true, false, - }, - { - now, now.Add(time.Second), - false, false, - }, - { - now, now.Add(-time.Second), - true, false, - }, - { - now, 10, - false, false, - }, - { - []int{10}, []int{12}, - false, true, - }, - } - - for _, c := range cases { - conds := c.con - data := c.data - match := c.match - hasErr := c.hasErr - - Describe(fmt.Sprintf("%T(%v) == %T(%v)", c.con, c.con, c.data, c.data), func() { - m, err := Match(map[string]interface{}{ - "x": map[string]interface{}{"$le": conds}, - }, map[string]interface{}{ - "x": data, - }) - - if hasErr { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - } - - if match { - It("should match", func() { - Expect(m).To(BeTrue()) - }) - } else { - It("should not match", func() { - Expect(m).To(BeFalse()) - - }) - } - }) - } - }) -}) diff --git a/connor/lt_test.go b/connor/lt_test.go deleted file mode 100644 index 3cc0f9852a..0000000000 --- a/connor/lt_test.go +++ /dev/null @@ -1,213 +0,0 @@ -package connor_test - -import ( - "fmt" - "time" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$lt", func() { - now := time.Now() - - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("lt")) - }) - - Describe("Basic Cases", func() { - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "not match numbers which are greater", - `{ "x": { "$lt": 0 } }`, - false, - false, - }, - { - "not match numbers which are equal", - `{ "x": { "$lt": 1 } }`, - false, - false, - }, - { - "match numbers which are less", - `{ "x": { "$lt": 2 } }`, - true, - false, - }, - { - "match numbers by up-casting them as necessary", - `{ "x": { "$lt": 1.3 } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "match nested object properties", - `{ "a.x": { "$lt": 2 } }`, - true, - false, - }, - { - "not match nested object properties which are less", - `{ "a": { "$lt": 0 } }`, - false, - false, - }, - }, - `{ "x": "5", "y": 2 }`: []TestCase{ - { - "not match strings logically when they are lexicographically less", - `{ "x": { "$lt": "3" } }`, - false, - false, - }, - { - "not match across different value types", - `{ "x": { "$lt": 10 } }`, - false, - false, - }, - }, - `{ "x": "b", "y": 2 }`: []TestCase{ - { - "match strings which are lexicographically larger", - `{ "x": { "$lt": "c" } }`, - true, - false, - }, - { - "not match strings which are lexicographically smaller", - `{ "x": { "$lt": "a" } }`, - false, - false, - }, - }, - } - - cases.Generate(nil) - }) - - Describe("Different Types", func() { - cases := []struct { - con interface{} - data interface{} - - match bool - hasErr bool - }{ - { - "abc", "def", - false, false, - }, - { - "abc", "abc", - false, false, - }, - { - "abc", "aaa", - true, false, - }, - { - "test", 1, - false, false, - }, - { - int8(10), 5, - true, false, - }, - { - int16(10), 1, - true, false, - }, - { - int32(10), 3, - true, false, - }, - { - int64(10), 9, - true, false, - }, - { - int64(10), 10, - false, false, - }, - { - int64(10), 12, - false, false, - }, - { - float32(10), 9, - true, false, - }, - { - int64(10), float32(9), - true, false, - }, - { - int64(10), "test", - false, false, - }, - { - now, now, - false, false, - }, - { - now, now.Add(time.Second), - false, false, - }, - { - now, now.Add(-time.Second), - true, false, - }, - { - now, 10, - false, false, - }, - { - []int{10}, []int{12}, - false, true, - }, - } - - for _, c := range cases { - conds := c.con - data := c.data - match := c.match - hasErr := c.hasErr - - Describe(fmt.Sprintf("%T(%v) == %T(%v)", c.con, c.con, c.data, c.data), func() { - m, err := Match(map[string]interface{}{ - "x": map[string]interface{}{"$lt": conds}, - }, map[string]interface{}{ - "x": data, - }) - - if hasErr { - It("should return an error", func() { - Expect(err).ToNot(Succeed()) - }) - } else { - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - } - - if match { - It("should match", func() { - Expect(m).To(BeTrue()) - }) - } else { - It("should not match", func() { - Expect(m).To(BeFalse()) - - }) - } - }) - } - }) -}) diff --git a/connor/ne_test.go b/connor/ne_test.go deleted file mode 100644 index 2391e359e6..0000000000 --- a/connor/ne_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$ne", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("ne")) - }) - - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "not match when the values are equal", - `{ "x": { "$ne": 1 } }`, - false, - false, - }, - { - "match when the values are different", - `{ "x": { "$ne": 2 } }`, - true, - false, - }, - { - "match when the field is not present", - `{ "a": { "$ne": 1 } }`, - true, - false, - }, - { - "match when the types are different", - `{ "x": { "$ne": "1" } }`, - true, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "not match when a nested property has the same value", - `{ "a.x": { "$ne": 1 } }`, - false, - false, - }, - { - "match when a nested property has a different value", - `{ "a": { "$ne": 1 } }`, - true, - false, - }, - { - "match when a missing nested property is tested for value equality", - `{ "a": { "$ne": { "z": 1 } } }`, - true, - false, - }, - }, - } - - cases.Generate(nil) -}) diff --git a/connor/nin_test.go b/connor/nin_test.go deleted file mode 100644 index ba8cf6deb3..0000000000 --- a/connor/nin_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$nin", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("nin")) - }) - - cases := TestCases{ - - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "not match values which are in the query list", - `{ "x": { "$nin": [1] } }`, - false, - false, - }, - { - "match values which are not in the query list", - `{ "x": { "$nin": [2] } }`, - true, - false, - }, - { - "not match values which are in the query list with many options", - `{ "x": { "$nin": [1, 2, 3] } }`, - false, - false, - }, - }, - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "not match nested object properties", - `{ "a.x": { "$nin": [1] } }`, - false, - false, - }, - { - "match nested properties if they are not deep-equal", - `{ "a": { "$nin": [1] } }`, - true, - false, - }, - { - "return an error if a query which is not a list is provided", - `{ "a": { "$nin": 1 } }`, - false, - true, - }, - }, - } - - cases.Generate(nil) -}) diff --git a/connor/or_test.go b/connor/or_test.go deleted file mode 100644 index fb49ebeaa3..0000000000 --- a/connor/or_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("$or", func() { - It("should be registered as an operator", func() { - Expect(Operators()).To(ContainElement("or")) - }) - - cases := TestCases{ - `{ "x": 1, "y": 2 }`: []TestCase{ - { - "match equality of values implicitly", - `{ "x": { "$or": [1] } }`, - true, - false, - }, - { - "not match inequality of values implicitly", - `{ "x": { "$or": [2] } }`, - false, - false, - }, - { - "match using explicit value comparison operators", - `{ "x": { "$or": [{ "$eq": 1 }] } }`, - true, - false, - }, - { - "return an error if you do not provide a list of options", - `{ "x": { "$or": 2 } }`, - false, - true, - }, - }, - - `{ "a": { "x": 1 }, "y": 2 }`: []TestCase{ - { - "not match if values are not deep-equal with an explicit operator", - `{ "x": { "$or": [{ "$eq": 1 }] } }`, - false, - false, - }, - { - "match if a complex value comparison is performed", - `{ "a": { "$or": [{ "x": { "$in": [1] } }] } }`, - true, - false, - }, - }, - } - - cases.Generate(nil) - -}) diff --git a/connor/realworld_test.go b/connor/realworld_test.go deleted file mode 100644 index 5535c9575c..0000000000 --- a/connor/realworld_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package connor_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - . "github.com/sourcenetwork/defradb/connor" -) - -var _ = Describe("Real World Cases", func() { - Describe("Sensu Check", func() { - conds := map[string]interface{}{ - "check.status": 0, - } - data := map[string]interface{}{ - "client": "razz-base-stackstorm", - "check": map[string]interface{}{ - "command": "/opt/sensu/embedded/bin/check-cpu.rb", - "handlers": []interface{}{"default", "st2"}, - "name": "CPU", - "issued": 1487492532, - "interval": 60, - "subscribers": []interface{}{"generic"}, - "executed": 1487492532, - "duration": 1.115, - "output": "This is a quick test", - "status": 0, - "remediations": []interface{}{ - map[string]interface{}{ - "name": "all good", - "command": "echo 'OK' > check_handler.dat", - "conditions": map[string]interface{}{ - "check.status": 0, - }, - }, - map[string]interface{}{ - "name": "so-so", - "command": "echo 'WARN' > check_handler.dat", - "conditions": map[string]interface{}{ - "check.status": 1, - "occurrences": 2, - }, - }, - map[string]interface{}{ - "name": "it's on fire!", - "command": "echo 'CRIT' > check_handler.dat", - "conditions": map[string]interface{}{ - "check.status": 2, - "occurrences": 2, - }, - }, - }, - }, - } - - var matches bool - var err error - - BeforeEach(func() { - matches, err = Match(conds, data) - }) - - It("should not return an error", func() { - Expect(err).To(Succeed()) - }) - - It("should match", func() { - Expect(matches).To(BeTrue()) - }) - }) -}) diff --git a/core/doc.go b/core/doc.go new file mode 100644 index 0000000000..9dc5f590f4 --- /dev/null +++ b/core/doc.go @@ -0,0 +1,222 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package core + +const DocKeyFieldIndex int = 0 + +type DocFields []interface{} +type Doc struct { + // If true, this Doc will not be rendered, but will still be passed through + // the plan graph just like any other document. + Hidden bool + + Fields DocFields +} + +// GetKey returns the DocKey for this document. +// +// Will panic if the document is empty. +func (d *Doc) GetKey() string { + key, _ := d.Fields[DocKeyFieldIndex].(string) + return key +} + +// SetKey sets the DocKey for this document. +// +// Will panic if the document has not been initialised with fields. +func (d *Doc) SetKey(key string) { + d.Fields[DocKeyFieldIndex] = key +} + +// Clone returns a deep copy of this document. +func (d *Doc) Clone() Doc { + cp := Doc{ + Fields: make(DocFields, len(d.Fields)), + } + + for i, v := range d.Fields { + switch typedFieldValue := v.(type) { + case Doc: + cp.Fields[i] = typedFieldValue.Clone() + case []Doc: + innerMaps := make([]Doc, len(typedFieldValue)) + for j, innerDoc := range typedFieldValue { + innerMaps[j] = innerDoc.Clone() + } + cp.Fields[i] = innerMaps + default: + cp.Fields[i] = v + } + } + + return cp +} + +type RenderKey struct { + // The field index to be rendered. + Index int + + // The key by which the field contents should be rendered into. + Key string +} + +type DocumentMapping struct { + // The set of fields that should be rendered. + // + // Fields not in this collection will not be rendered to the consumer. + // Collection-item indexes do not have to pair up with field indexes and + // items should not be accessed this way. + RenderKeys []RenderKey + + // The set of fields available using this mapping. + // + // If a field-name is not in this collection, it esentially doesn't exist. + // Collection should include fields that are not rendered to the consumer. + // Multiple fields may exist for any given name (for example if a property + // exists under different aliases/filters). + IndexesByName map[string][]int + + // The next index available for use. + // + // Also useful for identifying how many fields a document should have. + nextIndex int + + // The collection of child mappings for this object. + // + // Indexes correspond exactly to field indexes, however entries may be default + // if the field is unmappable (e.g. integer fields). + ChildMappings []DocumentMapping +} + +// NewDocumentMapping instantiates a new DocumentMapping instance. +func NewDocumentMapping() *DocumentMapping { + return &DocumentMapping{ + IndexesByName: map[string][]int{}, + } +} + +// CloneWithoutRender deep copies the source mapping skipping over the +// RenderKeys. +func (source *DocumentMapping) CloneWithoutRender() *DocumentMapping { + result := DocumentMapping{ + IndexesByName: make(map[string][]int, len(source.IndexesByName)), + nextIndex: source.nextIndex, + ChildMappings: make([]DocumentMapping, len(source.ChildMappings)), + } + + for externalName, sourceIndexes := range source.IndexesByName { + indexes := make([]int, len(sourceIndexes)) + copy(indexes, sourceIndexes) + result.IndexesByName[externalName] = indexes + } + + for i, childMapping := range source.ChildMappings { + result.ChildMappings[i] = *childMapping.CloneWithoutRender() + } + + return &result +} + +// GetNextIndex returns the next index available for use. +// +// Also useful for identifying how many fields a document should have. +func (mapping *DocumentMapping) GetNextIndex() int { + return mapping.nextIndex +} + +// NewDoc instantiates a new Doc from this mapping, ensuring that the Fields +// collection is constructed with the required length/indexes. +func (mapping *DocumentMapping) NewDoc() Doc { + return Doc{ + Fields: make(DocFields, mapping.nextIndex), + } +} + +// SetFirstOfName overwrites the first field of this name with the given value. +// +// Will panic if the field does not exist. +func (mapping *DocumentMapping) SetFirstOfName(d *Doc, name string, value interface{}) { + d.Fields[mapping.IndexesByName[name][0]] = value +} + +// FirstOfName returns the value of the first field of the given name. +// +// Will panic if the field does not exist (but not if it's value is default). +func (mapping *DocumentMapping) FirstOfName(d Doc, name string) interface{} { + return d.Fields[mapping.FirstIndexOfName(name)] +} + +// FirstIndexOfName returns the first field index of the given name. +// +// Will panic if the field does not exist. +func (mapping *DocumentMapping) FirstIndexOfName(name string) int { + return mapping.IndexesByName[name][0] +} + +// ToMap renders the given document to map[string]interface{} format using +// the given mapping. +// +// Will not return fields without a render key, or any child documents +// marked as Hidden. +func (mapping *DocumentMapping) ToMap(doc Doc) map[string]interface{} { + mappedDoc := make(map[string]interface{}, len(mapping.RenderKeys)) + for _, renderKey := range mapping.RenderKeys { + value := doc.Fields[renderKey.Index] + var renderValue interface{} + switch innerV := value.(type) { + case []Doc: + innerMapping := mapping.ChildMappings[renderKey.Index] + innerArray := []map[string]interface{}{} + for _, innerDoc := range innerV { + if innerDoc.Hidden { + continue + } + innerArray = append(innerArray, innerMapping.ToMap(innerDoc)) + } + renderValue = innerArray + case Doc: + innerMapping := mapping.ChildMappings[renderKey.Index] + renderValue = innerMapping.ToMap(innerV) + default: + renderValue = innerV + } + mappedDoc[renderKey.Key] = renderValue + } + return mappedDoc +} + +// Add appends the given index and name to the mapping. +func (mapping *DocumentMapping) Add(index int, name string) { + inner := mapping.IndexesByName[name] + inner = append(inner, index) + mapping.IndexesByName[name] = inner + + if index >= mapping.nextIndex { + mapping.nextIndex = index + 1 + } +} + +// SetChildAt sets the given child mapping at the given index. +// +// If the index is greater than the ChildMappings length the collection will +// grow. +func (m *DocumentMapping) SetChildAt(index int, childMapping DocumentMapping) { + var newMappings []DocumentMapping + if index >= len(m.ChildMappings)-1 { + newMappings = make([]DocumentMapping, index+1) + copy(newMappings, m.ChildMappings) + } else { + newMappings = m.ChildMappings + } + + newMappings[index] = childMapping + m.ChildMappings = newMappings +} diff --git a/db/collection.go b/db/collection.go index f9f0ba6061..e6b96c0c8a 100644 --- a/db/collection.go +++ b/db/collection.go @@ -483,6 +483,20 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. } // write data to DB via MerkleClock/CRDT _, err = c.save(ctx, txn, doc) + + // If this a Batch masked as a Transaction + // commit our writes so we can see them. + // Batches don't maintain serializability, or + // linearization, or any other transaction + // semantics, which the user already knows + // otherwise they wouldn't use a datastore + // that doesn't support proper transactions. + // So let's just commit, and keep going. + if txn.IsBatch() { + if err := txn.Commit(ctx); err != nil { + return err + } + } return err } diff --git a/db/collection_delete.go b/db/collection_delete.go index 3ccdd8a2f1..3ac0173020 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -21,7 +21,6 @@ import ( query "github.com/ipfs/go-datastore/query" ipld "github.com/ipfs/go-ipld-format" dag "github.com/ipfs/go-merkledag" - parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" @@ -221,8 +220,9 @@ func (c *collection) deleteWithFilter( break } + doc := query.Value() // Extract the dockey in the string format from the document value. - docKey := query.Value()[parserTypes.DocKeyFieldName].(string) + docKey := doc.GetKey() // Convert from string to client.DocKey. key := core.PrimaryDataStoreKey{ diff --git a/db/collection_update.go b/db/collection_update.go index 4d1be433c2..ac8ddf87fc 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -21,9 +21,12 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/sourcenetwork/defradb/query/graphql/parser" "github.com/sourcenetwork/defradb/query/graphql/planner" + parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" + cbor "github.com/fxamacker/cbor/v2" ) @@ -256,6 +259,8 @@ func (c *collection) updateWithFilter( DocKeys: make([]string, 0), } + docMap := query.DocumentMap() + // loop while we still have results from the filter query for { next, nextErr := query.Next() @@ -268,7 +273,7 @@ func (c *collection) updateWithFilter( } // Get the document, and apply the patch - doc := query.Value() + doc := docMap.ToMap(query.Value()) if isPatch { err = c.applyPatch(txn, doc, patch.([]map[string]interface{})) } else if isMerge { // else is fine here @@ -279,7 +284,7 @@ func (c *collection) updateWithFilter( } // add successful updated doc to results - results.DocKeys = append(results.DocKeys, doc["_key"].(string)) + results.DocKeys = append(results.DocKeys, doc[parserTypes.DocKeyFieldName].(string)) results.Count++ } @@ -557,18 +562,21 @@ func (c *collection) makeSelectionQuery( txn datastore.Txn, filter interface{}, ) (planner.Query, error) { - var f *parser.Filter + mapping := c.createMapping() + var f *mapper.Filter var err error switch fval := filter.(type) { case string: if fval == "" { return nil, errors.New("Invalid filter") } - f, err = parser.NewFilterFromString(fval) + var p *parser.Filter + p, err = parser.NewFilterFromString(fval) if err != nil { return nil, err } - case *parser.Filter: + f = mapper.ToFilter(p, mapping) + case *mapper.Filter: f = fval default: return nil, errors.New("Invalid filter") @@ -576,7 +584,7 @@ func (c *collection) makeSelectionQuery( if filter == "" { return nil, errors.New("Invalid filter") } - slct, err := c.makeSelectLocal(f) + slct, err := c.makeSelectLocal(f, mapping) if err != nil { return nil, err } @@ -584,23 +592,51 @@ func (c *collection) makeSelectionQuery( return c.db.queryExecutor.MakeSelectQuery(ctx, c.db, txn, slct) } -func (c *collection) makeSelectLocal(filter *parser.Filter) (*parser.Select, error) { - slct := &parser.Select{ - Name: c.Name(), - Filter: filter, - Fields: make([]parser.Selection, len(c.desc.Schema.Fields)), +func (c *collection) makeSelectLocal(filter *mapper.Filter, mapping *core.DocumentMapping) (*mapper.Select, error) { + slct := &mapper.Select{ + Targetable: mapper.Targetable{ + Field: mapper.Field{ + Name: c.Name(), + }, + Filter: filter, + }, + Fields: make([]mapper.Requestable, len(c.desc.Schema.Fields)), + DocumentMapping: *mapping, } - for i, fd := range c.Schema().Fields { + for _, fd := range c.Schema().Fields { if fd.IsObject() { continue } - slct.Fields[i] = parser.Field{Name: fd.Name} + index := int(fd.ID) + slct.Fields = append(slct.Fields, &mapper.Field{ + Index: index, + Name: fd.Name, + }) } return slct, nil } +func (c *collection) createMapping() *core.DocumentMapping { + mapping := core.NewDocumentMapping() + mapping.Add(core.DocKeyFieldIndex, parserTypes.DocKeyFieldName) + for _, fd := range c.Schema().Fields { + if fd.IsObject() { + continue + } + index := int(fd.ID) + mapping.Add(index, fd.Name) + mapping.RenderKeys = append(mapping.RenderKeys, + core.RenderKey{ + Index: index, + Key: fd.Name, + }, + ) + } + return mapping +} + // getTypeAndCollectionForPatch parses the Patch op path values // and compares it against the collection schema. // If it's within the schema, then patchIsSubType is false diff --git a/db/container/container.go b/db/container/container.go index d3b6918707..c6a82755e4 100644 --- a/db/container/container.go +++ b/db/container/container.go @@ -10,6 +10,10 @@ package container +import ( + "github.com/sourcenetwork/defradb/core" +) + // DocumentContainer is a specialized buffer to store potentially // thousands of document value maps. Its used by the Planner system // to store documents that need to have logic applied to all of them. @@ -19,7 +23,7 @@ package container // Close() is called if you want to free all the memory associated // with the container type DocumentContainer struct { - docs []map[string]interface{} + docs []core.Doc numDocs int } @@ -28,13 +32,13 @@ type DocumentContainer struct { // A capacity of 0 ignores any initial pre-allocation. func NewDocumentContainer(capacity int) *DocumentContainer { return &DocumentContainer{ - docs: make([]map[string]interface{}, capacity), + docs: make([]core.Doc, capacity), numDocs: 0, } } // At returns the document at the specified index. -func (c *DocumentContainer) At(index int) map[string]interface{} { +func (c *DocumentContainer) At(index int) core.Doc { if index < 0 || index >= c.numDocs { panic("Invalid index for document container") } @@ -46,13 +50,12 @@ func (c *DocumentContainer) Len() int { } // AddDoc adds a new document to the DocumentContainer. -// It makes a deep copy before its added -func (c *DocumentContainer) AddDoc(doc map[string]interface{}) error { - if doc == nil { - return nil - } - // append to docs slice - c.docs = append(c.docs, copyMap(doc)) +// +// It makes a deep copy before its added to allow for independent mutation of +// the added clone. +func (c *DocumentContainer) AddDoc(doc core.Doc) error { + copyDoc := doc.Clone() + c.docs = append(c.docs, copyDoc) c.numDocs++ return nil } @@ -74,17 +77,3 @@ func (c *DocumentContainer) Close() error { c.numDocs = 0 return nil } - -func copyMap(m map[string]interface{}) map[string]interface{} { - cp := make(map[string]interface{}) - for k, v := range m { - vm, ok := v.(map[string]interface{}) - if ok { - cp[k] = copyMap(vm) - } else { - cp[k] = v - } - } - - return cp -} diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index f7040b344f..f1c60fdcb3 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -15,6 +15,7 @@ import ( "github.com/fxamacker/cbor/v2" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/core" ) type EPTuple []encProperty @@ -152,17 +153,18 @@ func (encdoc *encodedDocument) Decode() (*client.Document, error) { return doc, nil } -// DecodeToMap returns a decoded document as a +// DecodeToDoc returns a decoded document as a // map of field/value pairs -func (encdoc *encodedDocument) DecodeToMap() (map[string]interface{}, error) { - doc := make(map[string]interface{}) - doc["_key"] = string(encdoc.Key) +func (encdoc *encodedDocument) DecodeToDoc(mapping *core.DocumentMapping) (core.Doc, error) { + doc := mapping.NewDoc() + + doc.SetKey(string(encdoc.Key)) for fieldDesc, prop := range encdoc.Properties { _, val, err := prop.Decode() if err != nil { - return nil, err + return core.Doc{}, err } - doc[fieldDesc.Name] = val + doc.Fields[fieldDesc.ID] = val } return doc, nil } diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index 34d1169fde..522cd6949b 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -33,7 +33,7 @@ type Fetcher interface { Start(ctx context.Context, txn datastore.Txn, spans core.Spans) error FetchNext(ctx context.Context) (*encodedDocument, error) FetchNextDecoded(ctx context.Context) (*client.Document, error) - FetchNextMap(ctx context.Context) ([]byte, map[string]interface{}, error) + FetchNextDoc(ctx context.Context, mapping *core.DocumentMapping) ([]byte, core.Doc, error) Close() error } @@ -367,22 +367,23 @@ func (df *DocumentFetcher) FetchNextDecoded(ctx context.Context) (*client.Docume return df.decodedDoc, nil } -// FetchNextMap returns the next document as a map[string]interface{} +// FetchNextDoc returns the next document as a core.Doc // The first return value is the parsed document key -func (df *DocumentFetcher) FetchNextMap( +func (df *DocumentFetcher) FetchNextDoc( ctx context.Context, -) ([]byte, map[string]interface{}, error) { + mapping *core.DocumentMapping, +) ([]byte, core.Doc, error) { encdoc, err := df.FetchNext(ctx) if err != nil { - return nil, nil, err + return nil, core.Doc{}, err } if encdoc == nil { - return nil, nil, nil + return nil, core.Doc{}, nil } - doc, err := encdoc.DecodeToMap() + doc, err := encdoc.DecodeToDoc(mapping) if err != nil { - return nil, nil, err + return nil, core.Doc{}, err } return encdoc.Key, doc, err } diff --git a/db/fetcher/versioned_test.go b/db/fetcher/versioned_test.go deleted file mode 100644 index 7647dcacb7..0000000000 --- a/db/fetcher/versioned_test.go +++ /dev/null @@ -1,489 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package fetcher_test - -import ( - "context" - "encoding/json" - "errors" - "testing" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/db" - "github.com/sourcenetwork/defradb/db/fetcher" - - "github.com/ipfs/go-cid" - ds "github.com/ipfs/go-datastore" - "github.com/stretchr/testify/assert" -) - -type update struct { - payload []byte - diffOps map[string]interface{} - cid string -} - -var ( - testStates = []update{ - { - payload: []byte(`{ - "name": "Alice", - "age": 31, - "points": 100, - "verified": true - }`), - // cid: "Qmcv2iU3myUBwuFCHe3w97sBMMER2FTY2rpbNBP6cqWb4S", - cid: "bafybeigxazren4cw3fla22hvs3773udxynn53mk4hwezobv3fbirusgxnq", - }, - { - payload: []byte(`{ - "name": "Pete", - "age": 31, - "points": 99.9, - "verified": true - }`), - diffOps: map[string]interface{}{ - "name": "Pete", - "points": 99.9, - }, - // cid: "QmPgnQvhPuLGwVU4ZEcbRy7RNCxSkeS72eKwXusUrAEEXR", - cid: "bafybeies2gdj2xzswz4jdxev3bouuefo6q5f377ur7a3ly2jursh5kgkyu", - }, - { - payload: []byte(`{ - "name": "Pete", - "age": 22, - "points": 99.9, - "verified": false - }`), - diffOps: map[string]interface{}{ - "verified": false, - "age": 22, - }, - // cid: "QmRpMfTzExGrXat5W9uCAEtnSpRTvWBcd1hBYNWVPdN9Xh", - cid: "bafybeia2gx47ypcpwd3bt4xl26det6mw7pxevcw7sh4njnjcfh4z7wh5fa", - }, - { - payload: []byte(`{ - "name": "Pete", - "age": 22, - "points": 129.99, - "verified": false - }`), - diffOps: map[string]interface{}{ - "points": 129.99, - }, - // cid: "QmRWYwKadjWqHLrzPKd7MdS4EoQuT2RzWVTaBxxVkeSjFH", - cid: "bafybeif45vokvqg47ahuvgfbhuayw54pmjidwjefonzrlakesbi2v4aeki", - }, - } -) - -func newMemoryDB(ctx context.Context) (client.DB, error) { - rootstore := ds.NewMapDatastore() - return db.NewDB(ctx, rootstore) -} - -func TestVersionedFetcherInit(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) -} - -func TestVersionedFetcherStart(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[3].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) -} - -func TestVersionedFetcherNextMap(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[0].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) - - _, doc, err := vf.FetchNextMap(ctx) - assert.NoError(t, err) - - var state map[string]interface{} - err = json.Unmarshal(testStates[0].payload, &state) - assert.NoError(t, err) - - compareVersionedDocs(t, doc, state) -} - -func TestVersionedFetcherNextMapV1(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - // assert.True(t, false) // force printing dump - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[1].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) - - _, doc, err := vf.FetchNextMap(ctx) - assert.NoError(t, err) - - var state map[string]interface{} - err = json.Unmarshal(testStates[1].payload, &state) - assert.NoError(t, err) - - compareVersionedDocs(t, doc, state) -} - -func TestVersionedFetcherNextMapV2(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - // assert.True(t, false) // force printing dump - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[2].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) - - _, doc, err := vf.FetchNextMap(ctx) - assert.NoError(t, err) - - var state map[string]interface{} - err = json.Unmarshal(testStates[2].payload, &state) - assert.NoError(t, err) - - compareVersionedDocs(t, doc, state) -} - -func TestVersionedFetcherNextMapV3(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[3].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) - - _, doc, err := vf.FetchNextMap(ctx) - assert.NoError(t, err) - - var state map[string]interface{} - err = json.Unmarshal(testStates[3].payload, &state) - assert.NoError(t, err) - - compareVersionedDocs(t, doc, state) -} - -func TestVersionedFetcherIncrementalSeekTo(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(db) - assert.NoError(t, err) - - err = createDocUpdates(col) - assert.NoError(t, err) - - vf := &fetcher.VersionedFetcher{} - desc := col.Description() - err = vf.Init(&desc, nil, false) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, false) - assert.NoError(t, err) - - key := core.DataStoreKey{DocKey: "bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d"} - version, err := cid.Decode(testStates[0].cid) - assert.NoError(t, err) - - span := fetcher.NewVersionedSpan(key, version) - err = vf.Start(ctx, txn, span) - assert.NoError(t, err) - - // loop over updates so we can seek to them - // skip first (create) - for _, update := range testStates[1:] { - c, err := cid.Decode(update.cid) - assert.NoError(t, err) - - err = vf.SeekTo(ctx, c) - assert.NoError(t, err) - - _, doc, err := vf.FetchNextMap(ctx) - assert.NoError(t, err) - - var state map[string]interface{} - err = json.Unmarshal(update.payload, &state) - assert.NoError(t, err) - - compareVersionedDocs(t, doc, state) - } -} - -// func buildTestState() (*db.DB, *db.Collection, error) { - -// } - -func compareVersionedDocs(t *testing.T, doc, expected map[string]interface{}) { - for k, v := range doc { - if k == "_key" { - continue - } - // make sure our floats are converted - if f, ok := expected[k].(float64); ok { - if f == float64(int64(f)) { - expected[k] = float64(f) - } - - if u, ok := v.(uint64); ok { - v = float64(u) - } - } - - if i, ok := v.(uint64); ok { - if i == uint64(int64(i)) { - v = int64(i) - } - } - - assert.Equal(t, expected[k], v) - } -} - -func createDocUpdates(col client.Collection) error { - // col, err := newTestCollectionWithSchema(db) - // if err != ni - - // dockey: bae-ed7f0bd5-3f5b-5e93-9310-4b2e71ac460d - // cid: Qmcv2iU3myUBwuFCHe3w97sBMMER2FTY2rpbNBP6cqWb4S - // sub: - // -age: QmSom35RYVzYTE7nGsudvomv1pi9ffjEfSFsPZgQRM92v1 - // -name: QmeKjH2iuNjbWqZ5Lx9hSCiZDeCQvb4tHNyGm99dvB69M9 - // -points: Qmd7mvZJkL9uQoC2YZsQE3ijmyGAaHgSnZMvLY4H71Vmaz - // -verified: QmNRQwWjTBTDfAFUHkG8yuKmtbprYQtGs4jYxGJ5fCfXtn - // testJSONObj := []byte(`{ - // "name": "Alice", - // "age": 31, - // "points": 100, - // "verified": true - // }`) - - // doc, err := document.NewFromJSON(testJSONObj) - // if err != nil { - // return err - // } - - // if err := col.Save(doc); err != nil { - // return err - // } - - // // update #1 - // // cid: QmPgnQvhPuLGwVU4ZEcbRy7RNCxSkeS72eKwXusUrAEEXR - // // sub: - // // - name: QmZzL7AUq1L9whhHvVfbBJho6uAJQnAZWEFWYsTD2PgCKM - // // - points: Qmejouu71QPjTue2P1gLnrzqApa8cU6NPdBoGrCQdpSC1Q - // doc.Set("name", "Pete") - // doc.Set("points", 99.9) - // if err := col.Update(doc); err != nil { - // return err - // } - - // // update #2 - // // cid: QmRpMfTzExGrXat5W9uCAEtnSpRTvWBcd1hBYNWVPdN9Xh - // // sub: - // // - verified: QmNTLb5ChDx3HjeAMuWVm7wmgjbXPzDRdPNnzwRqG71T2Q - // // - age: QmfJTRSXy1x4VxaVDqSa35b3sXQkCAppPSwfhwKGkV2zez - // doc.Set("verified", false) - // doc.Set("age", 22) - // if err := col.Update(doc); err != nil { - // return err - // } - - // // update #3 - // // cid: QmRWYwKadjWqHLrzPKd7MdS4EoQuT2RzWVTaBxxVkeSjFH - // // sub: - // // - points: QmQGkkF1xpLkMFWtG5fNTGs6VwbNXESrtG2Mj35epLU8do - // doc.Set("points", 129.99) - // err = col.Update(doc) - - var doc *client.Document - var err error - ctx := context.Background() - for i, update := range testStates { - if i == 0 { // create - doc, err = client.NewDocFromJSON(update.payload) - if err != nil { - return err - } - if err := col.Save(ctx, doc); err != nil { - return err - } - } else { - if update.diffOps == nil { - return errors.New("Expecting diffOp for update") - } - - for k, v := range update.diffOps { - doc.Set(k, v) - } - err = col.Update(ctx, doc) - if err != nil { - return err - } - } - } - - return err -} - -func newTestCollectionWithSchema(d client.DB) (client.Collection, error) { - desc := client.CollectionDescription{ - Name: "users", - Schema: client.SchemaDescription{ - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - { - Name: "name", - Kind: client.FieldKind_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "age", - Kind: client.FieldKind_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "verified", - Kind: client.FieldKind_BOOL, - Typ: client.LWW_REGISTER, - }, - { - Name: "points", - Kind: client.FieldKind_FLOAT, - Typ: client.LWW_REGISTER, - }, - }, - }, - } - - ctx := context.Background() - col, err := d.CreateCollection(ctx, desc) - return col, err -} diff --git a/query/graphql/mapper/aggregate.go b/query/graphql/mapper/aggregate.go new file mode 100644 index 0000000000..4d7671914e --- /dev/null +++ b/query/graphql/mapper/aggregate.go @@ -0,0 +1,69 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +import "github.com/sourcenetwork/defradb/core" + +// An optional child target. +type OptionalChildTarget struct { + // The field index of this target. + Index int + + // The name of the target, for example '_sum' or 'Age'. + Name string + + // If true this child target exists and has been requested. + // + // If false, this property is empty and in its default state. + HasValue bool +} + +// The relative target/path from the object hosting an aggregate, to the property to +// be aggregated. +type AggregateTarget struct { + Targetable + + // The property on the `HostIndex` that this aggregate targets. + // + // This may be empty if the aggregate targets a whole collection (e.g. Count), + // or if `HostIndex` is an inline array. + ChildTarget OptionalChildTarget +} + +// Aggregate represents an aggregate operation definition. +// +// E.g. count, or average. This may have been requested by a consumer, or it may be +// an internal dependency (of for example, another aggregate). +type Aggregate struct { + Field + // The mapping of this aggregate's parent/host. + core.DocumentMapping + + // The collection of targets that this aggregate will aggregate. + AggregateTargets []AggregateTarget + + // Any aggregates that this aggregate may dependend on. + // + // For example, Average is dependent on a Sum and Count field. + Dependencies []*Aggregate +} + +func (a *Aggregate) CloneTo(index int) Requestable { + return a.cloneTo(index) +} + +func (a *Aggregate) cloneTo(index int) *Aggregate { + return &Aggregate{ + Field: *a.Field.cloneTo(index), + DocumentMapping: a.DocumentMapping, + AggregateTargets: a.AggregateTargets, + } +} diff --git a/query/graphql/mapper/commitSelect.go b/query/graphql/mapper/commitSelect.go new file mode 100644 index 0000000000..e77c11f451 --- /dev/null +++ b/query/graphql/mapper/commitSelect.go @@ -0,0 +1,55 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +// CommitType represents a type of [CommitSelect] +type CommitType int + +const ( + NoneCommitType = CommitType(iota) + LatestCommits + AllCommits + OneCommit +) + +// CommitSelect represents a commit request from a consumer. +// +// E.g. allCommits, or latestCommits. +type CommitSelect struct { + // The underlying Select, defining the information requested. + Select + + // The type of commit select request. + Type CommitType + + // The key of the target document for which to get commits for. + DocKey string + + // The field for which commits have been requested. + FieldName string + + // The parent Cid for which commit information has been requested. + Cid string +} + +func (s *CommitSelect) CloneTo(index int) Requestable { + return s.cloneTo(index) +} + +func (s *CommitSelect) cloneTo(index int) *CommitSelect { + return &CommitSelect{ + Select: *s.Select.cloneTo(index), + DocKey: s.DocKey, + Type: s.Type, + FieldName: s.FieldName, + Cid: s.Cid, + } +} diff --git a/query/graphql/mapper/descriptions.go b/query/graphql/mapper/descriptions.go new file mode 100644 index 0000000000..527f121162 --- /dev/null +++ b/query/graphql/mapper/descriptions.go @@ -0,0 +1,64 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/datastore" +) + +// DescriptionsRepo is a cache of previously requested collection descriptions +// that can be used to reduce multiple reads of the same collection description. +type DescriptionsRepo struct { + ctx context.Context + txn datastore.Txn + + collectionDescriptionsByName map[string]client.CollectionDescription +} + +// NewDescriptionsRepo instantiates a new DescriptionsRepo with the given context and transaction. +func NewDescriptionsRepo(ctx context.Context, txn datastore.Txn) *DescriptionsRepo { + return &DescriptionsRepo{ + ctx: ctx, + txn: txn, + collectionDescriptionsByName: map[string]client.CollectionDescription{}, + } +} + +// getCollectionDesc returns the description of the collection with the given name. +// +// Will return nil and an error if a description of the given name is not found. Will first look +// in the repo's cache for the description before querying the datastore. +func (r *DescriptionsRepo) getCollectionDesc(name string) (client.CollectionDescription, error) { + if desc, hasDesc := r.collectionDescriptionsByName[name]; hasDesc { + return desc, nil + } + + key := core.NewCollectionKey(name) + buf, err := r.txn.Systemstore().Get(r.ctx, key.ToDS()) + if err != nil { + return client.CollectionDescription{}, fmt.Errorf("Failed to get collection description: %w", err) + } + + desc := client.CollectionDescription{} + err = json.Unmarshal(buf, &desc) + if err != nil { + return client.CollectionDescription{}, err + } + + r.collectionDescriptionsByName[name] = desc + return desc, nil +} diff --git a/query/graphql/mapper/field.go b/query/graphql/mapper/field.go new file mode 100644 index 0000000000..440208783d --- /dev/null +++ b/query/graphql/mapper/field.go @@ -0,0 +1,47 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +// Field contains the most basic information about a requestable. +type Field struct { + // The location of this field within it's parent. + Index int + + // The name of this field. For example 'Age', or '_group'. + Name string +} + +func (f *Field) GetIndex() int { + return f.Index +} + +func (f *Field) GetName() string { + return f.Name +} + +func (f *Field) AsTargetable() (*Targetable, bool) { + return nil, false +} + +func (f *Field) AsSelect() (*Select, bool) { + return nil, false +} + +func (f *Field) CloneTo(index int) Requestable { + return f.cloneTo(index) +} + +func (f *Field) cloneTo(index int) *Field { + return &Field{ + Index: index, + Name: f.Name, + } +} diff --git a/query/graphql/mapper/mapper.go b/query/graphql/mapper/mapper.go new file mode 100644 index 0000000000..6470a7d13f --- /dev/null +++ b/query/graphql/mapper/mapper.go @@ -0,0 +1,1004 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +import ( + "context" + "fmt" + "reflect" + "strings" + + "github.com/graphql-go/graphql/language/ast" + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/query/graphql/parser" + + parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" +) + +// ToSelect converts the given [parser.Select] into a [Select]. +// +// In the process of doing so it will construct the document map required to access the data +// yielded by the [Select]. +func ToSelect(ctx context.Context, txn datastore.Txn, parsed *parser.Select) (*Select, error) { + descriptionsRepo := NewDescriptionsRepo(ctx, txn) + // the top-level select will always have index=0, and no parent collection name + return toSelect(descriptionsRepo, 0, parsed, "") +} + +// toSelect converts the given [parser.Select] into a [Select]. +// +// In the process of doing so it will construct the document map required to access the data +// yielded by the [Select]. +func toSelect( + descriptionsRepo *DescriptionsRepo, + thisIndex int, + parsed *parser.Select, + parentCollectionName string, +) (*Select, error) { + collectionName, err := getCollectionName(descriptionsRepo, parsed, parentCollectionName) + if err != nil { + return nil, err + } + + mapping, desc, err := getTopLevelInfo(descriptionsRepo, parsed, collectionName) + if err != nil { + return nil, err + } + + fields, aggregates, err := getRequestables(parsed, mapping, desc, descriptionsRepo) + if err != nil { + return nil, err + } + + // Needs to be done before resolving aggregates, else filter conversion may fail there + filterDependencies, err := resolveFilterDependencies(descriptionsRepo, collectionName, parsed.Filter, mapping) + if err != nil { + return nil, err + } + fields = append(fields, filterDependencies...) + + aggregates = appendUnderlyingAggregates(aggregates, mapping) + fields, err = resolveAggregates( + parsed, + aggregates, + fields, + mapping, + desc, + descriptionsRepo, + ) + if err != nil { + return nil, err + } + + // If there is a groupby, and no inner group has been requested, we need to map the property here + if parsed.GroupBy != nil { + if _, isGroupFieldMapped := mapping.IndexesByName[parserTypes.GroupFieldName]; !isGroupFieldMapped { + index := mapping.GetNextIndex() + mapping.Add(index, parserTypes.GroupFieldName) + } + } + + return &Select{ + Targetable: toTargetable(thisIndex, parsed, mapping), + DocumentMapping: *mapping, + Cid: parsed.CID, + CollectionName: desc.Name, + Fields: fields, + }, nil +} + +// resolveAggregates figures out which fields the given aggregates are targeting +// and converts the aggregateRequest into an Aggregate, appending it onto the given +// fields slice. +// +// If an aggregate targets a field that doesn't yet exist, it will create it and +// append the new target field as well as the aggregate. The mapping will also be +// updated with any new fields/aggregates. +func resolveAggregates( + parsed *parser.Select, + aggregates []*aggregateRequest, + inputFields []Requestable, + mapping *core.DocumentMapping, + desc *client.CollectionDescription, + descriptionsRepo *DescriptionsRepo, +) ([]Requestable, error) { + fields := inputFields + dependenciesByParentId := map[int][]int{} + + for _, aggregate := range aggregates { + aggregateTargets := make([]AggregateTarget, len(aggregate.targets)) + + for i, target := range aggregate.targets { + var host Requestable + var hostTarget *Targetable + var childTarget OptionalChildTarget + + // If the host has not been requested the child mapping may not yet exist and + // we must create it before we can convert the filter. + childIsMapped := len(mapping.IndexesByName[target.hostExternalName]) != 0 + + var hasHost bool + var convertedFilter *Filter + if childIsMapped { + fieldDesc, isField := desc.GetField(target.hostExternalName) + if isField && !fieldDesc.IsObject() { + // If the hostExternalName matches a non-object field + // we can just take it as a field-requestable as only + // objects are targetable-requestables. + hasHost = true + host = &Field{ + Index: int(fieldDesc.ID), + Name: target.hostExternalName, + } + } else { + childObjectIndex := mapping.FirstIndexOfName(target.hostExternalName) + convertedFilter = ToFilter(target.filter, &mapping.ChildMappings[childObjectIndex]) + + host, hasHost = tryGetTarget(target.hostExternalName, convertedFilter, fields) + } + } + + if !hasHost { + // If a matching host is not found, we need to construct and add it. + index := mapping.GetNextIndex() + + dummyParsed := &parser.Select{ + Root: parsed.Root, + Name: target.hostExternalName, + } + + childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, desc.Name) + if err != nil { + return nil, err + } + + childMapping, _, err := getTopLevelInfo(descriptionsRepo, dummyParsed, childCollectionName) + if err != nil { + return nil, err + } + childMapping = childMapping.CloneWithoutRender() + mapping.SetChildAt(index, *childMapping) + + if !childIsMapped { + // If the child was not mapped, the filter will not have been converted yet + // so we must do that now. + convertedFilter = ToFilter(target.filter, &mapping.ChildMappings[index]) + } + + dummyJoin := &Select{ + Targetable: Targetable{ + Field: Field{ + Index: index, + Name: target.hostExternalName, + }, + Filter: convertedFilter, + }, + CollectionName: childCollectionName, + DocumentMapping: *childMapping, + } + + fields = append(fields, dummyJoin) + mapping.Add(index, target.hostExternalName) + + host = dummyJoin + hostTarget = &dummyJoin.Targetable + } else { + var isTargetable bool + hostTarget, isTargetable = host.AsTargetable() + if !isTargetable { + // If the host is not targetable, such as when it is an inline-array field, + // we don't need to worry about preserving the targetable information and + // can just take the field properties. + hostTarget = &Targetable{ + Field: Field{ + Index: host.GetIndex(), + Name: host.GetName(), + }, + } + } + } + + if target.childExternalName != "" { + hostSelect, isHostSelectable := host.AsSelect() + if !isHostSelectable { + // I believe this is dead code as the gql library should always catch this error first + return nil, fmt.Errorf( + "Aggregate target host must be selectable, but was not", + ) + } + + if len(hostSelect.IndexesByName[target.childExternalName]) == 0 { + // I believe this is dead code as the gql library should always catch this error first + return nil, fmt.Errorf( + "Unable to identify aggregate child: %s", target.childExternalName, + ) + } + + childTarget = OptionalChildTarget{ + // If there are multiple children of the same name there is no way + // for us (or the consumer) to identify which one they are hoping for + // so we take the first. + Index: hostSelect.IndexesByName[target.childExternalName][0], + Name: target.childExternalName, + HasValue: true, + } + } + + aggregateTargets[i] = AggregateTarget{ + Targetable: *hostTarget, + ChildTarget: childTarget, + } + } + + newAggregate := Aggregate{ + Field: aggregate.field, + DocumentMapping: *mapping, + AggregateTargets: aggregateTargets, + } + fields = append(fields, &newAggregate) + dependenciesByParentId[aggregate.field.Index] = aggregate.dependencyIndexes + } + + // Once aggregates have been resolved we pair up their dependencies + for aggregateId, dependencyIds := range dependenciesByParentId { + aggregate := fieldAt(fields, aggregateId).(*Aggregate) + for _, dependencyId := range dependencyIds { + aggregate.Dependencies = append(aggregate.Dependencies, fieldAt(fields, dependencyId).(*Aggregate)) + } + } + + return fields, nil +} + +func fieldAt(fields []Requestable, index int) Requestable { + for _, f := range fields { + if f.GetIndex() == index { + return f + } + } + return nil +} + +// aggregateDependencies maps aggregate names to the names of any aggregates +// that they may be dependent on. +var aggregateDependencies = map[string][]string{ + parserTypes.AverageFieldName: { + parserTypes.CountFieldName, + parserTypes.SumFieldName, + }, +} + +// appendUnderlyingAggregates scans the given inputAggregates for any composite aggregates +// (e.g. average), and appends any missing dependencies to the collection and mapping. +// +// It will try and make use of existing aggregates that match the targeting parameters +// before creating new ones. It will also adjust the target filters if required (e.g. +// average skips nil items). +func appendUnderlyingAggregates( + inputAggregates []*aggregateRequest, + mapping *core.DocumentMapping, +) []*aggregateRequest { + aggregates := inputAggregates + + // Loop through the aggregates slice, including items that may have been appended + // to the slice whilst looping. + for i := 0; i < len(aggregates); i++ { + aggregate := aggregates[i] + + dependencies, hasDependencies := aggregateDependencies[aggregate.field.Name] + // If the aggregate has no dependencies, then we dont need to do anything and we continue. + if !hasDependencies { + continue + } + + for _, target := range aggregate.targets { + if target.childExternalName != "" { + if _, isAggregate := parserTypes.Aggregates[target.childExternalName]; !isAggregate { + // Append a not-nil filter if the target is not an aggregate. + // Aggregate-targets are excluded here as they are assumed to always have a value and + // amending the filter introduces significant complexity for both machine and developer. + appendNotNilFilter(target, target.childExternalName) + } + } + } + + for _, dependencyName := range dependencies { + var newAggregate *aggregateRequest + aggregates, newAggregate = appendIfNotExists( + dependencyName, + aggregate.targets, + aggregates, + mapping, + ) + aggregate.dependencyIndexes = append(aggregate.dependencyIndexes, newAggregate.field.Index) + } + } + return aggregates +} + +// appendIfNotExists attemps to match the given name and targets against existing +// aggregates, if a match is not found, it will append a new aggregate. +func appendIfNotExists( + name string, + targets []*aggregateRequestTarget, + aggregates []*aggregateRequest, + mapping *core.DocumentMapping, +) ([]*aggregateRequest, *aggregateRequest) { + field, exists := tryGetMatchingAggregate(name, targets, aggregates) + if exists { + // If a match is found, there is nothing to do so we return the aggregages slice + // unchanged. + return aggregates, field + } + + // If a match is not found, create, map and append the + // dependency to the aggregates collection. + index := mapping.GetNextIndex() + + field = &aggregateRequest{ + field: Field{ + Index: index, + Name: name, + }, + targets: targets, + } + + mapping.Add(index, field.field.Name) + return append(aggregates, field), field +} + +// getRequestables returns a converted slice of consumer-requested Requestables +// and aggregateRequests from the given parsed.Fields slice. +func getRequestables( + parsed *parser.Select, + mapping *core.DocumentMapping, + desc *client.CollectionDescription, + descriptionsRepo *DescriptionsRepo, +) (fields []Requestable, aggregates []*aggregateRequest, err error) { + for _, field := range parsed.Fields { + switch f := field.(type) { + case *parser.Field: + // We can map all fields to the first (and only index) + // as they support no value modifiers (such as filters/limits/etc). + // All fields should have already been mapped by getTopLevelInfo + index := mapping.FirstIndexOfName(f.Name) + + fields = append(fields, &Field{ + Index: index, + Name: f.Name, + }) + + mapping.RenderKeys = append(mapping.RenderKeys, core.RenderKey{ + Index: index, + Key: f.Alias, + }) + case *parser.Select: + index := mapping.GetNextIndex() + + // Aggregate targets are not known at this point, and must be evaluated + // after all requested fields have been evaluated - so we note which + // aggregates have been requested and their targets here, before finalizing + // their evaluation later. + if _, isAggregate := parserTypes.Aggregates[f.Name]; isAggregate { + aggregateTargets, err := getAggregateSources(f) + if err != nil { + return nil, nil, err + } + + if len(aggregateTargets) == 0 { + return nil, nil, fmt.Errorf( + "Aggregate must be provided with a property to aggregate.", + ) + } + + aggregates = append(aggregates, &aggregateRequest{ + field: Field{ + Index: index, + Name: f.Name, + }, + targets: aggregateTargets, + }) + } else { + innerSelect, err := toSelect(descriptionsRepo, index, f, desc.Name) + if err != nil { + return nil, nil, err + } + fields = append(fields, innerSelect) + mapping.SetChildAt(index, innerSelect.DocumentMapping) + } + + mapping.RenderKeys = append(mapping.RenderKeys, core.RenderKey{ + Index: index, + Key: f.Alias, + }) + + mapping.Add(index, f.Name) + default: + return nil, nil, fmt.Errorf( + "Unexpected field type: %T", + field, + ) + } + } + return +} + +// getCollectionName returns the name of the parsed collection. This may be empty +// if this is a commit request. +func getCollectionName( + descriptionsRepo *DescriptionsRepo, + parsed *parser.Select, + parentCollectionName string, +) (string, error) { + if parsed.Name == parserTypes.GroupFieldName { + return parentCollectionName, nil + } else if parsed.Root == parserTypes.CommitSelection { + return parentCollectionName, nil + } + + if parentCollectionName != "" { + parentDescription, err := descriptionsRepo.getCollectionDesc(parentCollectionName) + if err != nil { + return "", err + } + + hostFieldDesc, parentHasField := parentDescription.GetField(parsed.Name) + if parentHasField && hostFieldDesc.RelationType != 0 { + // If this field exists on the parent, and it is a child object + // then this collection name is the collection name of the child. + return hostFieldDesc.Schema, nil + } + } + + return parsed.Name, nil +} + +// getTopLevelInfo returns the collection description and maps the fields directly +// on the object. +func getTopLevelInfo( + descriptionsRepo *DescriptionsRepo, + parsed *parser.Select, + collectionName string, +) (*core.DocumentMapping, *client.CollectionDescription, error) { + mapping := core.NewDocumentMapping() + + if parsed.Root != parserTypes.CommitSelection { + mapping.Add(core.DocKeyFieldIndex, parserTypes.DocKeyFieldName) + + desc, err := descriptionsRepo.getCollectionDesc(collectionName) + if err != nil { + return nil, nil, err + } + + // Map all fields from schema into the map as they are fetched automatically + for _, f := range desc.Schema.Fields { + if f.IsObject() { + // Objects are skipped, as they are not fetched by default and + // have to be requested via selects. + continue + } + mapping.Add(int(f.ID), f.Name) + } + + return mapping, &desc, nil + } + + if parsed.Name == parserTypes.LinksFieldName { + for f := range parserTypes.LinksFields { + mapping.Add(mapping.GetNextIndex(), f) + } + } else { + for f := range parserTypes.VersionFields { + mapping.Add(mapping.GetNextIndex(), f) + } + } + + return mapping, &client.CollectionDescription{}, nil +} + +func resolveFilterDependencies( + descriptionsRepo *DescriptionsRepo, + parentCollectionName string, + source *parser.Filter, + mapping *core.DocumentMapping, +) ([]Requestable, error) { + if source == nil { + return nil, nil + } + + return resolveInnerFilterDependencies( + descriptionsRepo, + parentCollectionName, + source.Conditions, + mapping, + ) +} + +func resolveInnerFilterDependencies( + descriptionsRepo *DescriptionsRepo, + parentCollectionName string, + source map[string]interface{}, + mapping *core.DocumentMapping, +) ([]Requestable, error) { + newFields := []Requestable{} + + for key := range source { + if strings.HasPrefix(key, "$") { + continue + } + + propertyMapped := len(mapping.IndexesByName[key]) != 0 + + if propertyMapped { + // Inner properties should be recursively checked here, however at the moment + // filters do not support quering any deeper anyway. + // https://github.com/sourcenetwork/defradb/issues/509 + continue + } + + index := mapping.GetNextIndex() + + dummyParsed := &parser.Select{ + Name: key, + } + + childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, parentCollectionName) + if err != nil { + return nil, err + } + + childMapping, _, err := getTopLevelInfo(descriptionsRepo, dummyParsed, childCollectionName) + if err != nil { + return nil, err + } + childMapping = childMapping.CloneWithoutRender() + mapping.SetChildAt(index, *childMapping) + + dummyJoin := &Select{ + Targetable: Targetable{ + Field: Field{ + Index: index, + Name: key, + }, + }, + CollectionName: childCollectionName, + DocumentMapping: *childMapping, + } + + newFields = append(newFields, dummyJoin) + mapping.Add(index, key) + } + + return newFields, nil +} + +// ToCommitSelect converts the given [parser.CommitSelect] into a [CommitSelect]. +// +// In the process of doing so it will construct the document map required to access the data +// yielded by the [Select] embedded in the [CommitSelect]. +func ToCommitSelect(ctx context.Context, txn datastore.Txn, parsed *parser.CommitSelect) (*CommitSelect, error) { + underlyingSelect, err := ToSelect(ctx, txn, parsed.ToSelect()) + if err != nil { + return nil, err + } + return &CommitSelect{ + Select: *underlyingSelect, + DocKey: parsed.DocKey, + Type: CommitType(parsed.Type), + FieldName: parsed.FieldName, + Cid: parsed.Cid, + }, nil +} + +// ToMutation converts the given [parser.Mutation] into a [Mutation]. +// +// In the process of doing so it will construct the document map required to access the data +// yielded by the [Select] embedded in the [Mutation]. +func ToMutation(ctx context.Context, txn datastore.Txn, parsed *parser.Mutation) (*Mutation, error) { + underlyingSelect, err := ToSelect(ctx, txn, parsed.ToSelect()) + if err != nil { + return nil, err + } + + return &Mutation{ + Select: *underlyingSelect, + Type: MutationType(parsed.Type), + Data: parsed.Data, + }, nil +} + +func toTargetable(index int, parsed *parser.Select, docMap *core.DocumentMapping) Targetable { + return Targetable{ + Field: toField(index, parsed), + DocKeys: parsed.DocKeys, + Filter: ToFilter(parsed.Filter, docMap), + Limit: toLimit(parsed.Limit), + GroupBy: toGroupBy(parsed.GroupBy, docMap), + OrderBy: toOrderBy(parsed.OrderBy, docMap), + } +} + +func toField(index int, parsed *parser.Select) Field { + return Field{ + Index: index, + Name: parsed.Name, + } +} + +// ConvertFilter converts the given `source` parser filter to a Filter using the given mapping. +// +// Any requestables identified by name will be converted to being identified by index instead. +func ToFilter(source *parser.Filter, mapping *core.DocumentMapping) *Filter { + if source == nil { + return nil + } + conditions := make(map[connor.FilterKey]interface{}, len(source.Conditions)) + + for sourceKey, sourceClause := range source.Conditions { + key, clause := toFilterMap(sourceKey, sourceClause, mapping) + conditions[key] = clause + } + + return &Filter{ + Conditions: conditions, + ExternalConditions: source.Conditions, + } +} + +// convertFilterMap converts a consumer-defined filter key-value into a filter clause +// keyed by field index. +// +// Return key will either be an int (field index), or a string (operator). +func toFilterMap( + sourceKey string, + sourceClause interface{}, + mapping *core.DocumentMapping, +) (connor.FilterKey, interface{}) { + if strings.HasPrefix(sourceKey, "$") { + key := &Operator{ + Operation: sourceKey, + } + switch typedClause := sourceClause.(type) { + case []interface{}: + // If the clause is an array then we need to convert any inner maps. + returnClauses := []interface{}{} + for _, innerSourceClause := range typedClause { + var returnClause interface{} + switch typedInnerSourceClause := innerSourceClause.(type) { + case map[string]interface{}: + innerMapClause := map[connor.FilterKey]interface{}{} + for innerSourceKey, innerSourceValue := range typedInnerSourceClause { + rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, mapping) + innerMapClause[rKey] = rValue + } + returnClause = innerMapClause + default: + returnClause = innerSourceClause + } + returnClauses = append(returnClauses, returnClause) + } + return key, returnClauses + default: + return key, typedClause + } + } else { + // If there are mutliple properties of the same name we can just take the first as + // we have no other reasonable way of identifing which property they mean if multiple + // consumer specified requestables are available. Aggregate dependencies should not + // impact this as they are added after selects. + index := mapping.FirstIndexOfName(sourceKey) + key := &PropertyIndex{ + Index: index, + } + switch typedClause := sourceClause.(type) { + case map[string]interface{}: + returnClause := map[connor.FilterKey]interface{}{} + for innerSourceKey, innerSourceValue := range typedClause { + var innerMapping *core.DocumentMapping + switch innerSourceValue.(type) { + case map[string]interface{}: + // If the innerSourceValue is also a map, then we should parse the nested clause + // using the child mapping, as this key must refer to a host property in a join + // and deeper keys must refer to properties on the child items. + innerMapping = &mapping.ChildMappings[index] + default: + innerMapping = mapping + } + rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, innerMapping) + returnClause[rKey] = rValue + } + return key, returnClause + default: + return key, sourceClause + } + } +} + +func toLimit(source *parserTypes.Limit) *Limit { + if source == nil { + return nil + } + + return &Limit{ + Limit: source.Limit, + Offset: source.Offset, + } +} + +func toGroupBy(source *parserTypes.GroupBy, mapping *core.DocumentMapping) *GroupBy { + if source == nil { + return nil + } + + indexes := make([]int, len(source.Fields)) + for i, fieldName := range source.Fields { + // If there are mutliple properties of the same name we can just take the first as + // we have no other reasonable way of identifing which property they mean if multiple + // consumer specified requestables are available. Aggregate dependencies should not + // impact this as they are added after selects. + key := mapping.FirstIndexOfName(fieldName) + indexes[i] = key + } + + return &GroupBy{ + FieldIndexes: indexes, + } +} + +func toOrderBy(source *parserTypes.OrderBy, mapping *core.DocumentMapping) *OrderBy { + if source == nil { + return nil + } + + conditions := make([]OrderCondition, len(source.Conditions)) + for _, condition := range source.Conditions { + fields := strings.Split(condition.Field, ".") + fieldIndexes := make([]int, len(fields)) + currentMapping := mapping + for i, field := range fields { + // If there are mutliple properties of the same name we can just take the first as + // we have no other reasonable way of identifing which property they mean if multiple + // consumer specified requestables are available. Aggregate dependencies should not + // impact this as they are added after selects. + fieldIndex := currentMapping.FirstIndexOfName(field) + fieldIndexes[i] = fieldIndex + if i != len(fields)-1 { + // no need to do this for the last (and will panic) + currentMapping = ¤tMapping.ChildMappings[fieldIndex] + } + } + + conditions = append(conditions, OrderCondition{ + FieldIndexes: fieldIndexes, + Direction: SortDirection(condition.Direction), + }) + } + + return &OrderBy{ + Conditions: conditions, + } +} + +// RunFilter runs the given filter expression +// using the document, and evaluates. +func RunFilter(doc core.Doc, filter *Filter) (bool, error) { + if filter == nil { + return true, nil + } + + return connor.Match(filter.Conditions, doc) +} + +// equal compares the given Targetables and returns true if they can be considered equal. +// Note: Currently only compares Name and Filter as that is all that is currently required, +// but this should be extended in the future. +func (s Targetable) equal(other Targetable) bool { + if s.Index != other.Index && + s.Name != other.Name { + return false + } + + if s.Filter == nil { + return other.Filter == nil + } + + if other.Filter == nil { + return s.Filter == nil + } + + return reflect.DeepEqual(s.Filter.Conditions, other.Filter.Conditions) +} + +// aggregateRequest is an intermediary struct defining a consumer-requested +// aggregate. These are defined before it can be determined as to which exact +// fields they target and so only specify the names of the target properties +// as they are know to the consumer. +type aggregateRequest struct { + // This field. + // + // The Index and Name of *this* aggregate are known, and are specified here. + field Field + + // The targets of this aggregate, as defined by the consumer. + targets []*aggregateRequestTarget + dependencyIndexes []int +} + +// aggregateRequestTarget contains the user defined information for an aggregate +// target before the actual underlying target is identified and/or created. +type aggregateRequestTarget struct { + // The name of the host target as known by the consumer. + // + // This name may match zero to many field names requested by the consumer. + hostExternalName string + + // The name of the child target as known by the consumer. This property is + // optional and may be default depending on aggregate type and the type of + // the host property. + // + // This name may match zero to many field names requested by the consumer. + childExternalName string + + // The aggregate filter specified by the consumer for this target. Optional. + filter *parser.Filter +} + +// Returns the source of the aggregate as requested by the consumer +func getAggregateSources(field *parser.Select) ([]*aggregateRequestTarget, error) { + targets := make([]*aggregateRequestTarget, len(field.Statement.Arguments)) + + for i, argument := range field.Statement.Arguments { + switch argumentValue := argument.Value.GetValue().(type) { + case string: + targets[i] = &aggregateRequestTarget{ + hostExternalName: argumentValue, + } + case []*ast.ObjectField: + hostExternalName := argument.Name.Value + var childExternalName string + var filter *parser.Filter + + fieldArg, hasFieldArg := tryGet(argumentValue, parserTypes.Field) + if hasFieldArg { + if innerPathStringValue, isString := fieldArg.Value.GetValue().(string); isString { + childExternalName = innerPathStringValue + } + } + + filterArg, hasFilterArg := tryGet(argumentValue, parserTypes.FilterClause) + if hasFilterArg { + var err error + filter, err = parser.NewFilter(filterArg.Value.(*ast.ObjectValue)) + if err != nil { + return nil, err + } + } + + targets[i] = &aggregateRequestTarget{ + hostExternalName: hostExternalName, + childExternalName: childExternalName, + filter: filter, + } + } + } + + return targets, nil +} + +func tryGet(fields []*ast.ObjectField, name string) (*ast.ObjectField, bool) { + for _, field := range fields { + if field.Name.Value == name { + return field, true + } + } + return nil, false +} + +// tryGetMatchingAggregate scans the given collection for aggregates with the given name and targets. +// +// Will return the matching target and true if one is found, otherwise will return false. +func tryGetMatchingAggregate( + name string, + targets []*aggregateRequestTarget, + collection []*aggregateRequest, +) (*aggregateRequest, bool) { +collectionLoop: + for _, aggregate := range collection { + if aggregate.field.Name != name { + continue + } + if len(aggregate.targets) != len(targets) { + continue + } + + for i, target := range targets { + potentialMatchingTarget := aggregate.targets[i] + + if target.hostExternalName != potentialMatchingTarget.hostExternalName { + continue collectionLoop + } + + if target.childExternalName != potentialMatchingTarget.childExternalName { + continue collectionLoop + } + + if target.filter == nil && potentialMatchingTarget.filter != nil { + continue collectionLoop + } + + if potentialMatchingTarget.filter == nil && target.filter != nil { + continue collectionLoop + } + + if target.filter == nil && potentialMatchingTarget.filter == nil { + // target matches, so continue the `target` loop and check the remaining. + continue + } + + if !reflect.DeepEqual(target.filter.Conditions, potentialMatchingTarget.filter.Conditions) { + continue collectionLoop + } + } + + return aggregate, true + } + return nil, false +} + +// tryGetTarget scans the given collection of Requestables for an item that matches the given +// name and filter. +// +// If a match is found the matching field will be returned along with true. If a match is not +// found, nil and false will be returned. +func tryGetTarget(name string, filter *Filter, collection []Requestable) (Requestable, bool) { + dummyTarget := Targetable{ + Field: Field{ + Name: name, + }, + Filter: filter, + } + + for _, field := range collection { + if field == nil { + continue + } + targetable, isTargetable := field.AsTargetable() + if isTargetable && targetable.equal(dummyTarget) { + // Return the original field in order to preserve type specific info + return field, true + } + } + return nil, false +} + +// appendNotNilFilter appends a not nil filter for the given child field +// to the given Select. +func appendNotNilFilter(field *aggregateRequestTarget, childField string) { + if field.filter == nil { + field.filter = &parser.Filter{} + } + + if field.filter.Conditions == nil { + field.filter.Conditions = map[string]interface{}{} + } + + childBlock, hasChildBlock := field.filter.Conditions[childField] + if !hasChildBlock { + childBlock = map[string]interface{}{} + field.filter.Conditions[childField] = childBlock + } + + typedChildBlock := childBlock.(map[string]interface{}) + typedChildBlock["$ne"] = nil +} diff --git a/query/graphql/mapper/mutation.go b/query/graphql/mapper/mutation.go new file mode 100644 index 0000000000..c3c5829294 --- /dev/null +++ b/query/graphql/mapper/mutation.go @@ -0,0 +1,45 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +type MutationType int + +const ( + NoneMutationType MutationType = iota + CreateObjects + UpdateObjects + DeleteObjects +) + +// Mutation represents a request to mutate data stored in Defra. +type Mutation struct { + // The underlying Select, defining the information requested upon return. + Select + + // The type of mutation. For example a create request. + Type MutationType + + // The data to be used for the mutation. For example, during a create this + // will be the json representation of the object to be inserted. + Data string +} + +func (m *Mutation) CloneTo(index int) Requestable { + return m.cloneTo(index) +} + +func (m *Mutation) cloneTo(index int) *Mutation { + return &Mutation{ + Select: *m.Select.cloneTo(index), + Type: m.Type, + Data: m.Data, + } +} diff --git a/query/graphql/mapper/requestable.go b/query/graphql/mapper/requestable.go new file mode 100644 index 0000000000..487a736b66 --- /dev/null +++ b/query/graphql/mapper/requestable.go @@ -0,0 +1,49 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +// Requestable is the interface shared by all items that may be +// requested by consumers. +// +// For example, integer fields, document mutations, or aggregates. +type Requestable interface { + // GetIndex returns the index at which this item can be found upon + // its parent. + GetIndex() int + + // GetName returns the name of this item. For example 'Age', or + // '_count'. + GetName() string + + // AsTargetable tries to return the targetable component of this + // item. If the item-type does not support targeting, it will + // return nil and false, otherwise it will return a pointer to + // the targetable component and true. + AsTargetable() (*Targetable, bool) + + // AsSelect tries to return the select component of this + // item. If the item-type does not support selection, it will + // return nil and false, otherwise it will return a pointer to + // the select component and true. + AsSelect() (*Select, bool) + + // CloneTo deep clones this item using the provided index instead + // of the index of this item. + CloneTo(index int) Requestable +} + +var ( + _ Requestable = (*Aggregate)(nil) + _ Requestable = (*CommitSelect)(nil) + _ Requestable = (*Field)(nil) + _ Requestable = (*Mutation)(nil) + _ Requestable = (*Select)(nil) +) diff --git a/query/graphql/mapper/select.go b/query/graphql/mapper/select.go new file mode 100644 index 0000000000..c95573feac --- /dev/null +++ b/query/graphql/mapper/select.go @@ -0,0 +1,63 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +import "github.com/sourcenetwork/defradb/core" + +// Select represents a request to return data from Defra. +// +// It wraps child Fields belonging to this Select. +type Select struct { + // Targeting infomation used to restrict or format the result. + Targetable + + // The document mapping for this select, describing how items yielded + // for this select can be accessed and rendered. + core.DocumentMapping + + // A commit identifier that can be specified to request data at a given time. + Cid string + + // The name of the collection that this Select selects data from. + CollectionName string + + // The fields that are to be selected. + // + // These can include stuff such as version information, aggregates, and other + // Selects. + Fields []Requestable +} + +func (s *Select) AsTargetable() (*Targetable, bool) { + return &s.Targetable, true +} + +func (s *Select) AsSelect() (*Select, bool) { + return s, true +} + +func (s *Select) CloneTo(index int) Requestable { + return s.cloneTo(index) +} + +func (s *Select) cloneTo(index int) *Select { + return &Select{ + Targetable: *s.Targetable.cloneTo(index), + DocumentMapping: s.DocumentMapping, + Cid: s.Cid, + CollectionName: s.CollectionName, + Fields: s.Fields, + } +} + +func (s *Select) FieldAt(index int) Requestable { + return fieldAt(s.Fields, index) +} diff --git a/query/graphql/mapper/targetable.go b/query/graphql/mapper/targetable.go new file mode 100644 index 0000000000..d5802daebe --- /dev/null +++ b/query/graphql/mapper/targetable.go @@ -0,0 +1,148 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package mapper + +import ( + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/core" +) + +var ( + _ connor.FilterKey = (*PropertyIndex)(nil) + _ connor.FilterKey = (*Operator)(nil) +) + +// PropertyIndex is a FilterKey that represents a property in a document. +type PropertyIndex struct { + // The index at which the target property can be found on its parent. + Index int +} + +func (k *PropertyIndex) GetProp(data interface{}) interface{} { + if data == nil { + return nil + } + + return data.(core.Doc).Fields[k.Index] +} + +func (k *PropertyIndex) GetOperatorOrDefault(defaultOp string) string { + return defaultOp +} + +// Operator is a FilterKey that represents a filter operator. +type Operator struct { + // The filter operation string that this Operator represents. + // + // E.g. "$eq", or "$and". + Operation string +} + +func (k *Operator) GetProp(data interface{}) interface{} { + return data +} + +func (k *Operator) GetOperatorOrDefault(defaultOp string) string { + return k.Operation +} + +// Filter represents a series of conditions that may reduce the number of +// records that a query returns. +type Filter struct { + // The filter conditions that must pass in order for a record to be returned. + Conditions map[connor.FilterKey]interface{} + + // The filter conditions in human-readable form. + ExternalConditions map[string]interface{} +} + +func NewFilter() *Filter { + return &Filter{ + Conditions: map[connor.FilterKey]interface{}{}, + } +} + +// Limit represents a limit-offset pairing that controls how many +// and which records will be returned from a query. +type Limit struct { + // The maximum number of records that can be returned from a query. + Limit int64 + + // The offset from which counting towards the Limit will begin. + // Before records before the Offset will not be returned. + Offset int64 +} + +// GroupBy represents a grouping instruction on a query. +type GroupBy struct { + // The indexes of fields by which documents should be grouped. Ordered. + FieldIndexes []int +} + +type SortDirection string + +const ( + ASC SortDirection = "ASC" + DESC SortDirection = "DESC" +) + +// OrderCondition represents a single property by which query results should +// be ordered, and the direction in which they should be ordered. +type OrderCondition struct { + // A chain of field indexes by which the property to sort by may be found. + // This is relative to the host/defining object and may traverse through + // multiple object layers. + FieldIndexes []int + + // The direction in which the sort should be applied. + Direction SortDirection +} + +type OrderBy struct { + Conditions []OrderCondition +} + +// Targetable represents a targetable property. +type Targetable struct { + // The basic field information of this property. + Field + + // A optional collection of docKeys that can be specified to restrict results + // to belonging to this set. + DocKeys []string + + // An optional filter, that can be specified to restrict results to documents + // that satisfies all of its conditions. + Filter *Filter + + // An optional limit, that can be specified to restrict the number and location + // of documents returned. + Limit *Limit + + // An optional grouping clause, that can be specifed to group results by property + // value. + GroupBy *GroupBy + + // An optional order clause, that can be specifed to order results by property + // value + OrderBy *OrderBy +} + +func (t *Targetable) cloneTo(index int) *Targetable { + return &Targetable{ + Field: *t.Field.cloneTo(index), + DocKeys: t.DocKeys, + Filter: t.Filter, + Limit: t.Limit, + GroupBy: t.GroupBy, + OrderBy: t.OrderBy, + } +} diff --git a/query/graphql/parser/commit.go b/query/graphql/parser/commit.go index 2a50953b85..18387ff7fd 100644 --- a/query/graphql/parser/commit.go +++ b/query/graphql/parser/commit.go @@ -49,49 +49,26 @@ type CommitSelect struct { OrderBy *parserTypes.OrderBy Fields []Selection - - Statement *ast.Field } func (c CommitSelect) GetRoot() parserTypes.SelectionType { return parserTypes.CommitSelection } -func (c CommitSelect) GetStatement() ast.Node { - return c.Statement -} - -func (c CommitSelect) GetName() string { - return c.Name -} - -func (c CommitSelect) GetAlias() string { - return c.Alias -} - -func (c CommitSelect) GetSelections() []Selection { - return c.Fields -} - func (c CommitSelect) ToSelect() *Select { return &Select{ - Name: c.Name, - Alias: c.Alias, - Limit: c.Limit, - OrderBy: c.OrderBy, - Statement: c.Statement, - Fields: c.Fields, - Root: parserTypes.CommitSelection, + Alias: c.Alias, + Limit: c.Limit, + OrderBy: c.OrderBy, + Fields: c.Fields, + Root: parserTypes.CommitSelection, } } func parseCommitSelect(field *ast.Field) (*CommitSelect, error) { commit := &CommitSelect{ - Statement: field, - } - commit.Name = field.Name.Value - if field.Alias != nil { - commit.Alias = field.Alias.Value + Name: field.Name.Value, + Alias: getFieldAlias(field), } var ok bool diff --git a/query/graphql/parser/filter.go b/query/graphql/parser/filter.go index f27d0ad8e0..e4ec889ec7 100644 --- a/query/graphql/parser/filter.go +++ b/query/graphql/parser/filter.go @@ -11,7 +11,6 @@ package parser import ( - "context" "errors" "fmt" "strconv" @@ -21,14 +20,9 @@ import ( gqlp "github.com/graphql-go/graphql/language/parser" gqls "github.com/graphql-go/graphql/language/source" - "github.com/sourcenetwork/defradb/connor" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) -type EvalContext struct { - context.Context -} - // Filter contains the parsed condition map to be // run by the Filter Evaluator. // @todo: Cache filter structure for faster condition @@ -36,9 +30,6 @@ type EvalContext struct { type Filter struct { // parsed filter conditions Conditions map[string]interface{} - - // raw graphql statement - Statement *ast.ObjectValue } // type condition @@ -51,7 +42,6 @@ func NewFilter(stmt *ast.ObjectValue) (*Filter, error) { return nil, err } return &Filter{ - Statement: stmt, Conditions: conditions, }, nil } @@ -224,16 +214,6 @@ func parseVal(val ast.Value, recurseFn parseFn) (interface{}, error) { return nil, errors.New("Failed to parse condition value from query filter statement") } -// RunFilter runs the given filter expression -// using the document, and evaluates. -func RunFilter(doc map[string]interface{}, filter *Filter, ctx EvalContext) (bool, error) { - if filter == nil { - return true, nil - } - - return connor.Match(filter.Conditions, doc) -} - /* userCollection := db.getCollection("users") doc := userCollection.NewFromJSON("{ diff --git a/query/graphql/parser/filter_test.go b/query/graphql/parser/filter_test.go deleted file mode 100644 index 74ded728a3..0000000000 --- a/query/graphql/parser/filter_test.go +++ /dev/null @@ -1,830 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package parser - -import ( - "testing" - - gqlp "github.com/graphql-go/graphql/language/parser" - - parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" - - "github.com/graphql-go/graphql/language/ast" - "github.com/graphql-go/graphql/language/source" - "github.com/stretchr/testify/assert" -) - -func getQueryFilterObject(query string) (*Filter, error) { - source := source.NewSource(&source.Source{ - Body: []byte(query), - Name: "", - }) - - doc, err := gqlp.Parse(gqlp.ParseParams{Source: source}) - if err != nil { - return nil, err - } - - filterObj := doc.Definitions[0].(*ast.OperationDefinition).SelectionSet.Selections[0].(*ast.Field).Arguments[0].Value.(*ast.ObjectValue) - return NewFilter(filterObj) -} - -func TestNewFilterFromString(t *testing.T) { - _, err := NewFilterFromString(`name: {_eq: "bob"}`) - assert.NoError(t, err) -} - -func TestParseConditions_Empty(t *testing.T) { - var query = (` - query { - users(filter: {}) - }`) - - filter, err := getQueryFilterObject(query) - assert.NoError(t, err) - - assert.Equal(t, filter.Conditions, map[string]interface{}{}) -} - -func TestParseConditions_Single_Int(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: 1}) - }`), - map[string]interface{}{ - "x": int64(1), - }, - ) -} - -func TestParseConditions_Single_Float(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: 1.1}) - }`), - map[string]interface{}{ - "x": 1.1, - }, - ) -} - -func TestParseConditions_Single_String(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: "test"}) - }`), - map[string]interface{}{ - "x": "test", - }, - ) -} - -func TestParseConditions_Single_Enum(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: ASC}) - }`), - map[string]interface{}{ - "x": "ASC", - }, - ) -} - -func TestParseConditions_Single_Boolean(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: true}) - }`), - map[string]interface{}{ - "x": true, - }, - ) -} - -// Ignoring Null input, see: -// - https://github.com/graphql-go/graphql/issues/178 -// - https://github.com/99designs/gqlgen/issues/1416 -// func TestParseConditions_Single_Null(t *testing.T) { -// runParseConditionTest(t, (` -// query { -// users(filter: {x: null}) -// }`), -// map[string]interface{}{ -// "x": nil, -// }, -// ) -// } - -func TestParseConditions_List_Int(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: [1,2,3]}) - }`), - map[string]interface{}{ - "x": []interface{}{int64(1), int64(2), int64(3)}, - }, - ) -} - -func TestParseConditions_List_Float(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: [1.1,2.2,3.3]}) - }`), - map[string]interface{}{ - "x": []interface{}{1.1, 2.2, 3.3}, - }, - ) -} - -func TestParseConditions_List_String(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: ["hello", "world", "bye"]}) - }`), - map[string]interface{}{ - "x": []interface{}{"hello", "world", "bye"}, - }, - ) -} - -func TestParseConditions_Single_Object_Simple(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: {y: 1}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": int64(1), - }, - }, - ) -} - -func TestParseConditions_Single_Object_Multiple(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: {y: 1, z: 2}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": int64(1), - "z": int64(2), - }, - }, - ) -} - -func TestParseConditions_Single_Object_Nested(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {x: {y: 1, z: {a: 2}}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": int64(1), - "z": map[string]interface{}{ - "a": int64(2), - }, - }, - }, - ) -} - -func TestParseConditions_Single_List_Objects(t *testing.T) { - runParseConditionTest(t, (` - query { - users(filter: {_and: [{x: 1}, {y: 1}]}) - }`), - map[string]interface{}{ - "$and": []interface{}{ - map[string]interface{}{ - "x": int64(1), - }, - map[string]interface{}{ - "y": int64(1), - }, - }, - }, - ) -} - -func runParseConditionTest(t *testing.T, query string, target map[string]interface{}) { - filter, err := getQueryFilterObject(query) - assert.NoError(t, err) - - assert.Equal(t, target, filter.Conditions) -} - -// func TestRunFilter_Single_Int_Match(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 1}) -// }`), -// map[string]interface{}{ -// "x": int64(1), -// }, -// true, -// ) -// } - -// func TestRunFilter_Single_Int_FilterNoMatch(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 2}) -// }`), -// map[string]interface{}{ -// "x": int64(1), -// }, -// false, -// ) -// } - -// func TestRunFilter_Single_Int_DataNoMatch(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 1}) -// }`), -// map[string]interface{}{ -// "x": int64(2), -// }, -// false, -// ) -// } - -// func TestRunFilter_Single_Int_Match_MultiData(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 2}) -// }`), -// map[string]interface{}{ -// "x": int64(1), -// "y": "hello", -// }, -// false, -// ) -// } - -// func TestRunFilter_Single_String_Match(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: "test"}) -// }`), -// map[string]interface{}{ -// "x": "test", -// }, -// true, -// ) -// } - -// func TestRunFilter_Single_String_NoMatch(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: "test"}) -// }`), -// map[string]interface{}{ -// "x": "somethingelse", -// }, -// false, -// ) -// } - -// func TestRunFilter_Single_Float_Match(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 1.1}) -// }`), -// map[string]interface{}{ -// "x": 1.1, -// }, -// true, -// ) -// } - -// func TestRunFilter_Single_Float_NoMatch(t *testing.T) { -// runRunFilterTest(t, (` -// query { -// users(filter: {x: 1.1}) -// }`), -// map[string]interface{}{ -// "x": 2.2, -// }, -// false, -// ) -// } - -type testCase struct { - description string - query string - data map[string]interface{} - shouldPass bool -} - -func TestRunFilter_TestCases(t *testing.T) { - testCases := []testCase{ - { - "Single Int, match", - (` - query { - users(filter: {x: 1}) - }`), - map[string]interface{}{ - "x": uint64(1), - }, - true, - }, - { - "Single Int, no match", - (` - query { - users(filter: {x: 1}) - }`), - map[string]interface{}{ - "x": int64(2), - }, - false, - }, - { - "Single Int, match, multiple data fields", - (` - query { - users(filter: {x: 1}) - }`), - map[string]interface{}{ - "x": int64(1), - "y": int64(2), - }, - true, - }, - { - "Single string, match", - (` - query { - users(filter: {x: "test"}) - }`), - map[string]interface{}{ - "x": "test", - }, - true, - }, - { - "Single string, no match", - (` - query { - users(filter: {x: "test"}) - }`), - map[string]interface{}{ - "x": "nothing", - }, - false, - }, - { - "Single float, match", - (` - query { - users(filter: {x: 1.1}) - }`), - map[string]interface{}{ - "x": 1.1, - }, - true, - }, - { - "Single float, no match", - (` - query { - users(filter: {x: 1.1}) - }`), - map[string]interface{}{ - "x": 2.2, - }, - false, - }, - { - "Single boolean, match", - (` - query { - users(filter: {x: true}) - }`), - map[string]interface{}{ - "x": true, - }, - true, - }, - // @todo: implement list equality filter - // { - // "Single list, match", - // (` - // query { - // users(filter: {x: [1,2,3]}) - // }`), - // map[string]interface{}{ - // "x": []interface{}{ - // int64(1), - // int64(2), - // int64(3), - // }, - // }, - // false, - // }, - { - "Multi condition match", - (` - query { - users(filter: {x: 1, y: 1}) - }`), - map[string]interface{}{ - "x": int64(1), - "y": int64(1), - }, - true, - }, - { - "Object, match", - (` - query { - users(filter: {x: {y: 1}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": int64(1), - }, - }, - true, - }, - { - "Nested object, match", - (` - query { - users(filter: {x: {y: {z: 1}}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": map[string]interface{}{ - "z": int64(1), - }, - }, - }, - true, - }, - { - "Nested object, multi match", - (` - query { - users(filter: {a: 1, x: {y: {z: 1}}}) - }`), - map[string]interface{}{ - "x": map[string]interface{}{ - "y": map[string]interface{}{ - "z": int64(1), - }, - }, - "a": int64(1), - }, - true, - }, - { - "Explicit condition match: Int _eq", - (` - query { - users(filter: {x: {_eq: 1}}) - }`), - map[string]interface{}{ - "x": int64(1), - }, - true, - }, - { - "Explicit condition match: Int _ne", - (` - query { - users(filter: {x: {_ne: 1}}) - }`), - map[string]interface{}{ - "x": int64(2), - }, - true, - }, - { - "Explicit condition match: Float _ne", - (` - query { - users(filter: {x: {_ne: 1.1}}) - }`), - map[string]interface{}{ - "x": int64(2), - }, - true, - }, - { - "Explicit condition match: String _ne", - (` - query { - users(filter: {x: {_ne: "test"}}) - }`), - map[string]interface{}{ - "x": int64(2), - }, - true, - }, - { - "Explicit condition match: Int greater than (_gt)", - (` - query { - users(filter: {x: {_gt: 1}}) - }`), - map[string]interface{}{ - "x": int64(2), - }, - true, - }, - { - "Explicit condition no match: Int greater than (_gt)", - (` - query { - users(filter: {x: {_gt: 1}}) - }`), - map[string]interface{}{ - "x": int64(0), - }, - false, - }, - { - "Explicit muti condition match: Int greater than (_gt) less than (_lt)", - (` - query { - users(filter: {x: {_gt: 1}, y: {_lt: 2}}) - }`), - map[string]interface{}{ - "x": int64(3), - "y": int64(1), - }, - true, - }, - { - "Explicit condition match: Int & Float greater than (_gt)", - (` - query { - users(filter: {x: {_gt: 1}}) - }`), - map[string]interface{}{ - "x": 1.1, - }, - true, - }, - { - "Explicit condition match: In set (_in)", - (` - query { - users(filter: {x: {_in: [1,2,3]}}) - }`), - map[string]interface{}{ - "x": int64(1), - }, - true, - }, - { - "Explicit condition match: Not in set (_nin)", - (` - query { - users(filter: {x: {_nin: [1,2,3]}}) - }`), - map[string]interface{}{ - "x": int64(4), - }, - true, - }, - { - "Compound logical condition match: _and", - (` - query { - users(filter: {_and: [ {x: {_lt: 10}}, {x: {_gt: 5}} ]}) - }`), - map[string]interface{}{ - "x": int64(6), - }, - true, - }, - { - "Compound logical condition no match: _and", - (` - query { - users(filter: {_and: [ {x: {_lt: 10}}, {x: {_gt: 5}} ]}) - }`), - map[string]interface{}{ - "x": int64(11), - }, - false, - }, - { - "Compound logical condition match: _or", - (` - query { - users(filter: {_or: [ {x: 10}, {x: 5} ]}) - }`), - map[string]interface{}{ - "x": int64(5), - }, - true, - }, - { - "Compound nested logical condition match: _or #2", - (` - query { - users(filter: {_or: - [ - {_and: [ {x: {_lt: 10}}, {x: {_gt: 5}} ]}, - {_and: [ {y: {_lt: 1}}, {y: {_gt: 0}} ]}, - ]}) - }`), - map[string]interface{}{ - "x": int64(6), - }, - true, - }, - { - "Compound nested logical condition match: _or #2", - (` - query { - users(filter: {_or: - [ - {_and: [ {x: {_lt: 10}}, {x: {_gt: 5}} ]}, - {_and: [ {y: {_lt: 1}}, {y: {_gt: 0}} ]}, - ]}) - }`), - map[string]interface{}{ - "y": 0.1, - }, - true, - }, - { - "Nested object logical condition match: _or", - (` - query { - users(filter: {_or: [ {x: 10}, {x: 5} ]}) - }`), - map[string]interface{}{ - "x": int64(5), - }, - true, - }, - } - for _, test := range testCases { - runRunFilterTest(t, test) - } -} - -func runRunFilterTest(t *testing.T, test testCase) { - filter, err := getQueryFilterObject(test.query) - assert.NoError(t, err) - - passed, err := RunFilter(test.data, filter, EvalContext{}) - assert.NoError(t, err) - assert.True(t, passed == test.shouldPass, "Test Case faild: %s", test.description) -} - -func getQuerySortObject(query string) (*parserTypes.OrderBy, error) { - source := source.NewSource(&source.Source{ - Body: []byte(query), - Name: "", - }) - - doc, err := gqlp.Parse(gqlp.ParseParams{Source: source}) - if err != nil { - return nil, err - } - - sortObj := doc.Definitions[0].(*ast.OperationDefinition).SelectionSet.Selections[0].(*ast.Field).Arguments[0].Value.(*ast.ObjectValue) - conditions, err := ParseConditionsInOrder(sortObj) - if err != nil { - return nil, err - } - return &parserTypes.OrderBy{ - Conditions: conditions, - Statement: sortObj, - }, nil -} - -func TestParseConditionsInOrder_Empty(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {}) - }`), - []parserTypes.SortCondition{}, - ) -} - -func TestParseConditionsInOrder_Simple(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {name: ASC}) - }`), - []parserTypes.SortCondition{ - { - Field: "name", - Direction: parserTypes.ASC, - }, - }, - ) -} - -func TestParseConditionsInOrder_Simple_Multiple(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {name: ASC, date: DESC}) - }`), - []parserTypes.SortCondition{ - { - Field: "name", - Direction: parserTypes.ASC, - }, - { - Field: "date", - Direction: parserTypes.DESC, - }, - }, - ) -} - -func TestParseConditionsInOrder_Embedded(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {author: {name: ASC}}) - }`), - []parserTypes.SortCondition{ - { - Field: "author.name", - Direction: parserTypes.ASC, - }, - }, - ) -} - -func TestParseConditionsInOrder_Embedded_Multiple(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {author: {name: ASC, birthday: DESC}}) - }`), - []parserTypes.SortCondition{ - { - Field: "author.name", - Direction: parserTypes.ASC, - }, - { - Field: "author.birthday", - Direction: parserTypes.DESC, - }, - }, - ) -} - -func TestParseConditionsInOrder_Embedded_Nested(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {author: {address: {street_name: DESC}}}) - }`), - []parserTypes.SortCondition{ - { - Field: "author.address.street_name", - Direction: parserTypes.DESC, - }, - }, - ) -} - -func TestParseConditionsInOrder_Complex(t *testing.T) { - runParseConditionInOrderTest(t, (` - query { - users(order: {name: ASC, author: {birthday: ASC, address: {street_name: DESC}}}) - }`), - []parserTypes.SortCondition{ - { - Field: "name", - Direction: parserTypes.ASC, - }, - { - Field: "author.birthday", - Direction: parserTypes.ASC, - }, - { - Field: "author.address.street_name", - Direction: parserTypes.DESC, - }, - }, - ) -} - -func runParseConditionInOrderTest(t *testing.T, query string, target []parserTypes.SortCondition) { - sort, err := getQuerySortObject(query) - assert.NoError(t, err) - - assert.Equal(t, target, sort.Conditions) -} diff --git a/query/graphql/parser/mutation.go b/query/graphql/parser/mutation.go index a8ddfeccd4..0b01bdd149 100644 --- a/query/graphql/parser/mutation.go +++ b/query/graphql/parser/mutation.go @@ -11,7 +11,6 @@ package parser import ( - "encoding/json" "errors" "strings" @@ -41,43 +40,6 @@ var ( ErrEmptyDataPayload = errors.New("given data payload is empty") ) -type ObjectPayload struct { - Object map[string]interface{} - Array []interface{} -} - -// NewObjectPayload parses a given payload string as JSON -// and returns a ObjectPayload struct decoded with either -// a JSON object, or JSON array. -func NewObjectPayload(payload string) (ObjectPayload, error) { - obj := ObjectPayload{} - if payload == "" { - return obj, errors.New("Object payload value cannot be empty") - } - var d interface{} - err := json.Unmarshal([]byte(payload), &d) - if err != nil { - return obj, err - } - - switch v := d.(type) { - // array usually means its a JSON PATCH object, unless its a create, then its - // just multiple documents - case []interface{}: - obj.Array = v - - case map[string]interface{}: - obj.Object = v - - default: - return obj, errors.New( - "Object payload value has unknown structure, must be a JSON object or array", - ) - } - - return obj, nil -} - // Mutation is a field on the MutationType // of a graphql query. It includes all the possible // arguments and all @@ -98,38 +60,22 @@ type Mutation struct { Data string Fields []Selection - - Statement *ast.Field } func (m Mutation) GetRoot() parserTypes.SelectionType { return parserTypes.ObjectSelection } -func (m Mutation) GetStatement() ast.Node { - return m.Statement -} - -func (m Mutation) GetSelections() []Selection { - return m.Fields -} - -func (m Mutation) GetName() string { - return m.Name -} - -func (m Mutation) GetAlias() string { - return m.Alias -} - // ToSelect returns a basic Select object, with the same Name, // Alias, and Fields as the Mutation object. Used to create a // Select planNode for the mutation return objects func (m Mutation) ToSelect() *Select { return &Select{ - Name: m.Schema, - Alias: m.Alias, - Fields: m.Fields, + Name: m.Schema, + Alias: m.Alias, + Fields: m.Fields, + DocKeys: m.IDs, + Filter: m.Filter, } } @@ -169,7 +115,7 @@ func parseMutationOperationDefinition(def *ast.OperationDefinition) (*OperationD // which includes sub fields, and may include // filters, IDs, payloads, etc. func parseMutation(field *ast.Field) (*Mutation, error) { - mut := &Mutation{Statement: field} + mut := &Mutation{} mut.Name = field.Name.Value if field.Alias != nil { mut.Alias = field.Alias.Value diff --git a/query/graphql/parser/parser.go b/query/graphql/parser/parser.go deleted file mode 100644 index 32b9b8bcf3..0000000000 --- a/query/graphql/parser/parser.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package parser - -import ( - "github.com/graphql-go/graphql/language/ast" -) - -type Statement interface { - GetStatement() ast.Node -} diff --git a/query/graphql/parser/query.go b/query/graphql/parser/query.go index 2543585276..f573e0ccfc 100644 --- a/query/graphql/parser/query.go +++ b/query/graphql/parser/query.go @@ -12,8 +12,6 @@ package parser import ( "errors" - "fmt" - "reflect" "strconv" "github.com/graphql-go/graphql/language/ast" @@ -33,10 +31,6 @@ type Query struct { Statement *ast.Document } -func (q Query) GetStatement() ast.Node { - return q.Statement -} - type OperationDefinition struct { Name string Selections []Selection @@ -49,10 +43,6 @@ func (q OperationDefinition) GetStatement() ast.Node { } type Selection interface { - Statement - GetName() string - GetAlias() string - GetSelections() []Selection GetRoot() parserTypes.SelectionType } @@ -61,22 +51,11 @@ type Selection interface { // fields, and query arguments like filters, // limits, etc. type Select struct { - // The unique, internal name of the Select - this may differ from that which - // is visible in the query string Name string - // The identifier to be used in the rendered results, typically specified by // the user. Alias string - // The name by which the the consumer refers to the select, e.g. `_group` - ExternalName string - CollectionName string - - // If true, this Select will not be exposed/rendered to the consumer and will - // only be used internally - Hidden bool - DocKeys []string CID string @@ -105,82 +84,18 @@ func (s Select) GetRoot() parserTypes.SelectionType { return s.Root } -func (s Select) GetStatement() ast.Node { - return s.Statement -} - -func (s Select) GetSelections() []Selection { - return s.Fields -} - -func (s Select) GetName() string { - return s.Name -} - -func (s Select) GetAlias() string { - return s.Alias -} - -// Equal compares the given Selects and returns true if they can be considered equal. -// Note: Currently only compares Name, ExternalName and Filter as that is all that is -// currently required, but this should be extended in the future. -func (s Select) Equal(other Select) bool { - if s.Name != other.Name && - s.ExternalName != other.ExternalName { - return false - } - - if s.Filter == nil { - return other.Filter == nil - } - - return reflect.DeepEqual(s.Filter.Conditions, other.Filter.Conditions) -} - -// Clone shallow-clones the Select using the provided names. -// Note: Currently only Filter and Statement are taken from the source select, -// this will likely expand in the near future. -func (s Select) Clone(name string, externalName string) *Select { - return &Select{ - Name: name, - ExternalName: externalName, - Filter: s.Filter, - Statement: s.Statement, - } -} - // Field implements Selection type Field struct { Name string Alias string Root parserTypes.SelectionType - - // raw graphql statement - Statement *ast.Field } func (c Field) GetRoot() parserTypes.SelectionType { return c.Root } -// GetSelectionSet implements Selection -func (f Field) GetSelections() []Selection { - return []Selection{} -} - -func (f Field) GetName() string { - return f.Name -} - -func (f Field) GetAlias() string { - return f.Alias -} - -func (f Field) GetStatement() ast.Node { - return f.Statement -} - // ParseQuery parses a root ast.Document, and returns a // formatted Query object. // Requires a non-nil doc, will error if given a nil @@ -285,14 +200,11 @@ func parseQueryOperationDefinition(def *ast.OperationDefinition) (*OperationDefi // which includes sub fields, and may include // filters, limits, orders, etc.. func parseSelect(rootType parserTypes.SelectionType, field *ast.Field, index int) (*Select, error) { - name, alias := getFieldName(field, index) - slct := &Select{ - Name: name, - Alias: alias, - ExternalName: field.Name.Value, - Root: rootType, - Statement: field, + Alias: getFieldAlias(field), + Name: field.Name.Value, + Root: rootType, + Statement: field, } // parse arguments @@ -401,31 +313,11 @@ func getArgumentKeyValue(field *ast.Field, argument *ast.Argument) (string, ast. return argument.Name.Value, argument.Value } -// getFieldName returns the internal name and alias of the given field at the given index. -// The returned name/alias may be different from the values directly on the field in order to -// distinguish between multiple aliases of the same underlying field. -func getFieldName(field *ast.Field, index int) (name string, alias string) { - // Fields that take arguments (e.g. filters) that can be aliased must be renamed internally - // to allow code to distinguish between multiple properties targeting the same underlying field - // that may or may not have different arguments. It is hoped that this renaming can be removed - // once we migrate to an array-based document structure as per - // https://github.com/sourcenetwork/defradb/issues/395 - if _, isAggregate := parserTypes.Aggregates[field.Name.Value]; isAggregate || - field.Name.Value == parserTypes.GroupFieldName { - name = fmt.Sprintf("_agg%v", index) - if field.Alias == nil { - alias = field.Name.Value - } else { - alias = field.Alias.Value - } - } else { - name = field.Name.Value - if field.Alias != nil { - alias = field.Alias.Value - } +func getFieldAlias(field *ast.Field) string { + if field.Alias == nil { + return field.Name.Value } - - return name, alias + return field.Alias.Value } func parseSelectFields(root parserTypes.SelectionType, fields *ast.SelectionSet) ([]Selection, error) { @@ -441,11 +333,7 @@ func parseSelectFields(root parserTypes.SelectionType, fields *ast.SelectionSet) } selections[i] = s } else if node.SelectionSet == nil { // regular field - f, err := parseField(root, node) - if err != nil { - return nil, err - } - selections[i] = f + selections[i] = parseField(root, node) } else { // sub type with extra fields subroot := root switch node.Name.Value { @@ -466,21 +354,12 @@ func parseSelectFields(root parserTypes.SelectionType, fields *ast.SelectionSet) // parseField simply parses the Name/Alias // into a Field type -func parseField(root parserTypes.SelectionType, field *ast.Field) (*Field, error) { - var alias string - - name := field.Name.Value - if field.Alias != nil { - alias = field.Alias.Value - } - - f := &Field{ - Root: root, - Name: name, - Statement: field, - Alias: alias, +func parseField(root parserTypes.SelectionType, field *ast.Field) *Field { + return &Field{ + Root: root, + Name: field.Name.Value, + Alias: getFieldAlias(field), } - return f, nil } func parseAPIQuery(field *ast.Field) (Selection, error) { @@ -491,88 +370,3 @@ func parseAPIQuery(field *ast.Field) (Selection, error) { return nil, errors.New("Unknown query") } } - -// The relative target/path from the object hosting an aggregate, to the property to -// be aggregated. -type AggregateTarget struct { - // The property on the object hosting the aggregate. This should never be empty - HostProperty string - // The static name of the target host property as it appears in the aggregate - // query. For example `_group`. - ExternalHostName string - // The property on the `HostProperty` that this aggregate targets. - // - // This may be empty if the aggregate targets a whole collection (e.g. Count), - // or if `HostProperty` is an inline array. - ChildProperty string -} - -// Returns the source of the aggregate as requested by the consumer -func (field Select) GetAggregateSource(host Selection) (AggregateTarget, error) { - if len(field.Statement.Arguments) == 0 { - return AggregateTarget{}, fmt.Errorf( - "Aggregate must be provided with a property to aggregate.", - ) - } - - var hostProperty string - var externalHostName string - var childProperty string - switch argumentValue := field.Statement.Arguments[0].Value.GetValue().(type) { - case string: - externalHostName = argumentValue - case []*ast.ObjectField: - externalHostName = field.Statement.Arguments[0].Name.Value - fieldArg, hasFieldArg := tryGet(argumentValue, parserTypes.Field) - if hasFieldArg { - if innerPathStringValue, isString := fieldArg.Value.GetValue().(string); isString { - childProperty = innerPathStringValue - } - } - } - - childFields := host.GetSelections() - targetField := field.Clone(externalHostName, externalHostName) - - // Check for any fields matching the targetField - for _, childField := range childFields { - childSelect, isSelect := childField.(*Select) - if isSelect && childSelect.Equal(*targetField) { - hostProperty = childSelect.Name - break - } - } - - // If we didn't find a field matching the target, we look for something with no filter, - // as it should yield all the items required by the aggregate. - if hostProperty == "" { - for _, childField := range childFields { - if childSelect, isSelect := childField.(*Select); isSelect { - if childSelect.ExternalName == externalHostName && childSelect.Filter == nil { - hostProperty = childSelect.Name - break - } - } - } - } - - if hostProperty == "" { - // child relationships use this currently due to bug https://github.com/sourcenetwork/defradb/issues/390 - hostProperty = externalHostName - } - - return AggregateTarget{ - HostProperty: hostProperty, - ExternalHostName: externalHostName, - ChildProperty: childProperty, - }, nil -} - -func tryGet(fields []*ast.ObjectField, name string) (arg *ast.ObjectField, hasArg bool) { - for _, field := range fields { - if field.Name.Value == name { - return field, true - } - } - return nil, false -} diff --git a/query/graphql/parser/types/types.go b/query/graphql/parser/types/types.go index c61525cc88..5c9718e491 100644 --- a/query/graphql/parser/types/types.go +++ b/query/graphql/parser/types/types.go @@ -66,12 +66,19 @@ const ( CountFieldName = "_count" DocKeyFieldName = "_key" GroupFieldName = "_group" - HiddenFieldName = "_hidden" SumFieldName = "_sum" VersionFieldName = "_version" ExplainLabel = "explain" + LinksFieldName = "links" + HeightFieldName = "height" + CidFieldName = "cid" + DeltaFieldName = "delta" + + LinksNameFieldName = "name" + LinksCidFieldName = "cid" + ASC = SortDirection("ASC") DESC = SortDirection("DESC") ) @@ -99,7 +106,6 @@ var ( CountFieldName: true, SumFieldName: true, AverageFieldName: true, - HiddenFieldName: true, DocKeyFieldName: true, } @@ -108,4 +114,15 @@ var ( SumFieldName: {}, AverageFieldName: {}, } + + VersionFields = map[string]struct{}{ + HeightFieldName: {}, + CidFieldName: {}, + DeltaFieldName: {}, + } + + LinksFields = map[string]struct{}{ + LinksNameFieldName: {}, + LinksCidFieldName: {}, + } ) diff --git a/query/graphql/planner/arbitrary_join.go b/query/graphql/planner/arbitrary_join.go index fd5fbbda2f..3ec2c207d5 100644 --- a/query/graphql/planner/arbitrary_join.go +++ b/query/graphql/planner/arbitrary_join.go @@ -24,15 +24,15 @@ type dataSource struct { parentSource planNode childSource planNode - childName string + childIndex int lastParentDocIndex int lastChildDocIndex int } -func newDataSource(childName string) *dataSource { +func newDataSource(childIndex int) *dataSource { return &dataSource{ - childName: childName, + childIndex: childIndex, lastParentDocIndex: -1, lastChildDocIndex: -1, } @@ -111,10 +111,10 @@ func (n *dataSource) Source() planNode { } func (source *dataSource) mergeParent( - keyFields []string, + keyIndexes []int, destination *orderedMap, - childNames []string, -) (map[string]interface{}, bool, error) { + childIndexes []int, +) (bool, error) { // This needs to be set manually for each item, in case other nodes // aggregate items from the pipe progressing the docIndex beyond the first item // for example, if the child is sorted. @@ -125,24 +125,25 @@ func (source *dataSource) mergeParent( hasNext, err := source.parentSource.Next() if err != nil { - return nil, false, err + return false, err } if !hasNext { - return nil, false, nil + return false, nil } value := source.parentSource.Value() - key := generateKey(value, keyFields) + key := generateKey(value, keyIndexes) - destination.mergeParent(key, childNames, value) + destination.mergeParent(key, childIndexes, value) - return value, true, nil + return true, nil } func (source *dataSource) appendChild( - keyFields []string, + keyIndexes []int, valuesByKey *orderedMap, -) (map[string]interface{}, bool, error) { + mapping *core.DocumentMapping, +) (bool, error) { // Most of the time this will be the same document as the parent (with different rendering), // however if the child group is sorted it will be different, the child may also be missing // if it is filtered out by a child filter. The parent will always exist, but may be @@ -154,32 +155,32 @@ func (source *dataSource) appendChild( hasNext, err := source.childSource.Next() if err != nil { - return nil, false, err + return false, err } if !hasNext { - return nil, false, nil + return false, nil } // Note that even if the source yields both parent and child items, they may not be yielded in // the same order - we need to treat it as a new item, regenerating the key and potentially caching // it without yet receiving the parent-level details value := source.childSource.Value() - key := generateKey(value, keyFields) + key := generateKey(value, keyIndexes) - valuesByKey.appendChild(key, source.childName, value) + valuesByKey.appendChild(key, source.childIndex, value, mapping) - return value, true, nil + return true, nil } -func join(sources []*dataSource, keyFields []string) (*orderedMap, error) { +func join(sources []*dataSource, keyIndexes []int, mapping *core.DocumentMapping) (*orderedMap, error) { result := orderedMap{ - values: []map[string]interface{}{}, + values: []core.Doc{}, indexesByKey: map[string]int{}, } - childNames := make([]string, len(sources)) + childIndexes := make([]int, len(sources)) for i, source := range sources { - childNames[i] = source.childName + childIndexes[i] = source.childIndex } for _, source := range sources { @@ -189,14 +190,14 @@ func join(sources []*dataSource, keyFields []string) (*orderedMap, error) { for hasNextParent || hasNextChild { if hasNextParent { - _, hasNextParent, err = source.mergeParent(keyFields, &result, childNames) + hasNextParent, err = source.mergeParent(keyIndexes, &result, childIndexes) if err != nil { return nil, err } } if hasNextChild { - _, hasNextChild, err = source.appendChild(keyFields, &result) + hasNextChild, err = source.appendChild(keyIndexes, &result, mapping) if err != nil { return nil, err } @@ -207,11 +208,11 @@ func join(sources []*dataSource, keyFields []string) (*orderedMap, error) { return &result, nil } -func generateKey(doc map[string]interface{}, keyFields []string) string { +func generateKey(doc core.Doc, keyIndexes []int) string { keyBuilder := strings.Builder{} - for _, keyField := range keyFields { - keyBuilder.WriteString(keyField) - keyBuilder.WriteString(fmt.Sprintf("%v", doc[keyField])) + for _, keyField := range keyIndexes { + keyBuilder.WriteString(fmt.Sprint(keyField)) + keyBuilder.WriteString(fmt.Sprintf("_%v_", doc.Fields[keyField])) } return keyBuilder.String() } @@ -219,32 +220,40 @@ func generateKey(doc map[string]interface{}, keyFields []string) string { // A specialized collection that allows retrieval of items by key whilst preserving the order // in which they were added. type orderedMap struct { - values []map[string]interface{} + values []core.Doc indexesByKey map[string]int } -func (m *orderedMap) mergeParent(key string, childAddresses []string, value map[string]interface{}) { +func (m *orderedMap) mergeParent(key string, childIndexes []int, value core.Doc) { index, exists := m.indexesByKey[key] if exists { existingValue := m.values[index] + // copy every value from the child, apart from the child-indexes propertyLoop: - for property, cellValue := range value { - for _, childAddress := range childAddresses { - if property == childAddress { + for cellIndex, cellValue := range value.Fields { + for _, childIndex := range childIndexes { + if cellIndex == childIndex { continue propertyLoop } } - - existingValue[property] = cellValue + existingValue.Fields[cellIndex] = cellValue } + return } // If the value is new, we can safely set the child group to an empty // collection (required if children are filtered out) - for _, childAddress := range childAddresses { - value[childAddress] = []map[string]interface{}{} + for _, childAddress := range childIndexes { + // the parent may have come from a pipe using a smaller doc mapping, + // if so we need to extend the field slice. + if childAddress >= len(value.Fields) { + newFields := make(core.DocFields, childAddress+1) + copy(newFields, value.Fields) + value.Fields = newFields + } + value.Fields[childAddress] = []core.Doc{} } index = len(m.values) @@ -252,13 +261,13 @@ func (m *orderedMap) mergeParent(key string, childAddresses []string, value map[ m.indexesByKey[key] = index } -func (m *orderedMap) appendChild(key string, childAddress string, value map[string]interface{}) { +func (m *orderedMap) appendChild(key string, childIndex int, value core.Doc, mapping *core.DocumentMapping) { index, exists := m.indexesByKey[key] - var parent map[string]interface{} + var parent core.Doc if !exists { index = len(m.values) - parent = map[string]interface{}{} + parent = mapping.NewDoc() m.values = append(m.values, parent) m.indexesByKey[key] = index @@ -266,15 +275,15 @@ func (m *orderedMap) appendChild(key string, childAddress string, value map[stri parent = m.values[index] } - childProperty, hasChildCollection := parent[childAddress] - if !hasChildCollection { - childProperty = []map[string]interface{}{ + childProperty := parent.Fields[childIndex] + if childProperty == nil { + childProperty = []core.Doc{ value, } - parent[childAddress] = childProperty + parent.Fields[childIndex] = childProperty return } - childCollection := childProperty.([]map[string]interface{}) - parent[childAddress] = append(childCollection, value) + childCollection := childProperty.([]core.Doc) + parent.Fields[childIndex] = append(childCollection, value) } diff --git a/query/graphql/planner/average.go b/query/graphql/planner/average.go index 1d690130c7..4baf150deb 100644 --- a/query/graphql/planner/average.go +++ b/query/graphql/planner/average.go @@ -14,28 +14,43 @@ import ( "fmt" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" + parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) type averageNode struct { documentIterator + docMapper plan planNode - sumFieldName string - countFieldName string - virtualFieldId string + sumFieldIndex int + countFieldIndex int + virtualFieldIndex int } func (p *Planner) Average( - sumField *parser.Select, - countField *parser.Select, - field *parser.Select, + field *mapper.Aggregate, ) (*averageNode, error) { + var sumField *mapper.Aggregate + var countField *mapper.Aggregate + + for _, dependency := range field.Dependencies { + switch dependency.Name { + case parserTypes.CountFieldName: + countField = dependency + case parserTypes.SumFieldName: + sumField = dependency + default: + return nil, fmt.Errorf("Unknown dependency, name: %s", dependency.Name) + } + } + return &averageNode{ - sumFieldName: sumField.Name, - countFieldName: countField.Name, - virtualFieldId: field.Name, + sumFieldIndex: sumField.Index, + countFieldIndex: countField.Index, + virtualFieldIndex: field.Index, + docMapper: docMapper{&field.DocumentMapping}, }, nil } @@ -57,34 +72,26 @@ func (n *averageNode) Next() (bool, error) { n.currentValue = n.plan.Value() - countProp, hasCount := n.currentValue[n.countFieldName] - sumProp, hasSum := n.currentValue[n.sumFieldName] - - count := 0 - if hasCount { - typedCount, isInt := countProp.(int) - if !isInt { - return false, fmt.Errorf("Expected count to be int but was: %T", countProp) - } - count = typedCount + countProp := n.currentValue.Fields[n.countFieldIndex] + typedCount, isInt := countProp.(int) + if !isInt { + return false, fmt.Errorf("Expected count to be int but was: %T", countProp) } + count := typedCount if count == 0 { - n.currentValue[n.virtualFieldId] = float64(0) + n.currentValue.Fields[n.virtualFieldIndex] = float64(0) return true, nil } - if hasSum { - switch sum := sumProp.(type) { - case float64: - n.currentValue[n.virtualFieldId] = sum / float64(count) - case int64: - n.currentValue[n.virtualFieldId] = float64(sum) / float64(count) - default: - return false, fmt.Errorf("Expected sum to be either float64 or int64 or int but was: %T", sumProp) - } - } else { - n.currentValue[n.virtualFieldId] = float64(0) + sumProp := n.currentValue.Fields[n.sumFieldIndex] + switch sum := sumProp.(type) { + case float64: + n.currentValue.Fields[n.virtualFieldIndex] = sum / float64(count) + case int64: + n.currentValue.Fields[n.virtualFieldIndex] = float64(sum) / float64(count) + default: + return false, fmt.Errorf("Expected sum to be either float64 or int64 or int but was: %T", sumProp) } return true, nil diff --git a/query/graphql/planner/commit.go b/query/graphql/planner/commit.go index 47234c4a40..0171b97ceb 100644 --- a/query/graphql/planner/commit.go +++ b/query/graphql/planner/commit.go @@ -16,12 +16,14 @@ import ( cid "github.com/ipfs/go-cid" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // commitSelectTopNode is a wrapper for the selectTopNode // in the case where the select is actually a CommitSelect type commitSelectTopNode struct { + docMapper + p *Planner plan planNode } @@ -36,7 +38,7 @@ func (n *commitSelectTopNode) Next() (bool, error) { return n.plan.Next() } func (n *commitSelectTopNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *commitSelectTopNode) Value() map[string]interface{} { return n.plan.Value() } +func (n *commitSelectTopNode) Value() core.Doc { return n.plan.Value() } func (n *commitSelectTopNode) Source() planNode { return n.plan } @@ -51,12 +53,11 @@ func (n *commitSelectTopNode) Append() bool { return true } type commitSelectNode struct { documentIterator + docMapper p *Planner source *dagScanNode - - subRenderInfo map[string]renderInfo } func (n *commitSelectNode) Kind() string { @@ -98,16 +99,16 @@ func (n *commitSelectNode) Explain() (map[string]interface{}, error) { return map[string]interface{}{}, nil } -func (p *Planner) CommitSelect(parsed *parser.CommitSelect) (planNode, error) { +func (p *Planner) CommitSelect(parsed *mapper.CommitSelect) (planNode, error) { // check type of commit select (all, latest, one) var commit *commitSelectNode var err error switch parsed.Type { - case parser.LatestCommits: + case mapper.LatestCommits: commit, err = p.commitSelectLatest(parsed) - case parser.OneCommit: + case mapper.OneCommit: commit, err = p.commitSelectBlock(parsed) - case parser.AllCommits: + case mapper.AllCommits: commit, err = p.commitSelectAll(parsed) default: return nil, fmt.Errorf("Invalid CommitSelect type") @@ -115,21 +116,21 @@ func (p *Planner) CommitSelect(parsed *parser.CommitSelect) (planNode, error) { if err != nil { return nil, err } - slct := parsed.ToSelect() - plan, err := p.SelectFromSource(slct, commit, false, nil) + plan, err := p.SelectFromSource(&parsed.Select, commit, false, nil) if err != nil { return nil, err } return &commitSelectTopNode{ - p: p, - plan: plan, + p: p, + plan: plan, + docMapper: docMapper{&parsed.DocumentMapping}, }, nil } // commitSelectLatest is a CommitSelect node initalized with a headsetScanNode and a DocKey -func (p *Planner) commitSelectLatest(parsed *parser.CommitSelect) (*commitSelectNode, error) { - dag := p.DAGScan() - headset := p.HeadScan() +func (p *Planner) commitSelectLatest(parsed *mapper.CommitSelect) (*commitSelectNode, error) { + dag := p.DAGScan(parsed) + headset := p.HeadScan(parsed) // @todo: Get Collection field ID if parsed.FieldName == "" { parsed.FieldName = core.COMPOSITE_NAMESPACE @@ -141,19 +142,18 @@ func (p *Planner) commitSelectLatest(parsed *parser.CommitSelect) (*commitSelect } dag.headset = headset commit := &commitSelectNode{ - p: p, - source: dag, - subRenderInfo: make(map[string]renderInfo), + p: p, + source: dag, } return commit, nil } // commitSelectBlock is a CommitSelect node intialized witout a headsetScanNode, and is -// expected to be given a target CID in the parser.CommitSelect object. It returns +// expected to be given a target CID in the mapper.CommitSelect object. It returns // a single commit if found -func (p *Planner) commitSelectBlock(parsed *parser.CommitSelect) (*commitSelectNode, error) { - dag := p.DAGScan() +func (p *Planner) commitSelectBlock(parsed *mapper.CommitSelect) (*commitSelectNode, error) { + dag := p.DAGScan(parsed) if parsed.Cid != "" { c, err := cid.Decode(parsed.Cid) if err != nil { @@ -163,17 +163,16 @@ func (p *Planner) commitSelectBlock(parsed *parser.CommitSelect) (*commitSelectN } // @todo: handle error if no CID is given return &commitSelectNode{ - p: p, - source: dag, - subRenderInfo: make(map[string]renderInfo), + p: p, + source: dag, }, nil } // commitSelectAll is a CommitSelect initialized with a headsetScanNode, and will // recursively return all graph commits in order. -func (p *Planner) commitSelectAll(parsed *parser.CommitSelect) (*commitSelectNode, error) { - dag := p.DAGScan() - headset := p.HeadScan() +func (p *Planner) commitSelectAll(parsed *mapper.CommitSelect) (*commitSelectNode, error) { + dag := p.DAGScan(parsed) + headset := p.HeadScan(parsed) // @todo: Get Collection field ID if parsed.FieldName == "" { parsed.FieldName = core.COMPOSITE_NAMESPACE @@ -187,9 +186,8 @@ func (p *Planner) commitSelectAll(parsed *parser.CommitSelect) (*commitSelectNod dag.depthLimit = math.MaxUint32 // infinite depth // dag.key = &key commit := &commitSelectNode{ - p: p, - source: dag, - subRenderInfo: make(map[string]renderInfo), + p: p, + source: dag, } return commit, nil diff --git a/query/graphql/planner/count.go b/query/graphql/planner/count.go index 63d07b0138..0aa2003426 100644 --- a/query/graphql/planner/count.go +++ b/query/graphql/planner/count.go @@ -18,32 +18,26 @@ import ( "reflect" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) type countNode struct { documentIterator + docMapper p *Planner plan planNode - sourceProperty string - virtualFieldId string - - filter *parser.Filter + virtualFieldIndex int + aggregateMapping []mapper.AggregateTarget } -func (p *Planner) Count(field *parser.Select, host *parser.Select) (*countNode, error) { - source, err := field.GetAggregateSource(host) - if err != nil { - return nil, err - } - +func (p *Planner) Count(field *mapper.Aggregate, host *mapper.Select) (*countNode, error) { return &countNode{ - p: p, - sourceProperty: source.HostProperty, - virtualFieldId: field.Name, - filter: field.Filter, + p: p, + virtualFieldIndex: field.Index, + aggregateMapping: field.AggregateTargets, + docMapper: docMapper{&field.DocumentMapping}, }, nil } @@ -66,19 +60,26 @@ func (n *countNode) Source() planNode { return n.plan } // Explain method returns a map containing all attributes of this node that // are to be explained, subscribes / opts-in this node to be an explainablePlanNode. func (n *countNode) Explain() (map[string]interface{}, error) { - explainerMap := map[string]interface{}{} + sourceExplanations := make([]map[string]interface{}, len(n.aggregateMapping)) - // Add the filter attribute if it exists. - if n.filter == nil || n.filter.Conditions == nil { - explainerMap[filterLabel] = nil - } else { - explainerMap[filterLabel] = n.filter.Conditions - } + for i, source := range n.aggregateMapping { + explainerMap := map[string]interface{}{} - // Add the source property. - explainerMap["sourceProperty"] = n.sourceProperty + // Add the filter attribute if it exists. + if source.Filter == nil || source.Filter.ExternalConditions == nil { + explainerMap[filterLabel] = nil + } else { + explainerMap[filterLabel] = source.Filter.ExternalConditions + } - return explainerMap, nil + // Add the source property. + explainerMap["sourceProperty"] = source.Field.Name + sourceExplanations[i] = explainerMap + } + + return map[string]interface{}{ + "sources": sourceExplanations, + }, nil } func (n *countNode) Next() (bool, error) { @@ -88,24 +89,23 @@ func (n *countNode) Next() (bool, error) { } n.currentValue = n.plan.Value() - // Can just scan for now, can be replaced later by something fancier if needed var count int - if property, hasProperty := n.currentValue[n.sourceProperty]; hasProperty { + for _, source := range n.aggregateMapping { + property := n.currentValue.Fields[source.Index] v := reflect.ValueOf(property) switch v.Kind() { // v.Len will panic if v is not one of these types, we don't want it to panic case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String: - count = v.Len() + length := v.Len() // For now, we only support count filters internally to support averages // so this is fine here now, but may need to be moved later once external // count filter support is added. - if count > 0 && n.filter != nil { - docArray, isDocArray := property.([]map[string]interface{}) + if count > 0 && source.Filter != nil { + docArray, isDocArray := property.([]core.Doc) if isDocArray { - count = 0 for _, doc := range docArray { - passed, err := parser.RunFilter(doc, n.filter, n.p.evalCtx) + passed, err := mapper.RunFilter(doc, source.Filter) if err != nil { return false, err } @@ -114,11 +114,13 @@ func (n *countNode) Next() (bool, error) { } } } + } else { + count = count + length } } } - n.currentValue[n.virtualFieldId] = count + n.currentValue.Fields[n.virtualFieldIndex] = count return true, nil } diff --git a/query/graphql/planner/create.go b/query/graphql/planner/create.go index 092769b9f2..192ba3f279 100644 --- a/query/graphql/planner/create.go +++ b/query/graphql/planner/create.go @@ -16,7 +16,8 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // createNode is used to construct and execute @@ -28,6 +29,8 @@ import ( // or Select plans type createNode struct { documentIterator + docMapper + p *Planner // cache information about the original data source @@ -41,6 +44,7 @@ type createNode struct { err error returned bool + results planNode } func (n *createNode) Kind() string { return "createNode" } @@ -76,21 +80,52 @@ func (n *createNode) Next() (bool, error) { return false, err } - currentValue, err := n.doc.ToMap() - if err != nil { - return false, err + currentValue := n.documentMapping.NewDoc() + + currentValue.SetKey(n.doc.Key().String()) + for i, value := range n.doc.Values() { + // On create the document will have no aliased fields/aggregates/etc so we can safely take + // the first index. + n.documentMapping.SetFirstOfName(¤tValue, i.Name(), value.Value()) } n.returned = true n.currentValue = currentValue + + desc := n.collection.Description() + docKey := base.MakeDocKey(desc, currentValue.GetKey()) + n.results.Spans(core.Spans{core.NewSpan(docKey, docKey.PrefixEnd())}) + + err := n.results.Init() + if err != nil { + return false, err + } + + err = n.results.Start() + if err != nil { + return false, err + } + + // get the next result based on our point lookup + next, err := n.results.Next() + if err != nil { + return false, err + } + if !next { + return false, nil + } + + n.currentValue = n.results.Value() return true, nil } func (n *createNode) Spans(spans core.Spans) { /* no-op */ } -func (n *createNode) Close() error { return nil } +func (n *createNode) Close() error { + return n.results.Close() +} -func (n *createNode) Source() planNode { return nil } +func (n *createNode) Source() planNode { return n.results } // Explain method returns a map containing all attributes of this node that // are to be explained, subscribes / opts-in this node to be an explainablePlanNode. @@ -106,25 +141,25 @@ func (n *createNode) Explain() (map[string]interface{}, error) { }, nil } -func (p *Planner) CreateDoc(parsed *parser.Mutation) (planNode, error) { +func (p *Planner) CreateDoc(parsed *mapper.Mutation) (planNode, error) { + results, err := p.Select(&parsed.Select) + if err != nil { + return nil, err + } + // create a mutation createNode. create := &createNode{ p: p, newDocStr: parsed.Data, + results: results, + docMapper: docMapper{&parsed.DocumentMapping}, } // get collection - col, err := p.db.GetCollectionByName(p.ctx, parsed.Schema) + col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) if err != nil { return nil, err } create.collection = col - - // last step, create a basic Select statement - // from the parsed Mutation object - // and construct a new Select planNode - // which uses the new create node as its - // source, instead of a scan node. - slct := parsed.ToSelect() - return p.SelectFromSource(slct, create, true, nil) + return create, nil } diff --git a/query/graphql/planner/dagscan.go b/query/graphql/planner/dagscan.go index ffc306e64d..3278205dc0 100644 --- a/query/graphql/planner/dagscan.go +++ b/query/graphql/planner/dagscan.go @@ -38,16 +38,19 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/fetcher" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/fxamacker/cbor/v2" blocks "github.com/ipfs/go-block-format" cid "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" dag "github.com/ipfs/go-merkledag" + parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) type headsetScanNode struct { documentIterator + docMapper p *Planner key core.DataStoreKey @@ -58,6 +61,7 @@ type headsetScanNode struct { cid *cid.Cid fetcher fetcher.HeadFetcher + parsed *mapper.CommitSelect } func (n *headsetScanNode) Kind() string { @@ -106,9 +110,9 @@ func (h *headsetScanNode) Next() (bool, error) { return false, nil } - h.currentValue = map[string]interface{}{ - "cid": *h.cid, - } + h.currentValue = h.parsed.DocumentMapping.NewDoc() + h.parsed.DocumentMapping.SetFirstOfName(&h.currentValue, "cid", *h.cid) + return true, nil } @@ -118,12 +122,17 @@ func (h *headsetScanNode) Close() error { func (h *headsetScanNode) Source() planNode { return nil } -func (p *Planner) HeadScan() *headsetScanNode { - return &headsetScanNode{p: p} +func (p *Planner) HeadScan(parsed *mapper.CommitSelect) *headsetScanNode { + return &headsetScanNode{ + p: p, + parsed: parsed, + docMapper: docMapper{&parsed.DocumentMapping}, + } } type dagScanNode struct { documentIterator + docMapper p *Planner cid *cid.Cid @@ -141,6 +150,7 @@ type dagScanNode struct { queuedCids *list.List headset *headsetScanNode + parsed *mapper.CommitSelect // previousScanNode planNode // linksScanNode planNode @@ -148,11 +158,13 @@ type dagScanNode struct { // block blocks.Block } -func (p *Planner) DAGScan() *dagScanNode { +func (p *Planner) DAGScan(parsed *mapper.CommitSelect) *dagScanNode { return &dagScanNode{ p: p, visitedNodes: make(map[string]bool), queuedCids: list.New(), + parsed: parsed, + docMapper: docMapper{&parsed.DocumentMapping}, } } @@ -187,11 +199,13 @@ func (n *dagScanNode) Spans(spans core.Spans) { // otherwise, try to parse as a CID if n.headset != nil { // make sure we have the correct field suffix - span := spans[0].Start() + headSetSpans := make(core.Spans, len(spans)) + copy(headSetSpans, spans) + span := headSetSpans[0].Start() if !strings.HasSuffix(span.ToString(), n.field) { - spans[0] = core.NewSpan(span.WithFieldId(n.field), core.DataStoreKey{}) + headSetSpans[0] = core.NewSpan(span.WithFieldId(n.field), core.DataStoreKey{}) } - n.headset.Spans(spans) + n.headset.Spans(headSetSpans) } else { data := spans[0].Start().ToString() c, err := cid.Decode(data) @@ -232,7 +246,7 @@ func (n *dagScanNode) Next() (bool, error) { } val := n.headset.Value() - cid, ok := val["cid"].(cid.Cid) + cid, ok := n.parsed.DocumentMapping.FirstOfName(val, "cid").(cid.Cid) if !ok { return false, fmt.Errorf("Headset scan node returned an invalid cid") } @@ -262,7 +276,7 @@ func (n *dagScanNode) Next() (bool, error) { return false, err } var heads []*ipld.Link - n.currentValue, heads, err = dagBlockToNodeMap(block) + n.currentValue, heads, err = n.dagBlockToNodeDoc(block) if err != nil { return false, err } @@ -326,46 +340,55 @@ which returns the current dag commit for the stored CRDT value. All the dagScanNode endpoints use similar structures */ -func dagBlockToNodeMap(block blocks.Block) (map[string]interface{}, []*ipld.Link, error) { - commit := map[string]interface{}{ - "cid": block.Cid().String(), - } +func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.Link, error) { + commit := n.parsed.DocumentMapping.NewDoc() + n.parsed.DocumentMapping.SetFirstOfName(&commit, "cid", block.Cid().String()) // decode the delta, get the priority and payload nd, err := dag.DecodeProtobuf(block.RawData()) if err != nil { - return nil, nil, err + return core.Doc{}, nil, err } // @todo: Wrap delta unmarshaling into a proper typed interface. var delta map[string]interface{} if err := cbor.Unmarshal(nd.Data(), &delta); err != nil { - return nil, nil, err + return core.Doc{}, nil, err } prio, ok := delta["Priority"].(uint64) if !ok { - return nil, nil, fmt.Errorf("Commit Delta missing priority key") + return core.Doc{}, nil, fmt.Errorf("Commit Delta missing priority key") } - commit["height"] = int64(prio) - commit["delta"] = delta["Data"] // check + n.parsed.DocumentMapping.SetFirstOfName(&commit, "height", int64(prio)) + n.parsed.DocumentMapping.SetFirstOfName(&commit, "delta", delta["Data"]) heads := make([]*ipld.Link, 0) // links - links := make([]map[string]interface{}, len(nd.Links())) - for i, l := range nd.Links() { - link := map[string]interface{}{ - "name": l.Name, - "cid": l.Cid.String(), + linksIndexes := n.parsed.DocumentMapping.IndexesByName[parserTypes.LinksFieldName] + + for _, linksIndex := range linksIndexes { + links := make([]core.Doc, len(nd.Links())) + linksMapping := n.parsed.DocumentMapping.ChildMappings[linksIndex] + + for i, l := range nd.Links() { + link := linksMapping.NewDoc() + linksMapping.SetFirstOfName(&link, "name", l.Name) + linksMapping.SetFirstOfName(&link, "cid", l.Cid.String()) + + links[i] = link } - links[i] = link + commit.Fields[linksIndex] = links + } + + for _, l := range nd.Links() { if l.Name == "_head" { heads = append(heads, l) } } - commit["links"] = links + return commit, heads, nil } diff --git a/query/graphql/planner/datasource.go b/query/graphql/planner/datasource.go index d1ad9ca1cf..389528b9af 100644 --- a/query/graphql/planner/datasource.go +++ b/query/graphql/planner/datasource.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // sourceInfo stores info about the data source @@ -29,24 +30,22 @@ type planSource struct { plan planNode } -// datasource is a set of utilities for constructing scan/index/join nodes -// from a given query statement -func (p *Planner) getSource(collection string, versioned bool) (planSource, error) { +func (p *Planner) getSource(parsed *mapper.Select) (planSource, error) { // for now, we only handle simple collection scannodes - return p.getCollectionScanPlan(collection, versioned) + return p.getCollectionScanPlan(parsed) } // @todo: Add field selection -func (p *Planner) getCollectionScanPlan(collection string, versioned bool) (planSource, error) { - if collection == "" { +func (p *Planner) getCollectionScanPlan(parsed *mapper.Select) (planSource, error) { + if parsed.CollectionName == "" { return planSource{}, fmt.Errorf("collection name cannot be empty") } - colDesc, err := p.getCollectionDesc(collection) + colDesc, err := p.getCollectionDesc(parsed.CollectionName) if err != nil { return planSource{}, err } - scan := p.Scan(versioned) + scan := p.Scan(parsed) err = scan.initCollection(colDesc) if err != nil { return planSource{}, err diff --git a/query/graphql/planner/delete.go b/query/graphql/planner/delete.go index b060ab8a3c..e2d3d2e92c 100644 --- a/query/graphql/planner/delete.go +++ b/query/graphql/planner/delete.go @@ -15,15 +15,17 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) type deleteNode struct { + docMapper + p *Planner collection client.Collection - filter *parser.Filter + filter *mapper.Filter ids []string isDeleting bool @@ -74,7 +76,9 @@ func (n *deleteNode) Next() (bool, error) { // Consume the deletes into our valuesNode for _, resKey := range results.DocKeys { - err := n.deleteIter.docs.AddDoc(map[string]interface{}{"_key": resKey}) + doc := n.docMapper.documentMapping.NewDoc() + doc.SetKey(resKey) + err := n.deleteIter.docs.AddDoc(doc) if err != nil { return false, err } @@ -89,7 +93,7 @@ func (n *deleteNode) Next() (bool, error) { return n.deleteIter.Next() } -func (n *deleteNode) Value() map[string]interface{} { +func (n *deleteNode) Value() core.Doc { return n.deleteIter.Value() } @@ -126,31 +130,37 @@ func (n *deleteNode) Explain() (map[string]interface{}, error) { explainerMap[idsLabel] = n.ids // Add the filter attribute if it exists, otherwise have it nil. - if n.filter == nil || n.filter.Conditions == nil { + if n.filter == nil || n.filter.ExternalConditions == nil { explainerMap[filterLabel] = nil } else { - explainerMap[filterLabel] = n.filter.Conditions + explainerMap[filterLabel] = n.filter.ExternalConditions } return explainerMap, nil } -func (p *Planner) DeleteDocs(parsed *parser.Mutation) (planNode, error) { +func (p *Planner) DeleteDocs(parsed *mapper.Mutation) (planNode, error) { delete := &deleteNode{ p: p, filter: parsed.Filter, - ids: parsed.IDs, + ids: parsed.DocKeys, isDeleting: true, + docMapper: docMapper{&parsed.DocumentMapping}, } // get collection - col, err := p.db.GetCollectionByName(p.ctx, parsed.Schema) + col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) if err != nil { return nil, err } delete.collection = col.WithTxn(p.txn) - slct := parsed.ToSelect() - return p.SelectFromSource(slct, delete, true, nil) + // We have to clone the mutation and clear the filter before + // using it to create the select node, else the mutation filter + // will filter out the results + clone := parsed.CloneTo(parsed.Index).(*mapper.Mutation) + clone.Select.Filter = nil + + return p.SelectFromSource(&clone.Select, delete, true, nil) } diff --git a/query/graphql/planner/executor.go b/query/graphql/planner/executor.go index 2cf3c31eeb..f9dfb70436 100644 --- a/query/graphql/planner/executor.go +++ b/query/graphql/planner/executor.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/sourcenetwork/defradb/query/graphql/parser" "github.com/sourcenetwork/defradb/query/graphql/schema" @@ -51,7 +52,7 @@ func (e *QueryExecutor) MakeSelectQuery( ctx context.Context, db client.DB, txn datastore.Txn, - selectStmt *parser.Select, + selectStmt *mapper.Select, ) (Query, error) { if selectStmt == nil { return nil, fmt.Errorf("Cannot create query without a selection") diff --git a/query/graphql/planner/explain.go b/query/graphql/planner/explain.go index e6f8b38236..540293e00b 100644 --- a/query/graphql/planner/explain.go +++ b/query/graphql/planner/explain.go @@ -31,7 +31,6 @@ var ( _ explainablePlanNode = (*commitSelectNode)(nil) _ explainablePlanNode = (*countNode)(nil) _ explainablePlanNode = (*dagScanNode)(nil) - _ explainablePlanNode = (*renderNode)(nil) _ explainablePlanNode = (*sortNode)(nil) _ explainablePlanNode = (*sumNode)(nil) _ explainablePlanNode = (*updateNode)(nil) diff --git a/query/graphql/planner/group.go b/query/graphql/planner/group.go index 5b0b4cb305..0adb70b4ed 100644 --- a/query/graphql/planner/group.go +++ b/query/graphql/planner/group.go @@ -12,7 +12,7 @@ package planner import ( "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) @@ -20,27 +20,28 @@ import ( // A node responsible for the grouping of documents by a given selection of fields. type groupNode struct { documentIterator + docMapper p *Planner // The child select information. Will be empty if there are no child `_group` items requested. - childSelects []*parser.Select + childSelects []*mapper.Select // The fields to group by - this must be an ordered collection and // will include any parent group-by fields (if any) - groupByFields []string + groupByFieldIndexes []int // The data sources that this node will draw data from. dataSources []*dataSource - values []map[string]interface{} + values []core.Doc currentIndex int } // Creates a new group node. The function is recursive and will construct the node-chain for any // child (`_group`) collections. `groupSelect` is optional and will typically be nil if the // child `_group` is not requested. -func (p *Planner) GroupBy(n *parserTypes.GroupBy, childSelects []*parser.Select) (*groupNode, error) { +func (p *Planner) GroupBy(n *mapper.GroupBy, parsed *mapper.Select, childSelects []*mapper.Select) (*groupNode, error) { if n == nil { return nil, nil } @@ -49,23 +50,28 @@ func (p *Planner) GroupBy(n *parserTypes.GroupBy, childSelects []*parser.Select) // GroupBy must always have at least one data source, for example // childSelects may be empty if no group members are requested if len(childSelects) == 0 { - dataSources = append(dataSources, newDataSource(parserTypes.GroupFieldName)) + dataSources = append( + dataSources, + // If there are no child selects, then we just take the first field index of name _group + newDataSource(parsed.DocumentMapping.FirstIndexOfName(parserTypes.GroupFieldName)), + ) } for _, childSelect := range childSelects { if childSelect.GroupBy != nil { // group by fields have to be propagated downwards to ensure correct sub-grouping, otherwise child // groups will only group on the fields they explicitly reference - childSelect.GroupBy.Fields = append(childSelect.GroupBy.Fields, n.Fields...) + childSelect.GroupBy.FieldIndexes = append(childSelect.GroupBy.FieldIndexes, n.FieldIndexes...) } - dataSources = append(dataSources, newDataSource(childSelect.Name)) + dataSources = append(dataSources, newDataSource(childSelect.Index)) } groupNodeObj := groupNode{ - p: p, - childSelects: childSelects, - groupByFields: n.Fields, - dataSources: dataSources, + p: p, + childSelects: childSelects, + groupByFieldIndexes: n.FieldIndexes, + dataSources: dataSources, + docMapper: docMapper{&parsed.DocumentMapping}, } return &groupNodeObj, nil } @@ -78,7 +84,7 @@ func (n *groupNode) Init() error { // We need to make sure state is cleared down on Init, // this function may be called multiple times per instance (for example during a join) n.values = nil - n.currentValue = nil + n.currentValue = core.Doc{} n.currentIndex = 0 for _, dataSource := range n.dataSources { @@ -121,7 +127,7 @@ func (n *groupNode) Source() planNode { return n.dataSources[0].Source() } func (n *groupNode) Next() (bool, error) { if n.values == nil { - values, err := join(n.dataSources, n.groupByFields) + values, err := join(n.dataSources, n.groupByFieldIndexes, n.documentMapping) if err != nil { return false, err } @@ -130,23 +136,25 @@ func (n *groupNode) Next() (bool, error) { for _, group := range n.values { for _, childSelect := range n.childSelects { - subSelect, hasSubSelect := group[childSelect.Name] - if !hasSubSelect { + subSelect := group.Fields[childSelect.Index] + if subSelect == nil { + // If the sub-select is nil we need to set it to an empty array and continue + group.Fields[childSelect.Index] = []core.Doc{} continue } - childDocs := subSelect.([]map[string]interface{}) + childDocs := subSelect.([]core.Doc) if childSelect.Limit != nil { l := int64(len(childDocs)) // We must hide all child documents before the offset for i := int64(0); i < childSelect.Limit.Offset && i < l; i++ { - childDocs[i][parserTypes.HiddenFieldName] = struct{}{} + childDocs[i].Hidden = true } // We must hide all child documents after the offset plus limit for i := childSelect.Limit.Limit + childSelect.Limit.Offset; i < l; i++ { - childDocs[i][parserTypes.HiddenFieldName] = struct{}{} + childDocs[i].Hidden = true } } } diff --git a/query/graphql/planner/limit.go b/query/graphql/planner/limit.go index 7cbc8e4094..07600199a7 100644 --- a/query/graphql/planner/limit.go +++ b/query/graphql/planner/limit.go @@ -12,12 +12,15 @@ package planner import ( "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/query/graphql/mapper" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) // Limit the results, yielding only what the limit/offset permits // @todo: Handle cursor type hardLimitNode struct { + docMapper + p *Planner plan planNode @@ -27,15 +30,16 @@ type hardLimitNode struct { } // HardLimit creates a new hardLimitNode initalized from the parser.Limit object. -func (p *Planner) HardLimit(n *parserTypes.Limit) (*hardLimitNode, error) { +func (p *Planner) HardLimit(parsed *mapper.Select, n *mapper.Limit) (*hardLimitNode, error) { if n == nil { return nil, nil // nothing to do } return &hardLimitNode{ - p: p, - limit: n.Limit, - offset: n.Offset, - rowIndex: 0, + p: p, + limit: n.Limit, + offset: n.Offset, + rowIndex: 0, + docMapper: docMapper{&parsed.DocumentMapping}, }, nil } @@ -48,10 +52,10 @@ func (n *hardLimitNode) Init() error { return n.plan.Init() } -func (n *hardLimitNode) Start() error { return n.plan.Start() } -func (n *hardLimitNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *hardLimitNode) Close() error { return n.plan.Close() } -func (n *hardLimitNode) Value() map[string]interface{} { return n.plan.Value() } +func (n *hardLimitNode) Start() error { return n.plan.Start() } +func (n *hardLimitNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *hardLimitNode) Close() error { return n.plan.Close() } +func (n *hardLimitNode) Value() core.Doc { return n.plan.Value() } func (n *hardLimitNode) Next() (bool, error) { // check if we're passed the limit @@ -82,6 +86,7 @@ func (n *hardLimitNode) Source() planNode { return n.plan } // the full dataset. type renderLimitNode struct { documentIterator + docMapper p *Planner plan planNode @@ -92,16 +97,17 @@ type renderLimitNode struct { } // RenderLimit creates a new renderLimitNode initalized from -// the parserTypes.Limit object. -func (p *Planner) RenderLimit(n *parserTypes.Limit) (*renderLimitNode, error) { +// the parser.Limit object. +func (p *Planner) RenderLimit(docMap *core.DocumentMapping, n *parserTypes.Limit) (*renderLimitNode, error) { if n == nil { return nil, nil // nothing to do } return &renderLimitNode{ - p: p, - limit: n.Limit, - offset: n.Offset, - rowIndex: 0, + p: p, + limit: n.Limit, + offset: n.Offset, + rowIndex: 0, + docMapper: docMapper{docMap}, }, nil } @@ -127,7 +133,7 @@ func (n *renderLimitNode) Next() (bool, error) { n.rowIndex++ if n.rowIndex-n.offset > n.limit || n.rowIndex <= n.offset { - n.currentValue[parserTypes.HiddenFieldName] = struct{}{} + n.currentValue.Hidden = true } return true, nil } diff --git a/query/graphql/planner/multi.go b/query/graphql/planner/multi.go index 94768ff17e..da9fc5771b 100644 --- a/query/graphql/planner/multi.go +++ b/query/graphql/planner/multi.go @@ -32,8 +32,6 @@ results in all the attached multinodes. type MultiNode interface { planNode Children() []planNode - AddChild(string, planNode) error - ReplaceChildAt(int, string, planNode) error } // mergeNode is a special interface for the MultiNode @@ -76,11 +74,12 @@ type appendNode interface { // type parallelNode struct { // serialNode? documentIterator + docMapper p *Planner - children []planNode - childFields []string + children []planNode + childIndexes []int multiscan *multiScanNode } @@ -127,7 +126,8 @@ func (p *parallelNode) Close() error { // It only needs a single child plan to return true for it // to return true. Same with errors. func (p *parallelNode) Next() (bool, error) { - p.currentValue = make(map[string]interface{}) + p.currentValue = p.documentMapping.NewDoc() + var orNext bool for i, plan := range p.children { var next bool @@ -156,9 +156,8 @@ func (p *parallelNode) nextMerge(index int, plan mergeNode) (bool, error) { } doc := plan.Value() - for k, v := range doc { - p.currentValue[k] = v - } + copy(p.currentValue.Fields, doc.Fields) + return true, nil } @@ -208,19 +207,20 @@ output */ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { - if key, ok := p.currentValue["_key"].(string); ok { - // pass the doc key as a reference through the spans interface - spans := core.Spans{core.NewSpan(core.DataStoreKey{DocKey: key}, core.DataStoreKey{})} - plan.Spans(spans) - err := plan.Init() - if err != nil { - return false, err - } - } else { + key := p.currentValue.GetKey() + if key == "" { return false, nil } - results := make([]map[string]interface{}, 0) + // pass the doc key as a reference through the spans interface + spans := core.Spans{core.NewSpan(core.DataStoreKey{DocKey: key}, core.DataStoreKey{})} + plan.Spans(spans) + err := plan.Init() + if err != nil { + return false, err + } + + results := make([]core.Doc, 0) for { next, err := plan.Next() if err != nil { @@ -233,7 +233,7 @@ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { results = append(results, plan.Value()) } - p.currentValue[p.childFields[index]] = results + p.currentValue.Fields[p.childIndexes[index]] = results return true, nil } @@ -280,20 +280,9 @@ func (p *parallelNode) Children() []planNode { return p.children } -func (p *parallelNode) AddChild(field string, node planNode) error { +func (p *parallelNode) addChild(fieldIndex int, node planNode) { p.children = append(p.children, node) - p.childFields = append(p.childFields, field) - return nil -} - -func (p *parallelNode) ReplaceChildAt(i int, field string, node planNode) error { - if i >= len(p.children) { - return errors.New("Index to replace child node at doesn't exist (out of bounds)") - } - - p.children[i] = node - p.childFields[i] = field - return nil + p.childIndexes = append(p.childIndexes, fieldIndex) } /* @@ -388,7 +377,7 @@ Select { */ // @todo: Document AddSubPlan method -func (s *selectNode) addSubPlan(field string, plan planNode) error { +func (s *selectNode) addSubPlan(fieldIndex int, plan planNode) error { src := s.source switch node := src.(type) { // if its a scan node, we either replace or create a multinode @@ -397,13 +386,12 @@ func (s *selectNode) addSubPlan(field string, plan planNode) error { case mergeNode: s.source = plan case appendNode: - m := ¶llelNode{p: s.p} - if err := m.AddChild("", src); err != nil { - return err - } - if err := m.AddChild(field, plan); err != nil { - return err + m := ¶llelNode{ + p: s.p, + docMapper: docMapper{src.DocumentMap()}, } + m.addChild(-1, src) + m.addChild(fieldIndex, plan) s.source = m default: return fmt.Errorf("Sub plan needs to be either a MergeNode or an AppendNode") @@ -417,39 +405,33 @@ func (s *selectNode) addSubPlan(field string, plan planNode) error { } // create our new multiscanner multiscan := &multiScanNode{scanNode: origScan} - // create multinode - multinode := ¶llelNode{ - p: s.p, - multiscan: multiscan, - } // replace our current source internal scanNode with our new multiscanner if err := s.p.walkAndReplacePlan(src, origScan, multiscan); err != nil { return err } - // add our newly updated source to the multinode - if err := multinode.AddChild("", src); err != nil { - return err + // create multinode + multinode := ¶llelNode{ + p: s.p, + multiscan: multiscan, + docMapper: docMapper{src.DocumentMap()}, } + multinode.addChild(-1, src) multiscan.addReader() // replace our new node internal scanNode with our new multiscanner if err := s.p.walkAndReplacePlan(plan, origScan, multiscan); err != nil { return err } // add our newly updated plan to the multinode - if err := multinode.AddChild(field, plan); err != nil { - return err - } + multinode.addChild(fieldIndex, plan) multiscan.addReader() s.source = multinode - // we already have an existing MultiNode as our source - case MultiNode: + // we already have an existing parallelNode as our source + case *parallelNode: switch plan.(type) { // easy, just append, since append doest need any internal relaced scannode case appendNode: - if err := node.AddChild(field, plan); err != nil { - return err - } + node.addChild(fieldIndex, plan) // We have a internal multiscanNode on our MultiNode case mergeNode: @@ -464,9 +446,7 @@ func (s *selectNode) addSubPlan(field string, plan planNode) error { } multiscan.addReader() // add our newly updated plan to the multinode - if err := node.AddChild(field, plan); err != nil { - return err - } + node.addChild(fieldIndex, plan) default: return fmt.Errorf("Sub plan needs to be either a MergeNode or an AppendNode") } diff --git a/query/graphql/planner/operations.go b/query/graphql/planner/operations.go index 8ae71f0b32..371eb6c1b9 100644 --- a/query/graphql/planner/operations.go +++ b/query/graphql/planner/operations.go @@ -25,7 +25,6 @@ var ( _ planNode = (*parallelNode)(nil) _ planNode = (*pipeNode)(nil) _ planNode = (*renderLimitNode)(nil) - _ planNode = (*renderNode)(nil) _ planNode = (*scanNode)(nil) _ planNode = (*selectNode)(nil) _ planNode = (*selectTopNode)(nil) diff --git a/query/graphql/planner/pipe.go b/query/graphql/planner/pipe.go index 68cb2db47e..8298e2df3d 100644 --- a/query/graphql/planner/pipe.go +++ b/query/graphql/planner/pipe.go @@ -20,6 +20,7 @@ import ( // requested more than once will not be re-loaded from source. type pipeNode struct { documentIterator + docMapper source planNode @@ -29,11 +30,12 @@ type pipeNode struct { docIndex int } -func newPipeNode() pipeNode { +func newPipeNode(docMap *core.DocumentMapping) pipeNode { return pipeNode{ docs: container.NewDocumentContainer(0), // A docIndex of minus -1 indicated that nothing has been read yet - docIndex: -1, + docIndex: -1, + docMapper: docMapper{docMap}, } } @@ -77,6 +79,7 @@ func (n *pipeNode) Next() (bool, error) { // Values must be copied out of the node, in case consumers mutate the item // for example: when rendering - n.currentValue = copyMap(n.docs.At(n.docIndex)) + doc := n.docs.At(n.docIndex) + n.currentValue = doc.Clone() return true, nil } diff --git a/query/graphql/planner/planner.go b/query/graphql/planner/planner.go index 8f422bcb5a..544be80961 100644 --- a/query/graphql/planner/planner.go +++ b/query/graphql/planner/planner.go @@ -19,6 +19,7 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/sourcenetwork/defradb/query/graphql/parser" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" @@ -45,7 +46,7 @@ type planNode interface { Next() (bool, error) // Values returns the value of the current doc, should only be called *after* Next(). - Value() map[string]interface{} + Value() core.Doc // Source returns the child planNode that generates the source values for this plan. // If a plan has no source, nil is returned. @@ -54,19 +55,29 @@ type planNode interface { // Kind tells the name of concrete planNode type. Kind() string + DocumentMap() *core.DocumentMapping + // Close terminates the planNode execution and releases its resources. After this // method is called you can only safely call Kind() and Source() methods. Close() error } type documentIterator struct { - currentValue map[string]interface{} + currentValue core.Doc } -func (n *documentIterator) Value() map[string]interface{} { +func (n *documentIterator) Value() core.Doc { return n.currentValue } +type docMapper struct { + documentMapping *core.DocumentMapping +} + +func (d *docMapper) DocumentMap() *core.DocumentMapping { + return d.documentMapping +} + type ExecutionContext struct { context.Context } @@ -81,8 +92,7 @@ type Planner struct { txn datastore.Txn db client.DB - ctx context.Context - evalCtx parser.EvalContext + ctx context.Context } func makePlanner(ctx context.Context, db client.DB, txn datastore.Txn) *Planner { @@ -93,7 +103,7 @@ func makePlanner(ctx context.Context, db client.DB, txn datastore.Txn) *Planner } } -func (p *Planner) newPlan(stmt parser.Statement) (planNode, error) { +func (p *Planner) newPlan(stmt interface{}) (planNode, error) { switch n := stmt.(type) { case *parser.Query: if len(n.Queries) > 0 { @@ -111,26 +121,40 @@ func (p *Planner) newPlan(stmt parser.Statement) (planNode, error) { return p.newPlan(n.Selections[0]) case *parser.Select: + m, err := mapper.ToSelect(p.ctx, p.txn, n) + if err != nil { + return nil, err + } + return p.Select(m) + case *mapper.Select: return p.Select(n) case *parser.CommitSelect: - return p.CommitSelect(n) + m, err := mapper.ToCommitSelect(p.ctx, p.txn, n) + if err != nil { + return nil, err + } + return p.CommitSelect(m) case *parser.Mutation: - return p.newObjectMutationPlan(n) + m, err := mapper.ToMutation(p.ctx, p.txn, n) + if err != nil { + return nil, err + } + return p.newObjectMutationPlan(m) } return nil, fmt.Errorf("Unknown statement type %T", stmt) } -func (p *Planner) newObjectMutationPlan(stmt *parser.Mutation) (planNode, error) { +func (p *Planner) newObjectMutationPlan(stmt *mapper.Mutation) (planNode, error) { switch stmt.Type { - case parser.CreateObjects: + case mapper.CreateObjects: return p.CreateDoc(stmt) - case parser.UpdateObjects: + case mapper.UpdateObjects: return p.UpdateDocs(stmt) - case parser.DeleteObjects: + case mapper.DeleteObjects: return p.DeleteDocs(stmt) default: @@ -141,7 +165,7 @@ func (p *Planner) newObjectMutationPlan(stmt *parser.Mutation) (planNode, error) // makePlan creates a new plan from the parsed data, optimizes the plan and returns // an initiated plan. The caller of makePlan is also responsible of calling Close() // on the plan to free it's resources. -func (p *Planner) makePlan(stmt parser.Statement) (planNode, error) { +func (p *Planner) makePlan(stmt interface{}) (planNode, error) { plan, err := p.newPlan(stmt) if err != nil { return nil, err @@ -193,6 +217,9 @@ func (p *Planner) expandPlan(plan planNode, parentPlan *selectTopNode) error { case *updateNode: return p.expandPlan(n.results, parentPlan) + case *createNode: + return p.expandPlan(n.results, parentPlan) + default: return nil } @@ -230,12 +257,6 @@ func (p *Planner) expandSelectTopNodePlan(plan *selectTopNode, parentPlan *selec } } - // wire up the render plan - if plan.render != nil { - plan.render.plan = plan.plan - plan.plan = plan.render - } - return nil } @@ -245,7 +266,10 @@ type aggregateNode interface { } func (p *Planner) expandAggregatePlans(plan *selectTopNode) { - for _, aggregate := range plan.aggregates { + // Iterate through the aggregates backwards to ensure dependencies + // execute *before* any aggregate dependent on them. + for i := len(plan.aggregates) - 1; i >= 0; i-- { + aggregate := plan.aggregates[i] aggregate.SetPlan(plan.plan) plan.plan = aggregate } @@ -277,7 +301,7 @@ func (p *Planner) expandGroupNodePlan(plan *selectTopNode) error { pipe, hasPipe := p.walkAndFindPlanType(plan.plan, &pipeNode{}).(*pipeNode) if !hasPipe { - newPipeNode := newPipeNode() + newPipeNode := newPipeNode(scanNode.DocumentMap()) pipe = &newPipeNode pipe.source = scanNode } @@ -298,8 +322,6 @@ func (p *Planner) expandGroupNodePlan(plan *selectTopNode) error { if err != nil { return err } - // We need to remove the render so that any child records are preserved on arrival at the parent - childSelectNode.(*selectTopNode).render = nil dataSource := plan.group.dataSources[i] dataSource.childSource = childSelectNode @@ -339,10 +361,13 @@ func (p *Planner) expandLimitPlan(plan *selectTopNode, parentPlan *selectTopNode // replace the hard limit with a render limit to allow the full set of child records // to be aggregated if parentPlan != nil && len(parentPlan.aggregates) > 0 { - renderLimit, err := p.RenderLimit(&parserTypes.Limit{ - Offset: l.offset, - Limit: l.limit, - }) + renderLimit, err := p.RenderLimit( + parentPlan.documentMapping, + &parserTypes.Limit{ + Offset: l.offset, + Limit: l.limit, + }, + ) if err != nil { return err } @@ -458,11 +483,11 @@ func (p *Planner) executeRequest( } docs := []map[string]interface{}{} + docMap := plan.DocumentMap() for next { - if values := plan.Value(); values != nil { - docs = append(docs, copyMap(values)) - } + copy := docMap.ToMap(plan.Value()) + docs = append(docs, copy) next, err = plan.Next() if err != nil { @@ -520,17 +545,3 @@ func multiErr(errorsToWrap ...error) error { } return errs } - -func copyMap(m map[string]interface{}) map[string]interface{} { - cp := make(map[string]interface{}) - for k, v := range m { - vm, ok := v.(map[string]interface{}) - if ok { - cp[k] = copyMap(vm) - } else { - cp[k] = v - } - } - - return cp -} diff --git a/query/graphql/planner/render.go b/query/graphql/planner/render.go deleted file mode 100644 index dd3761a3cf..0000000000 --- a/query/graphql/planner/render.go +++ /dev/null @@ -1,173 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package planner - -import ( - "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" - - parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" -) - -// the final field select and render -type renderNode struct { - documentIterator - - p *Planner - plan planNode - - renderInfo topLevelRenderInfo -} - -type topLevelRenderInfo struct { - children []renderInfo -} - -type renderInfo struct { - sourceFieldName string - destinationFieldName string - children []renderInfo -} - -func (p *Planner) render(parsed *parser.Select) *renderNode { - return &renderNode{ - p: p, - renderInfo: buildTopLevelRenderInfo(parsed), - } -} - -func buildTopLevelRenderInfo(parsed parser.Selection) topLevelRenderInfo { - childSelections := parsed.GetSelections() - - info := topLevelRenderInfo{ - children: []renderInfo{}, - } - - for _, selection := range childSelections { - if slct, isSelect := selection.(*parser.Select); isSelect && slct.Hidden { - continue - } - info.children = append(info.children, buildRenderInfo(selection)) - } - - return info -} - -func buildRenderInfo(parsed parser.Selection) renderInfo { - childSelections := parsed.GetSelections() - sourceFieldName := parsed.GetName() - alias := parsed.GetAlias() - - var destinationFieldName string - if alias == "" { - destinationFieldName = sourceFieldName - } else { - destinationFieldName = alias - } - - info := renderInfo{ - sourceFieldName: sourceFieldName, - destinationFieldName: destinationFieldName, - children: []renderInfo{}, - } - - for _, selection := range childSelections { - if slct, isSelect := selection.(*parser.Select); isSelect && slct.Hidden { - continue - } - info.children = append(info.children, buildRenderInfo(selection)) - } - - return info -} - -func (n *renderNode) Kind() string { return "renderNode" } - -func (n *renderNode) Init() error { return n.plan.Init() } - -func (n *renderNode) Start() error { return n.plan.Start() } - -func (n *renderNode) Next() (bool, error) { - hasNext, err := n.plan.Next() - if err != nil || !hasNext { - return hasNext, err - } - - doc := n.plan.Value() - if doc == nil { - return n.Next() - } - - if _, isHidden := doc[parserTypes.HiddenFieldName]; isHidden { - return n.Next() - } - - n.currentValue = map[string]interface{}{} - for _, renderInfo := range n.renderInfo.children { - renderInfo.render(doc, n.currentValue) - } - - return true, nil -} -func (n *renderNode) Spans(spans core.Spans) { n.plan.Spans(spans) } - -func (n *renderNode) Close() error { return n.plan.Close() } - -func (n *renderNode) Source() planNode { return n.plan } - -// Explain method returns a map containing all attributes of this node that -// are to be explained, subscribes / opts-in this node to be an explainablePlanNode. -func (n *renderNode) Explain() (map[string]interface{}, error) { - return map[string]interface{}{}, nil -} - -// Renders the source document into the destination document using the given renderInfo. -// Function recursively handles any nested children defined in the render info. -func (r *renderInfo) render(src map[string]interface{}, destination map[string]interface{}) { - var resultValue interface{} - if val, ok := src[r.sourceFieldName]; ok { - switch v := val.(type) { - // If the current property is itself a map, we should render any properties of the child - case map[string]interface{}: - inner := map[string]interface{}{} - - if _, isHidden := v[parserTypes.HiddenFieldName]; isHidden { - return - } - - for _, child := range r.children { - child.render(v, inner) - } - resultValue = inner - // If the current property is an array of maps, we should render each child map - case []map[string]interface{}: - subdocs := make([]map[string]interface{}, 0) - for _, subv := range v { - if _, isHidden := subv[parserTypes.HiddenFieldName]; isHidden { - continue - } - - inner := map[string]interface{}{} - for _, child := range r.children { - child.render(subv, inner) - } - subdocs = append(subdocs, inner) - } - resultValue = subdocs - default: - resultValue = v - } - } else { - resultValue = nil - } - - destination[r.destinationFieldName] = resultValue -} diff --git a/query/graphql/planner/scan.go b/query/graphql/planner/scan.go index abed785764..0bbb776664 100644 --- a/query/graphql/planner/scan.go +++ b/query/graphql/planner/scan.go @@ -15,12 +15,13 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/fetcher" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // scans an index for records type scanNode struct { documentIterator + docMapper p *Planner desc client.CollectionDescription @@ -31,7 +32,7 @@ type scanNode struct { spans core.Spans reverse bool - filter *parser.Filter + filter *mapper.Filter scanInitialized bool @@ -83,15 +84,16 @@ func (n *scanNode) Next() (bool, error) { // keep scanning until we find a doc that passes the filter for { var err error - n.docKey, n.currentValue, err = n.fetcher.FetchNextMap(n.p.ctx) + n.docKey, n.currentValue, err = n.fetcher.FetchNextDoc(n.p.ctx, n.documentMapping) if err != nil { return false, err } - if n.currentValue == nil { + + if len(n.currentValue.Fields) == 0 { return false, nil } - passed, err := parser.RunFilter(n.currentValue, n.filter, n.p.evalCtx) + passed, err := mapper.RunFilter(n.currentValue, n.filter) if err != nil { return false, err } @@ -132,10 +134,10 @@ func (n *scanNode) Explain() (map[string]interface{}, error) { explainerMap := map[string]interface{}{} // Add the filter attribute if it exists. - if n.filter == nil || n.filter.Conditions == nil { + if n.filter == nil || n.filter.ExternalConditions == nil { explainerMap[filterLabel] = nil } else { - explainerMap[filterLabel] = n.filter.Conditions + explainerMap[filterLabel] = n.filter.ExternalConditions } // Add the collection attributes. @@ -151,14 +153,18 @@ func (n *scanNode) Explain() (map[string]interface{}, error) { // Merge implements mergeNode func (n *scanNode) Merge() bool { return true } -func (p *Planner) Scan(versioned bool) *scanNode { +func (p *Planner) Scan(parsed *mapper.Select) *scanNode { var f fetcher.Fetcher - if versioned { + if parsed.Cid != "" { f = new(fetcher.VersionedFetcher) } else { f = new(fetcher.DocumentFetcher) } - return &scanNode{p: p, fetcher: f} + return &scanNode{ + p: p, + fetcher: f, + docMapper: docMapper{&parsed.DocumentMapping}, + } } // multiScanNode is a buffered scanNode that has @@ -171,6 +177,8 @@ func (p *Planner) Scan(versioned bool) *scanNode { // we call Next() on the underlying scanNode only // once every 2 Next() calls on the multiScan type multiScanNode struct { + docMapper + scanNode *scanNode numReaders int numCalls int @@ -204,7 +212,7 @@ func (n *multiScanNode) Next() (bool, error) { return n.lastBool, n.lastErr } -func (n *multiScanNode) Value() map[string]interface{} { +func (n *multiScanNode) Value() core.Doc { return n.scanNode.documentIterator.Value() } diff --git a/query/graphql/planner/select.go b/query/graphql/planner/select.go index 72580bd1e3..8cb0e6455f 100644 --- a/query/graphql/planner/select.go +++ b/query/graphql/planner/select.go @@ -12,14 +12,11 @@ package planner import ( "fmt" - "sort" - "strings" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/fetcher" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" cid "github.com/ipfs/go-cid" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" @@ -50,10 +47,11 @@ SELECT * From TableA as A JOIN TableB as B ON a.id = b.friend_id // Wraps a selectNode and all the logic of a plan graph into a single struct for proper plan expansion. // Executes the top level plan node. type selectTopNode struct { + docMapper + group *groupNode sort *sortNode limit planNode - render *renderNode aggregates []aggregateNode // selectnode is used pre-wiring of the plan (before expansion and all). @@ -73,7 +71,7 @@ func (n *selectTopNode) Next() (bool, error) { return n.plan.Next() } func (n *selectTopNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *selectTopNode) Value() map[string]interface{} { return n.plan.Value() } +func (n *selectTopNode) Value() core.Doc { return n.plan.Value() } func (n *selectTopNode) Source() planNode { return n.plan } @@ -93,6 +91,7 @@ func (n *selectTopNode) Close() error { type selectNode struct { documentIterator + docMapper p *Planner @@ -107,9 +106,6 @@ type selectNode struct { // collection name, meta-data, etc. sourceInfo sourceInfo - // data related to rendering - renderInfo *renderInfo - // top level filter expression // filter is split between select, scan, and typeIndexJoin. // The filters which only apply to the main collection @@ -118,9 +114,10 @@ type selectNode struct { // to the sub type are defined here in the select. // The filters that are defined on the subtype query // are defined in the subtype scan node. - filter *parser.Filter + filter *mapper.Filter - groupSelects []*parser.Select + parsed *mapper.Select + groupSelects []*mapper.Select } func (n *selectNode) Kind() string { @@ -146,7 +143,7 @@ func (n *selectNode) Next() (bool, error) { } n.currentValue = n.source.Value() - passes, err := parser.RunFilter(n.currentValue, n.filter, n.p.evalCtx) + passes, err := mapper.RunFilter(n.currentValue, n.filter) if err != nil { return false, err } @@ -172,10 +169,10 @@ func (n *selectNode) Explain() (map[string]interface{}, error) { explainerMap := map[string]interface{}{} // Add the filter attribute if it exists. - if n.filter == nil || n.filter.Conditions == nil { + if n.filter == nil || n.filter.ExternalConditions == nil { explainerMap[filterLabel] = nil } else { - explainerMap[filterLabel] = n.filter.Conditions + explainerMap[filterLabel] = n.filter.ExternalConditions } return explainerMap, nil @@ -186,14 +183,12 @@ func (n *selectNode) Explain() (map[string]interface{}, error) { // creating scanNodes, typeIndexJoinNodes, and splitting // the necessary filters. Its designed to work with the // planner.Select construction call. -func (n *selectNode) initSource(parsed *parser.Select) ([]aggregateNode, error) { - if parsed.CollectionName == "" { - parsed.CollectionName = parsed.Name +func (n *selectNode) initSource() ([]aggregateNode, error) { + if n.parsed.CollectionName == "" { + n.parsed.CollectionName = n.parsed.Name } - sourcePlan, err := n.p.getSource( - parsed.CollectionName, - parsed.QueryType == parserTypes.VersionedScanQuery, - ) + + sourcePlan, err := n.p.getSource(n.parsed) if err != nil { return nil, err } @@ -213,8 +208,8 @@ func (n *selectNode) initSource(parsed *parser.Select) ([]aggregateNode, error) // If we have both a DocKey and a CID, then we need to run // a TimeTravel (History-Traversing Versioned) query, which means // we need to propagate the values to the underlying VersionedFetcher - if parsed.QueryType == parserTypes.VersionedScanQuery { - c, err := cid.Decode(parsed.CID) + if n.parsed.Cid != "" { + c, err := cid.Decode(n.parsed.Cid) if err != nil { return nil, fmt.Errorf( "Failed to propagate VersionFetcher span, invalid CID: %w", @@ -222,19 +217,19 @@ func (n *selectNode) initSource(parsed *parser.Select) ([]aggregateNode, error) ) } spans := fetcher.NewVersionedSpan( - core.DataStoreKey{DocKey: parsed.DocKeys[0]}, + core.DataStoreKey{DocKey: n.parsed.DocKeys[0]}, c, ) // @todo check len origScan.Spans(spans) - } else if parsed.DocKeys != nil { + } else if n.parsed.DocKeys != nil { // If we *just* have a DocKey(s), run a FindByDocKey(s) optimization // if we have a FindByDockey filter, create a span for it // and propagate it to the scanNode // @todo: When running the optimizer, check if the filter object // contains a _key equality condition, and upgrade it to a point lookup // instead of a prefix scan + filter via the Primary Index (0), like here: - spans := make(core.Spans, len(parsed.DocKeys)) - for i, docKey := range parsed.DocKeys { + spans := make(core.Spans, len(n.parsed.DocKeys)) + for i, docKey := range n.parsed.DocKeys { dockeyIndexKey := base.MakeDocKey(sourcePlan.info.collectionDescription, docKey) spans[i] = core.NewSpan(dockeyIndexKey, dockeyIndexKey.PrefixEnd()) } @@ -242,131 +237,68 @@ func (n *selectNode) initSource(parsed *parser.Select) ([]aggregateNode, error) } } - return n.initFields(parsed) + return n.initFields(n.parsed) } -func (n *selectNode) initFields(parsed *parser.Select) ([]aggregateNode, error) { - // re-organize the fields slice into reverse-alphabetical - // this makes sure the reserved database fields that start with - // a "_" end up at the end. So if/when we build our MultiNode - // all the AppendPlans end up at the end. - sort.Slice(parsed.Fields, func(i, j int) bool { - return !(strings.Compare(parsed.Fields[i].GetName(), parsed.Fields[j].GetName()) < 0) - }) - +func (n *selectNode) initFields(parsed *mapper.Select) ([]aggregateNode, error) { aggregates := []aggregateNode{} // loop over the sub type // at the moment, we're only testing a single sub selection for _, field := range parsed.Fields { switch f := field.(type) { - case *parser.Select: + case *mapper.Aggregate: var plan aggregateNode var aggregateError error - // @todo: check select type: - // - TypeJoin - // - commitScan - if f.Statement.Name.Value == parserTypes.CountFieldName { - aggregateError = n.joinAggregatedChild(parsed, f) - if aggregateError != nil { - return nil, aggregateError - } + + switch f.Name { + case parserTypes.CountFieldName: plan, aggregateError = n.p.Count(f, parsed) - } else if f.Statement.Name.Value == parserTypes.SumFieldName { - aggregateError = n.joinAggregatedChild(parsed, f) - if aggregateError != nil { - return nil, aggregateError - } + case parserTypes.SumFieldName: plan, aggregateError = n.p.Sum(&n.sourceInfo, f, parsed) - } else if f.Statement.Name.Value == parserTypes.AverageFieldName { - averageSource, err := f.GetAggregateSource(parsed) - if err != nil { - return nil, err - } - childField := n.p.getSourceProperty(averageSource, parsed) - // We must not count nil values else they will corrupt the average (they would be counted otherwise) - // so here we append the nil filter to the average (and child nodes) before joining any children. - // The nil clause is appended to average and sum as well as count in order to make it much easier - // to find them and safely identify existing nodes. - appendNotNilFilter(f, childField) - - // then we join the potentially missing child using the dummy field (will be used by sum+count) - aggregateError = n.joinAggregatedChild(parsed, f) - if aggregateError != nil { - return nil, aggregateError - } - - // value of the suffix is unimportant here, just needs to be unique - dummyCountField := f.Clone(fmt.Sprintf("%s_internalCount", f.Name), parserTypes.CountFieldName) - countField, countExists := tryGetField(parsed.Fields, dummyCountField) - // Note: sumExists will always be false until we support filtering by nil in the query - if !countExists { - countField = dummyCountField - countPlan, err := n.p.Count(countField, parsed) - if err != nil { - return nil, err - } - aggregates = append(aggregates, countPlan) - } + case parserTypes.AverageFieldName: + plan, aggregateError = n.p.Average(f) + } - // value of the suffix is unimportant here, just needs to be unique - dummySumField := f.Clone(fmt.Sprintf("%s_internalSum", f.Name), parserTypes.SumFieldName) - sumField, sumExists := tryGetField(parsed.Fields, dummySumField) - // Note: sumExists will always be false until we support filtering by nil in the query - if !sumExists { - sumField = dummySumField - sumPlan, err := n.p.Sum(&n.sourceInfo, sumField, parsed) - if err != nil { - return nil, err - } - aggregates = append(aggregates, sumPlan) - } + if aggregateError != nil { + return nil, aggregateError + } - plan, aggregateError = n.p.Average(sumField, countField, f) - } else if f.Name == parserTypes.VersionFieldName { // reserved sub type for object queries - commitSlct := &parser.CommitSelect{ - Name: f.Name, - Alias: f.Alias, - // Type: parser.LatestCommits, - Fields: f.Fields, + if plan != nil { + aggregates = append(aggregates, plan) + } + case *mapper.Select: + if f.Name == parserTypes.VersionFieldName { // reserved sub type for object queries + commitSlct := &mapper.CommitSelect{ + Select: *f, } // handle _version sub selection query differently // if we are executing a regular Scan query // or a TimeTravel query. - if parsed.QueryType == parserTypes.VersionedScanQuery { + if parsed.Cid != "" { // for a TimeTravel query, we don't need the Latest // commit. Instead, _version references the CID // of that Target version we are querying. // So instead of a LatestCommit subquery, we need // a OneCommit subquery, with the supplied parameters. commitSlct.DocKey = parsed.DocKeys[0] // @todo check length - commitSlct.Cid = parsed.CID - commitSlct.Type = parser.OneCommit + commitSlct.Cid = parsed.Cid + commitSlct.Type = mapper.OneCommit } else { - commitSlct.Type = parser.LatestCommits + commitSlct.Type = mapper.LatestCommits } commitPlan, err := n.p.CommitSelect(commitSlct) if err != nil { return nil, err } - if err := n.addSubPlan(field.GetName(), commitPlan); err != nil { + if err := n.addSubPlan(f.Index, commitPlan); err != nil { return nil, err } - } else if f.Root == parserTypes.ObjectSelection { - if f.Statement.Name.Value == parserTypes.GroupFieldName { - n.groupSelects = append(n.groupSelects, f) - } else { - //nolint:errcheck - n.addTypeIndexJoin(f) // @TODO: ISSUE#158 - } - } - - if aggregateError != nil { - return nil, aggregateError - } - - if plan != nil { - aggregates = append(aggregates, plan) + } else if f.Name == parserTypes.GroupFieldName { + n.groupSelects = append(n.groupSelects, f) + } else { + //nolint:errcheck + n.addTypeIndexJoin(f) // @TODO: ISSUE#158 } } } @@ -374,118 +306,13 @@ func (n *selectNode) initFields(parsed *parser.Select) ([]aggregateNode, error) return aggregates, nil } -// appendNotNilFilter appends a not nil filter for the given child field -// to the given Select. -func appendNotNilFilter(field *parser.Select, childField string) { - if field.Filter == nil { - field.Filter = &parser.Filter{} - } - - if field.Filter.Conditions == nil { - field.Filter.Conditions = map[string]interface{}{} - } - - childBlock, hasChildBlock := field.Filter.Conditions[childField] - if !hasChildBlock { - childBlock = map[string]interface{}{} - field.Filter.Conditions[childField] = childBlock - } - - typedChildBlock := childBlock.(map[string]interface{}) - typedChildBlock["$ne"] = nil -} - -// tryGetField scans the given list of fields for an item matching the given searchTerm. -// Will return the matched value and true if one is found, else will return nil and false. -func tryGetField(fields []parser.Selection, searchTerm *parser.Select) (*parser.Select, bool) { - for _, field := range fields { - f, isSelect := field.(*parser.Select) - if !isSelect { - continue - } - - if f.Equal(*searchTerm) { - return f, true - } - } - - return nil, false -} - -// Join any child collections required by the given transformation if the child -// collections have not been requested for render by the consumer -func (n *selectNode) joinAggregatedChild( - parsed *parser.Select, - field *parser.Select, -) error { - source, err := field.GetAggregateSource(parsed) - if err != nil { - return err - } - - targetField := field.Clone(source.HostProperty, source.ExternalHostName) - - hasChildProperty := false - for _, siblingField := range parsed.Fields { - siblingSelect, isSelect := siblingField.(*parser.Select) - if isSelect && siblingSelect.Equal(*targetField) { - hasChildProperty = true - break - } - } - - // If the child item is not requested, then we have add in the necessary components - // to force the child records to be scanned through (they wont be rendered) - if !hasChildProperty { - if source.ExternalHostName == parserTypes.GroupFieldName { - hasGroupSelect := false - for _, childSelect := range n.groupSelects { - if childSelect.Equal(*targetField) { - hasGroupSelect = true - break - } - - // if the child filter is nil then we can use it as source with no meaningful overhead - // - // todo - this might be incorrect when the groupby contains a filter - test - // consider adding fancy inclusive logic - if childSelect.ExternalName == parserTypes.GroupFieldName && childSelect.Filter == nil { - hasGroupSelect = true - break - } - } - if !hasGroupSelect { - newGroup := &parser.Select{ - Alias: source.HostProperty, - Name: fmt.Sprintf("_agg%v", len(parsed.Fields)), - ExternalName: parserTypes.GroupFieldName, - Hidden: true, - } - parsed.Fields = append(parsed.Fields, newGroup) - n.groupSelects = append(n.groupSelects, newGroup) - } - } else if parsed.Root != parserTypes.CommitSelection { - fieldDescription, _ := n.sourceInfo.collectionDescription.GetField(source.HostProperty) - if fieldDescription.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - subtype := &parser.Select{ - Name: source.HostProperty, - ExternalName: parserTypes.GroupFieldName, - } - return n.addTypeIndexJoin(subtype) - } - } - } - - return nil -} - -func (n *selectNode) addTypeIndexJoin(subSelect *parser.Select) error { +func (n *selectNode) addTypeIndexJoin(subSelect *mapper.Select) error { typeIndexJoin, err := n.p.makeTypeIndexJoin(n, n.origSource, subSelect) if err != nil { return err } - if err := n.addSubPlan(subSelect.Name, typeIndexJoin); err != nil { + if err := n.addSubPlan(subSelect.Index, typeIndexJoin); err != nil { return err } @@ -507,7 +334,7 @@ func (n *selectNode) Source() planNode { return n.source } // not to be used on the top level selection node. // This allows us to disable rendering on all sub Select nodes // and only run it at the end on the top level select node. -func (p *Planner) SubSelect(parsed *parser.Select) (planNode, error) { +func (p *Planner) SubSelect(parsed *mapper.Select) (planNode, error) { plan, err := p.Select(parsed) if err != nil { return nil, err @@ -516,12 +343,11 @@ func (p *Planner) SubSelect(parsed *parser.Select) (planNode, error) { // if this is a sub select plan, we need to remove the render node // as the final top level selectTopNode will handle all sub renders top := plan.(*selectTopNode) - top.render = nil return top, nil } func (p *Planner) SelectFromSource( - parsed *parser.Select, + parsed *mapper.Select, source planNode, fromCollection bool, providedSourceInfo *sourceInfo, @@ -530,12 +356,13 @@ func (p *Planner) SelectFromSource( p: p, source: source, origSource: source, + parsed: parsed, + docMapper: docMapper{&parsed.DocumentMapping}, } s.filter = parsed.Filter limit := parsed.Limit sort := parsed.OrderBy groupBy := parsed.GroupBy - s.renderInfo = &renderInfo{} if providedSourceInfo != nil { s.sourceInfo = *providedSourceInfo @@ -555,69 +382,71 @@ func (p *Planner) SelectFromSource( return nil, err } - groupPlan, err := p.GroupBy(groupBy, s.groupSelects) + groupPlan, err := p.GroupBy(groupBy, parsed, s.groupSelects) if err != nil { return nil, err } - limitPlan, err := p.HardLimit(limit) + limitPlan, err := p.HardLimit(parsed, limit) if err != nil { return nil, err } - sortPlan, err := p.OrderBy(sort) + sortPlan, err := p.OrderBy(parsed, sort) if err != nil { return nil, err } top := &selectTopNode{ selectnode: s, - render: p.render(parsed), limit: limitPlan, sort: sortPlan, group: groupPlan, aggregates: aggregates, + docMapper: docMapper{&parsed.DocumentMapping}, } return top, nil } // Select constructs a SelectPlan -func (p *Planner) Select(parsed *parser.Select) (planNode, error) { - s := &selectNode{p: p} - s.filter = parsed.Filter +func (p *Planner) Select(parsed *mapper.Select) (planNode, error) { + s := &selectNode{ + p: p, + filter: parsed.Filter, + parsed: parsed, + docMapper: docMapper{&parsed.DocumentMapping}, + } limit := parsed.Limit sort := parsed.OrderBy groupBy := parsed.GroupBy - s.renderInfo = &renderInfo{} - s.groupSelects = []*parser.Select{} - aggregates, err := s.initSource(parsed) + aggregates, err := s.initSource() if err != nil { return nil, err } - groupPlan, err := p.GroupBy(groupBy, s.groupSelects) + groupPlan, err := p.GroupBy(groupBy, parsed, s.groupSelects) if err != nil { return nil, err } - limitPlan, err := p.HardLimit(limit) + limitPlan, err := p.HardLimit(parsed, limit) if err != nil { return nil, err } - sortPlan, err := p.OrderBy(sort) + sortPlan, err := p.OrderBy(parsed, sort) if err != nil { return nil, err } top := &selectTopNode{ selectnode: s, - render: p.render(parsed), limit: limitPlan, sort: sortPlan, group: groupPlan, aggregates: aggregates, + docMapper: docMapper{&parsed.DocumentMapping}, } return top, nil } diff --git a/query/graphql/planner/sort.go b/query/graphql/planner/sort.go index 10607f521a..d938645731 100644 --- a/query/graphql/planner/sort.go +++ b/query/graphql/planner/sort.go @@ -12,7 +12,7 @@ package planner import ( "github.com/sourcenetwork/defradb/core" - parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // simplified planNode interface. @@ -20,7 +20,7 @@ import ( // in value generation and retrieval. type valueIterator interface { Next() (bool, error) - Value() map[string]interface{} + Value() core.Doc Close() error } @@ -30,7 +30,7 @@ type sortingStrategy interface { // copies data if its needed. // Ideally stores inside a valuesNode // rowContainer buffer. - Add(map[string]interface{}) error + Add(core.Doc) error // Finish finalizes and applies the actual // sorting mechanism to all the stored data. Finish() @@ -38,10 +38,12 @@ type sortingStrategy interface { // order the results type sortNode struct { + docMapper + p *Planner plan planNode - ordering []parserTypes.SortCondition + ordering []mapper.OrderCondition // simplified planNode interface // used for iterating through @@ -59,17 +61,18 @@ type sortNode struct { // OrderBy creates a new sortNode which returns the underlying // plans values in a sorted mannor. The field to sort by, and the -// direction of sorting is determined by the given parserTypes.OrderBy +// direction of sorting is determined by the given mapper.OrderBy // object. -func (p *Planner) OrderBy(n *parserTypes.OrderBy) (*sortNode, error) { +func (p *Planner) OrderBy(parsed *mapper.Select, n *mapper.OrderBy) (*sortNode, error) { if n == nil { // no orderby info return nil, nil } return &sortNode{ - p: p, - ordering: n.Conditions, - needSort: true, + p: p, + ordering: n.Conditions, + needSort: true, + docMapper: docMapper{&parsed.DocumentMapping}, }, nil } @@ -87,7 +90,7 @@ func (n *sortNode) Start() error { return n.plan.Start() } func (n *sortNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *sortNode) Value() map[string]interface{} { +func (n *sortNode) Value() core.Doc { return n.valueIter.Value() } @@ -166,7 +169,7 @@ func newAllSortStrategy(v *valuesNode) *allSortStrategy { } // Add adds a new document to underlying valueNode -func (s *allSortStrategy) Add(doc map[string]interface{}) error { +func (s *allSortStrategy) Add(doc core.Doc) error { err := s.valueNode.docs.AddDoc(doc) return err } @@ -182,7 +185,7 @@ func (s *allSortStrategy) Next() (bool, error) { } // Values returns the values of the next doc from the underliny valueNode -func (s *allSortStrategy) Value() map[string]interface{} { +func (s *allSortStrategy) Value() core.Doc { return s.valueNode.Value() } diff --git a/query/graphql/planner/sum.go b/query/graphql/planner/sum.go index 4fb8f04572..d764f4af30 100644 --- a/query/graphql/planner/sum.go +++ b/query/graphql/planner/sum.go @@ -15,189 +15,123 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" ) type sumNode struct { documentIterator + docMapper p *Planner plan planNode - isFloat bool - sourceCollection string - sourceProperty string - virtualFieldId string - - filter *parser.Filter + isFloat bool + virtualFieldIndex int + aggregateMapping []mapper.AggregateTarget } func (p *Planner) Sum( sourceInfo *sourceInfo, - field *parser.Select, - parent *parser.Select, + field *mapper.Aggregate, + parent *mapper.Select, ) (*sumNode, error) { - source, err := field.GetAggregateSource(parent) - if err != nil { - return nil, err - } - - sourceProperty := p.getSourceProperty(source, parent) - isFloat, err := p.isValueFloat(sourceInfo, parent, source, sourceProperty) - if err != nil { - return nil, err + isFloat := false + for _, target := range field.AggregateTargets { + isTargetFloat, err := p.isValueFloat(&sourceInfo.collectionDescription, parent, &target) + if err != nil { + return nil, err + } + // If one source property is a float, the result will be a float - no need to check the rest + if isTargetFloat { + isFloat = true + break + } } return &sumNode{ - p: p, - isFloat: isFloat, - sourceCollection: source.HostProperty, - sourceProperty: sourceProperty, - virtualFieldId: field.Name, - filter: field.Filter, + p: p, + isFloat: isFloat, + aggregateMapping: field.AggregateTargets, + virtualFieldIndex: field.Index, + docMapper: docMapper{&field.DocumentMapping}, }, nil } // Returns true if the value to be summed is a float, otherwise false. func (p *Planner) isValueFloat( - sourceInfo *sourceInfo, - parent *parser.Select, - source parser.AggregateTarget, - sourceProperty string, + parentDescription *client.CollectionDescription, + parent *mapper.Select, + source *mapper.AggregateTarget, ) (bool, error) { // It is important that averages are floats even if their underlying values are ints // else sum will round them down to the nearest whole number - if source.ChildProperty == parserTypes.AverageFieldName { + if source.ChildTarget.Name == parserTypes.AverageFieldName { return true, nil } - sourceFieldDescription, err := p.getSourceField( - sourceInfo, - parent, - source, - sourceProperty, - ) - if err != nil { - return false, err - } - - return sourceFieldDescription.Kind == client.FieldKind_FLOAT_ARRAY || - sourceFieldDescription.Kind == client.FieldKind_FLOAT, nil -} - -// Gets the root underlying field of the aggregate. -// This could be several layers deap if aggregating an aggregate. -func (p *Planner) getSourceField( - sourceInfo *sourceInfo, - parent parser.Selection, - source parser.AggregateTarget, - sourceProperty string, -) (client.FieldDescription, error) { - if source.ChildProperty == "" { + if !source.ChildTarget.HasValue { // If path length is one - we are summing an inline array - fieldDescription, fieldDescriptionFound := sourceInfo.collectionDescription.GetField(source.HostProperty) + fieldDescription, fieldDescriptionFound := parentDescription.GetField(source.Name) if !fieldDescriptionFound { - return client.FieldDescription{}, fmt.Errorf( + return false, fmt.Errorf( "Unable to find field description for field: %s", - source.HostProperty, + source.Name, ) } - return fieldDescription, nil + return fieldDescription.Kind == client.FieldKind_FLOAT_ARRAY || + fieldDescription.Kind == client.FieldKind_FLOAT, nil } // If path length is two, we are summing a group or a child relationship - if source.ChildProperty == parserTypes.CountFieldName { - // If we are summing a count, we know it is an int and can return early - return client.FieldDescription{ - Kind: client.FieldKind_INT, - }, nil + if source.ChildTarget.Name == parserTypes.CountFieldName { + // If we are summing a count, we know it is an int and can return false early + return false, nil + } + + child, isChildSelect := parent.FieldAt(source.Index).AsSelect() + if !isChildSelect { + return false, fmt.Errorf("Expected child select but none was found") } - if _, isAggregate := parserTypes.Aggregates[source.ChildProperty]; isAggregate { + childCollectionDescription, err := p.getCollectionDesc(child.CollectionName) + if err != nil { + return false, err + } + + if _, isAggregate := parserTypes.Aggregates[source.ChildTarget.Name]; isAggregate { // If we are aggregating an aggregate, we need to traverse the aggregation chain down to // the root field in order to determine the value type. This is recursive to allow handling // of N-depth aggregations (e.g. sum of sum of sum of...) - var sourceField *parser.Select - var sourceParent parser.Selection - for _, field := range parent.GetSelections() { - if field.GetName() == source.HostProperty { - sourceParent = field - } - } + sourceField := child.FieldAt(source.ChildTarget.Index).(*mapper.Aggregate) - for _, field := range sourceParent.GetSelections() { - if field.GetAlias() == source.ChildProperty { - sourceField = field.(*parser.Select) - break + for _, aggregateTarget := range sourceField.AggregateTargets { + isFloat, err := p.isValueFloat( + &childCollectionDescription, + child, + &aggregateTarget, + ) + if err != nil { + return false, err } - } - sourceSource, err := sourceField.GetAggregateSource(parent) - if err != nil { - return client.FieldDescription{}, err - } - sourceSourceProperty := p.getSourceProperty(sourceSource, sourceParent) - return p.getSourceField( - sourceInfo, - sourceParent, - sourceSource, - sourceSourceProperty, - ) - } - - if source.ExternalHostName == parserTypes.GroupFieldName { - // If the source collection is a group, then the description of the collection - // to sum is this object. - fieldDescription, fieldDescriptionFound := sourceInfo.collectionDescription.GetField(sourceProperty) - if !fieldDescriptionFound { - return client.FieldDescription{}, - fmt.Errorf("Unable to find field description for field: %s", sourceProperty) + // If one source property is a float, the result will be a float - no need to check the rest + if isFloat { + return true, nil + } } - return fieldDescription, nil - } - - parentFieldDescription, parentFieldDescriptionFound := sourceInfo.collectionDescription.GetField(source.HostProperty) - if !parentFieldDescriptionFound { - return client.FieldDescription{}, fmt.Errorf( - "Unable to find parent field description for field: %s", - source.HostProperty, - ) + return false, nil } - collectionDescription, err := p.getCollectionDesc(parentFieldDescription.Schema) - if err != nil { - return client.FieldDescription{}, err - } - - fieldDescription, fieldDescriptionFound := collectionDescription.GetField(sourceProperty) + fieldDescription, fieldDescriptionFound := childCollectionDescription.GetField(source.ChildTarget.Name) if !fieldDescriptionFound { - return client.FieldDescription{}, - fmt.Errorf("Unable to find child field description for field: %s", sourceProperty) - } - return fieldDescription, nil -} - -// Gets the name of the immediate value-property to be aggregated. -func (p *Planner) getSourceProperty(source parser.AggregateTarget, parent parser.Selection) string { - if source.ChildProperty == "" { - return "" - } - - if _, isAggregate := parserTypes.Aggregates[source.ChildProperty]; isAggregate { - for _, field := range parent.GetSelections() { - if field.GetName() == source.HostProperty { - for _, childField := range field.(*parser.Select).Fields { - if childField.GetAlias() == source.ChildProperty { - return childField.(*parser.Select).GetName() - } - } - } - } + return false, + fmt.Errorf("Unable to find child field description for field: %s", source.ChildTarget.Name) } - return source.ChildProperty + return fieldDescription.Kind == client.FieldKind_FLOAT_ARRAY || + fieldDescription.Kind == client.FieldKind_FLOAT, nil } func (n *sumNode) Kind() string { @@ -232,11 +166,12 @@ func (n *sumNode) Next() (bool, error) { sum := float64(0) - if child, hasProperty := n.currentValue[n.sourceCollection]; hasProperty { + for _, source := range n.aggregateMapping { + child := n.currentValue.Fields[source.Index] switch childCollection := child.(type) { - case []map[string]interface{}: + case []core.Doc: for _, childItem := range childCollection { - passed, err := parser.RunFilter(childItem, n.filter, n.p.evalCtx) + passed, err := mapper.RunFilter(childItem, source.Filter) if err != nil { return false, err } @@ -244,19 +179,18 @@ func (n *sumNode) Next() (bool, error) { continue } - if childProperty, hasChildProperty := childItem[n.sourceProperty]; hasChildProperty { - switch v := childProperty.(type) { - case int: - sum += float64(v) - case int64: - sum += float64(v) - case uint64: - sum += float64(v) - case float64: - sum += v - default: - // do nothing, cannot be summed - } + childProperty := childItem.Fields[source.ChildTarget.Index] + switch v := childProperty.(type) { + case int: + sum += float64(v) + case int64: + sum += float64(v) + case uint64: + sum += float64(v) + case float64: + sum += v + default: + // do nothing, cannot be summed } } case []int64: @@ -276,7 +210,7 @@ func (n *sumNode) Next() (bool, error) { } else { typedSum = int64(sum) } - n.currentValue[n.virtualFieldId] = typedSum + n.currentValue.Fields[n.virtualFieldIndex] = typedSum return true, nil } diff --git a/query/graphql/planner/type_join.go b/query/graphql/planner/type_join.go index 5ac6dd93f6..72379e783a 100644 --- a/query/graphql/planner/type_join.go +++ b/query/graphql/planner/type_join.go @@ -14,9 +14,10 @@ import ( "fmt" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/connor" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/sourcenetwork/defradb/query/graphql/schema" ) @@ -48,6 +49,8 @@ import ( // root, and recursively creates a new selectNode for the // subType. type typeIndexJoin struct { + docMapper + p *Planner // root planNode @@ -66,10 +69,11 @@ type typeIndexJoin struct { func (p *Planner) makeTypeIndexJoin( parent *selectNode, source planNode, - subType *parser.Select, + subType *mapper.Select, ) (*typeIndexJoin, error) { typeJoin := &typeIndexJoin{ - p: p, + p: p, + docMapper: docMapper{parent.documentMapping}, } // handle join relation strategies @@ -117,7 +121,7 @@ func (n *typeIndexJoin) Next() (bool, error) { return n.joinPlan.Next() } -func (n *typeIndexJoin) Value() map[string]interface{} { +func (n *typeIndexJoin) Value() core.Doc { return n.joinPlan.Value() } @@ -198,20 +202,22 @@ func (n *typeIndexJoin) Merge() bool { return true } // // The subType filter is the conditions that apply to the // queried sub type ie: {birthday: "June 26, 1990", ...}. -func splitFilterByType(filter *parser.Filter, subType string) (*parser.Filter, *parser.Filter) { +func splitFilterByType(filter *mapper.Filter, subType int) (*mapper.Filter, *mapper.Filter) { if filter == nil { return nil, nil } - sub, ok := filter.Conditions[subType] - if !ok { - return filter, &parser.Filter{} + conditionKey := &mapper.PropertyIndex{ + Index: subType, + } + + keyFound, sub := removeConditionIndex(conditionKey, filter.Conditions) + if !keyFound { + return filter, &mapper.Filter{} } - // delete old filter value - delete(filter.Conditions, subType) // create new splitup filter // our schema ensures that if sub exists, its of type map[string]interface{} - splitF := &parser.Filter{Conditions: map[string]interface{}{subType: sub}} + splitF := &mapper.Filter{Conditions: map[connor.FilterKey]interface{}{conditionKey: sub}} return filter, splitF } @@ -220,6 +226,7 @@ func splitFilterByType(filter *parser.Filter, subType string) (*parser.Filter, * // query. type typeJoinOne struct { documentIterator + docMapper p *Planner @@ -231,60 +238,54 @@ type typeJoinOne struct { primary bool - spans core.Spans + spans core.Spans + subSelect *mapper.Select } func (p *Planner) makeTypeJoinOne( parent *selectNode, source planNode, - subType *parser.Select, + subType *mapper.Select, ) (*typeJoinOne, error) { - //ignore recurse for now. - typeJoin := &typeJoinOne{ - p: p, - root: source, - } - - desc := parent.sourceInfo.collectionDescription - // get the correct sub field schema type (collection) - subTypeFieldDesc, ok := desc.GetField(subType.Name) - if !ok { - return nil, fmt.Errorf("couldn't find subtype field description for typeJoin node") + // split filter + if scan, ok := source.(*scanNode); ok { + scan.filter, parent.filter = splitFilterByType(scan.filter, subType.Index) } - subType.CollectionName = subTypeFieldDesc.Schema - selectPlan, err := p.SubSelect(subType) if err != nil { return nil, err } - typeJoin.subType = selectPlan - typeJoin.subTypeName = subTypeFieldDesc.Name - typeJoin.subTypeFieldName, err = p.db.GetRelationshipIdField( + subTypeFieldName, err := p.db.GetRelationshipIdField( subType.Name, - subTypeFieldDesc.Schema, - desc.Name, + subType.CollectionName, + parent.parsed.CollectionName, ) if err != nil { return nil, err } - // split filter - if scan, ok := source.(*scanNode); ok { - scan.filter, parent.filter = splitFilterByType(scan.filter, typeJoin.subTypeName) + // get the correct sub field schema type (collection) + subTypeFieldDesc, ok := parent.sourceInfo.collectionDescription.GetField(subType.Name) + if !ok { + return nil, fmt.Errorf("couldn't find subtype field description for typeJoin node") } - // source.filter, parent.filter = splitFilterByType(source.filter, typeJoin.subTypeName) // determine relation direction (primary or secondary?) // check if the field we're querying is the primary side of the relation - if subTypeFieldDesc.RelationType&client.Relation_Type_Primary > 0 { - typeJoin.primary = true - } else { - typeJoin.primary = false - } - - return typeJoin, nil + isPrimary := subTypeFieldDesc.RelationType&client.Relation_Type_Primary > 0 + + return &typeJoinOne{ + p: p, + root: source, + subSelect: subType, + subTypeName: subType.Name, + subTypeFieldName: subTypeFieldName, + subType: selectPlan, + primary: isPrimary, + docMapper: docMapper{parent.documentMapping}, + }, nil } func (n *typeJoinOne) Kind() string { @@ -322,42 +323,40 @@ func (n *typeJoinOne) Next() (bool, error) { return true, nil } -func (n *typeJoinOne) valuesSecondary(doc map[string]interface{}) map[string]interface{} { - docKey := doc["_key"].(string) - filter := map[string]interface{}{ - n.subTypeFieldName + "_id": docKey, +func (n *typeJoinOne) valuesSecondary(doc core.Doc) core.Doc { + fkIndex := &mapper.PropertyIndex{ + Index: n.subType.DocumentMap().FirstIndexOfName(n.subTypeFieldName + "_id"), + } + filter := map[connor.FilterKey]interface{}{ + fkIndex: doc.GetKey(), } // using the doc._key as a filter err := appendFilterToScanNode(n.subType, filter) if err != nil { - return nil + return core.Doc{} } - doc[n.subTypeName] = make(map[string]interface{}) + doc.Fields[n.subSelect.Index] = n.subSelect.DocumentMapping.NewDoc() next, err := n.subType.Next() if !next || err != nil { return doc } subdoc := n.subType.Value() - doc[n.subTypeName] = subdoc + doc.Fields[n.subSelect.Index] = subdoc return doc } -func (n *typeJoinOne) valuesPrimary(doc map[string]interface{}) map[string]interface{} { +func (n *typeJoinOne) valuesPrimary(doc core.Doc) core.Doc { // get the subtype doc key - subDocKey, ok := doc[n.subTypeName+"_id"] - if !ok { - return doc - } + subDocKey := n.docMapper.documentMapping.FirstOfName(doc, n.subTypeName+"_id") subDocKeyStr, ok := subDocKey.(string) if !ok { return doc } - subDocField := n.subTypeName - doc[subDocField] = map[string]interface{}{} + doc.Fields[n.subSelect.Index] = n.subSelect.DocumentMapping.NewDoc() // create the collection key for the sub doc slct := n.subType.(*selectTopNode).selectnode @@ -391,7 +390,7 @@ func (n *typeJoinOne) valuesPrimary(doc map[string]interface{}) map[string]inter } subDoc := n.subType.Value() - doc[subDocField] = subDoc + doc.Fields[n.subSelect.Index] = subDoc return doc } @@ -408,6 +407,7 @@ func (n *typeJoinOne) Source() planNode { return n.root } type typeJoinMany struct { documentIterator + docMapper p *Planner @@ -419,48 +419,43 @@ type typeJoinMany struct { // the subtype plan to get the subtype docs subType planNode subTypeName string + + subSelect *mapper.Select } func (p *Planner) makeTypeJoinMany( parent *selectNode, source planNode, - subType *parser.Select, + subType *mapper.Select, ) (*typeJoinMany, error) { - //ignore recurse for now. - typeJoin := &typeJoinMany{ - p: p, - root: source, - } - - desc := parent.sourceInfo.collectionDescription - // get the correct sub field schema type (collection) - subTypeFieldDesc, ok := desc.GetField(subType.Name) - if !ok { - return nil, fmt.Errorf("couldn't find subtype field description for typeJoin node") + // split filter + if scan, ok := source.(*scanNode); ok { + scan.filter, parent.filter = splitFilterByType(scan.filter, subType.Index) } - subType.CollectionName = subTypeFieldDesc.Schema selectPlan, err := p.SubSelect(subType) if err != nil { return nil, err } - typeJoin.subType = selectPlan - typeJoin.subTypeName = subTypeFieldDesc.Name - typeJoin.rootName, err = p.db.GetRelationshipIdField( + + rootName, err := p.db.GetRelationshipIdField( subType.Name, - subTypeFieldDesc.Schema, - desc.Name, + subType.CollectionName, + parent.parsed.CollectionName, ) if err != nil { return nil, err } - // split filter - if scan, ok := source.(*scanNode); ok { - scan.filter, parent.filter = splitFilterByType(scan.filter, typeJoin.subTypeName) - } - // source.filter, parent.filter = splitFilterByType(source.filter, typeJoin.subTypeName) - return typeJoin, nil + return &typeJoinMany{ + p: p, + root: source, + subSelect: subType, + subTypeName: subType.Name, + rootName: rootName, + subType: selectPlan, + docMapper: docMapper{parent.documentMapping}, + }, nil } func (n *typeJoinMany) Kind() string { @@ -494,13 +489,15 @@ func (n *typeJoinMany) Next() (bool, error) { // check if theres an index // if there is, scan and aggregate resuts // if not, then manually scan the subtype table - subdocs := make([]map[string]interface{}, 0) + subdocs := make([]core.Doc, 0) if n.index != nil { // @todo: handle index for one-to-many setup } else { - docKey := n.currentValue["_key"].(string) - filter := map[string]interface{}{ - n.rootName + "_id": docKey, // user_id: "bae-ALICE" | user_id: "bae-CHARLIE" + fkIndex := &mapper.PropertyIndex{ + Index: n.subSelect.FirstIndexOfName(n.rootName + "_id"), + } + filter := map[connor.FilterKey]interface{}{ + fkIndex: n.currentValue.GetKey(), // user_id: "bae-ALICE" | user_id: "bae-CHARLIE" } // using the doc._key as a filter err := appendFilterToScanNode(n.subType, filter) @@ -527,7 +524,7 @@ func (n *typeJoinMany) Next() (bool, error) { } } - n.currentValue[n.subTypeName] = subdocs + n.currentValue.Fields[n.subSelect.Index] = subdocs return true, nil } @@ -541,20 +538,21 @@ func (n *typeJoinMany) Close() error { func (n *typeJoinMany) Source() planNode { return n.root } -func appendFilterToScanNode(plan planNode, filterCondition map[string]interface{}) error { +func appendFilterToScanNode(plan planNode, filterCondition map[connor.FilterKey]interface{}) error { switch node := plan.(type) { case *scanNode: - var err error filter := node.filter if filter == nil { - filter, err = parser.NewFilter(nil) - if err != nil { - return err - } + filter = mapper.NewFilter() } // merge filter conditions for k, v := range filterCondition { + indexKey, isIndexKey := k.(*mapper.PropertyIndex) + if !isIndexKey { + continue + } + removeConditionIndex(indexKey, filter.Conditions) filter.Conditions[k] = v } @@ -566,3 +564,18 @@ func appendFilterToScanNode(plan planNode, filterCondition map[string]interface{ } return nil } + +func removeConditionIndex( + key *mapper.PropertyIndex, + filterConditions map[connor.FilterKey]interface{}, +) (bool, interface{}) { + for targetKey, clause := range filterConditions { + if indexKey, isIndexKey := targetKey.(*mapper.PropertyIndex); isIndexKey { + if key.Index == indexKey.Index { + delete(filterConditions, targetKey) + return true, clause + } + } + } + return false, nil +} diff --git a/query/graphql/planner/update.go b/query/graphql/planner/update.go index c20086e16b..c5912d06ca 100644 --- a/query/graphql/planner/update.go +++ b/query/graphql/planner/update.go @@ -16,17 +16,18 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) type updateNode struct { documentIterator + docMapper p *Planner collection client.Collection - filter *parser.Filter + filter *mapper.Filter ids []string patch string @@ -81,7 +82,9 @@ func (n *updateNode) Next() (bool, error) { // consume the updates into our valuesNode for _, resKey := range results.DocKeys { - err := n.updateIter.docs.AddDoc(map[string]interface{}{"_key": resKey}) + doc := n.docMapper.documentMapping.NewDoc() + doc.SetKey(resKey) + err := n.updateIter.docs.AddDoc(doc) if err != nil { return false, err } @@ -99,7 +102,7 @@ func (n *updateNode) Next() (bool, error) { updatedDoc := n.updateIter.Value() // create a new span with the updateDoc._key - docKeyStr := updatedDoc["_key"].(string) + docKeyStr := updatedDoc.GetKey() desc := n.collection.Description() updatedDocKeyIndex := base.MakeDocKey(desc, docKeyStr) spans := core.Spans{core.NewSpan(updatedDocKeyIndex, updatedDocKeyIndex.PrefixEnd())} @@ -149,10 +152,10 @@ func (n *updateNode) Explain() (map[string]interface{}, error) { explainerMap[idsLabel] = n.ids // Add the filter attribute if it exists, otherwise have it nil. - if n.filter == nil || n.filter.Conditions == nil { + if n.filter == nil || n.filter.ExternalConditions == nil { explainerMap[filterLabel] = nil } else { - explainerMap[filterLabel] = n.filter.Conditions + explainerMap[filterLabel] = n.filter.ExternalConditions } // Add the attribute that represents the patch to update with. @@ -166,25 +169,25 @@ func (n *updateNode) Explain() (map[string]interface{}, error) { return explainerMap, nil } -func (p *Planner) UpdateDocs(parsed *parser.Mutation) (planNode, error) { +func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { update := &updateNode{ p: p, filter: parsed.Filter, - ids: parsed.IDs, + ids: parsed.DocKeys, isUpdating: true, patch: parsed.Data, + docMapper: docMapper{&parsed.DocumentMapping}, } // get collection - col, err := p.db.GetCollectionByName(p.ctx, parsed.Schema) + col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) if err != nil { return nil, err } update.collection = col.WithTxn(p.txn) // create the results Select node - slct := parsed.ToSelect() - slctNode, err := p.Select(slct) + slctNode, err := p.Select(&parsed.Select) if err != nil { return nil, err } diff --git a/query/graphql/planner/values.go b/query/graphql/planner/values.go index 836278db21..d6184b9530 100644 --- a/query/graphql/planner/values.go +++ b/query/graphql/planner/values.go @@ -12,13 +12,11 @@ package planner import ( "sort" - "strings" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/container" - - parserTypes "github.com/sourcenetwork/defradb/query/graphql/parser/types" + "github.com/sourcenetwork/defradb/query/graphql/mapper" ) // valuesNode contains a collection @@ -28,16 +26,18 @@ import ( // then the rest of the nodes in the graph. It // has no children planNodes. type valuesNode struct { + docMapper + p *Planner // plan planNode - ordering []parserTypes.SortCondition + ordering []mapper.OrderCondition docs *container.DocumentContainer docIndex int } -func (p *Planner) newContainerValuesNode(ordering []parserTypes.SortCondition) *valuesNode { +func (p *Planner) newContainerValuesNode(ordering []mapper.OrderCondition) *valuesNode { return &valuesNode{ p: p, ordering: ordering, @@ -69,7 +69,7 @@ func (n *valuesNode) Next() (bool, error) { return true, nil } -func (n *valuesNode) Value() map[string]interface{} { +func (n *valuesNode) Value() core.Doc { return n.docs.At(n.docIndex) } @@ -91,15 +91,15 @@ func (n *valuesNode) Less(i, j int) bool { } // docValueLess extracts and compare field values of a document -func (n *valuesNode) docValueLess(da, db map[string]interface{}) bool { +func (n *valuesNode) docValueLess(da, db core.Doc) bool { var ra, rb interface{} for _, order := range n.ordering { - if order.Direction == parserTypes.ASC { - ra = getMapProp(da, order.Field) - rb = getMapProp(db, order.Field) - } else if order.Direction == parserTypes.DESC { // redundant, just else - ra = getMapProp(db, order.Field) - rb = getMapProp(da, order.Field) + if order.Direction == mapper.ASC { + ra = getDocProp(da, order.FieldIndexes) + rb = getDocProp(db, order.FieldIndexes) + } else if order.Direction == mapper.DESC { // redundant, just else + ra = getDocProp(db, order.FieldIndexes) + rb = getDocProp(da, order.FieldIndexes) } if c := base.Compare(ra, rb); c < 0 { @@ -133,31 +133,25 @@ func (n *valuesNode) Len() int { // case of nested objects. The key delimeter is a ".". // Eg. // prop = "author.name" -> {author: {name: ...}} -func getMapProp(obj map[string]interface{}, prop string) interface{} { - if prop == "" { +func getDocProp(obj core.Doc, prop []int) interface{} { + if len(prop) == 0 { return nil } - props := strings.Split(prop, ".") - numProps := len(props) - return getMapPropList(obj, props, numProps) + return getMapPropList(obj, prop) } -func getMapPropList(obj map[string]interface{}, props []string, numProps int) interface{} { - if numProps == 1 { - val, ok := obj[props[0]] - if !ok { - return nil - } - return val +func getMapPropList(obj core.Doc, props []int) interface{} { + if len(props) == 1 { + return obj.Fields[props[0]] } - val, ok := obj[props[0]] - if !ok { + val := obj.Fields[props[0]] + if val == nil { return nil } - subObj, ok := val.(map[string]interface{}) + subObj, ok := val.(core.Doc) if !ok { return nil } - return getMapPropList(subObj, props[1:], numProps-1) + return getMapPropList(subObj, props[1:]) } diff --git a/query/graphql/planner/versionedscan.go b/query/graphql/planner/versionedscan.go index cb9a63d56b..1ef987817d 100644 --- a/query/graphql/planner/versionedscan.go +++ b/query/graphql/planner/versionedscan.go @@ -16,7 +16,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/fetcher" - "github.com/sourcenetwork/defradb/query/graphql/parser" + "github.com/sourcenetwork/defradb/query/graphql/mapper" "github.com/ipfs/go-cid" ) @@ -28,6 +28,7 @@ var ( // scans an index for records type versionedScanNode struct { documentIterator + docMapper p *Planner @@ -43,7 +44,7 @@ type versionedScanNode struct { reverse bool // filter data - filter *parser.Filter + filter *mapper.Filter scanInitialized bool @@ -94,15 +95,15 @@ func (n *versionedScanNode) Next() (bool, error) { // keep scanning until we find a doc that passes the filter for { var err error - n.docKey, n.currentValue, err = n.fetcher.FetchNextMap(n.p.ctx) + n.docKey, n.currentValue, err = n.fetcher.FetchNextDoc(n.p.ctx, n.documentMapping) if err != nil { return false, err } - if n.currentValue == nil { + if len(n.currentValue.Fields) == 0 { return false, nil } - passed, err := parser.RunFilter(n.currentValue, n.filter, n.p.evalCtx) + passed, err := mapper.RunFilter(n.currentValue, n.filter) if err != nil { return false, err } diff --git a/tests/integration/mutation/relation/delete/explain_delete_test.go b/tests/integration/mutation/relation/delete/explain_delete_test.go index bc18dd42af..b9f1092f0a 100644 --- a/tests/integration/mutation/relation/delete/explain_delete_test.go +++ b/tests/integration/mutation/relation/delete/explain_delete_test.go @@ -73,14 +73,12 @@ func TestExplainRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "deleteNode": dataMap{ "filter": nil, - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-2f80f359-535d-508e-ba58-088a309ce3c3", - }, + "ids": []string{ + "bae-2f80f359-535d-508e-ba58-088a309ce3c3", }, }, }, diff --git a/tests/integration/mutation/simple/create/explain_simple_create_test.go b/tests/integration/mutation/simple/create/explain_simple_create_test.go index ccf45ca831..fe583d8224 100644 --- a/tests/integration/mutation/simple/create/explain_simple_create_test.go +++ b/tests/integration/mutation/simple/create/explain_simple_create_test.go @@ -34,18 +34,22 @@ func TestExplainMutationCreateSimple(t *testing.T) { Results: []dataMap{ { "explain": dataMap{ - "selectTopNode": dataMap{ - "renderNode": dataMap{ + "createNode": dataMap{ + "data": dataMap{ + "age": float64(27), + "name": "John", + "points": 42.1, + "verified": true, + }, + "selectTopNode": dataMap{ "selectNode": dataMap{ - "createNode": dataMap{ - "data": dataMap{ - "age": float64(27), - "name": "John", - "points": float64(42.1), - "verified": true, - }, - }, "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": nil, + "spans": []dataMap{}, + }, }, }, }, @@ -82,16 +86,20 @@ func TestExplainMutationCreateSimpleDoesNotCreateDocGivenDuplicate(t *testing.T) Results: []dataMap{ { "explain": dataMap{ - "selectTopNode": dataMap{ - "renderNode": dataMap{ + "createNode": dataMap{ + "data": dataMap{ + "age": float64(27), + "name": "John", + }, + "selectTopNode": dataMap{ "selectNode": dataMap{ - "createNode": dataMap{ - "data": dataMap{ - "age": float64(27), - "name": "John", - }, - }, "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": nil, + "spans": []dataMap{}, + }, }, }, }, diff --git a/tests/integration/mutation/simple/create/simple_test.go b/tests/integration/mutation/simple/create/simple_test.go index c15cad0b68..9b30b08493 100644 --- a/tests/integration/mutation/simple/create/simple_test.go +++ b/tests/integration/mutation/simple/create/simple_test.go @@ -30,7 +30,7 @@ func TestMutationCreateSimple(t *testing.T) { Results: []map[string]interface{}{ { "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), + "age": uint64(27), "name": "John", }, }, diff --git a/tests/integration/mutation/simple/create/with_version_test.go b/tests/integration/mutation/simple/create/with_version_test.go new file mode 100644 index 0000000000..59585310bb --- /dev/null +++ b/tests/integration/mutation/simple/create/with_version_test.go @@ -0,0 +1,42 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" +) + +func TestMutationCreateSimpleReturnVersionCID(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "Simple create mutation", + Query: `mutation { + create_user(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + _version { + cid + } + } + }`, + Results: []map[string]interface{}{ + { + "_version": []map[string]interface{}{ + { + "cid": "bafybeihsaeu7o2kep75fadotbqurrvqnamkjqr6cnpyvxxb3iolzxvzxve", + }, + }, + }, + }, + } + + simpleTests.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/simple/delete/explain_simple_delete_test.go b/tests/integration/mutation/simple/delete/explain_simple_delete_test.go index 5761529a23..585ab0eb9f 100644 --- a/tests/integration/mutation/simple/delete/explain_simple_delete_test.go +++ b/tests/integration/mutation/simple/delete/explain_simple_delete_test.go @@ -46,16 +46,14 @@ func TestExplainDeletionUsingMultiAndSingleIDs_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + }, }, + "filter": nil, }, }, }, @@ -95,17 +93,15 @@ func TestExplainDeletionUsingMultiAndSingleIDs_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + }, }, + "filter": nil, }, }, }, @@ -155,17 +151,15 @@ func TestExplainDeletionUsingMultiAndSingleIDs_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + }, }, + "filter": nil, }, }, }, @@ -199,16 +193,14 @@ func TestExplainDeletionUsingMultiAndSingleIDs_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-8ca944fd-260e-5a44-b88f-326d9faca810", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-8ca944fd-260e-5a44-b88f-326d9faca810", + }, }, + "filter": nil, }, }, }, @@ -251,18 +243,16 @@ func TestExplainDeletionOfDocumentsWithFilter_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": dataMap{ - "name": dataMap{ - "$eq": "Shahzad", - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ + "filter": dataMap{ + "name": dataMap{ + "$eq": "Shahzad", }, - "ids": []string(nil), }, - "filter": nil, + "ids": []string(nil), }, + "filter": nil, }, }, }, @@ -325,27 +315,25 @@ func TestExplainDeletionOfDocumentsWithFilter_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": dataMap{ - "$and": []interface{}{ - dataMap{ - "age": dataMap{ - "$lt": int64(26), - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ + "filter": dataMap{ + "$and": []interface{}{ + dataMap{ + "age": dataMap{ + "$lt": int64(26), }, - dataMap{ - "verified": dataMap{ - "$eq": true, - }, + }, + dataMap{ + "verified": dataMap{ + "$eq": true, }, }, }, - "ids": []string(nil), }, - "filter": nil, + "ids": []string(nil), }, + "filter": nil, }, }, }, @@ -403,14 +391,12 @@ func TestExplainDeletionOfDocumentsWithFilter_Success(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": dataMap{}, - "ids": []string(nil), - }, - "filter": nil, + "selectNode": dataMap{ + "deleteNode": dataMap{ + "filter": dataMap{}, + "ids": []string(nil), }, + "filter": nil, }, }, }, @@ -452,16 +438,14 @@ func TestExplainDeletionUsingMultiIdsAndSingleIdAndFilter_Failure(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-6a6482a8-24e1-5c73-a237-ca569e41507e", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-6a6482a8-24e1-5c73-a237-ca569e41507e", + }, }, + "filter": nil, }, }, }, @@ -485,17 +469,15 @@ func TestExplainDeletionUsingMultiIdsAndSingleIdAndFilter_Failure(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{ - "bae-028383cc-d6ba-5df7-959f-2bdce3536a05", - "bae-028383cc-d6ba-5df7-959f-2bdce3536a03", - }, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{ + "bae-028383cc-d6ba-5df7-959f-2bdce3536a05", + "bae-028383cc-d6ba-5df7-959f-2bdce3536a03", + }, }, + "filter": nil, }, }, }, @@ -537,30 +519,28 @@ func TestExplainDeletionUsingMultiIdsAndSingleIdAndFilter_Failure(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": dataMap{ - "$and": []interface{}{ - dataMap{ - "age": dataMap{ - "$lt": int64(26), - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ + "filter": dataMap{ + "$and": []interface{}{ + dataMap{ + "age": dataMap{ + "$lt": int64(26), }, - dataMap{ - "verified": dataMap{ - "$eq": true, - }, + }, + dataMap{ + "verified": dataMap{ + "$eq": true, }, }, }, - "ids": []string{ - "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - "test", - }, }, - "filter": nil, + "ids": []string{ + "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "test", + }, }, + "filter": nil, }, }, }, @@ -585,18 +565,16 @@ func TestExplainDeletionUsingMultiIdsAndSingleIdAndFilter_Failure(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": dataMap{ - "name": dataMap{ - "$eq": "Shahzad", - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ + "filter": dataMap{ + "name": dataMap{ + "$eq": "Shahzad", }, - "ids": []string(nil), }, - "filter": nil, + "ids": []string(nil), }, + "filter": nil, }, }, }, @@ -630,14 +608,12 @@ func TestExplainDeletionUsingMultiIdsAndSingleIdAndFilter_Failure(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "deleteNode": dataMap{ - "filter": nil, - "ids": []string{}, - }, + "selectNode": dataMap{ + "deleteNode": dataMap{ "filter": nil, + "ids": []string{}, }, + "filter": nil, }, }, }, diff --git a/tests/integration/mutation/simple/update/explain_simple_update_test.go b/tests/integration/mutation/simple/update/explain_simple_update_test.go index 4a16c1aada..2f1c35dc5e 100644 --- a/tests/integration/mutation/simple/update/explain_simple_update_test.go +++ b/tests/integration/mutation/simple/update/explain_simple_update_test.go @@ -70,15 +70,17 @@ func TestExplainSimpleMutationUpdateWithBooleanFilter(t *testing.T) { }, "ids": []string(nil), "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "user", - "filter": nil, - "spans": []dataMap{}, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": dataMap{ + "verified": dataMap{ + "$eq": true, + }, }, + "spans": []dataMap{}, }, }, }, @@ -142,14 +144,21 @@ func TestExplainSimpleMutationUpdateWithIdInFilter(t *testing.T) { "bae-958c9334-73cf-5695-bf06-cf06826babfa", }, "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "user", - "filter": nil, - "spans": []dataMap{}, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": nil, + "spans": []dataMap{ + { + "end": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475e", + "start": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + }, + { + "end": "/1/bae-958c9334-73cf-5695-bf06-cf06826babfb", + "start": "/1/bae-958c9334-73cf-5695-bf06-cf06826babfa", + }, }, }, }, @@ -207,14 +216,17 @@ func TestExplainSimpleMutationUpdateWithIdEqualsFilter(t *testing.T) { "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", }, "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "user", - "filter": nil, - "spans": []dataMap{}, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": nil, + "spans": []dataMap{ + { + "end": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475e", + "start": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + }, }, }, }, @@ -285,14 +297,25 @@ func TestExplainSimpleMutationUpdateWithIdAndFilter(t *testing.T) { "bae-958c9334-73cf-5695-bf06-cf06826babfa", }, "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "user", - "filter": nil, - "spans": []dataMap{}, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "user", + "filter": dataMap{ + "verified": dataMap{ + "$eq": true, + }, + }, + "spans": []dataMap{ + { + "end": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475e", + "start": "/1/bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + }, + { + "end": "/1/bae-958c9334-73cf-5695-bf06-cf06826babfb", + "start": "/1/bae-958c9334-73cf-5695-bf06-cf06826babfa", + }, }, }, }, diff --git a/tests/integration/query/explain/mix_test.go b/tests/integration/query/explain/mix_test.go deleted file mode 100644 index 297d49c499..0000000000 --- a/tests/integration/query/explain/mix_test.go +++ /dev/null @@ -1,302 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package test_explain - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -func TestExplainQueryOneToOneJoinWithParallelNodeMultipleCounts(t *testing.T) { - test := testUtils.QueryTestCase{ - - Description: "Explain two counts for parallelNode and a 2 level deep join request.", - - Query: `query @explain { - author { - name - numberOfBooks: _count( - books: { - filter: { - name: { - _eq: "Theif Lord" - } - } - } - ) - numberOfArticles: _count(articles: {}) - contact { - email - address { - city - } - } - } - }`, - - Docs: map[int][]string{ - // articles - 0: { - (`{ - "name": "After Guantánamo, Another Injustice", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "To my dear readers", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - (`{ - "name": "Twinklestar's Favourite Xmas Cookie", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - }, - // books - 1: { - (`{ - "name": "Painted House", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "A Time for Mercy", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "Theif Lord", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - }, - // authors - 2: { - // _key: bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - (`{ - "name": "John Grisham", - "age": 65, - "verified": true, - "contact_id": "bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed" - }`), - // _key: bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - (`{ - "name": "Cornelia Funke", - "age": 62, - "verified": false, - "contact_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - }`), - }, - // contact - 3: { - // _key: bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed - // "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - (`{ - "cell": "5197212301", - "email": "john_grisham@example.com", - "address_id": "bae-c8448e47-6cd1-571f-90bd-364acb80da7b" - }`), - - // _key: bae-c0960a29-b704-5c37-9c2e-59e1249e4559 - // "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - (`{ - "cell": "5197212302", - "email": "cornelia_funke@example.com", - "address_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - }`), - }, - - // address - 4: { - // _key: bae-c8448e47-6cd1-571f-90bd-364acb80da7b - // "contact_id": "bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed" - (`{ - "city": "Waterloo", - "country": "Canada" - }`), - - // _key: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 - // "contact_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - (`{ - "city": "Brampton", - "country": "Canada" - }`), - }, - }, - - Results: []dataMap{ - { - "explain": dataMap{ - "selectTopNode": dataMap{ - "renderNode": dataMap{ - "countNode": dataMap{ - "filter": dataMap{ - "name": dataMap{ - "$eq": "Theif Lord", - }, - }, - "sourceProperty": "books", - "countNode": dataMap{ - "filter": nil, - "sourceProperty": "articles", - "selectNode": dataMap{ - "filter": nil, - "parallelNode": []dataMap{ - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "contact", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "contact", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "4", - "collectionName": "authorContact", - "spans": []dataMap{ - { - "start": "/4", - "end": "/5", - }, - }, - }, - }, - "subTypeName": "address", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "5", - "collectionName": "contactAddress", - "spans": []dataMap{ - { - "start": "/5", - "end": "/6", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "articles", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "1", - "collectionName": "article", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, - }, - }, - }, - }, - }, - }, - }, - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "books", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "2", - "collectionName": "book", - "spans": []dataMap{ - { - "start": "/2", - "end": "/3", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/query/explain/type_join_test.go b/tests/integration/query/explain/type_join_test.go index d4dce89e23..8da099066b 100644 --- a/tests/integration/query/explain/type_join_test.go +++ b/tests/integration/query/explain/type_join_test.go @@ -118,40 +118,38 @@ func TestExplainQueryWithAOneToOneJoin(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "typeIndexJoin": dataMap{ - "direction": "primary", - "joinType": "typeJoinOne", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, + "selectNode": dataMap{ + "filter": nil, + "typeIndexJoin": dataMap{ + "direction": "primary", + "joinType": "typeJoinOne", + "rootName": "author", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "3", + "collectionName": "author", + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", }, }, }, - "subTypeName": "contact", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "4", - "collectionName": "authorContact", - "spans": []dataMap{ - { - "start": "/4", - "end": "/5", - }, + }, + "subTypeName": "contact", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "4", + "collectionName": "authorContact", + "spans": []dataMap{ + { + "start": "/4", + "end": "/5", }, }, }, @@ -275,42 +273,40 @@ func TestExplainQueryWithMultipleOneToOneJoins(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "parallelNode": []dataMap{ - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, + "selectNode": dataMap{ + "filter": nil, + "parallelNode": []dataMap{ + { + "typeIndexJoin": dataMap{ + "joinType": "typeJoinOne", + "direction": "primary", + "rootName": "author", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "3", + "collectionName": "author", + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", }, }, }, - "subTypeName": "contact", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "4", - "collectionName": "authorContact", - "spans": []dataMap{ - { - "start": "/4", - "end": "/5", - }, + }, + "subTypeName": "contact", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "4", + "collectionName": "authorContact", + "spans": []dataMap{ + { + "start": "/4", + "end": "/5", }, }, }, @@ -318,38 +314,38 @@ func TestExplainQueryWithMultipleOneToOneJoins(t *testing.T) { }, }, }, - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, + }, + { + "typeIndexJoin": dataMap{ + "joinType": "typeJoinOne", + "direction": "primary", + "rootName": "author", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "3", + "collectionName": "author", + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", }, }, }, - "subTypeName": "contact", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "4", - "collectionName": "authorContact", - "spans": []dataMap{ - { - "start": "/4", - "end": "/5", - }, + }, + "subTypeName": "contact", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "4", + "collectionName": "authorContact", + "spans": []dataMap{ + { + "start": "/4", + "end": "/5", }, }, }, @@ -476,62 +472,60 @@ func TestExplainQueryWithTwoLeveLDeepNestedJoins(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, + "selectNode": dataMap{ + "filter": nil, + "typeIndexJoin": dataMap{ + "joinType": "typeJoinOne", + "direction": "primary", + "rootName": "author", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "3", + "collectionName": "author", + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", }, }, }, - "subTypeName": "contact", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinOne", - "direction": "primary", - "rootName": "contact", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "4", - "collectionName": "authorContact", - "spans": []dataMap{ - { - "start": "/4", - "end": "/5", - }, + }, + "subTypeName": "contact", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "typeIndexJoin": dataMap{ + "joinType": "typeJoinOne", + "direction": "primary", + "rootName": "contact", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "4", + "collectionName": "authorContact", + "spans": []dataMap{ + { + "start": "/4", + "end": "/5", }, }, }, - "subTypeName": "address", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "5", - "collectionName": "contactAddress", - "spans": []dataMap{ - { - "start": "/5", - "end": "/6", - }, + }, + "subTypeName": "address", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "5", + "collectionName": "contactAddress", + "spans": []dataMap{ + { + "start": "/5", + "end": "/6", }, }, }, diff --git a/tests/integration/query/explain/with_count_test.go b/tests/integration/query/explain/with_count_test.go index eb7801069b..e81298f77c 100644 --- a/tests/integration/query/explain/with_count_test.go +++ b/tests/integration/query/explain/with_count_test.go @@ -92,280 +92,53 @@ func TestExplainQueryOneToManyWithACount(t *testing.T) { }, // ----> selectTopNode (explainable but no-attributes) - // ----> renderNode (explainable) - // ----> countNode (explainable) - // ----> selectNode (explainable) - // ----> typeIndexJoin (explainable) - // ----> typeJoinMany (non-explainable) - // ----> scanNode (explainable) + // ----> countNode (explainable) + // ----> selectNode (explainable) + // ----> typeIndexJoin (explainable) + // ----> typeJoinMany (non-explainable) + // ----> scanNode (explainable) Results: []dataMap{ { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "countNode": dataMap{ - "filter": nil, - "sourceProperty": "books", - "selectNode": dataMap{ - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "books", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "2", - "collectionName": "book", - "spans": []dataMap{ - { - "start": "/2", - "end": "/3", - }, - }, - }, - }, - }, - }, - }, + "countNode": dataMap{ + "sources": []dataMap{ + { + "filter": nil, + "sourceProperty": "books", }, }, - }, - }, - }, - }, - }, - } - - executeTestCase(t, test) -} - -func TestExplainQueryOneToManyMultipleWithCounts(t *testing.T) { - test := testUtils.QueryTestCase{ - - Description: "Explain two typeJoinMany query with both count.", - - Query: `query @explain { - author { - name - numberOfBooks: _count(books: {}) - numberOfArticles: _count( - articles: { - filter: { - name: { - _eq: "After Guantánamo, Another Injustice" - } - } - } - ) - } - }`, - - Docs: map[int][]string{ - // articles - 0: { - (`{ - "name": "After Guantánamo, Another Injustice", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "To my dear readers", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - (`{ - "name": "Twinklestar's Favourite Xmas Cookie", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - }, - // books - 1: { - (`{ - "name": "Painted House", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "A Time for Mercy", - "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - }`), - (`{ - "name": "Theif Lord", - "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - }`), - }, - // authors - 2: { - // _key: bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - (`{ - "name": "John Grisham", - "age": 65, - "verified": true, - "contact_id": "bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed" - }`), - // _key: bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - (`{ - "name": "Cornelia Funke", - "age": 62, - "verified": false, - "contact_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - }`), - }, - // contact - 3: { - // _key: bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed - // "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" - (`{ - "cell": "5197212301", - "email": "john_grisham@example.com", - "address_id": "bae-c8448e47-6cd1-571f-90bd-364acb80da7b" - }`), - - // _key: bae-c0960a29-b704-5c37-9c2e-59e1249e4559 - // "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" - (`{ - "cell": "5197212302", - "email": "cornelia_funke@example.com", - "address_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - }`), - }, - - // address - 4: { - // _key: bae-c8448e47-6cd1-571f-90bd-364acb80da7b - // "contact_id": "bae-1fe427b8-ab8d-56c3-9df2-826a6ce86fed" - (`{ - "city": "Waterloo", - "country": "Canada" - }`), - - // _key: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 - // "contact_id": "bae-c0960a29-b704-5c37-9c2e-59e1249e4559" - (`{ - "city": "Brampton", - "country": "Canada" - }`), - }, - }, - - // ----> selectTopNode (explainable but no attributes) - // ----> renderNode (explainable) - // ----> countNode (explainable) - // ----> countNode (explainable) - // ----> selectNode (explainable) - // ----> parallelNode (non-explainable but wraps children) - // ----> typeIndexJoin (explainable) - // ----> typeJoinMany (non-explainable) - // ----> multiscanNode (non-explainable) - // ----> scanNode (explainable) - // ----> typeIndexJoin (explainable) - // ----> typeJoinMany (non-explainable) - // ----> multiscanNode (non-explainable) - // ----> scanNode (explainable) - Results: []dataMap{ - { - "explain": dataMap{ - "selectTopNode": dataMap{ - "renderNode": dataMap{ - "countNode": dataMap{ - "filter": nil, - "sourceProperty": "books", - "countNode": dataMap{ - "filter": dataMap{ - "name": dataMap{ - "$eq": "After Guantánamo, Another Injustice", - }, - }, - "sourceProperty": "articles", - "selectNode": dataMap{ - "filter": nil, - "parallelNode": []dataMap{ - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "articles", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "1", - "collectionName": "article", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, - }, - }, - }, - }, - }, + "selectNode": dataMap{ + "filter": nil, + "typeIndexJoin": dataMap{ + "joinType": "typeJoinMany", + "rootName": "author", + "root": dataMap{ + "scanNode": dataMap{ + "filter": nil, + "collectionID": "3", + "collectionName": "author", + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", }, }, - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "3", - "collectionName": "author", - "spans": []dataMap{ - { - "start": "/3", - "end": "/4", - }, - }, - }, - }, - "subTypeName": "books", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "2", - "collectionName": "book", - "spans": []dataMap{ - { - "start": "/2", - "end": "/3", - }, - }, - }, - }, + }, + }, + "subTypeName": "books", + "subType": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "2", + "collectionName": "book", + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", }, }, }, diff --git a/tests/integration/query/one_to_many/with_filter_test.go b/tests/integration/query/one_to_many/with_filter_test.go index 7266568672..c519974fef 100644 --- a/tests/integration/query/one_to_many/with_filter_test.go +++ b/tests/integration/query/one_to_many/with_filter_test.go @@ -85,6 +85,59 @@ func TestQueryOneToManyWithNumericGreaterThanFilterOnParent(t *testing.T) { executeTestCase(t, test) } +func TestQueryOneToManyWithNumericGreaterThanChildFilterOnParentWithUnrenderedChild(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from the many side, simple filter", + Query: `query { + author(filter: {published: {rating: {_gt: 4.8}}}) { + name + } + }`, + Docs: map[int][]string{ + //books + 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + (`{ + "name": "Painted House", + "rating": 4.9, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "A Time for Mercy", + "rating": 4.5, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "Theif Lord", + "rating": 4.8, + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`), + }, + //authors + 1: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + }, + }, + } + + executeTestCase(t, test) +} + func TestQueryOneToManyWithNumericGreaterThanFilterOnParentAndChild(t *testing.T) { test := testUtils.QueryTestCase{ Description: "One-to-many relation query from the many side, simple filter on root and sub type", @@ -149,3 +202,89 @@ func TestQueryOneToManyWithNumericGreaterThanFilterOnParentAndChild(t *testing.T executeTestCase(t, test) } + +func TestQueryOneToManyWithMultipleAliasedFilteredChildren(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from the many side, simple filter on root and sub type", + Query: `query { + author { + name + age + p1: published(filter: {rating: {_gt: 4.6}}) { + name + rating + } + p2: published(filter: {rating: {_lt: 4.6}}) { + name + rating + } + } + }`, + Docs: map[int][]string{ + //books + 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + (`{ + "name": "Painted House", + "rating": 4.9, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "A Time for Mercy", + "rating": 4.5, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "Theif Lord", + "rating": 4.8, + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`), + }, + //authors + 1: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "age": uint64(65), + "p1": []map[string]interface{}{ + { + "name": "Painted House", + "rating": 4.9, + }, + }, + "p2": []map[string]interface{}{ + { + "name": "A Time for Mercy", + "rating": 4.5, + }, + }, + }, + { + "name": "Cornelia Funke", + "age": uint64(62), + "p1": []map[string]interface{}{ + { + "name": "Theif Lord", + "rating": 4.8, + }, + }, + "p2": []map[string]interface{}{}, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/utils.go b/tests/integration/query/one_to_many_multiple/utils.go index c2b142de50..12ec618ee3 100644 --- a/tests/integration/query/one_to_many_multiple/utils.go +++ b/tests/integration/query/one_to_many_multiple/utils.go @@ -20,11 +20,13 @@ var bookAuthorGQLSchema = (` type article { name: String author: author + rating: Int } type book { name: String author: author + score: Int } type author { diff --git a/tests/integration/query/one_to_many_multiple/with_average_filter_test.go b/tests/integration/query/one_to_many_multiple/with_average_filter_test.go new file mode 100644 index 0000000000..0268768103 --- /dev/null +++ b/tests/integration/query/one_to_many_multiple/with_average_filter_test.go @@ -0,0 +1,181 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToManyMultipleWithAverageOnMultipleJoinsWithAndWithoutFilter(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with averages with and without filters", + Query: `query { + author { + name + _avg(books: {field: score, filter: {score: {_gt: 3}}}, articles: {field: rating}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_avg": float64(3), + }, + { + "name": "Cornelia Funke", + "_avg": float64(2.3333333333333335), + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryOneToManyMultipleWithAverageOnMultipleJoinsWithFilters(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with averages with filters", + Query: `query { + author { + name + _avg(books: {field: score, filter: {score: {_gt: 3}}}, articles: {field: rating, filter: {rating: {_lt: 3}}}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_avg": float64(0), + }, + { + "name": "Cornelia Funke", + "_avg": float64(2.3333333333333335), + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/with_average_test.go b/tests/integration/query/one_to_many_multiple/with_average_test.go new file mode 100644 index 0000000000..a366e721ca --- /dev/null +++ b/tests/integration/query/one_to_many_multiple/with_average_test.go @@ -0,0 +1,99 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToManyMultipleWithAverageOnMultipleJoins(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with average", + Query: `query { + author { + name + _avg(books: {field: score}, articles: {field: rating}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_avg": float64(2.25), + }, + { + "name": "Cornelia Funke", + "_avg": float64(2.3333333333333335), + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/with_count_filter_test.go b/tests/integration/query/one_to_many_multiple/with_count_filter_test.go new file mode 100644 index 0000000000..3cb51d83de --- /dev/null +++ b/tests/integration/query/one_to_many_multiple/with_count_filter_test.go @@ -0,0 +1,181 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToManyMultipleWithCountOnMultipleJoinsWithAndWithoutFilter(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with counts with and without filters", + Query: `query { + author { + name + _count(books: {filter: {score: {_gt: 3}}}, articles: {}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_count": 1, + }, + { + "name": "Cornelia Funke", + "_count": 3, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryOneToManyMultipleWithCountOnMultipleJoinsWithFilters(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with counts with filters", + Query: `query { + author { + name + _count(books: {filter: {score: {_gt: 3}}}, articles: {filter: {rating: {_lt: 3}}}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_count": 0, + }, + { + "name": "Cornelia Funke", + "_count": 3, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/with_count_test.go b/tests/integration/query/one_to_many_multiple/with_count_test.go index be3bbd7df3..4411e4fbe9 100644 --- a/tests/integration/query/one_to_many_multiple/with_count_test.go +++ b/tests/integration/query/one_to_many_multiple/with_count_test.go @@ -89,3 +89,78 @@ func TestQueryOneToManyMultipleWithCount(t *testing.T) { executeTestCase(t, test) } + +func TestQueryOneToManyMultipleWithCountOnMultipleJoins(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with count", + Query: `query { + author { + name + _count(books: {}, articles: {}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_count": 4, + }, + { + "name": "Cornelia Funke", + "_count": 3, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/with_sum_filter_test.go b/tests/integration/query/one_to_many_multiple/with_sum_filter_test.go new file mode 100644 index 0000000000..03b84bde01 --- /dev/null +++ b/tests/integration/query/one_to_many_multiple/with_sum_filter_test.go @@ -0,0 +1,181 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToManyMultipleWithSumOnMultipleJoinsWithAndWithoutFilter(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with sums with and without filters", + Query: `query { + author { + name + _sum(books: {field: score, filter: {score: {_gt: 3}}}, articles: {field: rating}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_sum": int64(3), + }, + { + "name": "Cornelia Funke", + "_sum": int64(7), + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryOneToManyMultipleWithSumOnMultipleJoinsWithFilters(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with sums with filters", + Query: `query { + author { + name + _sum(books: {field: score, filter: {score: {_gt: 3}}}, articles: {field: rating, filter: {rating: {_lt: 3}}}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_sum": int64(0), + }, + { + "name": "Cornelia Funke", + "_sum": int64(7), + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many_multiple/with_sum_test.go b/tests/integration/query/one_to_many_multiple/with_sum_test.go new file mode 100644 index 0000000000..004aae0390 --- /dev/null +++ b/tests/integration/query/one_to_many_multiple/with_sum_test.go @@ -0,0 +1,99 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToManyMultipleWithSumOnMultipleJoins(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "One-to-many relation query from many side with sum", + Query: `query { + author { + name + _sum(books: {field: score}, articles: {field: rating}) + } + }`, + Docs: map[int][]string{ + //articles + 0: { + (`{ + "name": "After Guantánamo, Another Injustice", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "rating": 3 + }`), + (`{ + "name": "To my dear readers", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 2 + }`), + (`{ + "name": "Twinklestar's Favourite Xmas Cookie", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "rating": 1 + }`), + }, + //books + 1: { + (`{ + "name": "Painted House", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 1 + }`), + (`{ + "name": "A Time for Mercy", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 2 + }`), + (`{ + "name": "Sooley", + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "score": 3 + }`), + (`{ + "name": "Theif Lord", + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "score": 4 + }`), + }, + //authors + 2: { + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + (`{ + "name": "John Grisham", + "age": 65, + "verified": true + }`), + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + (`{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`), + }, + }, + Results: []map[string]interface{}{ + { + "name": "John Grisham", + "_sum": int64(9), + }, + { + "name": "Cornelia Funke", + "_sum": int64(7), + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/explain_with_filter_test.go b/tests/integration/query/simple/explain_with_filter_test.go index c6925dd1c6..1ac72fa76a 100644 --- a/tests/integration/query/simple/explain_with_filter_test.go +++ b/tests/integration/query/simple/explain_with_filter_test.go @@ -38,18 +38,16 @@ func TestExplainQuerySimpleWithDocKeyFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", }, }, }, @@ -79,18 +77,16 @@ func TestExplainQuerySimpleWithDocKeyFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009h", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009h", }, }, }, @@ -124,18 +120,16 @@ func TestExplainQuerySimpleWithDocKeyFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", }, }, }, @@ -173,18 +167,16 @@ func TestExplainQuerySimpleWithDocKeysFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", }, }, }, @@ -214,18 +206,16 @@ func TestExplainQuerySimpleWithDocKeysFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009h", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009h", }, }, }, @@ -260,22 +250,20 @@ func TestExplainQuerySimpleWithDocKeysFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", + }, + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", }, }, }, @@ -314,22 +302,20 @@ func TestExplainQuerySimpleWithDocKeysFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": nil, - "spans": []dataMap{ - { - "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", - }, - { - "start": "/1/bae-1378ab62-e064-5af4-9ea6-49941c8d8f94", - "end": "/1/bae-1378ab62-e064-5af4-9ea6-49941c8d8f95", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "end": "/1/bae-52b9170d-b77a-5887-b877-cbdbb99b009g", + }, + { + "start": "/1/bae-1378ab62-e064-5af4-9ea6-49941c8d8f94", + "end": "/1/bae-1378ab62-e064-5af4-9ea6-49941c8d8f95", }, }, }, @@ -370,22 +356,20 @@ func TestExplainQuerySimpleWithKeyFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "_key": dataMap{ - "$eq": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "_key": dataMap{ + "$eq": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -424,22 +408,20 @@ func TestExplainQuerySimpleWithStringFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Name": dataMap{ - "$eq": "John", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Name": dataMap{ + "$eq": "John", }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -473,22 +455,20 @@ func TestExplainQuerySimpleWithStringFilterBlockAndSelect(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Name": dataMap{ - "$eq": "John", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Name": dataMap{ + "$eq": "John", }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -517,22 +497,20 @@ func TestExplainQuerySimpleWithStringFilterBlockAndSelect(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Name": dataMap{ - "$eq": "John", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Name": dataMap{ + "$eq": "John", }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -562,22 +540,20 @@ func TestExplainQuerySimpleWithStringFilterBlockAndSelect(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Name": dataMap{ - "$eq": "Bob", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Name": dataMap{ + "$eq": "Bob", }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -618,22 +594,20 @@ func TestExplainQuerySimpleWithNumberEqualsFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Age": dataMap{ - "$eq": int64(21), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Age": dataMap{ + "$eq": int64(21), }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -672,22 +646,20 @@ func TestExplainQuerySimpleWithNumberGreaterThanFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Age": dataMap{ - "$gt": int64(20), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Age": dataMap{ + "$gt": int64(20), }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -721,22 +693,20 @@ func TestExplainQuerySimpleWithNumberGreaterThanFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Age": dataMap{ - "$gt": int64(40), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Age": dataMap{ + "$gt": int64(40), }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -771,22 +741,20 @@ func TestExplainQuerySimpleWithNumberGreaterThanFilterBlock(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Age": dataMap{ - "$gt": int64(20), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Age": dataMap{ + "$gt": int64(20), }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -835,31 +803,29 @@ func TestExplainQuerySimpleWithNumberGreaterThanAndNumberLessThanFilter(t *testi { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "$and": []interface{}{ - dataMap{ - "Age": dataMap{ - "$gt": int64(20), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "$and": []interface{}{ + dataMap{ + "Age": dataMap{ + "$gt": int64(20), }, - dataMap{ - "Age": dataMap{ - "$lt": int64(50), - }, + }, + dataMap{ + "Age": dataMap{ + "$lt": int64(50), }, }, }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -905,31 +871,29 @@ func TestExplainQuerySimpleWithNumberEqualToXOrYFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "$or": []interface{}{ - dataMap{ - "Age": dataMap{ - "$eq": int64(55), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "$or": []interface{}{ + dataMap{ + "Age": dataMap{ + "$eq": int64(55), }, - dataMap{ - "Age": dataMap{ - "$eq": int64(19), - }, + }, + dataMap{ + "Age": dataMap{ + "$eq": int64(19), }, }, }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -975,26 +939,24 @@ func TestExplainQuerySimpleWithNumberInFilter(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "users", - "filter": dataMap{ - "Age": dataMap{ - "$in": []interface{}{ - int64(19), - int64(40), - int64(55), - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "collectionID": "1", + "collectionName": "users", + "filter": dataMap{ + "Age": dataMap{ + "$in": []interface{}{ + int64(19), + int64(40), + int64(55), }, }, - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, diff --git a/tests/integration/query/simple/simple_explain_test.go b/tests/integration/query/simple/simple_explain_test.go index 747a3cb351..b0d05252e3 100644 --- a/tests/integration/query/simple/simple_explain_test.go +++ b/tests/integration/query/simple/simple_explain_test.go @@ -65,18 +65,16 @@ func TestExplainQuerySimple(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "1", - "collectionName": "users", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "1", + "collectionName": "users", + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -110,18 +108,16 @@ func TestExplainQuerySimpleWithAlias(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "1", - "collectionName": "users", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "1", + "collectionName": "users", + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, @@ -159,18 +155,16 @@ func TestExplainQuerySimpleWithMultipleRows(t *testing.T) { { "explain": dataMap{ "selectTopNode": dataMap{ - "renderNode": dataMap{ - "selectNode": dataMap{ - "filter": nil, - "scanNode": dataMap{ - "filter": nil, - "collectionID": "1", - "collectionName": "users", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, + "selectNode": dataMap{ + "filter": nil, + "scanNode": dataMap{ + "filter": nil, + "collectionID": "1", + "collectionName": "users", + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", }, }, }, diff --git a/tests/integration/query/simple/with_cid_test.go b/tests/integration/query/simple/with_cid_test.go index d9cb92c3c8..bd409c3a08 100644 --- a/tests/integration/query/simple/with_cid_test.go +++ b/tests/integration/query/simple/with_cid_test.go @@ -16,8 +16,6 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -// This test is for documentation reasons only. This is not -// desired behaviour. func TestQuerySimpleWithInvalidCid(t *testing.T) { test := testUtils.QueryTestCase{ Description: "Simple query with cid", @@ -34,11 +32,7 @@ func TestQuerySimpleWithInvalidCid(t *testing.T) { }`, }, }, - Results: []map[string]interface{}{ - { - "Name": "John", - }, - }, + ExpectedError: "Failed to propagate VersionFetcher span, invalid CID: selected encoding not supported", } executeTestCase(t, test) diff --git a/tests/integration/query/simple/with_group_average_filter_test.go b/tests/integration/query/simple/with_group_average_filter_test.go index 62b3d2de31..bf17b36fb3 100644 --- a/tests/integration/query/simple/with_group_average_filter_test.go +++ b/tests/integration/query/simple/with_group_average_filter_test.go @@ -251,3 +251,50 @@ func TestQuerySimpleWithGroupByStringWithoutRenderedGroupAndChildAveragesWithDif executeTestCase(t, test) } + +func TestQuerySimpleWithGroupByStringWithoutRenderedGroupAndChildAverageWithFilterAndNilItem(t *testing.T) { + // This test checks that the appended/internal nil filter does not clash with the consumer-defined filter + test := testUtils.QueryTestCase{ + Description: "Simple query with group by string, no children, average with filter on non-rendered, unfiltered group", + Query: `query { + users(groupBy: [Name]) { + Name + _avg(_group: {field: Age, filter: {Age: {_lt: 33}}}) + } + }`, + Docs: map[int][]string{ + 0: { + (`{ + "Name": "John", + "Age": 34 + }`), + (`{ + "Name": "John", + "Age": 32 + }`), + (`{ + "Name": "John", + "Age": 30 + }`), + (`{ + "Name": "John" + }`), + (`{ + "Name": "Alice", + "Age": 19 + }`)}, + }, + Results: []map[string]interface{}{ + { + "Name": "Alice", + "_avg": float64(19), + }, + { + "Name": "John", + "_avg": float64(31), + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_filter_test.go b/tests/integration/query/simple/with_group_filter_test.go index 6f53215731..559f37346c 100644 --- a/tests/integration/query/simple/with_group_filter_test.go +++ b/tests/integration/query/simple/with_group_filter_test.go @@ -126,6 +126,46 @@ func TestQuerySimpleWithGroupByStringWithGroupNumberWithParentFilter(t *testing. executeTestCase(t, test) } +func TestQuerySimpleWithGroupByStringWithUnrenderedGroupNumberWithParentFilter(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "Simple query with group by with number filter", + Query: `query { + users(groupBy: [Name], filter: {Age: {_gt: 26}}) { + Name + } + }`, + Docs: map[int][]string{ + 0: { + (`{ + "Name": "John", + "Age": 25 + }`), + (`{ + "Name": "John", + "Age": 32 + }`), + (`{ + "Name": "Carlo", + "Age": 55 + }`), + (`{ + "Name": "Alice", + "Age": 19 + }`)}, + }, + Results: []map[string]interface{}{ + { + "Name": "John", + }, + { + "Name": "Carlo", + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithGroupByStringWithInnerGroupBooleanThenInnerNumberFilterThatExcludesAll( t *testing.T, ) { diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 0ed4810a46..376d5fe614 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -64,3 +64,63 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { executeTestCase(t, test) } + +func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { + test := testUtils.QueryTestCase{ + Description: "Embedded, aliased, latest commits query within object query", + Query: `query { + users { + Name + Age + _version { + cid + L1: links { + cid + name + } + L2: links { + name + } + } + } + }`, + Docs: map[int][]string{ + 0: { + (`{ + "Name": "John", + "Age": 21 + }`)}, + }, + Results: []map[string]interface{}{ + { + "Name": "John", + "Age": uint64(21), + "_version": []map[string]interface{}{ + { + "cid": "bafybeihtn2xjbjjqxeqp2uhwhvk3tmjfkaf2qtfqh5w5q3ews7ax2dc75a", + "L1": []map[string]interface{}{ + { + "cid": "bafybeidst2mzxhdoh4ayjdjoh4vibo7vwnuoxk3xgyk5mzmep55jklni2a", + "name": "Age", + }, + { + "cid": "bafybeidkse2jiqekdebh6zdq4zvyx4gzyrupujbtb6gd7qqdb4hj3pyaeq", + "name": "Name", + }, + }, + "L2": []map[string]interface{}{ + { + "name": "Age", + }, + { + "name": "Name", + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +}