Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove _predicate_ from Dgraph. #3262

Merged
merged 11 commits into from
May 9, 2019
7 changes: 3 additions & 4 deletions dgraph/cmd/alpha/http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -306,15 +306,14 @@ func TestTransactionBasic(t *testing.T) {
require.NoError(t, err)
require.Equal(t, mts, ts)
require.Equal(t, 3, len(keys))
require.Equal(t, 3, len(preds))
require.Equal(t, 2, len(preds))
var parsedPreds []string
for _, pred := range preds {
parsedPreds = append(parsedPreds, strings.Join(strings.Split(pred, "-")[1:], "-"))
}
sort.Strings(parsedPreds)
require.Equal(t, "_predicate_", parsedPreds[0])
require.Equal(t, "balance", parsedPreds[1])
require.Equal(t, "name", parsedPreds[2])
require.Equal(t, "balance", parsedPreds[0])
require.Equal(t, "name", parsedPreds[1])

data, _, err := queryWithTs(q1, 0)
require.NoError(t, err)
Expand Down
4 changes: 0 additions & 4 deletions dgraph/cmd/alpha/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,6 @@ they form a Raft group and provide synchronous replication.
"IP_ADDRESS:PORT of a Dgraph Zero.")
flag.Uint64("idx", 0,
"Optional Raft ID that this Dgraph Alpha will use to join RAFT groups.")
flag.Bool("expand_edge", true,
"Enables the expand() feature. This is very expensive for large data loads because it"+
" doubles the number of mutations going on in the system.")
flag.Int("max_retries", -1,
"Commits to disk will give up after these number of retries to prevent locking the worker"+
" in a failed state. Use -1 to retry infinitely.")
Expand Down Expand Up @@ -482,7 +479,6 @@ func run() {
MyAddr: Alpha.Conf.GetString("my"),
ZeroAddr: Alpha.Conf.GetString("zero"),
RaftId: cast.ToUint64(Alpha.Conf.GetString("idx")),
ExpandEdge: Alpha.Conf.GetBool("expand_edge"),
WhiteListedIPRanges: ips,
MaxRetries: Alpha.Conf.GetInt("max_retries"),
StrictMutations: opts.MutationsMode == edgraph.StrictMutations,
Expand Down
196 changes: 23 additions & 173 deletions dgraph/cmd/alpha/run_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,14 +209,6 @@ func TestDeletePredicate(t *testing.T) {
`

var q4 = `
{
user(func: uid(0x3)) {
_predicate_
}
}
`

var q5 = `
{
user(func: uid(0x3)) {
age
Expand Down Expand Up @@ -260,10 +252,6 @@ func TestDeletePredicate(t *testing.T) {
require.NoError(t, err)
require.JSONEq(t, `{"data": {"user":[{"age": "13", "~friend" : [{"name":"Alice"}]}]}}`, output)

output, err = runQuery(q4)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"user":[{"_predicate_":["name","age"]}]}}`, output)

err = deletePredicate("friend")
require.NoError(t, err)
err = deletePredicate("salary")
Expand All @@ -272,7 +260,6 @@ func TestDeletePredicate(t *testing.T) {
output, err = runQuery(`schema{}`)
require.NoError(t, err)
z.CompareJSON(t, `{"data":{"schema":[`+
`{"predicate":"_predicate_","type":"string","list":true},`+
`{"predicate":"age","type":"default"},`+
`{"predicate":"name","type":"string","index":true, "tokenizer":["term"]},`+
x.AclPredicates+","+
Expand All @@ -287,13 +274,9 @@ func TestDeletePredicate(t *testing.T) {
require.NoError(t, err)
require.JSONEq(t, `{"data": {"user": [{"name":"Alice"},{"name":"Alice1"},{"name":"Alice2"}]}}`, output)

output, err = runQuery(q5)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"user":[{"age": "13"}]}}`, output)

output, err = runQuery(q4)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"user":[{"_predicate_":["name","age"]}]}}`, output)
require.JSONEq(t, `{"data": {"user":[{"age": "13"}]}}`, output)

// Lets try to change the type of predicates now.
err = alterSchemaWithRetry(s2)
Expand Down Expand Up @@ -1053,122 +1036,6 @@ func BenchmarkQuery(b *testing.B) {
}
}

func TestListPred(t *testing.T) {
require.NoError(t, alterSchema(`{"drop_all": true}`))
var q1 = `
{
listpred(func:anyofterms(name, "Alice")) {
_predicate_
}
}
`
var m = `
{
set {
<0x1> <name> "Alice" .
<0x1> <age> "13" .
<0x1> <friend> <0x4> .
}
}
`
var s = `
name:string @index(term) .
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
err := runMutation(m)
require.NoError(t, err)

// add index to name
err = alterSchemaWithRetry(s)
require.NoError(t, err)

output, err := runQuery(q1)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"listpred":[{"_predicate_":["name","age","friend"]}]}}`,
output)
}

func TestExpandPredError(t *testing.T) {
var q1 = `
{
me(func:anyofterms(name, "Alice")) {
expand(_all_)
name
friend
}
}
`
var m = `
{
set {
<0x1> <name> "Alice" .
<0x1> <age> "13" .
<0x1> <friend> <0x4> .
<0x4> <name> "bob" .
<0x4> <age> "12" .
}
}
`
var s = `
name:string @index(term) .
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
err := runMutation(m)
require.NoError(t, err)

// add index to name
err = alterSchemaWithRetry(s)
require.NoError(t, err)

_, err = runQuery(q1)
require.Error(t, err)
require.Contains(t, err.Error(), "Repeated subgraph")
}

func TestExpandPred(t *testing.T) {
var q1 = `
{
me(func: uid(0x11)) {
expand(_all_) {
expand(_all_)
}
}
}
`
var m = `
{
set {
<0x11> <name> "Alice" .
<0x11> <age> "13" .
<0x11> <friend> <0x4> .
<0x4> <name> "bob" .
<0x4> <age> "12" .
}
}
`
var s = `
name:string @index(term) .
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
err := runMutation(m)
require.NoError(t, err)

// add index to name
err = alterSchemaWithRetry(s)
require.NoError(t, err)

output, err := runQuery(q1)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"me":[{"age":"13","friend":[{"age":"12","name":"bob"}],"name":"Alice"}]}}`,
output)
}

var threeNiceFriends = `{
"data": {
"me": [
Expand Down Expand Up @@ -1348,13 +1215,30 @@ func TestListTypeSchemaChange(t *testing.T) {
require.NoError(t, err)
z.CompareJSON(t, `{"data":{"schema":[`+
x.AclPredicates+","+
`{"predicate":"_predicate_","type":"string","list":true},`+
`{"predicate":"occupations","type":"string"},`+
`{"predicate":"dgraph.type", "type":"string", "index":true, "tokenizer": ["exact"],
"list":true}]}}`, res)
}

func TestDeleteAllSP2(t *testing.T) {
s := `
type Node12345 {
nodeType: string
name: string
date: dateTime
weight: float
weightUnit: string
lifeLoad: int
stressLevel: int
plan: string
postMortem: string
}
`
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
err := alterSchemaWithRetry(s)
require.NoError(t, err)

var m = `
{
set {
Expand All @@ -1367,16 +1251,16 @@ func TestDeleteAllSP2(t *testing.T) {
<0x12345> <stressLevel> "3" .
<0x12345> <plan> "modest day" .
<0x12345> <postMortem> "win!" .
<0x12345> <dgraph.type> "Node12345" .
}
}
`
err := runMutation(m)
err = runMutation(m)
require.NoError(t, err)

q := fmt.Sprintf(`
{
me(func: uid(%s)) {
_predicate_
name
date
weight
Expand All @@ -1387,7 +1271,7 @@ func TestDeleteAllSP2(t *testing.T) {

output, err := runQuery(q)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"me":[{"_predicate_":["name","date","weightUnit","postMortem","lifeLoad","weight","stressLevel","nodeType","plan"],"name":"July 3 2017","date":"2017-07-03T03:49:03+00:00","weight":"262.3","lifeLoad":"5","stressLevel":"3"}]}}`, output)
require.JSONEq(t, `{"data": {"me":[{"name":"July 3 2017","date":"2017-07-03T03:49:03+00:00","weight":"262.3","lifeLoad":"5","stressLevel":"3"}]}}`, output)

m = fmt.Sprintf(`
{
Expand Down Expand Up @@ -1550,7 +1434,7 @@ func TestDropAll(t *testing.T) {
output, err = runQuery(q3)
require.NoError(t, err)
z.CompareJSON(t,
`{"data":{"schema":[{"predicate":"_predicate_","type":"string","list":true},`+
`{"data":{"schema":[`+
x.AclPredicates+","+
`{"predicate":"dgraph.type", "type":"string", "index":true, "tokenizer":["exact"],
"list":true}]}}`, output)
Expand All @@ -1571,40 +1455,6 @@ func TestDropAll(t *testing.T) {
require.JSONEq(t, `{"data": {"q":[]}}`, output)
}

func TestRecurseExpandAll(t *testing.T) {
var q1 = `
{
me(func:anyofterms(name, "Alica")) @recurse {
expand(_all_)
}
}
`
var m = `
{
set {
<0x1> <name> "Alica" .
<0x1> <age> "13" .
<0x1> <friend> <0x4> .
<0x4> <name> "bob" .
<0x4> <age> "12" .
}
}
`

var s = `name:string @index(term) .`
// reset Schema
schema.ParseBytes([]byte(""), 1)
err := runMutation(m)
require.NoError(t, err)

err = alterSchemaWithRetry(s)
require.NoError(t, err)

output, err := runQuery(q1)
require.NoError(t, err)
require.JSONEq(t, `{"data": {"me":[{"name":"Alica","age":"13","friend":[{"name":"bob","age":"12"}]}]}}`, output)
}

func TestIllegalCountInQueryFn(t *testing.T) {
s := `friend: [uid] @count .`
require.NoError(t, alterSchemaWithRetry(s))
Expand Down
1 change: 0 additions & 1 deletion dgraph/cmd/bulk/loader.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ type options struct {
TmpDir string
NumGoroutines int
MapBufSize int64
ExpandEdges bool
SkipMapPhase bool
CleanupTmp bool
NumShufflers int
Expand Down
17 changes: 0 additions & 17 deletions dgraph/cmd/bulk/mapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -208,13 +208,6 @@ func (m *mapper) processNQuad(nq gql.NQuad) {
m.addMapEntry(key, rev, shard)
}
m.addIndexMapEntries(nq, de)

if m.opt.ExpandEdges {
shard := m.state.shards.shardFor("_predicate_")
key = x.DataKey("_predicate_", sid)
pp := m.createPredicatePosting(nq.Predicate)
m.addMapEntry(key, pp, shard)
}
}

func (m *mapper) uid(xid string) uint64 {
Expand Down Expand Up @@ -248,16 +241,6 @@ func (m *mapper) lookupUid(xid string) uint64 {
return uid
}

func (m *mapper) createPredicatePosting(predicate string) *pb.Posting {
fp := farm.Fingerprint64([]byte(predicate))
return &pb.Posting{
Uid: fp,
Value: []byte(predicate),
ValType: pb.Posting_DEFAULT,
PostingType: pb.Posting_VALUE,
}
}

func (m *mapper) createPostings(nq gql.NQuad,
de *pb.DirectedEdge) (*pb.Posting, *pb.Posting) {

Expand Down
4 changes: 0 additions & 4 deletions dgraph/cmd/bulk/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,6 @@ func init() {
"Number of worker threads to use (defaults to the number of logical CPUs).")
flag.Int64("mapoutput_mb", 64,
"The estimated size of each map file output. Increasing this increases memory usage.")
flag.Bool("expand_edges", true,
"Generate edges that allow nodes to be expanded using _predicate_ or expand(...). "+
"Disable to increase loading speed.")
flag.Bool("skip_map_phase", false,
"Skip the map phase (assumes that map output files already exist).")
flag.Bool("cleanup_tmp", true,
Expand Down Expand Up @@ -108,7 +105,6 @@ func run() {
TmpDir: Bulk.Conf.GetString("tmp"),
NumGoroutines: Bulk.Conf.GetInt("num_go_routines"),
MapBufSize: int64(Bulk.Conf.GetInt("mapoutput_mb")),
ExpandEdges: Bulk.Conf.GetBool("expand_edges"),
SkipMapPhase: Bulk.Conf.GetBool("skip_map_phase"),
CleanupTmp: Bulk.Conf.GetBool("cleanup_tmp"),
NumShufflers: Bulk.Conf.GetInt("shufflers"),
Expand Down
7 changes: 1 addition & 6 deletions dgraph/cmd/bulk/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,7 @@ type schemaStore struct {

func newSchemaStore(initial []*pb.SchemaUpdate, opt options, state *state) *schemaStore {
s := &schemaStore{
m: map[string]*pb.SchemaUpdate{
"_predicate_": {
ValueType: pb.Posting_STRING,
List: true,
},
},
m: map[string]*pb.SchemaUpdate{},
state: state,
}

Expand Down
1 change: 0 additions & 1 deletion dgraph/cmd/bulk/systest/test-bulk-schema.sh
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,6 @@ EOF
dgraph debug -p out/0/p 2>|/dev/null | grep '{s}' | cut -d' ' -f4 > all_dbs.out
dgraph debug -p out/1/p 2>|/dev/null | grep '{s}' | cut -d' ' -f4 >> all_dbs.out
diff <(LC_ALL=C sort all_dbs.out | uniq -c) - <<EOF
1 _predicate_
1 dgraph.group.acl
1 dgraph.password
1 dgraph.type
Expand Down
Loading