Add support for foreign tables with group by clauses

This commit is contained in:
Tobie Morgan Hitchcock 2018-05-02 02:43:04 +01:00
parent 7f6d1565ff
commit b7d89ee65a
12 changed files with 1087 additions and 175 deletions

View file

@ -26,6 +26,7 @@ import (
func (e *executor) executeDefineNamespace(ctx context.Context, ast *sql.DefineNamespaceStatement) (out []interface{}, err error) {
// Save the namespace definition
nkey := &keys.NS{KV: ast.KV, NS: ast.Name.ID}
_, err = e.dbo.Put(0, nkey.Encode(), ast.Encode())
@ -36,6 +37,8 @@ func (e *executor) executeDefineNamespace(ctx context.Context, ast *sql.DefineNa
func (e *executor) executeDefineDatabase(ctx context.Context, ast *sql.DefineDatabaseStatement) (out []interface{}, err error) {
e.dbo.AddNS(ast.NS)
// Save the database definition
dkey := &keys.DB{KV: ast.KV, NS: ast.NS, DB: ast.Name.ID}
_, err = e.dbo.Put(0, dkey.Encode(), ast.Encode())
@ -51,13 +54,21 @@ func (e *executor) executeDefineLogin(ctx context.Context, ast *sql.DefineLoginS
switch ast.Kind {
case sql.NAMESPACE:
e.dbo.AddNS(ast.NS)
// Save the login definition
ukey := &keys.NU{KV: ast.KV, NS: ast.NS, US: ast.User.ID}
_, err = e.dbo.Put(0, ukey.Encode(), ast.Encode())
case sql.DATABASE:
e.dbo.AddDB(ast.NS, ast.DB)
// Save the login definition
ukey := &keys.DU{KV: ast.KV, NS: ast.NS, DB: ast.DB, US: ast.User.ID}
_, err = e.dbo.Put(0, ukey.Encode(), ast.Encode())
}
return
@ -68,13 +79,21 @@ func (e *executor) executeDefineToken(ctx context.Context, ast *sql.DefineTokenS
switch ast.Kind {
case sql.NAMESPACE:
e.dbo.AddNS(ast.NS)
// Save the token definition
tkey := &keys.NT{KV: ast.KV, NS: ast.NS, TK: ast.Name.ID}
_, err = e.dbo.Put(0, tkey.Encode(), ast.Encode())
case sql.DATABASE:
e.dbo.AddDB(ast.NS, ast.DB)
// Save the token definition
tkey := &keys.DT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TK: ast.Name.ID}
_, err = e.dbo.Put(0, tkey.Encode(), ast.Encode())
}
return
@ -87,6 +106,7 @@ func (e *executor) executeDefineScope(ctx context.Context, ast *sql.DefineScopeS
e.dbo.AddDB(ast.NS, ast.DB)
// Remove the scope definition
skey := &keys.SC{KV: ast.KV, NS: ast.NS, DB: ast.DB, SC: ast.Name.ID}
_, err = e.dbo.Put(0, skey.Encode(), ast.Encode())
@ -94,42 +114,13 @@ func (e *executor) executeDefineScope(ctx context.Context, ast *sql.DefineScopeS
}
func (e *executor) executeDefineTable(ctx context.Context, ast *sql.DefineTableStatement) (out []interface{}, err error) {
e.dbo.AddDB(ast.NS, ast.DB)
for _, TB := range ast.What {
ast.Name = sql.NewIdent(TB.TB)
tkey := &keys.TB{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
if _, err = e.dbo.Put(0, tkey.Encode(), ast.Encode()); err != nil {
return nil, err
}
if ast.Lock {
for _, FT := range ast.From {
tkey := &keys.FT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: FT.TB, FT: TB.TB}
if _, err = e.dbo.Put(0, tkey.Encode(), ast.Encode()); err != nil {
return nil, err
}
}
}
}
return
}
func (e *executor) executeDefineEvent(ctx context.Context, ast *sql.DefineEventStatement) (out []interface{}, err error) {
for _, TB := range ast.What {
e.dbo.AddTB(ast.NS, ast.DB, TB.TB)
// Remove the event definition
ekey := &keys.EV{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, EV: ast.Name.ID}
if _, err = e.dbo.Put(0, ekey.Encode(), ast.Encode()); err != nil {
return nil, err
@ -147,6 +138,7 @@ func (e *executor) executeDefineField(ctx context.Context, ast *sql.DefineFieldS
e.dbo.AddTB(ast.NS, ast.DB, TB.TB)
// Save the field definition
fkey := &keys.FD{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, FD: ast.Name.ID}
if _, err = e.dbo.Put(0, fkey.Encode(), ast.Encode()); err != nil {
return nil, err
@ -164,18 +156,22 @@ func (e *executor) executeDefineIndex(ctx context.Context, ast *sql.DefineIndexS
e.dbo.AddTB(ast.NS, ast.DB, TB.TB)
// Save the index definition
ikey := &keys.IX{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, IX: ast.Name.ID}
if _, err = e.dbo.Put(0, ikey.Encode(), ast.Encode()); err != nil {
return nil, err
}
// Remove the index resource data
dkey := &keys.Index{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, IX: ast.Name.ID, FD: keys.Ignore}
if _, err = e.dbo.ClrP(dkey.Encode(), 0); err != nil {
return nil, err
}
// Process the index resource data
uctx := context.WithValue(ctx, ctxKeyForce, true)
ustm := &sql.UpdateStatement{KV: ast.KV, NS: ast.NS, DB: ast.DB, What: []sql.Expr{TB}}
if _, err = e.executeUpdate(ctx, ustm); err != nil {
if _, err = e.executeUpdate(uctx, ustm); err != nil {
return nil, err
}
@ -184,3 +180,50 @@ func (e *executor) executeDefineIndex(ctx context.Context, ast *sql.DefineIndexS
return
}
func (e *executor) executeDefineTable(ctx context.Context, ast *sql.DefineTableStatement) (out []interface{}, err error) {
e.dbo.AddDB(ast.NS, ast.DB)
for _, TB := range ast.What {
ast.Name = sql.NewIdent(TB.TB)
// Save the table definition
tkey := &keys.TB{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
if _, err = e.dbo.Put(0, tkey.Encode(), ast.Encode()); err != nil {
return nil, err
}
if ast.Lock {
// Remove the table resource data
dkey := &keys.Table{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
if _, err = e.dbo.ClrP(dkey.Encode(), 0); err != nil {
return nil, err
}
for _, FT := range ast.From {
// Save the foreign table definition
tkey := &keys.FT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: FT.TB, FT: TB.TB}
if _, err = e.dbo.Put(0, tkey.Encode(), ast.Encode()); err != nil {
return nil, err
}
// Process the table resource data
uctx := context.WithValue(ctx, ctxKeyForce, true)
ustm := &sql.UpdateStatement{KV: ast.KV, NS: ast.NS, DB: ast.DB, What: []sql.Expr{FT}}
if _, err = e.executeUpdate(uctx, ustm); err != nil {
return nil, err
}
}
}
}
return
}

View file

@ -275,10 +275,6 @@ func TestDefine(t *testing.T) {
So(res, ShouldHaveLength, 5)
So(res[2].Result, ShouldHaveLength, 1)
So(res[3].Result, ShouldHaveLength, 1)
So(data.Consume(res[3].Result[0]).Get("meta.id").Data(), ShouldEqual, "test")
So(data.Consume(res[3].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person")
So(data.Consume(res[3].Result[0]).Get("name").Data(), ShouldEqual, "Test")
So(data.Consume(res[3].Result[0]).Get("test").Data(), ShouldEqual, true)
So(res[4].Result, ShouldHaveLength, 1)
So(data.Consume(res[4].Result[0]).Get("meta.id").Data(), ShouldEqual, "test")
So(data.Consume(res[4].Result[0]).Get("meta.tb").Data(), ShouldEqual, "temp")
@ -287,6 +283,316 @@ func TestDefine(t *testing.T) {
})
Convey("Define a foreign table with a where clause", t, func() {
setupDB(20)
txt := `
USE NS test DB test;
DEFINE TABLE temp AS SELECT name FROM person WHERE test=true;
UPDATE person:one SET name="Test", test=true;
UPDATE person:two SET name="Test", test=false;
SELECT * FROM person;
SELECT * FROM temp;
`
res, err := Execute(setupKV(), txt, nil)
So(err, ShouldBeNil)
So(res, ShouldHaveLength, 6)
So(res[2].Result, ShouldHaveLength, 1)
So(res[3].Result, ShouldHaveLength, 1)
So(res[4].Result, ShouldHaveLength, 2)
So(res[5].Result, ShouldHaveLength, 1)
So(data.Consume(res[5].Result[0]).Get("meta.id").Data(), ShouldEqual, "one")
So(data.Consume(res[5].Result[0]).Get("meta.tb").Data(), ShouldEqual, "temp")
So(data.Consume(res[5].Result[0]).Get("name").Data(), ShouldEqual, "Test")
So(data.Consume(res[5].Result[0]).Get("test").Data(), ShouldEqual, nil)
})
Convey("Define a foreign table with a group by clause", t, func() {
setupDB(20)
txt := `
USE NS test DB test;
DEFINE TABLE person_age AS SELECT count(*) AS count, age FROM person WHERE test=true GROUP BY age;
UPDATE person:1 SET name="Test", test=true, age=30;
UPDATE person:2 SET name="Test", test=true, age=32;
UPDATE person:3 SET name="Test", test=true, age=30;
SELECT * FROM person ORDER BY meta.id;
SELECT * FROM person_age ORDER BY meta.id;
UPDATE person:3 SET name="Test", test=true, age=32;
SELECT * FROM person_age ORDER BY meta.id;
UPDATE person:3 SET name="Test", test=false, age=32;
SELECT * FROM person_age ORDER BY meta.id;
`
res, err := Execute(setupKV(), txt, nil)
So(err, ShouldBeNil)
So(res, ShouldHaveLength, 11)
So(res[2].Result, ShouldHaveLength, 1)
So(res[3].Result, ShouldHaveLength, 1)
So(res[4].Result, ShouldHaveLength, 1)
So(res[5].Result, ShouldHaveLength, 3)
So(res[6].Result, ShouldHaveLength, 2)
So(data.Consume(res[6].Result[0]).Get("meta.id").Data(), ShouldEqual, "[30]")
So(data.Consume(res[6].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[6].Result[0]).Get("count").Data(), ShouldEqual, 2)
So(data.Consume(res[6].Result[0]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[6].Result[0]).Get("test").Data(), ShouldBeNil)
So(data.Consume(res[6].Result[1]).Get("meta.id").Data(), ShouldEqual, "[32]")
So(data.Consume(res[6].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[6].Result[1]).Get("count").Data(), ShouldEqual, 1)
So(data.Consume(res[6].Result[1]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[6].Result[1]).Get("test").Data(), ShouldBeNil)
So(res[7].Result, ShouldHaveLength, 1)
So(res[8].Result, ShouldHaveLength, 2)
So(data.Consume(res[8].Result[0]).Get("meta.id").Data(), ShouldEqual, "[30]")
So(data.Consume(res[8].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[8].Result[0]).Get("count").Data(), ShouldEqual, 1)
So(data.Consume(res[8].Result[0]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[8].Result[0]).Get("test").Data(), ShouldBeNil)
So(data.Consume(res[8].Result[1]).Get("meta.id").Data(), ShouldEqual, "[32]")
So(data.Consume(res[8].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[8].Result[1]).Get("count").Data(), ShouldEqual, 2)
So(data.Consume(res[8].Result[1]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[8].Result[1]).Get("test").Data(), ShouldBeNil)
So(res[9].Result, ShouldHaveLength, 1)
So(res[10].Result, ShouldHaveLength, 2)
So(data.Consume(res[10].Result[0]).Get("meta.id").Data(), ShouldEqual, "[30]")
So(data.Consume(res[10].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[10].Result[0]).Get("count").Data(), ShouldEqual, 1)
So(data.Consume(res[10].Result[0]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[10].Result[0]).Get("test").Data(), ShouldBeNil)
So(data.Consume(res[10].Result[1]).Get("meta.id").Data(), ShouldEqual, "[32]")
So(data.Consume(res[10].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[10].Result[1]).Get("count").Data(), ShouldEqual, 1)
So(data.Consume(res[10].Result[1]).Get("name").Data(), ShouldBeNil)
So(data.Consume(res[10].Result[1]).Get("test").Data(), ShouldBeNil)
})
Convey("Define multiple foreign tables with group by clauses", t, func() {
setupDB(20)
txt := `
USE NS test DB test;
DEFINE TABLE person_f AS SELECT * FROM person WHERE gender='f';
DEFINE TABLE person_m AS SELECT * FROM person WHERE gender='m';
DEFINE TABLE person_age AS
SELECT count(*) AS count,
distinct(id),
distinct(age),
math.min(age),
math.max(age),
math.sum(age),
math.mean(age),
math.stddev(age),
math.variance(age),
age
FROM person GROUP BY age
;
DEFINE TABLE person_gender AS
SELECT count(*) AS count,
distinct(id),
distinct(age),
math.min(age),
math.max(age),
math.sum(age),
math.mean(age),
math.stddev(age),
math.variance(age),
gender
FROM person GROUP BY gender
;
DEFINE TABLE person_age_gender AS
SELECT count(*) AS count,
distinct(id),
distinct(age),
math.min(age),
math.max(age),
math.sum(age),
math.mean(age),
math.stddev(age),
math.variance(age),
age, gender
FROM person GROUP BY age, gender
;
UPDATE |person:10| SET name="Test", test=true, age=30, gender='f';
UPDATE |person:10| SET name="Test", test=true, age=32, gender='m';
UPDATE |person:10| SET name="Test", test=true, age=30, gender='m';
UPDATE |person:10| SET name="Test", test=true, age=31, gender='f';
UPDATE |person:10| SET name="Test", test=true, age=29, gender='m';
SELECT * FROM person ORDER BY meta.id;
SELECT * FROM person_f ORDER BY meta.id;
SELECT * FROM person_m ORDER BY meta.id;
SELECT * FROM person_age ORDER BY meta.id;
SELECT * FROM person_gender ORDER BY meta.id;
SELECT * FROM person_age_gender ORDER BY meta.id;
`
res, err := Execute(setupKV(), txt, nil)
So(err, ShouldBeNil)
So(res, ShouldHaveLength, 17)
So(res[6].Result, ShouldHaveLength, 10)
So(res[7].Result, ShouldHaveLength, 10)
So(res[8].Result, ShouldHaveLength, 10)
So(res[9].Result, ShouldHaveLength, 10)
So(res[10].Result, ShouldHaveLength, 10)
So(res[11].Result, ShouldHaveLength, 50)
So(res[12].Result, ShouldHaveLength, 20)
So(res[13].Result, ShouldHaveLength, 30)
So(res[14].Result, ShouldHaveLength, 4)
So(data.Consume(res[14].Result[0]).Get("meta.id").Data(), ShouldEqual, "[29]")
So(data.Consume(res[14].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[14].Result[0]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[14].Result[0]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[0]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[14].Result[0]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[0]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[14].Result[0]).Get("math.min(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[14].Result[0]).Get("math.max(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[14].Result[0]).Get("math.sum(age)").Data(), ShouldEqual, 290)
So(data.Consume(res[14].Result[0]).Get("math.mean(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[14].Result[0]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[0]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[1]).Get("meta.id").Data(), ShouldEqual, "[30]")
So(data.Consume(res[14].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[14].Result[1]).Get("count").Data(), ShouldEqual, 20)
So(data.Consume(res[14].Result[1]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[1]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 20)
So(data.Consume(res[14].Result[1]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[1]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[14].Result[1]).Get("math.min(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[14].Result[1]).Get("math.max(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[14].Result[1]).Get("math.sum(age)").Data(), ShouldEqual, 600)
So(data.Consume(res[14].Result[1]).Get("math.mean(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[14].Result[1]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[1]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[2]).Get("meta.id").Data(), ShouldEqual, "[31]")
So(data.Consume(res[14].Result[2]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[14].Result[2]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[14].Result[2]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[2]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[14].Result[2]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[2]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[14].Result[2]).Get("math.min(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[14].Result[2]).Get("math.max(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[14].Result[2]).Get("math.sum(age)").Data(), ShouldEqual, 310)
So(data.Consume(res[14].Result[2]).Get("math.mean(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[14].Result[2]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[2]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[3]).Get("meta.id").Data(), ShouldEqual, "[32]")
So(data.Consume(res[14].Result[3]).Get("meta.tb").Data(), ShouldEqual, "person_age")
So(data.Consume(res[14].Result[3]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[14].Result[3]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[3]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[14].Result[3]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[14].Result[3]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[14].Result[3]).Get("math.min(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[14].Result[3]).Get("math.max(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[14].Result[3]).Get("math.sum(age)").Data(), ShouldEqual, 320)
So(data.Consume(res[14].Result[3]).Get("math.mean(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[14].Result[3]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[14].Result[3]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(res[15].Result, ShouldHaveLength, 2)
So(data.Consume(res[15].Result[0]).Get("meta.id").Data(), ShouldEqual, "[f]")
So(data.Consume(res[15].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_gender")
So(data.Consume(res[15].Result[0]).Get("count").Data(), ShouldEqual, 20)
So(data.Consume(res[15].Result[0]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[15].Result[0]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 20)
So(data.Consume(res[15].Result[0]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[15].Result[0]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 2)
So(data.Consume(res[15].Result[0]).Get("math.min(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[15].Result[0]).Get("math.max(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[15].Result[0]).Get("math.sum(age)").Data(), ShouldEqual, 610)
So(data.Consume(res[15].Result[0]).Get("math.mean(age)").Data(), ShouldEqual, 30.5)
So(data.Consume(res[15].Result[0]).Get("math.stddev(age)").Data(), ShouldEqual, 0.512989176042577)
So(data.Consume(res[15].Result[0]).Get("math.variance(age)").Data(), ShouldEqual, 0.26315789473684215)
So(data.Consume(res[15].Result[1]).Get("meta.id").Data(), ShouldEqual, "[m]")
So(data.Consume(res[15].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_gender")
So(data.Consume(res[15].Result[1]).Get("count").Data(), ShouldEqual, 30)
So(data.Consume(res[15].Result[1]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[15].Result[1]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 30)
So(data.Consume(res[15].Result[1]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[15].Result[1]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 3)
So(data.Consume(res[15].Result[1]).Get("math.min(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[15].Result[1]).Get("math.max(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[15].Result[1]).Get("math.sum(age)").Data(), ShouldEqual, 910)
So(data.Consume(res[15].Result[1]).Get("math.mean(age)").Data(), ShouldEqual, 30.333333333333332)
So(data.Consume(res[15].Result[1]).Get("math.stddev(age)").Data(), ShouldEqual, 1.2685406585123122)
So(data.Consume(res[15].Result[1]).Get("math.variance(age)").Data(), ShouldEqual, 1.6091954022988506)
So(res[16].Result, ShouldHaveLength, 5)
So(data.Consume(res[16].Result[0]).Get("meta.id").Data(), ShouldEqual, "[29 m]")
So(data.Consume(res[16].Result[0]).Get("meta.tb").Data(), ShouldEqual, "person_age_gender")
So(data.Consume(res[16].Result[0]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[16].Result[0]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[0]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[16].Result[0]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[0]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[16].Result[0]).Get("math.min(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[16].Result[0]).Get("math.max(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[16].Result[0]).Get("math.sum(age)").Data(), ShouldEqual, 290)
So(data.Consume(res[16].Result[0]).Get("math.mean(age)").Data(), ShouldEqual, 29)
So(data.Consume(res[16].Result[0]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[0]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[1]).Get("meta.id").Data(), ShouldEqual, "[30 f]")
So(data.Consume(res[16].Result[1]).Get("meta.tb").Data(), ShouldEqual, "person_age_gender")
So(data.Consume(res[16].Result[1]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[16].Result[1]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[1]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[16].Result[1]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[1]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[16].Result[1]).Get("math.min(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[1]).Get("math.max(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[1]).Get("math.sum(age)").Data(), ShouldEqual, 300)
So(data.Consume(res[16].Result[1]).Get("math.mean(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[1]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[1]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[2]).Get("meta.id").Data(), ShouldEqual, "[30 m]")
So(data.Consume(res[16].Result[2]).Get("meta.tb").Data(), ShouldEqual, "person_age_gender")
So(data.Consume(res[16].Result[2]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[16].Result[2]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[2]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[16].Result[2]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[2]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[16].Result[2]).Get("math.min(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[2]).Get("math.max(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[2]).Get("math.sum(age)").Data(), ShouldEqual, 300)
So(data.Consume(res[16].Result[2]).Get("math.mean(age)").Data(), ShouldEqual, 30)
So(data.Consume(res[16].Result[2]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[2]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[3]).Get("meta.id").Data(), ShouldEqual, "[31 f]")
So(data.Consume(res[16].Result[3]).Get("meta.tb").Data(), ShouldEqual, "person_age_gender")
So(data.Consume(res[16].Result[3]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[16].Result[3]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[3]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[16].Result[3]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[3]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[16].Result[3]).Get("math.min(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[16].Result[3]).Get("math.max(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[16].Result[3]).Get("math.sum(age)").Data(), ShouldEqual, 310)
So(data.Consume(res[16].Result[3]).Get("math.mean(age)").Data(), ShouldEqual, 31)
So(data.Consume(res[16].Result[3]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[3]).Get("math.variance(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[4]).Get("meta.id").Data(), ShouldEqual, "[32 m]")
So(data.Consume(res[16].Result[4]).Get("meta.tb").Data(), ShouldEqual, "person_age_gender")
So(data.Consume(res[16].Result[4]).Get("count").Data(), ShouldEqual, 10)
So(data.Consume(res[16].Result[4]).Get("distinct(id)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[4]).Get("distinct(id)").Data().([]interface{}), ShouldHaveLength, 10)
So(data.Consume(res[16].Result[4]).Get("distinct(age)").Data(), ShouldHaveSameTypeAs, []interface{}{})
So(data.Consume(res[16].Result[4]).Get("distinct(age)").Data().([]interface{}), ShouldHaveLength, 1)
So(data.Consume(res[16].Result[4]).Get("math.min(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[16].Result[4]).Get("math.max(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[16].Result[4]).Get("math.sum(age)").Data(), ShouldEqual, 320)
So(data.Consume(res[16].Result[4]).Get("math.mean(age)").Data(), ShouldEqual, 32)
So(data.Consume(res[16].Result[4]).Get("math.stddev(age)").Data(), ShouldEqual, 0)
So(data.Consume(res[16].Result[4]).Get("math.variance(age)").Data(), ShouldEqual, 0)
})
Convey("Define a table with permission specified so only specified records are visible", t, func() {
setupDB(20)

View file

@ -306,25 +306,27 @@ func (d *document) setup(ctx context.Context) (err error) {
d.id = sql.NewThing(d.key.TB, d.key.ID)
d.md = map[string]interface{}{
"tb": d.key.TB,
"id": d.key.ID,
}
}
return
}
func (d *document) changed() bool {
func (d *document) forced(ctx context.Context) bool {
if val := ctx.Value(ctxKeyForce); val != nil {
return val.(bool)
}
return false
}
func (d *document) changed(ctx context.Context) bool {
a, _ := d.initial.Data().(map[string]interface{})
b, _ := d.current.Data().(map[string]interface{})
c := diff.Diff(a, b)
return len(c) > 0
}
func (d *document) shouldDrop() (bool, error) {
func (d *document) shouldDrop(ctx context.Context) (bool, error) {
// Check whether it is specified
// that the table should drop
@ -346,14 +348,14 @@ func (d *document) storeThing(ctx context.Context) (err error) {
// Check that the rcord has been
// changed, and if not, return.
if ok := d.changed(); !ok {
if ok := d.changed(ctx); !ok {
return
}
// Check that the table should
// drop data being written.
if ok, err := d.shouldDrop(); ok {
if ok, err := d.shouldDrop(ctx); ok {
return err
}
@ -373,7 +375,7 @@ func (d *document) purgeThing(ctx context.Context) (err error) {
// Check that the table should
// drop data being written.
if ok, err := d.shouldDrop(); ok {
if ok, err := d.shouldDrop(ctx); ok {
return err
}
@ -393,7 +395,7 @@ func (d *document) eraseThing(ctx context.Context) (err error) {
// Check that the table should
// drop data being written.
if ok, err := d.shouldDrop(); ok {
if ok, err := d.shouldDrop(ctx); ok {
return err
}
@ -408,10 +410,22 @@ func (d *document) eraseThing(ctx context.Context) (err error) {
func (d *document) storeIndex(ctx context.Context) (err error) {
// Check if this query has been run
// in forced mode, or return.
forced := d.forced(ctx)
// Check that the rcord has been
// changed, and if not, return.
if !forced && !d.changed(ctx) {
return
}
// Check that the table should
// drop data being written.
if ok, err := d.shouldDrop(); ok {
if ok, err := d.shouldDrop(ctx); ok {
return err
}
@ -429,21 +443,9 @@ func (d *document) storeIndex(ctx context.Context) (err error) {
del := indx.Build(ix.Cols, d.initial)
add := indx.Build(ix.Cols, d.current)
// TODO use diffing to speed up indexes
// We need to use diffing so that only
// changed values are written to the
// storage layer. However if an index
// is redefined, then the diff does not
// return any changes, and the index is
// then corrupt. Maybe we could check
// when the index was created, and check
// if the d.initial change time is after
// the index creation time, then perform
// a diff on the old/new index values.
// if d.initial.Get("meta.time") > ix.Time {
// del, add = indx.Diff(old, now)
// }
if !forced {
del, add = indx.Diff(del, add)
}
if ix.Uniq == true {
for _, v := range del {
@ -479,10 +481,22 @@ func (d *document) storeIndex(ctx context.Context) (err error) {
func (d *document) purgeIndex(ctx context.Context) (err error) {
// Check if this query has been run
// in forced mode, or return.
forced := d.forced(ctx)
// Check that the rcord has been
// changed, and if not, return.
if !forced && !d.changed(ctx) {
return
}
// Check that the table should
// drop data being written.
if ok, err := d.shouldDrop(); ok {
if ok, err := d.shouldDrop(ctx); ok {
return err
}

View file

@ -25,12 +25,18 @@ import (
// table, and executes them in name order.
func (d *document) event(ctx context.Context, met method) (err error) {
// Check if this query has been run
// in forced mode, because of an
// index or foreign table update.
forced := d.forced(ctx)
// If this document has not changed
// then there is no need to perform
// any registered events.
if ok := d.changed(); !ok {
return
if !forced && !d.changed(ctx) {
return nil
}
// Get the event values specified

View file

@ -24,11 +24,17 @@ import (
// this table, and executes them in name order.
func (d *document) lives(ctx context.Context, when method) (err error) {
// Check if this query has been run
// in forced mode, because of an
// index or foreign table update.
forced := d.forced(ctx)
// If this document has not changed
// then there is no need to update
// any registered live queries.
if !d.changed() {
if !forced && !d.changed(ctx) {
return nil
}

View file

@ -80,8 +80,17 @@ func (d *document) merge(ctx context.Context, met method, data sql.Expr) (err er
func (d *document) defFld(ctx context.Context, met method) (err error) {
switch d.i.vir {
case true:
d.current.Set(d.id, "id")
d.current.Set(d.md, "meta")
d.current.Set(d.id.TB, "meta.tb")
d.current.Set(d.id.ID, "meta.id")
case false:
d.current.Del("meta")
d.current.Set(d.id, "id")
d.current.Set(d.id.TB, "meta.tb")
d.current.Set(d.id.ID, "meta.id")
}
return

View file

@ -25,9 +25,11 @@ func (e *executor) executeRemoveNamespace(ctx context.Context, ast *sql.RemoveNa
e.dbo.DelNS(ast.Name.ID)
// Remove the namespace definition
nkey := &keys.NS{KV: ast.KV, NS: ast.Name.ID}
_, err = e.dbo.Clr(nkey.Encode())
// Remove the namespace resource data
akey := &keys.Namespace{KV: ast.KV, NS: ast.Name.ID}
_, err = e.dbo.ClrP(akey.Encode(), 0)
@ -39,9 +41,11 @@ func (e *executor) executeRemoveDatabase(ctx context.Context, ast *sql.RemoveDat
e.dbo.DelDB(ast.NS, ast.Name.ID)
// Remove the database definition
dkey := &keys.DB{KV: ast.KV, NS: ast.NS, DB: ast.Name.ID}
_, err = e.dbo.Clr(dkey.Encode())
// Remove the database resource data
akey := &keys.Database{KV: ast.KV, NS: ast.NS, DB: ast.Name.ID}
_, err = e.dbo.ClrP(akey.Encode(), 0)
@ -53,11 +57,17 @@ func (e *executor) executeRemoveLogin(ctx context.Context, ast *sql.RemoveLoginS
switch ast.Kind {
case sql.NAMESPACE:
// Remove the login definition
ukey := &keys.NU{KV: ast.KV, NS: ast.NS, US: ast.User.ID}
_, err = e.dbo.ClrP(ukey.Encode(), 0)
case sql.DATABASE:
// Remove the login definition
ukey := &keys.DU{KV: ast.KV, NS: ast.NS, DB: ast.DB, US: ast.User.ID}
_, err = e.dbo.ClrP(ukey.Encode(), 0)
}
return
@ -68,11 +78,17 @@ func (e *executor) executeRemoveToken(ctx context.Context, ast *sql.RemoveTokenS
switch ast.Kind {
case sql.NAMESPACE:
// Remove the token definition
tkey := &keys.NT{KV: ast.KV, NS: ast.NS, TK: ast.Name.ID}
_, err = e.dbo.ClrP(tkey.Encode(), 0)
case sql.DATABASE:
// Remove the token definition
tkey := &keys.DT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TK: ast.Name.ID}
_, err = e.dbo.ClrP(tkey.Encode(), 0)
}
return
@ -81,6 +97,7 @@ func (e *executor) executeRemoveToken(ctx context.Context, ast *sql.RemoveTokenS
func (e *executor) executeRemoveScope(ctx context.Context, ast *sql.RemoveScopeStatement) (out []interface{}, err error) {
// Remove the scope definition
skey := &keys.SC{KV: ast.KV, NS: ast.NS, DB: ast.DB, SC: ast.Name.ID}
_, err = e.dbo.ClrP(skey.Encode(), 0)
@ -88,50 +105,13 @@ func (e *executor) executeRemoveScope(ctx context.Context, ast *sql.RemoveScopeS
}
func (e *executor) executeRemoveTable(ctx context.Context, ast *sql.RemoveTableStatement) (out []interface{}, err error) {
for _, TB := range ast.What {
e.dbo.DelTB(ast.NS, ast.DB, TB.TB)
tb, err := e.dbo.GetTB(ast.NS, ast.DB, TB.TB)
if err != nil {
return nil, err
}
tkey := &keys.TB{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
_, err = e.dbo.Clr(tkey.Encode())
if err != nil {
return nil, err
}
akey := &keys.Table{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
_, err = e.dbo.ClrP(akey.Encode(), 0)
if err != nil {
return nil, err
}
if tb.Lock {
for _, FT := range tb.From {
tkey := &keys.FT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: FT.TB, FT: TB.TB}
if _, err = e.dbo.ClrP(tkey.Encode(), 0); err != nil {
return nil, err
}
}
}
}
return
}
func (e *executor) executeRemoveEvent(ctx context.Context, ast *sql.RemoveEventStatement) (out []interface{}, err error) {
for _, TB := range ast.What {
e.dbo.DelEV(ast.NS, ast.DB, TB.TB, ast.Name.ID)
// Remove the event definition
ekey := &keys.EV{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, EV: ast.Name.ID}
if _, err = e.dbo.ClrP(ekey.Encode(), 0); err != nil {
return nil, err
@ -149,6 +129,7 @@ func (e *executor) executeRemoveField(ctx context.Context, ast *sql.RemoveFieldS
e.dbo.DelFD(ast.NS, ast.DB, TB.TB, ast.Name.ID)
// Remove the field definition
fkey := &keys.FD{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, FD: ast.Name.ID}
if _, err = e.dbo.ClrP(fkey.Encode(), 0); err != nil {
return nil, err
@ -166,11 +147,13 @@ func (e *executor) executeRemoveIndex(ctx context.Context, ast *sql.RemoveIndexS
e.dbo.DelIX(ast.NS, ast.DB, TB.TB, ast.Name.ID)
// Remove the index definition
ikey := &keys.IX{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, IX: ast.Name.ID}
if _, err = e.dbo.ClrP(ikey.Encode(), 0); err != nil {
return nil, err
}
// Remove the index resource data
dkey := &keys.Index{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB, IX: ast.Name.ID, FD: keys.Ignore}
if _, err = e.dbo.ClrP(dkey.Encode(), 0); err != nil {
return nil, err
@ -181,3 +164,48 @@ func (e *executor) executeRemoveIndex(ctx context.Context, ast *sql.RemoveIndexS
return
}
func (e *executor) executeRemoveTable(ctx context.Context, ast *sql.RemoveTableStatement) (out []interface{}, err error) {
for _, TB := range ast.What {
e.dbo.DelTB(ast.NS, ast.DB, TB.TB)
tb, err := e.dbo.GetTB(ast.NS, ast.DB, TB.TB)
if err != nil {
return nil, err
}
// Remove the table definition
tkey := &keys.TB{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
_, err = e.dbo.Clr(tkey.Encode())
if err != nil {
return nil, err
}
// Remove the table resource data
dkey := &keys.Table{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: TB.TB}
_, err = e.dbo.ClrP(dkey.Encode(), 0)
if err != nil {
return nil, err
}
if tb.Lock {
for _, FT := range tb.From {
// Remove the foreign table definition
tkey := &keys.FT{KV: ast.KV, NS: ast.NS, DB: ast.DB, TB: FT.TB, FT: TB.TB}
if _, err = e.dbo.ClrP(tkey.Encode(), 0); err != nil {
return nil, err
}
}
}
}
return
}

View file

@ -20,6 +20,8 @@ import (
"context"
"github.com/abcum/surreal/sql"
"github.com/abcum/surreal/util/data"
"github.com/abcum/surreal/util/fncs"
"github.com/abcum/surreal/util/keys"
)
@ -27,11 +29,17 @@ import (
// this table, and executes them in name order.
func (d *document) table(ctx context.Context, when method) (err error) {
// Check if this query has been run
// in forced mode, because of an
// index or foreign table update.
forced := d.forced(ctx)
// If this document has not changed
// then there is no need to update
// any registered foreign tables.
if !d.changed() {
if !forced && !d.changed(ctx) {
return nil
}
@ -44,11 +52,10 @@ func (d *document) table(ctx context.Context, when method) (err error) {
return err
}
if len(fts) > 0 {
for _, ft := range fts {
var ok bool
var prv *sql.Thing
var doc *sql.Thing
ok, err = d.check(ctx, ft.Cond)
@ -61,17 +68,16 @@ func (d *document) table(ctx context.Context, when method) (err error) {
// If there are GROUP BY clauses then
// let's calculate the
return errFeatureNotImplemented
ats := make([]interface{}, len(ft.Group))
old := make([]interface{}, len(ft.Group))
now := make([]interface{}, len(ft.Group))
for k, e := range ft.Group {
ats[k], _ = d.i.e.fetch(ctx, e.Expr, d.current)
old[k], _ = d.i.e.fetch(ctx, e.Expr, d.initial)
now[k], _ = d.i.e.fetch(ctx, e.Expr, d.current)
}
rec := fmt.Sprintf("%v", ats)
doc = sql.NewThing(ft.Name.ID, rec)
prv = sql.NewThing(ft.Name.ID, fmt.Sprintf("%v", old))
doc = sql.NewThing(ft.Name.ID, fmt.Sprintf("%v", now))
} else {
@ -92,15 +98,21 @@ func (d *document) table(ctx context.Context, when method) (err error) {
case false:
if len(ft.Group) > 0 {
err = d.tableModify(ctx, doc, nil)
if !forced && when != _CREATE {
err = d.tableModify(ctx, prv, ft.Expr, _REMOVE)
if err != nil {
return err
}
}
} else {
err = d.tableDelete(ctx, doc, nil)
err = d.tableDelete(ctx, doc, ft.Expr)
if err != nil {
return err
}
}
// If the document does match the table
@ -110,16 +122,27 @@ func (d *document) table(ctx context.Context, when method) (err error) {
case true:
if len(ft.Group) > 0 {
err = d.tableModify(ctx, doc, nil)
if !forced && when != _CREATE {
err = d.tableModify(ctx, prv, ft.Expr, _REMOVE)
if err != nil {
return err
}
}
if when != _DELETE {
err = d.tableModify(ctx, doc, ft.Expr, _CHANGE)
if err != nil {
return err
}
}
} else {
err = d.tableUpdate(ctx, doc, ft.Expr)
if err != nil {
return err
}
}
}
@ -131,18 +154,18 @@ func (d *document) table(ctx context.Context, when method) (err error) {
}
func (d *document) tableDelete(ctx context.Context, id *sql.Thing, exp sql.Fields) (err error) {
func (d *document) tableDelete(ctx context.Context, tng *sql.Thing, exp sql.Fields) (err error) {
stm := &sql.DeleteStatement{
KV: d.key.KV,
NS: d.key.NS,
DB: d.key.DB,
What: sql.Exprs{id},
Hard: true,
What: sql.Exprs{tng},
Hard: false,
Parallel: 1,
}
key := &keys.Thing{KV: stm.KV, NS: stm.NS, DB: stm.DB, TB: id.TB, ID: id.ID}
key := &keys.Thing{KV: stm.KV, NS: stm.NS, DB: stm.DB, TB: tng.TB, ID: tng.ID}
i := newIterator(d.i.e, ctx, stm, true)
@ -154,7 +177,7 @@ func (d *document) tableDelete(ctx context.Context, id *sql.Thing, exp sql.Field
}
func (d *document) tableUpdate(ctx context.Context, id *sql.Thing, exp sql.Fields) (err error) {
func (d *document) tableUpdate(ctx context.Context, tng *sql.Thing, exp sql.Fields) (err error) {
res, err := d.yield(ctx, &sql.SelectStatement{Expr: exp}, sql.ILLEGAL)
if err != nil {
@ -165,12 +188,12 @@ func (d *document) tableUpdate(ctx context.Context, id *sql.Thing, exp sql.Field
KV: d.key.KV,
NS: d.key.NS,
DB: d.key.DB,
What: sql.Exprs{id},
What: sql.Exprs{tng},
Data: &sql.ContentExpression{Data: res},
Parallel: 1,
}
key := &keys.Thing{KV: stm.KV, NS: stm.NS, DB: stm.DB, TB: id.TB, ID: id.ID}
key := &keys.Thing{KV: stm.KV, NS: stm.NS, DB: stm.DB, TB: tng.TB, ID: tng.ID}
i := newIterator(d.i.e, ctx, stm, true)
@ -182,8 +205,463 @@ func (d *document) tableUpdate(ctx context.Context, id *sql.Thing, exp sql.Field
}
func (d *document) tableModify(ctx context.Context, id *sql.Thing, exp sql.Fields) error {
func (d *document) tableModify(ctx context.Context, tng *sql.Thing, exp sql.Fields, when modify) (err error) {
return nil
var doc *data.Doc
switch when {
case _REMOVE:
doc = d.initial
case _CHANGE:
doc = d.current
}
set := &sql.DataExpression{}
for _, e := range exp {
if f, ok := e.Expr.(*sql.FuncExpression); ok && f.Aggr {
var v interface{}
args := make([]interface{}, len(f.Args))
for x := 0; x < len(f.Args); x++ {
args[x], _ = d.i.e.fetch(ctx, f.Args[x], doc)
}
// If the function is math.stddev() or
// math.variance(), then we need to work
// out the value as a whole, and not the
// result of each record separately.
switch f.Name {
default:
v, err = fncs.Run(ctx, f.Name, args...)
case "math.stddev":
v = args[0]
case "math.variance":
v = args[0]
}
if err != nil {
return err
}
switch f.Name {
case "distinct":
tableChg(set, e.Field, v, when)
case "count":
tableChg(set, e.Field, v, when)
case "count.if":
tableChg(set, e.Field, v, when)
case "count.not":
tableChg(set, e.Field, v, when)
case "math.sum":
tableChg(set, e.Field, v, when)
case "math.min":
tableMin(set, e.Field, v, when)
case "math.max":
tableMax(set, e.Field, v, when)
case "math.mean":
tableMean(set, e.Field, v, when)
case "math.stddev":
switch a := v.(type) {
case []interface{}:
for _, v := range a {
tableStddev(set, e.Field, v, when)
}
default:
tableStddev(set, e.Field, v, when)
}
case "math.variance":
switch a := v.(type) {
case []interface{}:
for _, v := range a {
tableVariance(set, e.Field, v, when)
}
default:
tableVariance(set, e.Field, v, when)
}
}
continue
}
o, err := d.i.e.fetch(ctx, e.Expr, doc)
if err != nil {
return err
}
tableSet(set, e.Field, o, when)
}
stm := &sql.UpdateStatement{
KV: d.key.KV,
NS: d.key.NS,
DB: d.key.DB,
What: sql.Exprs{tng},
Data: set,
Parallel: 1,
}
key := &keys.Thing{KV: stm.KV, NS: stm.NS, DB: stm.DB, TB: tng.TB, ID: tng.ID}
i := newIterator(d.i.e, ctx, stm, true)
i.processThing(ctx, key)
_, err = i.Yield(ctx)
return err
}
func tableSet(set *sql.DataExpression, key string, val interface{}, when modify) {
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: val,
})
}
func tableChg(set *sql.DataExpression, key string, val interface{}, when modify) {
var op sql.Token
switch when {
case _REMOVE:
op = sql.DEC
case _CHANGE:
op = sql.INC
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: op,
RHS: val,
})
}
func tableMin(set *sql.DataExpression, key string, val interface{}, when modify) {
if when == _CHANGE {
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.IfelExpression{
Cond: sql.Exprs{
&sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.Empty{},
},
Op: sql.OR,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.GT,
RHS: val,
},
},
},
Then: sql.Exprs{
val,
},
Else: sql.NewIdent(key),
},
})
}
}
func tableMax(set *sql.DataExpression, key string, val interface{}, when modify) {
if when == _CHANGE {
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.IfelExpression{
Cond: sql.Exprs{
&sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.Empty{},
},
Op: sql.OR,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.LT,
RHS: val,
},
},
},
Then: sql.Exprs{
val,
},
Else: sql.NewIdent(key),
},
})
}
}
func tableMean(set *sql.DataExpression, key string, val interface{}, when modify) {
var op sql.Token
switch when {
case _REMOVE:
op = sql.DEC
case _CHANGE:
op = sql.INC
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: op,
RHS: 1,
})
switch when {
case _REMOVE:
op = sql.SUB
case _CHANGE:
op = sql.ADD
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.BinaryExpression{
LHS: &sql.SubExpression{
Expr: &sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.MUL,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.SUB,
RHS: 1,
},
},
Op: op,
RHS: val,
},
},
Op: sql.DIV,
RHS: sql.NewIdent("meta.__." + key + ".c"),
},
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.IfelExpression{
Cond: sql.Exprs{
&sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.Empty{},
},
},
Then: sql.Exprs{0},
Else: sql.NewIdent(key),
},
})
}
func tableStddev(set *sql.DataExpression, key string, val interface{}, when modify) {
var op sql.Token
switch when {
case _REMOVE:
op = sql.DEC
case _CHANGE:
op = sql.INC
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: op,
RHS: 1,
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".t"),
Op: op,
RHS: val,
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".m"),
Op: op,
RHS: &sql.BinaryExpression{
LHS: val,
Op: sql.MUL,
RHS: val,
},
})
// FIXME Need to ensure removed values update correctly
switch when {
case _REMOVE:
op = sql.SUB // FIXME This is incorrect
case _CHANGE:
op = sql.ADD
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.FuncExpression{
Name: "math.sqrt",
Args: sql.Exprs{
&sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.MUL,
RHS: sql.NewIdent("meta.__." + key + ".m"),
},
Op: sql.SUB,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".t"),
Op: sql.MUL,
RHS: sql.NewIdent("meta.__." + key + ".t"),
},
},
Op: sql.DIV,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.MUL,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.SUB,
RHS: 1,
},
},
},
},
},
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.IfelExpression{
Cond: sql.Exprs{
&sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.Empty{},
},
},
Then: sql.Exprs{0},
Else: sql.NewIdent(key),
},
})
}
func tableVariance(set *sql.DataExpression, key string, val interface{}, when modify) {
var op sql.Token
switch when {
case _REMOVE:
op = sql.DEC
case _CHANGE:
op = sql.INC
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: op,
RHS: 1,
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".t"),
Op: op,
RHS: val,
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent("meta.__." + key + ".m"),
Op: op,
RHS: &sql.BinaryExpression{
LHS: val,
Op: sql.MUL,
RHS: val,
},
})
// FIXME Need to ensure removed values update correctly
switch when {
case _REMOVE:
op = sql.SUB // FIXME This is incorrect
case _CHANGE:
op = sql.ADD
}
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.MUL,
RHS: sql.NewIdent("meta.__." + key + ".m"),
},
Op: sql.SUB,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".t"),
Op: sql.MUL,
RHS: sql.NewIdent("meta.__." + key + ".t"),
},
},
Op: sql.DIV,
RHS: &sql.BinaryExpression{
LHS: sql.NewIdent("meta.__." + key + ".c"),
Op: sql.SUB,
RHS: 1,
},
},
Op: sql.DIV,
RHS: sql.NewIdent("meta.__." + key + ".c"),
},
})
set.Data = append(set.Data, &sql.ItemExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.IfelExpression{
Cond: sql.Exprs{
&sql.BinaryExpression{
LHS: sql.NewIdent(key),
Op: sql.EQ,
RHS: &sql.Empty{},
},
},
Then: sql.Exprs{0},
Else: sql.NewIdent(key),
},
})
}

View file

@ -19,7 +19,7 @@ import (
"runtime"
)
type method int
type method int8
const (
_SELECT method = iota
@ -28,6 +28,13 @@ const (
_DELETE
)
type modify int8
const (
_REMOVE modify = iota
_CHANGE
)
const (
docKeyId = "id"
docKeyOne = "0"
@ -46,6 +53,7 @@ const (
ctxKeyAuth = "auth"
ctxKeyKind = "kind"
ctxKeyScope = "scope"
ctxKeyForce = "force"
ctxKeyVersion = "version"
)

View file

@ -261,6 +261,16 @@ func (d *document) yield(ctx context.Context, stm sql.Statement, output sql.Toke
}
// Remove all temporary metadata from
// the record. This is not visible when
// outputting, but is stored in the DB.
doc.Del("meta.__")
// Output the document with the correct
// specified fields, linked records and
// any aggregated group by clauses.
return out.Data(), nil
}

View file

@ -26,7 +26,6 @@ var rolls = map[string]bool{
// Math implementation
"math.geometricmean": true,
"math.max": true,
"math.mean": true,
"math.min": true,

View file

@ -839,9 +839,14 @@ func (d *Doc) Inc(value interface{}, path ...string) (*Doc, error) {
return d.Set(0+inc, path...)
case float64:
return d.Set(0+inc, path...)
default:
switch value.(type) {
case []interface{}:
return d.Set(value, path...)
default:
return d.Set([]interface{}{value}, path...)
}
}
case int64:
switch inc := value.(type) {
case int64: