surrealpatch/sql/token.go

352 lines
5.3 KiB
Go
Raw Normal View History

2016-02-26 17:27:07 +00:00
// Copyright © 2016 Abcum Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sql
// Token defines a lexical token
type Token int
const (
// special
ILLEGAL Token = iota
EOF
WS
// literals
literalsBeg
DATE // 1970-01-01
TIME // 1970-01-01T00:00:00+00:00
PATH // :friend
JSON // {"test":true}
IDENT // something
STRING // "something"
REGION // "a multiline \n string"
NUMBER // 123456
DOUBLE // 123.456
REGEX // /.*/
2016-05-23 12:32:02 +00:00
ARRAY // [0,1,2]
2016-02-26 17:27:07 +00:00
DURATION // 13h
EAT // @
DOT // .
COMMA // ,
LPAREN // (
RPAREN // )
LBRACK // [
RBRACK // ]
COLON // :
SEMICOLON // ;
literalsEnd
// operators
operatorBeg
ADD // +
SUB // -
MUL // *
DIV // /
INC // +=
DEC // -=
EQ // =
NEQ // !=
LT // <
LTE // <=
GT // >
GTE // >=
EQR // =~
NER // !~
2016-02-26 17:45:40 +00:00
SEQ // ∋
SNE // ∌
2016-02-26 17:27:07 +00:00
2016-07-04 10:37:37 +00:00
OEDGE // ->
IEDGE // <-
BEDGE // <->
2016-02-26 17:27:07 +00:00
operatorEnd
// literals
keywordsBeg
2016-07-04 10:37:37 +00:00
ACCEPT
2016-05-23 12:32:02 +00:00
AFTER
2016-02-26 17:27:07 +00:00
ALL
AND
AS
ASC
AT
2016-05-23 12:32:02 +00:00
BEFORE
BOTH
2016-02-26 17:27:07 +00:00
BY
2016-05-23 12:32:02 +00:00
CODE
2016-02-26 17:27:07 +00:00
COLUMNS
2016-05-23 12:32:02 +00:00
CONTENT
2016-02-26 17:27:07 +00:00
CREATE
2016-05-23 12:32:02 +00:00
DATABASE
DEFAULT
2016-02-26 17:27:07 +00:00
DEFINE
DELETE
DESC
2016-05-23 12:32:02 +00:00
DIFF
2016-02-26 17:27:07 +00:00
DISTINCT
EMPTY
2016-07-04 10:37:37 +00:00
ENUM
2016-05-23 12:32:02 +00:00
EXPLAIN
2016-07-04 10:37:37 +00:00
EXPUNGE
2016-02-26 17:27:07 +00:00
FALSE
2016-05-23 12:32:02 +00:00
FIELD
2016-02-26 17:27:07 +00:00
FROM
2016-05-23 12:32:02 +00:00
FULL
2016-02-26 17:27:07 +00:00
GROUP
2016-07-04 10:37:37 +00:00
HISTORY
2016-05-23 12:32:02 +00:00
ID
2016-02-26 17:27:07 +00:00
IN
INDEX
INSERT
INTO
LIMIT
2016-05-23 12:32:02 +00:00
MANDATORY
MAX
MERGE
MIN
2016-02-26 17:27:07 +00:00
MODIFY
2016-05-23 12:32:02 +00:00
NAMESPACE
NONE
NOTNULL
NOW
2016-02-26 17:27:07 +00:00
NULL
OFFSET
ON
OR
ORDER
2016-05-23 12:32:02 +00:00
READONLY
2016-02-26 17:27:07 +00:00
RECORD
2016-07-04 10:37:37 +00:00
REJECT
2016-02-26 17:27:07 +00:00
RELATE
REMOVE
RESYNC
2016-05-23 12:32:02 +00:00
RETURN
2016-02-26 17:27:07 +00:00
SELECT
SET
START
2016-05-23 12:32:02 +00:00
TABLE
2016-02-26 17:27:07 +00:00
TO
TRUE
2016-05-23 12:32:02 +00:00
TYPE
2016-02-26 17:27:07 +00:00
UNIQUE
UPDATE
UPSERT
2016-05-23 12:32:02 +00:00
USE
2016-02-26 17:27:07 +00:00
VERSION
2016-05-23 12:32:02 +00:00
VOID
2016-02-26 17:27:07 +00:00
WHERE
keywordsEnd
)
var tokens = [...]string{
ILLEGAL: "ILLEGAL",
EOF: "EOF",
WS: "WS",
// literals
DATE: "DATE",
TIME: "TIME",
PATH: "PATH",
JSON: "JSON",
IDENT: "IDENT",
STRING: "STRING",
REGION: "REGION",
NUMBER: "NUMBER",
DOUBLE: "DOUBLE",
REGEX: "REGEX",
2016-05-23 12:32:02 +00:00
ARRAY: "ARRAY",
2016-02-26 17:27:07 +00:00
DURATION: "DURATION",
EAT: "@",
DOT: ".",
COMMA: ",",
LPAREN: "(",
RPAREN: ")",
LBRACK: "[",
RBRACK: "]",
COLON: ":",
SEMICOLON: ";",
// operators
ADD: "+",
SUB: "-",
MUL: "*",
DIV: "/",
INC: "+=",
DEC: "-=",
EQ: "=",
NEQ: "!=",
LT: "<",
LTE: "<=",
GT: ">",
GTE: ">=",
EQR: "=~",
NER: "!~",
2016-02-26 17:45:40 +00:00
SEQ: "∋",
SNE: "∌",
2016-02-26 17:27:07 +00:00
// keywords
2016-07-04 10:37:37 +00:00
ACCEPT: "ACCEPT",
2016-05-23 12:32:02 +00:00
AFTER: "AFTER",
ALL: "ALL",
AND: "AND",
AS: "AS",
ASC: "ASC",
AT: "AT",
BEFORE: "BEFORE",
BOTH: "BOTH",
BY: "BY",
CODE: "CODE",
COLUMNS: "COLUMNS",
CONTENT: "CONTENT",
CREATE: "CREATE",
DATABASE: "DATABASE",
DEFAULT: "DEFAULT",
DEFINE: "DEFINE",
DELETE: "DELETE",
DESC: "DESC",
DIFF: "DIFF",
DISTINCT: "DISTINCT",
EMPTY: "EMPTY",
2016-07-04 10:37:37 +00:00
ENUM: "ENUM",
2016-05-23 12:32:02 +00:00
EXPLAIN: "EXPLAIN",
2016-07-04 10:37:37 +00:00
EXPUNGE: "EXPUNGE",
2016-05-23 12:32:02 +00:00
FALSE: "FALSE",
FIELD: "FIELD",
FROM: "FROM",
FULL: "FULL",
GROUP: "GROUP",
2016-07-04 10:37:37 +00:00
HISTORY: "HISTORY",
2016-05-23 12:32:02 +00:00
ID: "ID",
IN: "IN",
INDEX: "INDEX",
INSERT: "INSERT",
INTO: "INTO",
LIMIT: "LIMIT",
MANDATORY: "MANDATORY",
MAX: "MAX",
MERGE: "MERGE",
MIN: "MIN",
MODIFY: "MODIFY",
NAMESPACE: "NAMESPACE",
NONE: "NONE",
NOTNULL: "NOTNULL",
NOW: "NOW",
NULL: "NULL",
ON: "ON",
OR: "OR",
ORDER: "ORDER",
READONLY: "READONLY",
RECORD: "RECORD",
2016-07-04 10:37:37 +00:00
REJECT: "REJECT",
2016-05-23 12:32:02 +00:00
RELATE: "RELATE",
REMOVE: "REMOVE",
RESYNC: "RESYNC",
RETURN: "RETURN",
SELECT: "SELECT",
SET: "SET",
START: "START",
TABLE: "TABLE",
TO: "TO",
TRUE: "TRUE",
TYPE: "TYPE",
UNIQUE: "UNIQUE",
UPDATE: "UPDATE",
UPSERT: "UPSERT",
USE: "USE",
VERSION: "VERSION",
VOID: "VOID",
WHERE: "WHERE",
2016-02-26 17:27:07 +00:00
}
var literals map[string]Token
var operator map[string]Token
var keywords map[string]Token
func init() {
literals = make(map[string]Token)
for tok := literalsBeg + 1; tok < literalsEnd; tok++ {
literals[tokens[tok]] = tok
}
operator = make(map[string]Token)
for tok := operatorBeg + 1; tok < operatorEnd; tok++ {
operator[tokens[tok]] = tok
}
keywords = make(map[string]Token)
for tok := keywordsBeg + 1; tok < keywordsEnd; tok++ {
keywords[tokens[tok]] = tok
}
}
func lookup(lookups []Token) (literals []string) {
for _, token := range lookups {
literals = append(literals, token.String())
}
return
}
func (tok Token) precedence() int {
switch tok {
case OR:
return 1
case AND:
return 2
case EQ, NEQ, EQR, NER, LT, LTE, GT, GTE:
return 3
case ADD, SUB:
return 4
case MUL, DIV:
return 5
}
return 0
}
func (tok Token) String() string {
if tok >= 0 && tok < Token(len(tokens)) {
return tokens[tok]
}
return ""
}
func (tok Token) isLiteral() bool { return tok > literalsBeg && tok < literalsEnd }
func (tok Token) isKeyword() bool { return tok > keywordsBeg && tok < keywordsEnd }
func (tok Token) isOperator() bool { return tok > operatorBeg && tok < operatorEnd }