-
Notifications
You must be signed in to change notification settings - Fork 0
/
cql-lexer.js
65 lines (53 loc) · 2.13 KB
/
cql-lexer.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
const chevrotain = require("chevrotain")
const Lexer = chevrotain.Lexer
const createToken = chevrotain.createToken
const tokenVocabulary = {}
const Activate = createToken({name: "Activate", pattern: /activate:|deactivate:/})
const For = createToken({name: "For", pattern: /for:/})
//relational operators to filter out contexts
const GreaterThan = createToken({name: "GreaterThan", pattern:/>/})
const LessThan = createToken({name:"LessThan", pattern:/</})
const Between = createToken({name:"Between", pattern:/between/})
const AtLeast = createToken({name:"AtLeastOne", pattern:/atLeastOne/})
const AtMost = createToken({name:"AtMostOne", pattern:/atMostOne/})
const Unique = createToken({name:"Unique", pattern:/unique/})
const AllOf = createToken({name:"AllOf", pattern:/allOf/})
const Equals = createToken({name:"Equals", pattern:/=/})
const And = createToken({name: "And", pattern:/and/})
const Or = createToken({name: "Or", pattern:/or/})
const In = createToken({name: "In", pattern:/in/})
//special characters
const Comma = createToken({name: "Comma", pattern: /,/})
const LParenthesis = createToken({name:"LParenthesis", pattern:/\(/})
const RParenthesis = createToken({name:"RParenthesis", pattern: /\)/})
const WhiteSpace = createToken({
name: "WhiteSpace",
pattern: /\s+/,
group: chevrotain.Lexer.SKIPPED,
line_breaks: true
})
//Identifiers
const Identifier = createToken({
name: "Identifier",
pattern: /[a-zA-Z]\w*/,
})
const Integer = createToken({name: "Integer", pattern: /0|[1-9]\d*/})
let allTokens = [WhiteSpace,
Activate, For, GreaterThan, LessThan, Between,
AtLeast, AtMost, AllOf, Unique, Equals, And, Or, Identifier, Integer, Comma, LParenthesis,
RParenthesis]
const CQLLexer = new Lexer(allTokens)
allTokens.forEach(tokenType => {
tokenVocabulary[tokenType.name] = tokenType
})
module.exports = {
tokenVocabulary: tokenVocabulary,
allTokens: allTokens,
lexer: function(inputQuery) {
const lexingResult = CQLLexer.tokenize(inputQuery)
if(lexingResult.errors.length > 0) {
throw Error("The Given text cannot be tokenized:\n" + lexingResult.errors[0].message)
}
return lexingResult
}
}