Commit 8e2008f8 by Torkel Ödegaard

Fixes #69, lexer correctly tokenizes number-number segments, numericLiterals are…

Fixes #69, lexer correctly tokenizes number-number segments, numericLiterals are only considered literal token if it is folled by a punctuator.
parent 0a8b9bad
......@@ -131,6 +131,11 @@ function (_) {
});
addFuncDef({
name: 'cactiStyle',
category: categories.Special,
});
addFuncDef({
name: 'scale',
category: categories.Transform,
params: [ { name: "factor", type: "int", } ],
......@@ -138,6 +143,13 @@ function (_) {
});
addFuncDef({
name: 'offset',
category: categories.Transform,
params: [ { name: "amount", type: "int", } ],
defaultParams: [10]
});
addFuncDef({
name: 'integral',
category: categories.Transform,
});
......
......@@ -402,13 +402,20 @@ define([
(ch >= "a" && ch <= "z") || (ch >= "A" && ch <= "Z");
}
// handle negative num literals
if (char === '-') {
value += char;
index += 1;
char = this.peek(index);
}
// Numbers must start either with a decimal digit or a point.
if (char !== "." && !isDecimalDigit(char)) {
return null;
}
if (char !== ".") {
value = this.peek(index);
value += this.peek(index);
index += 1;
char = this.peek(index);
......@@ -555,7 +562,7 @@ define([
if (index < length) {
char = this.peek(index);
if (isIdentifierStart(char)) {
if (!this.isPunctuator(char)) {
return null;
}
}
......@@ -569,9 +576,7 @@ define([
};
},
scanPunctuator: function () {
var ch1 = this.peek();
isPunctuator: function (ch1) {
switch (ch1) {
case ".":
case "(":
......@@ -579,6 +584,16 @@ define([
case ",":
case "{":
case "}":
return true;
}
return false;
},
scanPunctuator: function () {
var ch1 = this.peek();
if (this.isPunctuator(ch1)) {
return {
type: ch1,
value: ch1,
......
......@@ -21,6 +21,21 @@ define([
expect(tokens[4].value).to.be('se1-server-*');
});
it('should tokenize metric expression with dash2', function() {
var lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
var tokens = lexer.tokenize();
expect(tokens[0].value).to.be('net');
expect(tokens[2].value).to.be('192-168-1-1');
});
it('simple function2', function() {
var lexer = new Lexer('offset(test.metric, -100)');
var tokens = lexer.tokenize();
expect(tokens[2].type).to.be('identifier');
expect(tokens[4].type).to.be('identifier');
expect(tokens[6].type).to.be('number');
});
it('should tokenize metric expression with curly braces', function() {
var lexer = new Lexer('metric.se1-{first, second}.count');
var tokens = lexer.tokenize();
......
......@@ -49,6 +49,14 @@ define([
expect(rootNode.params.length).to.be(1);
});
it('simple function2', function() {
var parser = new Parser('offset(test.metric, -100)');
var rootNode = parser.getAst();
expect(rootNode.type).to.be('function');
expect(rootNode.params[0].type).to.be('metric');
expect(rootNode.params[1].type).to.be('number');
});
it('simple function with string arg', function() {
var parser = new Parser("randomWalk('test')");
var rootNode = parser.getAst();
......@@ -125,6 +133,13 @@ define([
expect(rootNode.pos).to.be(11);
});
it('handle issue #69', function() {
var parser = new Parser('cactiStyle(offset(scale(net.192-168-1-1.192-168-1-9.ping_value.*,0.001),-100))');
var rootNode = parser.getAst();
expect(rootNode.type).to.be('function');
});
});
});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment