Skip to content

Commit 0dc90e4

Browse files
committed
update 'Compiler module should provide correct ranges' test with 3 different cases
1 parent 5245f8d commit 0dc90e4

File tree

1 file changed

+57
-14
lines changed

1 file changed

+57
-14
lines changed

src/test/compiler.test.ts

Lines changed: 57 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js';
22
import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types';
3-
import { describe, it, onError } from '@efekos/es-test/bin/testRunner.js';
3+
import { describe, inst, it, onError } from '@efekos/es-test/bin/testRunner.js';
44
import { tokenizeSys, tokenizeSyx } from '../lexer.js';
55
import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js';
66
import { expect } from 'chai';
@@ -19,29 +19,72 @@ describe('Compiler module', () => {
1919

2020
it('should provide correct ranges', () => {
2121

22-
const tokens = tokenizeSyx('keyword hello;');
22+
inst(() => {
2323

24-
rangeExpectations(tokens[0].range);
25-
expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } });
26-
expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } });
27-
expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } });
24+
const tokens = tokenizeSyx('keyword hello;');
2825

29-
});
26+
tokens.map(r=>r.range).forEach(r=>rangeExpectations(r));
27+
expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } });
28+
expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } });
29+
expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } });
30+
31+
});
32+
33+
inst(() => {
34+
35+
const tokens = tokenizeSyx('rule "imports-keyword": cray;');
36+
37+
expect(tokens).to.be.a('array').to.have.lengthOf(10);
38+
tokens.map(r=>r.range).forEach(r=>rangeExpectations(r));
39+
expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } });
40+
expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } });
41+
expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 7 } });
42+
expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } });
43+
expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 15 } });
44+
expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } });
45+
expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 24 }, start: { line: 1, character: 23 } });
46+
expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 25 } });
47+
expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 30 }, start: { line: 1, character: 29 } });
48+
49+
});
50+
51+
inst(()=>{
52+
const tokens = tokenizeSyx('rule "return-function-value-enabled":true;');
53+
54+
expect(tokens).to.be.a('array').to.have.lengthOf(14);
55+
tokens.map(r=>r.range).forEach(r=>rangeExpectations(r));
56+
expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } });
57+
expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } });
58+
expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 13 }, start: { line: 1, character: 7 } });
59+
expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 13 } });
60+
expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 14 } });
61+
expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } });
62+
expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 28 }, start: { line: 1, character: 23 } });
63+
expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 28 } });
64+
expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 36 }, start: { line: 1, character: 29 } });
65+
expect(tokens[9].range).to.be.deep.equal({ end: { line: 1, character: 37 }, start: { line: 1, character: 36 } });
66+
expect(tokens[10].range).to.be.deep.equal({ end: { line: 1, character: 38 }, start: { line: 1, character: 37 } });
67+
expect(tokens[11].range).to.be.deep.equal({ end: { line: 1, character: 42 }, start: { line: 1, character: 38 } });
68+
expect(tokens[12].range).to.be.deep.equal({ end: { line: 1, character: 43 }, start: { line: 1, character: 42 } });
69+
70+
});
71+
72+
}, true);
3073

3174
it('should provide correct tokenization', () => {
3275
const t = tokenizeSyx('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s');
3376
const tList = [
34-
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote,
77+
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote,
3578
TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword,
36-
TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote,TokenType.SingleQuote,
79+
TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote,
3780
TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile
3881
];
3982

4083
expect(t).to.be.a('array');
4184
expect(t.map(tt => tt.type)).to.be.deep.equal(tList);
4285

4386
const sys = tokenizeSys('import "" \'\' ; :::');
44-
const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote, TokenType.SingleQuote,TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile];
87+
const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile];
4588

4689
expect(sys).to.be.a('array');
4790
expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList);
@@ -139,7 +182,7 @@ describe('Compiler module', () => {
139182

140183
const tokens = tokenizeSyx('export keyword ruleish;');
141184
const ast = syxparser.parseTokens(tokens, 'TEST_FILE');
142-
const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{range:{end:{line:1,character:7},start:{line:1,character:1}},type:TokenType.ExportKeyword,value:'export'}], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' };
185+
const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{ range: { end: { line: 1, character: 7 }, start: { line: 1, character: 1 } }, type: TokenType.ExportKeyword, value: 'export' }], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' };
143186

144187
astTypeExpectations(ast);
145188
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
@@ -148,16 +191,16 @@ describe('Compiler module', () => {
148191

149192
});
150193

151-
it('should provide correct diagnostic reports',()=>{
194+
it('should provide correct diagnostic reports', () => {
152195

153-
const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx','keyword ruleis');
196+
const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx', 'keyword ruleis');
154197

155198
expect(report).to.be.a('object');
156199
expect(report).to.have.property('items').to.be.a('array').to.have.lengthOf(1);
157200
expect(report).to.have.property('kind').to.be.a('string').to.be.equal(DocumentDiagnosticReportKind.Full);
158201

159202
const diag = report.items[0];
160-
const item: Diagnostic = {message:'Expected \';\' after statement, found \'EOF\'.',range:{start:{line:0,character:0},end:{line:0,character:0}},severity:DiagnosticSeverity.Error,source:'syntax-script',data:[]};
203+
const item: Diagnostic = { message: 'Expected \';\' after statement, found \'EOF\'.', range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Error, source: 'syntax-script', data: [] };
161204

162205
expect(diag).to.have.property('message').to.be.a('string');
163206
expect(diag).to.have.property('range');

0 commit comments

Comments
 (0)