Skip to content

Commit afa5ca1

Browse files
committed
create a test for compiler statements (3 failed)
1 parent 35159a4 commit afa5ca1

File tree

1 file changed

+139
-0
lines changed

1 file changed

+139
-0
lines changed

src/test/compiler.test.ts

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType } from '../types.js';
2+
import { describe, it } from '@efekos/es-test/bin/testRunner.js';
3+
import { tokenizeSys, tokenizeSyx } from '../lexer.js';
4+
import { expect } from 'chai';
5+
import { syxparser } from '../ast.js';
6+
7+
describe('Compiler module', () => {
8+
9+
it('should provide correct ranges', () => {
10+
11+
const tokens = tokenizeSyx('keyword hello;');
12+
const r = tokens[0].range;
13+
14+
expect(r).to.have.property('start').to.be.a('object');
15+
expect(r).to.have.property('end').to.be.a('object');
16+
expect(r.start).to.have.property('line').to.be.a('number');
17+
expect(r.start).to.have.property('character').to.be.a('number');
18+
expect(r.end).to.have.property('character').to.be.a('number');
19+
expect(r.end).to.have.property('line').to.be.a('number');
20+
expect(r).to.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 0 } });
21+
22+
});
23+
24+
it('should provide correct tokenization', () => {
25+
const t = tokenizeSyx('class } > ) ] , compile " export function global random import imports 1 keyword { < ( [ operator * rule ; \' | +s');
26+
const tList = [
27+
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote,
28+
TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword,
29+
TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote,
30+
TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile
31+
];
32+
33+
expect(t).to.be.a('array');
34+
expect(t).to.have.lengthOf(tList.length);
35+
expect(t.map(tt => tt.type)).to.be.deep.equal(tList);
36+
37+
const sys = tokenizeSys('import \' " ; :::');
38+
const sysList = [TokenType.ImportKeyword, TokenType.SingleQuote, TokenType.DoubleQuote, TokenType.Semicolon, TokenType.EndOfFile];
39+
40+
expect(sys).to.be.a('array');
41+
expect(sys).to.have.lengthOf(sysList.length);
42+
expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList);
43+
});
44+
45+
describe('should provide correct parsing', () => {
46+
47+
function astTypeExpectations(ast:ProgramStatement){
48+
expect(ast).to.be.a('object');
49+
expect(ast).to.have.property('type').to.be.a('number').to.be.equal(NodeType.Program);
50+
expect(ast).to.have.property('modifiers').to.be.a('array').to.have.lengthOf(0);
51+
expect(ast).to.have.property('body').to.be.a('array').to.have.lengthOf(1);
52+
expect(ast).to.have.property('range').to.be.a('object');
53+
expect(ast.range).to.have.property('start').to.be.a('object');
54+
expect(ast.range).to.have.property('end').to.be.a('object');
55+
expect(ast.range.start).to.have.property('line').to.be.a('number');
56+
expect(ast.range.start).to.have.property('character').to.be.a('number');
57+
expect(ast.range.end).to.have.property('line').to.be.a('number');
58+
expect(ast.range.end).to.have.property('character').to.be.a('number');
59+
}
60+
61+
it('for keyword statements', () => {
62+
63+
const tokens = tokenizeSyx('keyword ruleish;');
64+
const ast = syxparser.parseTokens(tokens, 'TEST_FILE');
65+
const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 14 }, start: { line: 1, character: 0 } }, word: 'ruleish' };
66+
67+
astTypeExpectations(ast);
68+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
69+
70+
});
71+
72+
it('for rule statements', () => {
73+
74+
const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;');
75+
const ast = syxparser.parseTokens(tokens, 'TEST_FILE');
76+
const stmt:RuleStatement = {range:{start:{line:1,character:0},end:{line:1,character:41}},modifiers:[],rule:'function-value-return-enabled',value:'true',type:NodeType.Rule};
77+
78+
astTypeExpectations(ast);
79+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
80+
81+
});
82+
83+
it('for compile statements',()=>{
84+
85+
const tokens = tokenizeSyx('compile(ts,js) \'test\';');
86+
const ast = syxparser.parseTokens(tokens,'TEST_FILE');
87+
const stmt:CompileStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Compile,modifiers:[],body:[{type:NodeType.String,modifiers:[],range:{start:{line:1,character:15},end:{line:1,character:20}},value:'test'}]};
88+
89+
astTypeExpectations(ast);
90+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
91+
92+
});
93+
94+
it('for imports statements',()=>{
95+
96+
const tokens = tokenizeSyx('imports(ts,js) \'math\';');
97+
const ast = syxparser.parseTokens(tokens,'TEST_FILE');
98+
const stmt:ImportsStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Imports,modifiers:[],module:'math'};
99+
100+
astTypeExpectations(ast);
101+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
102+
103+
});
104+
105+
it('for global statements',()=>{
106+
107+
const tokens = tokenizeSyx('global randomizer {}');
108+
const ast = syxparser.parseTokens(tokens,'TEST_FILE');
109+
const stmt:GlobalStatement = {range:{start:{line:1,character:0},end:{line:1,character:19}},name:'randomizer',type:NodeType.Global,modifiers:[],body:[]};
110+
111+
astTypeExpectations(ast);
112+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
113+
114+
});
115+
116+
it('for function statements',()=>{
117+
118+
const tokens = tokenizeSyx('function randomizer <int> {}');
119+
const ast = syxparser.parseTokens(tokens,'TEST_FILE');
120+
const stmt:FunctionStatement = {range:{start:{line:1,character:0},end:{line:1,character:27}},name:'randomizer',type:NodeType.Function,modifiers:[],body:[],arguments:['int']};
121+
122+
astTypeExpectations(ast);
123+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
124+
125+
});
126+
127+
it('for import statements',()=>{
128+
129+
const tokens = tokenizeSyx('import \'./math\';');
130+
const ast = syxparser.parseTokens(tokens,'TEST_FILE');
131+
const stmt:ImportStatement = {range:{start:{line:1,character:0},end:{line:1,character:14}},type:NodeType.Import,modifiers:[],path:'./math'};
132+
133+
astTypeExpectations(ast);
134+
expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt);
135+
136+
});
137+
138+
});
139+
});

0 commit comments

Comments
 (0)