Skip to content

Commit 92639f3

Browse files
committed
add 12 cases to 'Compiler module should provide correct tokenization' (all passed)
1 parent e2b06d0 commit 92639f3

File tree

1 file changed

+113
-18
lines changed

1 file changed

+113
-18
lines changed

src/test/compiler.test.ts

Lines changed: 113 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1-
import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js';
1+
import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, Token, TokenType, isCompilerError } from '../types.js';
22
import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types';
33
import { describe, inst, it } from '@efekos/es-test/bin/testRunner.js';
44
import { tokenizeSys, tokenizeSyx } from '../lexer.js';
5+
import { HandlerFn } from '@efekos/es-test/bin/types.js';
56
import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js';
67
import { expect } from 'chai';
78
import { syxparser } from '../ast.js';
@@ -17,6 +18,14 @@ describe('Compiler module', () => {
1718
expect(r.end).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0);
1819
}
1920

21+
function tokenExpectations(t: Token) {
22+
expect(t).to.have.property('range').to.be.a('object');
23+
rangeExpectations(t.range);
24+
25+
expect(t).to.have.property('type').to.be.a('number').to.be.greaterThanOrEqual(0);
26+
expect(t).to.have.property('value').to.be.a('string').to.be.not.equal(undefined);
27+
}
28+
2029
it('should provide correct ranges', () => {
2130

2231
inst(() => {
@@ -72,23 +81,109 @@ describe('Compiler module', () => {
7281
}, true);
7382

7483
it('should provide correct tokenization', () => {
75-
const t = tokenizeSyx('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s');
76-
const tList = [
77-
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote,
78-
TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword,
79-
TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote,
80-
TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile
81-
];
82-
83-
expect(t).to.be.a('array');
84-
expect(t.map(tt => tt.type)).to.be.deep.equal(tList);
85-
86-
const sys = tokenizeSys('import "" \'\' ; :::');
87-
const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile];
88-
89-
expect(sys).to.be.a('array');
90-
expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList);
91-
});
84+
85+
function _case(src: string, types: TokenType[]): HandlerFn {
86+
return () => {
87+
const ts = tokenizeSyx(src);
88+
89+
expect(ts).to.be.a('array');
90+
ts.forEach(t => tokenExpectations(t));
91+
expect(ts.map(t => t.type)).to.be.deep.equal(types);
92+
};
93+
}
94+
95+
inst(
96+
_case('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s', [
97+
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote,
98+
TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword,
99+
TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote,
100+
TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile
101+
])
102+
);
103+
104+
inst(
105+
_case('class}>)],compile""exportfunctionglobalrandomimportimports1keyword{<([operator*rule;\'\'|+s', [
106+
TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote,
107+
TokenType.Identifier, TokenType.IntNumber, TokenType.KeywordKeyword, TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw,
108+
TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile
109+
])
110+
);
111+
112+
inst(
113+
_case(
114+
'+s+s+s+s+s+s+s',
115+
[TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.EndOfFile]
116+
)
117+
);
118+
119+
inst(
120+
_case(
121+
'operator <int>"+"<int> {',
122+
[TokenType.OperatorKeyword,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.DoubleQuote,TokenType.Raw,TokenType.DoubleQuote,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.OpenBrace,TokenType.EndOfFile]
123+
)
124+
);
125+
126+
inst(
127+
_case(
128+
'o-+?',
129+
[TokenType.Identifier,TokenType.Raw,TokenType.Raw,TokenType.Raw,TokenType.EndOfFile]
130+
)
131+
);
132+
133+
inst(
134+
_case(
135+
'rmh09345kg9',
136+
[TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier,TokenType.IntNumber, TokenType.EndOfFile]
137+
)
138+
);
139+
140+
inst(
141+
_case(
142+
'rule \'custom-random-rule?\';',
143+
[TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,20,20,20,TokenType.SingleQuote,TokenType.Semicolon, TokenType.EndOfFile]
144+
)
145+
);
146+
147+
inst(
148+
_case(
149+
'keyword pray;rule\'imports-keyword\': pray;',
150+
[TokenType.KeywordKeyword,TokenType.Identifier,TokenType.Semicolon,TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,TokenType.SingleQuote,TokenType.Raw,TokenType.Identifier,TokenType.Semicolon, TokenType.EndOfFile]
151+
)
152+
);
153+
154+
inst(
155+
_case(
156+
'çş',
157+
[TokenType.Raw,TokenType.Raw, TokenType.EndOfFile]
158+
)
159+
);
160+
161+
inst(
162+
_case(
163+
'keyword altınasıçĞ;',
164+
[TokenType.KeywordKeyword,TokenType.Identifier,20,TokenType.Identifier,20,20,20,TokenType.Semicolon, TokenType.EndOfFile]
165+
)
166+
);
167+
168+
inst(
169+
_case(
170+
'keyword imsodonewiththistest12casesisenough',
171+
[TokenType.KeywordKeyword,TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier, TokenType.EndOfFile]
172+
)
173+
);
174+
175+
inst(() => {
176+
177+
const sys = tokenizeSys('import "" \'\' ; :::');
178+
const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile];
179+
180+
sys.forEach(t => tokenExpectations(t));
181+
expect(sys).to.be.a('array');
182+
expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList);
183+
184+
});
185+
186+
}, true);
92187

93188
describe('should provide correct parsing', () => {
94189

0 commit comments

Comments
 (0)