11import { CompileStatement , FunctionStatement , GlobalStatement , ImportStatement , ImportsStatement , KeywordStatement , NodeType , ProgramStatement , RuleStatement , TokenType } from '../types.js' ;
22import { describe , it } from '@efekos/es-test/bin/testRunner.js' ;
33import { tokenizeSys , tokenizeSyx } from '../lexer.js' ;
4+ import { Range } from 'lsp-types' ;
45import { expect } from 'chai' ;
56import { syxparser } from '../ast.js' ;
67
78describe ( 'Compiler module' , ( ) => {
89
10+ function rangeExpectations ( r : Range ) {
11+ expect ( r ) . to . have . property ( 'start' ) . to . be . a ( 'object' ) ;
12+ expect ( r ) . to . have . property ( 'end' ) . to . be . a ( 'object' ) ;
13+ expect ( r . start ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) . to . be . greaterThanOrEqual ( 0 ) ;
14+ expect ( r . start ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) . to . be . greaterThanOrEqual ( 0 ) ;
15+ expect ( r . end ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) . to . be . greaterThanOrEqual ( 0 ) ;
16+ expect ( r . end ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) . to . be . greaterThanOrEqual ( 0 ) ;
17+ }
18+
919 it ( 'should provide correct ranges' , ( ) => {
1020
1121 const tokens = tokenizeSyx ( 'keyword hello;' ) ;
12- const r = tokens [ 0 ] . range ;
1322
14- expect ( r ) . to . have . property ( 'start' ) . to . be . a ( 'object' ) ;
15- expect ( r ) . to . have . property ( 'end' ) . to . be . a ( 'object' ) ;
16- expect ( r . start ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) ;
17- expect ( r . start ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) ;
18- expect ( r . end ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) ;
19- expect ( r . end ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) ;
20- expect ( r ) . to . deep . equal ( { end : { line : 1 , character : 7 } , start : { line : 1 , character : 0 } } ) ;
23+ rangeExpectations ( tokens [ 0 ] . range ) ;
24+ expect ( tokens [ 0 ] . range ) . to . deep . equal ( { end : { line : 1 , character : 8 } , start : { line : 1 , character : 1 } } ) ;
25+ expect ( tokens [ 1 ] . range ) . to . deep . equal ( { end : { line : 1 , character : 14 } , start : { line : 1 , character : 9 } } ) ;
26+ expect ( tokens [ 2 ] . range ) . to . deep . equal ( { end : { line : 1 , character : 15 } , start : { line : 1 , character : 14 } } ) ;
2127
2228 } ) ;
2329
@@ -44,25 +50,20 @@ describe('Compiler module', () => {
4450
4551 describe ( 'should provide correct parsing' , ( ) => {
4652
47- function astTypeExpectations ( ast :ProgramStatement ) {
53+ function astTypeExpectations ( ast : ProgramStatement ) {
4854 expect ( ast ) . to . be . a ( 'object' ) ;
4955 expect ( ast ) . to . have . property ( 'type' ) . to . be . a ( 'number' ) . to . be . equal ( NodeType . Program ) ;
5056 expect ( ast ) . to . have . property ( 'modifiers' ) . to . be . a ( 'array' ) . to . have . lengthOf ( 0 ) ;
5157 expect ( ast ) . to . have . property ( 'body' ) . to . be . a ( 'array' ) . to . have . lengthOf ( 1 ) ;
5258 expect ( ast ) . to . have . property ( 'range' ) . to . be . a ( 'object' ) ;
53- expect ( ast . range ) . to . have . property ( 'start' ) . to . be . a ( 'object' ) ;
54- expect ( ast . range ) . to . have . property ( 'end' ) . to . be . a ( 'object' ) ;
55- expect ( ast . range . start ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) ;
56- expect ( ast . range . start ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) ;
57- expect ( ast . range . end ) . to . have . property ( 'line' ) . to . be . a ( 'number' ) ;
58- expect ( ast . range . end ) . to . have . property ( 'character' ) . to . be . a ( 'number' ) ;
59+ rangeExpectations ( ast . range ) ;
5960 }
6061
6162 it ( 'for keyword statements' , ( ) => {
6263
6364 const tokens = tokenizeSyx ( 'keyword ruleish;' ) ;
6465 const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
65- const stmt : KeywordStatement = { type : NodeType . Keyword , modifiers : [ ] , range : { end : { line : 1 , character : 15 } , start : { line : 1 , character : 0 } } , word : 'ruleish' } ;
66+ const stmt : KeywordStatement = { type : NodeType . Keyword , modifiers : [ ] , range : { end : { line : 1 , character : 16 } , start : { line : 1 , character : 1 } } , word : 'ruleish' } ;
6667
6768 astTypeExpectations ( ast ) ;
6869 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
@@ -73,62 +74,62 @@ describe('Compiler module', () => {
7374
7475 const tokens = tokenizeSyx ( 'rule \'function-value-return-enabled\': true;' ) ;
7576 const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
76- const stmt :RuleStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :42 } } , modifiers :[ ] , rule :'function-value-return-enabled' , value :'true' , type :NodeType . Rule } ;
77+ const stmt : RuleStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 43 } } , modifiers : [ ] , rule : 'function-value-return-enabled' , value : 'true' , type : NodeType . Rule } ;
7778
7879 astTypeExpectations ( ast ) ;
7980 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
8081
8182 } ) ;
8283
83- it ( 'for compile statements' , ( ) => {
84+ it ( 'for compile statements' , ( ) => {
8485
8586 const tokens = tokenizeSyx ( 'compile(ts,js) \'test\';' ) ;
86- const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
87- const stmt :CompileStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :21 } } , formats :[ 'ts' , 'js' ] , type :NodeType . Compile , modifiers :[ ] , body :[ { type :NodeType . String , modifiers :[ ] , range :{ start :{ line :1 , character :15 } , end :{ line :1 , character :20 } } , value :'test' } ] } ;
87+ const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
88+ const stmt : CompileStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 22 } } , formats : [ 'ts' , 'js' ] , type : NodeType . Compile , modifiers : [ ] , body : [ { type : NodeType . String , modifiers : [ ] , range : { start : { line : 1 , character : 16 } , end : { line : 1 , character : 22 } } , value : 'test' } ] } ;
8889
8990 astTypeExpectations ( ast ) ;
9091 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
9192
9293 } ) ;
9394
94- it ( 'for imports statements' , ( ) => {
95+ it ( 'for imports statements' , ( ) => {
9596
9697 const tokens = tokenizeSyx ( 'imports(ts,js) \'math\';' ) ;
97- const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
98- const stmt :ImportsStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :21 } } , formats :[ 'ts' , 'js' ] , type :NodeType . Imports , modifiers :[ ] , module :'math' } ;
98+ const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
99+ const stmt : ImportsStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 22 } } , formats : [ 'ts' , 'js' ] , type : NodeType . Imports , modifiers : [ ] , module : 'math' } ;
99100
100101 astTypeExpectations ( ast ) ;
101102 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
102103
103104 } ) ;
104105
105- it ( 'for global statements' , ( ) => {
106+ it ( 'for global statements' , ( ) => {
106107
107108 const tokens = tokenizeSyx ( 'global randomizer {}' ) ;
108- const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
109- const stmt :GlobalStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :20 } } , name :'randomizer' , type :NodeType . Global , modifiers :[ ] , body :[ ] } ;
110-
109+ const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
110+ const stmt : GlobalStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 21 } } , name : 'randomizer' , type : NodeType . Global , modifiers : [ ] , body : [ ] } ;
111+
111112 astTypeExpectations ( ast ) ;
112113 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
113114
114115 } ) ;
115116
116- it ( 'for function statements' , ( ) => {
117+ it ( 'for function statements' , ( ) => {
117118
118119 const tokens = tokenizeSyx ( 'function randomizer <int> {}' ) ;
119- const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
120- const stmt :FunctionStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :28 } } , name :'randomizer' , type :NodeType . Function , modifiers :[ ] , body :[ ] , arguments :[ 'int' ] } ;
121-
120+ const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
121+ const stmt : FunctionStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 29 } } , name : 'randomizer' , type : NodeType . Function , modifiers : [ ] , body : [ ] , arguments : [ 'int' ] } ;
122+
122123 astTypeExpectations ( ast ) ;
123124 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
124125
125126 } ) ;
126127
127- it ( 'for import statements' , ( ) => {
128+ it ( 'for import statements' , ( ) => {
128129
129130 const tokens = tokenizeSyx ( 'import \'./math\';' ) ;
130- const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
131- const stmt :ImportStatement = { range :{ start :{ line :1 , character :0 } , end :{ line :1 , character :15 } } , type :NodeType . Import , modifiers :[ ] , path :'./math' } ;
131+ const ast = syxparser . parseTokens ( tokens , 'TEST_FILE' ) ;
132+ const stmt : ImportStatement = { range : { start : { line : 1 , character : 1 } , end : { line : 1 , character : 16 } } , type : NodeType . Import , modifiers : [ ] , path : './math' } ;
132133
133134 astTypeExpectations ( ast ) ;
134135 expect ( ast . body [ 0 ] ) . to . be . a ( 'object' ) . to . be . deep . equal ( stmt ) ;
0 commit comments