Skip to content

Commit 133d931

Browse files
committed
replace logos with nom
nom is more lightweight and more suitable for this task
1 parent dbcfa62 commit 133d931

File tree

3 files changed

+75
-27
lines changed

3 files changed

+75
-27
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ thiserror = "2.0.17"
1616
ropey = "1.6.1"
1717
miniscript = "12"
1818
simplicityhl = { git = "https://github.com/BlockstreamResearch/SimplicityHL.git", rev = "e68e1c6" }
19-
logos = "0.15.1"
19+
nom = "8.0.0"
2020

2121
[lints.rust]
2222
unsafe_code = "deny"

src/completion/mod.rs

Lines changed: 19 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
use logos::Logos;
21
use simplicityhl::parse::Function;
32

43
pub mod builtin;
@@ -12,6 +11,7 @@ use tower_lsp_server::lsp_types::{
1211
};
1312

1413
use tokens::Token;
14+
use tokens::lex_tokens;
1515

1616
/// Build and provide [`CompletionItem`] for jets and builtin functions.
1717
#[derive(Debug)]
@@ -88,18 +88,33 @@ impl CompletionProvider {
8888
prefix: &str,
8989
functions: &[(&Function, &str)],
9090
) -> Option<Vec<CompletionItem>> {
91-
let mut tokens: Vec<Token> = Token::lexer(prefix).filter_map(Result::ok).collect();
92-
tokens.reverse();
91+
let tokens = match lex_tokens(prefix) {
92+
Ok((_, mut t)) => {
93+
t.reverse();
94+
t
95+
}
96+
Err(_) => return None,
97+
};
9398

9499
match tokens.as_slice() {
95100
[Token::Jet, ..] => Some(self.jets.clone()),
96101

102+
// Case for ": type = <", so we can return completion for specific type, or generic one
103+
// if it is not on default type casts.
97104
[
98105
Token::OpenAngle,
99106
Token::EqualSign,
100107
Token::Identifier(type_name),
101108
Token::Colon,
102109
..,
110+
]
111+
| [
112+
Token::Identifier(_) | Token::OpenBracket,
113+
Token::OpenAngle,
114+
Token::EqualSign,
115+
Token::Identifier(type_name),
116+
Token::Colon,
117+
..,
103118
] => {
104119
let to = type_name.as_str();
105120

@@ -117,6 +132,7 @@ impl CompletionProvider {
117132
Some(self.type_casts.clone())
118133
}
119134

135+
// Case for ">::" -- this structure is only present for into keyword.
120136
[Token::DoubleColon, Token::CloseAngle, ..] => Some(vec![CompletionItem {
121137
label: "into".to_string(),
122138
kind: Some(CompletionItemKind::FUNCTION),
@@ -131,11 +147,6 @@ impl CompletionProvider {
131147

132148
_ => {
133149
let mut completions = CompletionProvider::get_function_completions(functions);
134-
// return only function completions in case of '<' symbol in `for_while`, `array_fold` and
135-
// `fold`
136-
if prefix.ends_with('<') {
137-
return Some(completions);
138-
}
139150

140151
completions.extend_from_slice(&self.builtin);
141152
completions.extend_from_slice(&self.modules);

src/completion/tokens.rs

Lines changed: 55 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,65 @@
1-
use logos::Logos;
1+
use nom::{
2+
IResult, Parser,
3+
branch::alt,
4+
bytes::complete::{tag, take_while},
5+
character::complete::{multispace0, satisfy},
6+
combinator::{map, opt, recognize, value},
7+
multi::many0,
8+
sequence::{pair, preceded},
9+
};
210

3-
#[derive(Logos, Debug, PartialEq, Clone)]
4-
#[logos(skip r"[ \t\r\n\f]+")]
11+
#[derive(Debug, PartialEq, Clone)]
512
pub enum Token {
6-
#[token(":")]
713
Colon,
8-
9-
#[token("::")]
1014
DoubleColon,
11-
12-
#[token("<")]
1315
OpenAngle,
14-
15-
#[token(">")]
1616
CloseAngle,
17-
18-
#[token("=")]
1917
EqualSign,
20-
21-
#[regex(r"\(\s*[a-zA-Z_][a-zA-Z0-9_]*\s*,\s*[a-zA-Z_][a-zA-Z0-9_]*\s*\)", |lex| lex.slice().to_string())]
22-
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())]
18+
OpenBracket,
19+
ClosedBracket,
2320
Identifier(String),
24-
25-
#[regex(r"jet::[a-zA-Z0-9_]?", priority = 2)]
26-
#[token("jet::", priority = 1)]
2721
Jet,
2822
}
23+
24+
fn parse_symbol(input: &str) -> IResult<&str, Token> {
25+
let mut parser = alt((
26+
value(Token::DoubleColon, tag("::")),
27+
value(Token::Colon, tag(":")),
28+
value(Token::OpenBracket, tag("(")),
29+
value(Token::ClosedBracket, tag(")")),
30+
value(Token::OpenAngle, tag("<")),
31+
value(Token::CloseAngle, tag(">")),
32+
value(Token::EqualSign, tag("=")),
33+
));
34+
parser.parse(input)
35+
}
36+
37+
fn parse_jet(input: &str) -> IResult<&str, Token> {
38+
let mut parser = value(
39+
Token::Jet,
40+
recognize(pair(
41+
tag("jet::"),
42+
opt(take_while(|c: char| c.is_alphanumeric() || c == '_')),
43+
)),
44+
);
45+
parser.parse(input)
46+
}
47+
48+
fn parse_identifier(input: &str) -> IResult<&str, Token> {
49+
let mut parser = map(
50+
recognize(pair(
51+
satisfy(|c| c.is_alphabetic() || c == '_'),
52+
take_while(|c: char| c.is_alphanumeric() || c == '_'),
53+
)),
54+
|s: &str| Token::Identifier(s.to_string()),
55+
);
56+
parser.parse(input)
57+
}
58+
59+
pub fn lex_tokens(input: &str) -> IResult<&str, Vec<Token>> {
60+
let mut parser = many0(preceded(
61+
multispace0,
62+
alt((parse_jet, parse_symbol, parse_identifier)),
63+
));
64+
parser.parse(input)
65+
}

0 commit comments

Comments
 (0)