Skip to content

Commit 70416a0

Browse files
authored
Optimize vec[] creation (#545)
1 parent f4be889 commit 70416a0

File tree

2 files changed

+25
-17
lines changed

2 files changed

+25
-17
lines changed

src/filters/network.rs

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -917,10 +917,7 @@ impl NetworkFilter {
917917
(self.is_plain() || self.is_regex()) && !self.is_right_anchor();
918918
let skip_first_token = self.is_right_anchor();
919919

920-
let filter_tokens =
921-
utils::tokenize_filter(f, skip_first_token, skip_last_token);
922-
923-
tokens.extend(filter_tokens);
920+
utils::tokenize_filter_to(f, skip_first_token, skip_last_token, &mut tokens);
924921
}
925922
}
926923
FilterPart::AnyOf(_) => (), // across AnyOf set of filters no single token is guaranteed to match to a request
@@ -930,16 +927,14 @@ impl NetworkFilter {
930927
// Append tokens from hostname, if any
931928
if !self.mask.contains(NetworkFilterMask::IS_HOSTNAME_REGEX) {
932929
if let Some(hostname) = self.hostname.as_ref() {
933-
let hostname_tokens = utils::tokenize(hostname);
934-
tokens.extend(hostname_tokens);
930+
utils::tokenize_to(hostname, &mut tokens);
935931
}
936932
}
937933

938934
if tokens.is_empty() && self.mask.contains(NetworkFilterMask::IS_REMOVEPARAM) {
939935
if let Some(removeparam) = &self.modifier_option {
940936
if VALID_PARAM.is_match(removeparam) {
941-
let param_tokens = utils::tokenize(&removeparam.to_ascii_lowercase());
942-
tokens.extend(param_tokens);
937+
utils::tokenize_to(&removeparam.to_ascii_lowercase(), &mut tokens);
943938
}
944939
}
945940
}

src/utils.rs

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -81,32 +81,45 @@ pub(crate) fn tokenize_pooled(pattern: &str, tokens_buffer: &mut Vec<Hash>) {
8181

8282
pub fn tokenize(pattern: &str) -> Vec<Hash> {
8383
let mut tokens_buffer: Vec<Hash> = Vec::with_capacity(TOKENS_BUFFER_SIZE);
84-
fast_tokenizer_no_regex(
85-
pattern,
86-
&is_allowed_filter,
87-
false,
88-
false,
89-
&mut tokens_buffer,
90-
);
84+
tokenize_to(pattern, &mut tokens_buffer);
9185
tokens_buffer
9286
}
9387

88+
pub(crate) fn tokenize_to(pattern: &str, tokens_buffer: &mut Vec<Hash>) {
89+
fast_tokenizer_no_regex(pattern, &is_allowed_filter, false, false, tokens_buffer);
90+
}
91+
92+
#[cfg(test)]
9493
pub(crate) fn tokenize_filter(
9594
pattern: &str,
9695
skip_first_token: bool,
9796
skip_last_token: bool,
9897
) -> Vec<Hash> {
9998
let mut tokens_buffer: Vec<Hash> = Vec::with_capacity(TOKENS_BUFFER_SIZE);
100-
fast_tokenizer_no_regex(
99+
tokenize_filter_to(
101100
pattern,
102-
&is_allowed_filter,
103101
skip_first_token,
104102
skip_last_token,
105103
&mut tokens_buffer,
106104
);
107105
tokens_buffer
108106
}
109107

108+
pub(crate) fn tokenize_filter_to(
109+
pattern: &str,
110+
skip_first_token: bool,
111+
skip_last_token: bool,
112+
tokens_buffer: &mut Vec<Hash>,
113+
) {
114+
fast_tokenizer_no_regex(
115+
pattern,
116+
&is_allowed_filter,
117+
skip_first_token,
118+
skip_last_token,
119+
tokens_buffer,
120+
);
121+
}
122+
110123
pub(crate) fn bin_lookup<T: Ord>(arr: &[T], elt: T) -> bool {
111124
arr.binary_search(&elt).is_ok()
112125
}

0 commit comments

Comments
 (0)