diff --git a/core/src/idx/ft/analyzer/mod.rs b/core/src/idx/ft/analyzer/mod.rs index 26a3be3d..d579866f 100644 --- a/core/src/idx/ft/analyzer/mod.rs +++ b/core/src/idx/ft/analyzer/mod.rs @@ -278,13 +278,16 @@ impl Analyzer { }); } } - if let Some(t) = &self.az.tokenizers { - if !input.is_empty() { - let t = Tokenizer::tokenize(t, input); - return Filter::apply_filters(t, &self.filters, stage); - } + if input.is_empty() { + return Ok(Tokens::new(input)); } - Ok(Tokens::new(input)) + + let tokens = if let Some(t) = &self.az.tokenizers { + Tokenizer::tokenize(t, input) + } else { + Tokenizer::tokenize(&[], input) + }; + Filter::apply_filters(tokens, &self.filters, stage) } /// Used for exposing the analyzer as the native function `search::analyze` @@ -351,4 +354,9 @@ mod tests { let tokens = get_analyzer_tokens(def, input).await; assert_eq!(tokens.list(), expected); } + + #[tokio::test] + async fn test_no_tokenizer() { + test_analyzer("ANALYZER test FILTERS lowercase", "ab", &["ab"]).await; + } } diff --git a/sdk/tests/matches.rs b/sdk/tests/matches.rs index aeec7e3f..e54c81f6 100644 --- a/sdk/tests/matches.rs +++ b/sdk/tests/matches.rs @@ -1,7 +1,7 @@ mod parse; use parse::Parse; mod helpers; -use crate::helpers::skip_ok; +use crate::helpers::{skip_ok, Test}; use helpers::new_ds; use surrealdb::dbs::Session; use surrealdb::err::Error; @@ -704,3 +704,17 @@ async fn select_where_matches_mixing_indexes() -> Result<(), Error> { assert_eq!(format!("{:#}", tmp), format!("{:#}", val)); Ok(()) } + +#[tokio::test] +async fn select_where_matches_analyser_without_tokenizer() -> Result<(), Error> { + let sql = r" + DEFINE ANALYZER az FILTERS lowercase,ngram(1,5); + CREATE t:1 SET text = 'ab'; + DEFINE INDEX search_idx ON TABLE t COLUMNS text SEARCH ANALYZER az BM25 HIGHLIGHTS; + SELECT * FROM t WHERE text @@ 'a';"; + let mut t = Test::new(sql).await?; + t.expect_size(4)?; + t.skip_ok(3)?; + t.expect_val("[{ id: t:1, text: 'ab' }]")?; + Ok(()) +}