surrealpatch/lib/src/sql/tokenizer.rs

45 lines
1.1 KiB
Rust
Raw Normal View History

2023-05-10 02:08:09 +00:00
use crate::sql::common::commas;
use crate::sql::error::IResult;
use nom::branch::alt;
use nom::bytes::complete::tag_no_case;
use nom::combinator::map;
use nom::multi::separated_list1;
use revision::revisioned;
2023-05-10 02:08:09 +00:00
use serde::{Deserialize, Serialize};
use std::fmt;
use std::fmt::Display;
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[revisioned(revision = 1)]
2023-05-10 02:08:09 +00:00
pub enum Tokenizer {
Blank,
Camel,
Class,
Punct,
2023-05-10 02:08:09 +00:00
}
impl Display for Tokenizer {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match self {
Self::Blank => "BLANK",
Self::Camel => "CAMEL",
Self::Class => "CLASS",
Self::Punct => "PUNCT",
2023-05-10 02:08:09 +00:00
})
}
}
fn tokenizer(i: &str) -> IResult<&str, Tokenizer> {
let (i, t) = alt((
map(tag_no_case("BLANK"), |_| Tokenizer::Blank),
map(tag_no_case("CAMEL"), |_| Tokenizer::Camel),
map(tag_no_case("CLASS"), |_| Tokenizer::Class),
map(tag_no_case("PUNCT"), |_| Tokenizer::Punct),
2023-05-10 02:08:09 +00:00
))(i)?;
Ok((i, t))
}
pub(super) fn tokenizers(i: &str) -> IResult<&str, Vec<Tokenizer>> {
separated_list1(commas, tokenizer)(i)
2023-05-10 02:08:09 +00:00
}