From af6c4003233713dc7372d41aacef286f6491a9b9 Mon Sep 17 00:00:00 2001 From: Ankur Goyal Date: Thu, 1 Dec 2022 14:08:44 -0800 Subject: [PATCH] Add some tests --- src/parser.rs | 27 +++++++++++++++++++++++++++ src/tokenizer.rs | 20 +++++++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/src/parser.rs b/src/parser.rs index b1c44cb75..b0933f660 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -6592,4 +6592,31 @@ mod tests { r#"UPDATE test SET name = $1, value = $2, where = $3, create = $4, is_default = $5, classification = $6, sort = $7 WHERE id = $8"# ); } + + #[test] + fn test_tokenizer_error_loc() { + let sql = "foo '"; + let ast = Parser::parse_sql(&GenericDialect, sql); + assert_eq!( + ast, + Err(ParserError::TokenizerError( + "Unterminated string literal at Line: 1, Column 5".to_string() + )) + ); + } + + #[test] + fn test_parser_error_loc() { + // TODO: Once we thread token locations through the parser, we should update this + // test to assert the locations of the referenced token + let sql = "SELECT this is a syntax error"; + let ast = Parser::parse_sql(&GenericDialect, sql); + assert_eq!( + ast, + Err(ParserError::ParserError( + "Expected [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a" + .to_string() + )) + ); + } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 43ac14bc7..4da66c3ed 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1615,7 +1615,25 @@ mod tests { compare(expected, tokens); } - fn compare(expected: Vec, actual: Vec) { + #[test] + fn tokenize_with_location() { + let sql = "SELECT a,\n b"; + let dialect = GenericDialect {}; + let mut tokenizer = Tokenizer::new(&dialect, sql); + let tokens = tokenizer.tokenize_with_location().unwrap(); + let expected = vec![ + TokenWithLocation::new(Token::make_keyword("SELECT"), 1, 1), + TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 1, 7), + TokenWithLocation::new(Token::make_word("a", None), 1, 8), + TokenWithLocation::new(Token::Comma, 1, 9), + TokenWithLocation::new(Token::Whitespace(Whitespace::Newline), 1, 10), + TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 2, 1), + TokenWithLocation::new(Token::make_word("b", None), 2, 2), + ]; + compare(expected, tokens); + } + + fn compare(expected: Vec, actual: Vec) { //println!("------------------------------"); //println!("tokens = {:?}", actual); //println!("expected = {:?}", expected);