80 lines
2.7 KiB
OCaml
80 lines
2.7 KiB
OCaml
open Lex
|
|
open Error
|
|
|
|
let test_simple_tokens () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "let x = 42" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (tok_types = [LET; IDENT "x"; EQ; INT 42L; EOF]);
|
|
print_endline "test_simple_tokens"
|
|
|
|
let test_operators () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "+ - * / % == != < <= > >= && || :: ++ |> >>" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (tok_types = [
|
|
PLUS; MINUS; STAR; SLASH; PERCENT;
|
|
EQ; NE; LT; LE; GT; GE;
|
|
AND; OR; CONS; CONCAT; PIPE; COMPOSE; EOF
|
|
]);
|
|
print_endline "test_operators"
|
|
|
|
let test_string_literals () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "\"hello\" \"world\\n\" \"emoji: 🌟\"" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (List.length tok_types = 4);
|
|
print_endline "test_string_literals"
|
|
|
|
let test_numeric_literals () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "42 3.14 0xFF 0o77 0b1010" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (match tok_types with
|
|
| INT _ :: FLOAT _ :: INT _ :: INT _ :: INT _ :: EOF :: [] -> true
|
|
| _ -> false);
|
|
print_endline "test_numeric_literals"
|
|
|
|
let test_comments () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "let // comment\nx = /* block */ 42" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (tok_types = [LET; IDENT "x"; EQ; INT 42L; EOF]);
|
|
print_endline "test_comments"
|
|
|
|
let test_keywords () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "let rec in fn if then else match with" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (tok_types = [LET; REC; IN; FN; IF; THEN; ELSE; MATCH; WITH; EOF]);
|
|
print_endline "test_keywords"
|
|
|
|
let test_identifiers () =
|
|
let ctx = create_context () in
|
|
let lex = create ~filename:"<test>" "foo bar_baz qux123 CamelCase" in
|
|
let tokens = tokenize ctx lex in
|
|
let tok_types = List.map fst tokens in
|
|
assert (match tok_types with
|
|
| IDENT "foo" :: IDENT "bar_baz" :: IDENT "qux123" :: UIDENT "CamelCase" :: EOF :: [] -> true
|
|
| _ -> false);
|
|
print_endline "test_identifiers"
|
|
|
|
let run_tests () =
|
|
print_endline "\n=== Lexer Tests ===";
|
|
test_simple_tokens ();
|
|
test_operators ();
|
|
test_string_literals ();
|
|
test_numeric_literals ();
|
|
test_comments ();
|
|
test_keywords ();
|
|
test_identifiers ();
|
|
print_endline "All lexer tests passed!\n"
|
|
|
|
let () = run_tests ()
|