SeaLang/src/bin/tokenizer.rs
2024-12-22 19:01:22 +01:00

56 lines
1.6 KiB
Rust

use std::{io::Read, path::PathBuf};
use compiler::*;
use lexer::SourceLocation;
fn main() {
let cmd = clap::Command::new("sea-tokens").bin_name("sea-tokens").arg(
clap::Arg::new("input")
.short('i')
.help("sea source file.")
.value_parser(clap::builder::PathBufValueParser::new()),
);
let matches = cmd.get_matches();
let path = matches.get_one::<PathBuf>("input");
let source = path
.and_then(|p| std::fs::read(p).ok())
.or_else(|| {
let mut buf = Vec::new();
std::io::stdin().read(&mut buf).ok()?;
Some(buf)
})
.expect("no source bytes.");
let tokens = tokenize(&source);
match tokens {
Ok(tokens) => {
for tok in tokens.iter() {
println!("{}@[{}]", tok.token(), tok.source_location());
}
}
Err((tokens, errors)) => {
eprint!("{} errors while tokenizing", errors.len());
match path {
Some(path) => {
eprint!("{}", path.display());
}
None => {
eprint!("stdin");
}
}
eprintln!(":");
for error in &errors {
let lines =
tokens.src().get_lines(error.range.start, error.range.end);
let location = tokens
.src()
.get_source_span(error.range.start, error.range.end);
eprintln!("Error: {}", error.err);
SourceLocation::squiggle_line(location, lines);
}
}
}
}