lexical.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
use crate::{ Source_File }; use serde::{ Serialize, Deserialize }; #[derive(Serialize, Deserialize)] pub enum Token_Kind { Keyword, Identifier, Punctuation, String_Literal, } #[derive(Serialize, Deserialize)] pub struct Token { pub kind: Token_Kind, pub start: usize, pub end: usize, } pub struct Tokenizer<'a> { source_file: &'a Source_File, source_buffer: &'a [u8], output_buffer: Vec<Token> } impl<'a> Tokenizer<'a> { pub fn new(source_file: &'a Source_File, source_buffer: &'a [u8]) -> Tokenizer<'a> { Tokenizer { source_file, source_buffer, output_buffer: Vec::new() } } pub fn run(&mut self) -> Option<Vec<Token>> { Some(Vec::new()) } }