cleaned up comments, added lines to readme

This commit is contained in:
Tristan Smith 2024-09-15 16:00:46 -04:00
parent 50020b4813
commit d9ac25fe4a
6 changed files with 27 additions and 7 deletions

3
hello-world.fddl Normal file
View file

@ -0,0 +1,3 @@
func main() {
print(`hello, world in fddl`);
}

View file

@ -32,6 +32,12 @@ cargo run path/to/script.fddl
## Examples ## Examples
```sh
func main() {
print(`hello, world in fddl`);
}
```
```sh ```sh
##! This is a sample module ##! This is a sample module
@ -56,12 +62,13 @@ This project is licensed under the MIT License.
## **Notes and Next Steps** ## **Notes and Next Steps**
- [x] Added first new set of tokens and features, added the first lexer tests. - [x] Added first new set of tokens and features, added the first `lexer` tests.
- [ ] `parser` module is a placeholder. - [ ] `parser` module is a placeholder.
- [ ] `interpreter` module is a placeholder. - [ ] `interpreter` module is a placeholder.
- [ ] Implement a more robust error handling mechanism instead of using `stderr`. - [ ] Implement a more robust error handling mechanism instead of using `stderr`.
- [ ] Imlement string interpolation (backticks with `$variable`) - [ ] Imlement string interpolation (backticks with `$variable`)
- [ ] Continue to expand tests to cover all new syntax and features. - [ ] Continue to expand tests to cover all new syntax and features.
- [x] Made a crappy website.
--- ---

View file

@ -139,6 +139,7 @@ impl Lexer {
} }
// Helper methods // Helper methods
// function to consume the current character
fn advance(&mut self) -> char { fn advance(&mut self) -> char {
let c = if self.is_at_end() { let c = if self.is_at_end() {
'\0' '\0'
@ -149,6 +150,7 @@ impl Lexer {
c c
} }
// function to parse the current character if it matches the expected character
fn match_char(&mut self, expected: char) -> bool { fn match_char(&mut self, expected: char) -> bool {
if self.is_at_end() { if self.is_at_end() {
return false; return false;
@ -162,6 +164,7 @@ impl Lexer {
true true
} }
// function to parse the current character without consuming it
fn peek(&self) -> char { fn peek(&self) -> char {
if self.is_at_end() { if self.is_at_end() {
'\0' '\0'
@ -170,6 +173,7 @@ impl Lexer {
} }
} }
// function to parse the next character without consuming it
fn peek_next(&self) -> char { fn peek_next(&self) -> char {
if self.current + 1 >= self.source.len() { if self.current + 1 >= self.source.len() {
'\0' '\0'
@ -178,10 +182,12 @@ impl Lexer {
} }
} }
// function to check if we've reached the end of the source
fn is_at_end(&self) -> bool { fn is_at_end(&self) -> bool {
self.current >= self.source.len() self.current >= self.source.len()
} }
// Function to handle different token types
fn string(&mut self) -> Option<Token> { fn string(&mut self) -> Option<Token> {
while self.peek() != '"' && !self.is_at_end() { while self.peek() != '"' && !self.is_at_end() {
if self.peek() == '\n' { if self.peek() == '\n' {
@ -206,6 +212,7 @@ impl Lexer {
Some(Token::StringLiteral(value)) Some(Token::StringLiteral(value))
} }
// Function to handle number literals
fn number(&mut self) -> Option<Token> { fn number(&mut self) -> Option<Token> {
while self.peek().is_ascii_digit() { while self.peek().is_ascii_digit() {
self.advance(); self.advance();
@ -228,6 +235,7 @@ impl Lexer {
Some(Token::Number(value)) Some(Token::Number(value))
} }
// Function to handle identifiers
fn identifier(&mut self) -> Option<Token> { fn identifier(&mut self) -> Option<Token> {
while self.is_alphanumeric(self.peek()) || self.peek() == '_' { while self.is_alphanumeric(self.peek()) || self.peek() == '_' {
self.advance(); self.advance();
@ -275,20 +283,24 @@ impl Lexer {
Some(token) Some(token)
} }
// Function to check if a character is an alphabetic character or an underscore
fn is_alpha(&self, c: char) -> bool { fn is_alpha(&self, c: char) -> bool {
c.is_alphabetic() || c == '_' c.is_alphabetic() || c == '_'
} }
// Function to check if a character is an alphanumeric character or an underscore
fn is_alphanumeric(&self, c: char) -> bool { fn is_alphanumeric(&self, c: char) -> bool {
c.is_alphanumeric() || c == '_' c.is_alphanumeric() || c == '_'
} }
// Function to handle comments and documentation
fn line_comment(&mut self) { fn line_comment(&mut self) {
while self.peek() != '\n' && !self.is_at_end() { while self.peek() != '\n' && !self.is_at_end() {
self.advance(); self.advance();
} }
} }
// Function to handle block comments
fn block_comment(&mut self) { fn block_comment(&mut self) {
while !self.is_at_end() { while !self.is_at_end() {
if self.peek() == '*' && self.peek_next() == '/' { if self.peek() == '*' && self.peek_next() == '/' {
@ -304,6 +316,7 @@ impl Lexer {
} }
} }
// Function to handle comments and documentation
fn handle_comment_or_doc(&mut self) -> Option<Token> { fn handle_comment_or_doc(&mut self) -> Option<Token> {
// We have matched one '#' character so far // We have matched one '#' character so far
let mut count = 1; let mut count = 1;
@ -348,7 +361,7 @@ impl Lexer {
} }
} }
// Function to handle documentation comments
fn doc_comment(&mut self, _kind: &str) -> Option<Token> { fn doc_comment(&mut self, _kind: &str) -> Option<Token> {
let mut comment = String::new(); let mut comment = String::new();
while self.peek() != '\n' && !self.is_at_end() { while self.peek() != '\n' && !self.is_at_end() {

View file

@ -1,5 +1,3 @@
pub mod lexer; pub mod lexer;
pub mod parser; pub mod parser;
pub mod interpreter; pub mod interpreter;
// ohhhhh, this file puts your created files together

View file

@ -21,7 +21,7 @@ fn main() {
} }
fn run_repl() { fn run_repl() {
println!("fiddle REPL"); println!("fddl REPL");
loop { loop {
print!("> "); print!("> ");
io::stdout().flush().unwrap(); io::stdout().flush().unwrap();

View file

@ -1,4 +1,3 @@
// pub mod ast; // pub mod ast;
// pub use ast::*; // pub use ast::*;
// don't fully understand this re-export