scanner works

This commit is contained in:
Shautvast 2025-10-14 22:18:48 +02:00
commit a0ec182bc5
17 changed files with 3001 additions and 0 deletions

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
.DS_Store

8
.idea/.gitignore generated vendored Normal file
View file

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

12
.idea/crudlang.iml generated Normal file
View file

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

8
.idea/modules.xml generated Normal file
View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/crudlang.iml" filepath="$PROJECT_DIR$/.idea/crudlang.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated Normal file
View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

2505
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

21
Cargo.toml Normal file
View file

@ -0,0 +1,21 @@
[package]
name = "crudlang"
version = "0.1.0"
edition = "2024"
[dependencies]
axum = "0.8.6"
# log = "0.4"
log4rs = "1.4.0"
serde = { version = "1.0.228", features = ["derive"] }
tokio = { version = "1.47", features = ["full"] }
tokio-postgres = "0.7"
chrono = "0.4.42"
dotenv = "0.15.0"
reqwest = { version = "0.12", features = ["json", "multipart"] }
tower-http = { version = "0.6", features = ["fs"] }
tower = "0.5.0"
tracing = "0.1.41"
tracing-subscriber = "0.3.20"
anyhow = "1.0"
tower-livereload = "0.9.6"

49
README.md Normal file
View file

@ -0,0 +1,49 @@
# crud-lang
_This is now in first-draft phase. Meaning, I just had the idea and I am jotting down very preliminary design decisions._
- an experimental language for CRUD applications (backend only though, I think)
- Enterprise as a first-class citizen
- urls are made up of directories and filenames
- a controller sourcefile is a file with the .ctl extension
- likewise:
- .svc services
- .cl service clients (that call other services)
- .dao database access code (not objects)
- .qc queueconsumers
- .qp queueproducers
- .utl utilities
- there is a strict calling hierarchy. A service can not call a controller. It can only go 'down'.
- Services can not call other services, because that is the recipe for spaghetti. Refactor your logic, abstract and put lower level code in utilities.
- Utilities are allowed to call other utilities. OMG, spaghetti after all! TBD
- It is an interpreter written in rust. OMG!
- And it has everything I like in other languages
- strictly typed
- [] is a list
- {} is a map
- no objects, no inheritance
- structs and duck typing
- everything is an expression
- nice iterators.
- First class functions? Maybe...
- automatic mapping from database to object to json
**types**
- u32, i32
- u64, i64
- f32, f64,
- string, bool, char
- struct enum
- date
**question**
- how to model headers
- middleware, implement later
- JWT tokens, I guess
**the example im /src: **
- a very simple api that listens to GET /api/customers{:id} and returns a customer from the database

View file

@ -0,0 +1,6 @@
///gets the customer
fn get(id: u32) -> Customer:
service.get(id)
fn post(customer: Customer):
service.add(customer)

View file

@ -0,0 +1,5 @@
fn get(id: u32) -> Customer:
select id, first_name, last_name from customers where id = :id
fn save(c: Customer):
insert into customers values(id, first_name, last_name) values(:c.id, :c.first_name, :c.last_name)

View file

@ -0,0 +1,5 @@
struct Customer:
id: u32,
first_name: string,
last_name: string,
date_fetched: date

View file

@ -0,0 +1,6 @@
fn get(id: u32) -> Customer:
let customer = dao.get(id)
customer.date_fetched = current_date
fn add(customer: Customer):
dao.save(customer)

9
src/keywords.rs Normal file
View file

@ -0,0 +1,9 @@
use crate::tokens::TokenType;
pub(crate) fn get_keyword(lexeme: &str) -> Option<TokenType> {
match lexeme {
"fn" => Some(TokenType::Fn),
"struct" => Some(TokenType::Struct),
_ => None,
}
}

3
src/lib.rs Normal file
View file

@ -0,0 +1,3 @@
mod keywords;
mod scanner;
mod tokens;

81
src/main.rs Normal file
View file

@ -0,0 +1,81 @@
use std::sync::Arc;
use axum::{
Json, Router,
extract::{Path, State},
http::StatusCode,
routing::get,
};
use chrono::{DateTime, Days, Timelike, Utc};
use dotenv::dotenv;
use serde::{Deserialize, Serialize};
use tokio_postgres::Client;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
dotenv().ok();
let (client, connection) = tokio_postgres::connect(
"host=localhost user=postgres password=boompje dbname=postgres",
tokio_postgres::NoTls,
)
.await?;
// Spawn connection handler
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let state = AppState {
db: Arc::new(client),
};
let app = Router::new()
.route("/api/customers/{id}", get(get_customer))
.with_state(state);
// run our app with hyper, listening globally on port 3000
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
axum::serve(listener, app).await.unwrap();
Ok(())
}
#[derive(Clone)]
struct AppState {
db: Arc<Client>,
}
async fn get_customer(
Path(id): Path<i32>,
State(state): State<AppState>,
) -> Result<Json<Customer>, StatusCode> {
let rows = state
.db
.query(
"SELECT id, first_name, last_name FROM customers WHERE id = $1",
&[&id],
)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if rows.is_empty() {
return Err(StatusCode::NOT_FOUND);
}
let row = &rows[0];
let user = Customer {
id: row.get(0),
first_name: row.get(1),
last_name: row.get(2),
};
Ok(Json(user))
}
#[derive(Serialize, Deserialize)]
struct Customer {
id: i32,
first_name: String,
last_name: String,
}

222
src/scanner.rs Normal file
View file

@ -0,0 +1,222 @@
use crate::{
keywords,
tokens::{
Token,
TokenType::{self},
},
};
fn scan(source: &str) -> Vec<Token> {
let scanner = Scanner {
chars: source.chars().collect(),
current: 0,
start: 0,
line: 0,
tokens: vec![],
};
scanner.scan()
}
impl Scanner {
fn scan(mut self) -> Vec<Token> {
while !self.is_at_end() {
self.start = self.current;
self.scan_token();
}
self.tokens
}
fn scan_token(&mut self) {
let c = self.advance();
match c {
'(' => self.add_token(TokenType::LeftParen),
')' => self.add_token(TokenType::RightParen),
'{' => self.add_token(TokenType::LeftBrace),
'}' => self.add_token(TokenType::RightBrace),
',' => self.add_token(TokenType::Comma),
'.' => self.add_token(TokenType::Dot),
'-' => self.add_token(TokenType::Minus),
'+' => self.add_token(TokenType::Plus),
':' => self.add_token(TokenType::Colon),
'*' => self.add_token(TokenType::Star),
'!' => {
let t = if self.match_next('=') {
TokenType::BangEqual
} else {
TokenType::Bang
};
self.add_token(t);
}
'=' => {
let t = if self.match_next('=') {
TokenType::EqualEqual
} else {
TokenType::Equal
};
self.add_token(t);
}
'<' => {
let t = if self.match_next('=') {
TokenType::LessEqual
} else {
TokenType::Less
};
self.add_token(t)
}
'>' => {
let t = if self.match_next('=') {
TokenType::GreaterEqual
} else {
TokenType::Greater
};
self.add_token(t);
}
'/' => {
if self.match_next('/') {
// todo make distinction between comment and doc
while self.peek() != '\n' && !self.is_at_end() {
self.advance();
}
} else {
self.add_token(TokenType::Slash);
}
}
'"' => self.string(),
' ' | '\t' | '\r' => {}
'\n' => {
self.line += 1;
}
_ => {
if is_digit(c) {
self.number();
} else if is_alpha(c) {
self.identifier();
} else {
println!("Unexpected identifier at line {}", self.line);
}
}
}
}
fn identifier(&mut self) {
while is_alphanumeric(self.peek()) {
self.advance();
}
let value: String = self.chars[self.start..self.current].iter().collect();
let tokentype = keywords::get_keyword(&value).unwrap_or(TokenType::Identifier);
self.add_token_with_value(tokentype, value);
}
fn number(&mut self) {
while is_digit(self.peek()) {
self.advance();
}
if self.peek() == '.' && is_digit(self.peek_next()) {
self.advance();
}
while is_digit(self.peek()) {
self.advance();
}
let value: String = self.chars[self.start..self.current].iter().collect();
self.add_token_with_value(TokenType::Number, value);
}
fn string(&mut self) {
while self.peek() != '"' && !self.is_at_end() {
if self.peek() == '\n' {
self.line += 1;
}
self.advance();
}
if self.is_at_end() {
println!("Unterminated string at {}", self.line)
}
self.advance();
let value: String = self.chars[self.start + 1..self.current - 1]
.iter()
.collect();
self.add_token_with_value(TokenType::String, value);
}
fn peek(&self) -> char {
self.chars[self.current]
}
fn peek_next(&self) -> char {
self.chars[self.current + 1]
}
fn match_next(&mut self, expected: char) -> bool {
if self.is_at_end() {
false
} else if self.chars[self.current] != expected {
false
} else {
self.current += 1;
true
}
}
fn add_token(&mut self, tokentype: TokenType) {
self.tokens
.push(Token::new(tokentype, "".to_string(), self.line));
}
fn add_token_with_value(&mut self, tokentype: TokenType, value: String) {
self.tokens.push(Token::new(tokentype, value, self.line));
}
fn advance(&mut self) -> char {
self.current += 1;
self.chars[self.current - 1]
}
fn is_at_end(&self) -> bool {
self.current >= self.chars.len()
}
}
struct Scanner {
chars: Vec<char>,
current: usize,
start: usize,
tokens: Vec<Token>,
line: usize,
}
fn is_digit(c: char) -> bool {
c >= '0' && c <= '9'
}
fn is_alphanumeric(c: char) -> bool {
is_alpha(c) || is_digit(c)
}
fn is_alpha(c: char) -> bool {
(c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test() {
let tokens = scan(
r#"
///gets the customer
fn get(id: u32) -> Customer:
service.get(id)
"#,
);
let tokenstring = format!("{:?}", tokens);
assert_eq!(tokenstring,r#"[Token { tokentype: Fn, lexeme: "fn", line: 2 }, Token { tokentype: Identifier, lexeme: "get", line: 2 }, Token { tokentype: LeftParen, lexeme: "", line: 2 }, Token { tokentype: Identifier, lexeme: "id", line: 2 }, Token { tokentype: Colon, lexeme: "", line: 2 }, Token { tokentype: Identifier, lexeme: "u32", line: 2 }, Token { tokentype: RightParen, lexeme: "", line: 2 }, Token { tokentype: Minus, lexeme: "", line: 2 }, Token { tokentype: Greater, lexeme: "", line: 2 }, Token { tokentype: Identifier, lexeme: "Customer", line: 2 }, Token { tokentype: Colon, lexeme: "", line: 2 }, Token { tokentype: Identifier, lexeme: "service", line: 3 }, Token { tokentype: Dot, lexeme: "", line: 3 }, Token { tokentype: Identifier, lexeme: "get", line: 3 }, Token { tokentype: LeftParen, lexeme: "", line: 3 }, Token { tokentype: Identifier, lexeme: "id", line: 3 }, Token { tokentype: RightParen, lexeme: "", line: 3 }]"#)
}
}

53
src/tokens.rs Normal file
View file

@ -0,0 +1,53 @@
#[derive(Debug)]
pub(crate) struct Token {
tokentype: TokenType,
lexeme: String,
line: usize,
}
impl Token {
pub(crate) fn new(tokentype: TokenType, lexeme: String, line: usize) -> Self {
Self {
tokentype,
lexeme,
line,
}
}
}
#[derive(Debug)]
enum Value {
None,
}
#[derive(Debug)]
pub(crate) enum TokenType {
LeftParen,
RightParen,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Colon,
Comma,
Dot,
Star,
Slash,
Plus,
Minus,
Hash,
Bang,
BangEqual,
EqualEqual,
Equal,
Greater,
Less,
GreaterEqual,
LessEqual,
Indent,
Identifier,
String,
Number,
Fn,
Struct,
}