first draft after day 1

This commit is contained in:
Shautvast 2025-01-06 22:02:47 +01:00
commit 4534d405da
11 changed files with 676 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

23
Cargo.lock generated Normal file
View file

@ -0,0 +1,23 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "anyhow"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "vis"
version = "0.1.0"
dependencies = [
"anyhow",
"unicode-segmentation",
]

8
Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "vis"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0"
unicode-segmentation = "1.1"

64
README.md Normal file
View file

@ -0,0 +1,64 @@
Just started
sample vis file:
```
markup {
lanes {
functions {
calc: "Calculation"
acc_interest_calc: "Account interest Calculation"
interest_rates: "Interest Rates"
config: "Configuration"
nob: "NoB" {
nob_execution: "NoB Execution"
coll_reinst_inst: "Collection of Reinstatement instructions"
}
reporting: "Reporting"
}
systems {
bank: "Bank" {
bank_motor: "Bank Motor"
bank_scripts: "Bank Scripts"
bank_client: "Bank Client"
bank_db: "Bank DB"
}
interest_engine: "InterestEngine"
}
}
bank-->calc
bank_scripts--<>bank_db
bank_motor--<>bank_db
interest_engine-->calc
}
styles {
lanes(group) {
type: textnode
orientation: horizontal
shape: rectangle
font-family: arial
border-width: 1px
border-color: gray
}
functions(group) {
background-color: yellow
font-family: arial
border-radius: 20px
border-width: 1px
border-color: gray
}
systems(group) {
background-color: lightblue
}
tag1: "⚭" { // how will this work?
right:0px
top:0px
}
tag2: {
itchy: scratchy
}
}
```
Will have to be turned into an architecture diagram... we'll see how it goes!

56
src/cashpool.vis Normal file
View file

@ -0,0 +1,56 @@
markup {
lanes {
functions {
calc: "Calculation"
acc_interest_calc: "Account interest Calculation"
interest_rates: "Interest Rates"
config: "Configuration"
nob: "NoB" {
nob_execution: "NoB Execution"
coll_reinst_inst: "Collection of Reinstatement instructions"
}
reporting: "Reporting"
}
systems {
bank: "Bank" {
bank_motor: "Bank Motor"
bank_scripts: "Bank Scripts"
bank_client: "Bank Client"
bank_db: "Bank DB"
}
interest_engine: "InterestEngine"
}
}
bank-->calc
bank_scripts--<>bank_db
bank_motor--<>bank_db
interest_engine-->calc
}
styles {
lanes(group) {
type: textnode
orientation: horizontal
shape: rectangle
font-family: arial
border-width: 1px
border-color: gray
}
functions(group) {
background-color: yellow
font-family: arial
border-radius: 20px
border-width: 1px
border-color: gray
}
systems(group) {
background-color: lightblue
}
tag1: "⚭" { // how will this work?
right:0px
top:0px
}
tag2: {
itchy: scratchy
}
}

25
src/grammar.txt Normal file
View file

@ -0,0 +1,25 @@
node: markup | styles
markup: nodes
elements: "{" element* "}"
element: node | edge
node: (id (":" title)? nodes?) | edgenode
edge: idref arrow idref
arrow: ArrowLeft | ArrowRight | DiamondArrowLeft | DiamondArrowRight
ArrowLeft: "<--"
ArrowRight: "-->"
DiamondArrowLeft: "<>--"
DiamondArrowRight: "--<>"
id: text
title: string
string: """ text """
styles: style*
style: idref attributes? ":" style-elements
style-elements: "{" style-element "}"
style-element: key ":" value
key: text
value: text
idref: text
attributes: "(" attribute ")"
attribute: text ("," attribute)*

32
src/lib.rs Normal file
View file

@ -0,0 +1,32 @@
use std::collections::HashMap;
use tokens::TokenType;
pub mod parser;
mod scanner;
mod tokens;
#[derive(Debug)]
pub struct Vis {
pub markup: Vec<Element>,
pub styles: Vec<StyleNode>,
}
#[derive(Debug)]
pub enum Element {
Node(String, Option<String>, Vec<Element>),
Edge(String, String, TokenType, Option<String>),
}
#[derive(Debug)]
pub struct StyleNode {
pub id_ref: String,
pub containertype: ContainerType,
pub attributes: HashMap<String, String>,
}
#[derive(Debug)]
pub enum ContainerType {
Node,
Group,
}

41
src/main.rs Normal file
View file

@ -0,0 +1,41 @@
use anyhow::anyhow;
use std::env::args;
use std::fs;
fn main() -> anyhow::Result<()> {
let args: Vec<String> = args().collect();
if args.len() != 2 {
return Err(anyhow!("Usage: vis vis-file"));
} else {
let vis_file = read_file(&args[1])?;
let vis = vis::parser::parse_vis(vis_file.as_str())?;
println!("{:?}", vis);
}
Ok(())
}
fn read_file(file_name: &str) -> anyhow::Result<String> {
fs::read_to_string(file_name).map_err(|e| anyhow!("Cannot read file '{}': {}", file_name, e))
}
//frankensteinwoorden:
//loopbaanrecord
//vriendendienstplicht
//brandweerbericht
//antiloopgraaf
//blikveldslag
//zeurpietenhuis
//krentenbroodmager
//opzijweg
//doorstroomversnelling
//koektrommelvlies
//draaideuropening
//luchtsteuntrekker
//boekhoudhakker
//krantenkopstoot
//prijzenoorlogsverslaggeving
//dwaallichtknop
//slaapwandeltocht
//rampspoedafdeling
//

163
src/parser.rs Normal file
View file

@ -0,0 +1,163 @@
use crate::{
tokens::{
Token,
TokenType::{self, *},
},
Element, StyleNode, Vis,
};
use anyhow::anyhow;
pub fn parse_vis(contents: &str) -> anyhow::Result<Vis> {
let tokens = crate::scanner::scan(contents)?;
println!("{:?}", tokens);
let mut parser = Parser::new(tokens);
Ok(Vis {
markup: parser.markup()?,
styles: parser.styles()?,
})
}
struct Parser {
tokens: Vec<Token>,
current: usize,
}
impl Parser {
pub fn new(tokens: Vec<Token>) -> Self {
Self { tokens, current: 0 }
}
fn markup(&mut self) -> anyhow::Result<Vec<Element>> {
if self.match_token(Markup) {
self.nodes()
} else {
Ok(vec![])
}
}
fn nodes(&mut self) -> anyhow::Result<Vec<Element>> {
println!("nodes {:?}", self.peek());
self.consume(LeftBrace, "Expected '{'")?;
let mut nodes = vec![];
while !self.match_token(RightBrace) {
nodes.push(self.node()?);
}
Ok(nodes)
}
fn node(&mut self) -> anyhow::Result<Element> {
println!("node {:?}", self.peek());
let id = self.id()?;
println!("id {}", id);
let current = self.peek().clone();
if self.match_tokens(vec![
ArrowRight,
ArrowLeft,
DiamondArrowRight,
DiamondArrowLeft,
]) {
self.edge(id, current)
} else {
let title = self.title()?;
println!("title {:?}", title);
let children = if self.check(&LeftBrace) {
self.nodes()?
} else {
vec![]
};
Ok(Element::Node(id, title, children))
}
}
fn edge(&mut self, from_id: String, arrow: Token) -> anyhow::Result<Element> {
let to_id = self.id()?;
let title = self.title()?;
Ok(Element::Edge(from_id, to_id, arrow.tokentype, title))
}
fn title(&mut self) -> anyhow::Result<Option<String>> {
if self.check(&Colon) {
self.advance();
Ok(Some(self.string()?))
} else {
Ok(None)
}
}
fn id(&mut self) -> anyhow::Result<String> {
self.text()
}
fn string(&mut self) -> anyhow::Result<String> {
let text = self.peek().clone();
self.consume(Str, "Expected quoted string")?;
Ok(text.lexeme.to_owned())
}
fn text(&mut self) -> anyhow::Result<String> {
let text = self.peek().clone();
self.consume(Identifier, "Expected text")?;
Ok(text.lexeme.to_owned())
}
fn styles(&mut self) -> anyhow::Result<Vec<StyleNode>> {
Ok(vec![])
}
fn consume(&mut self, tokentype: TokenType, expect: &str) -> anyhow::Result<&Token> {
let current = self.peek();
if self.check(&tokentype) {
Ok(self.advance())
} else {
Err(anyhow!("Error: {} on line {}", expect, current.line))
}
}
fn match_tokens(&mut self, tokentypes: Vec<TokenType>) -> bool {
for tokentype in tokentypes.iter() {
if self.check(tokentype) {
self.advance();
return true;
}
}
false
}
fn match_token(&mut self, tokentype: TokenType) -> bool {
if self.check(&tokentype) {
self.advance();
true
} else {
false
}
}
fn check(&self, tokentype: &TokenType) -> bool {
if self.is_at_end() {
false
} else {
&self.peek().tokentype == tokentype
}
}
fn advance(&mut self) -> &Token {
if !self.is_at_end() {
self.current += 1;
}
self.previous()
}
fn previous(&self) -> &Token {
&self.tokens[self.current - 1]
}
fn is_at_end(&self) -> bool {
self.peek().tokentype == TokenType::Eof
}
fn peek(&self) -> &Token {
&self.tokens[self.current]
}
}

209
src/scanner.rs Normal file
View file

@ -0,0 +1,209 @@
use unicode_segmentation::UnicodeSegmentation;
use crate::tokens::{
Token,
TokenType::{self, *},
KEYWORDS,
};
pub fn scan(vis: &str) -> anyhow::Result<Vec<Token>> {
let mut scanner = Scanner::new(vis);
scanner.scan();
Ok(scanner.tokens)
}
struct Scanner<'a> {
tokens: Vec<Token>,
chars: Vec<&'a str>,
current_pos: usize,
current_line: usize,
start_pos: usize,
}
impl<'a> Scanner<'a> {
fn new(vis: &'a str) -> Self {
Self {
tokens: vec![],
chars: UnicodeSegmentation::graphemes(vis, true).collect::<Vec<&str>>(),
current_pos: 0,
current_line: 0,
start_pos: 0,
}
}
fn scan(&mut self) {
while !self.is_at_end() {
self.start_pos = self.current_pos;
self.scan_token();
}
self.add_token(Eof);
}
fn scan_token(&mut self) {
let c = self.advance();
match c {
"(" => self.add_token(LeftParen),
")" => self.add_token(RightParen),
"{" => self.add_token(LeftBrace),
"}" => self.add_token(RightBrace),
"," => self.add_token(Comma),
"." => self.add_token(Dot),
"-" => {
if self.match_token("-") {
if self.match_token(">") {
self.add_token(ArrowRight);
} else if self.match_token("<") {
if self.match_token(">") {
self.add_token(DiamondArrowRight);
} else {
println!("Wrong arrow at {}", self.current_line);
}
} else {
println!("Wrong arrow at {}", self.current_line);
}
} else {
self.add_token(Minus);
}
}
"<" => {
if self.match_token("-") {
if self.match_token("-") {
self.add_token(ArrowLeft);
} else {
println!("Wrong arrow at {}", self.current_line);
}
} else if self.match_token(">") {
if self.match_token("-") {
if self.match_token("-") {
self.add_token(DiamondArrowLeft);
} else {
println!("Wrong arrow at {}", self.current_line);
}
} else {
println!("Wrong arrow at {}", self.current_line);
}
}
}
":" => self.add_token(Colon),
"/" => {
if self.match_token("/") {
while self.peek() != "\n" && !self.is_at_end() {
self.advance();
}
} else {
self.add_token(Slash);
}
}
" " | "\t" | "\r" => {}
"\n" => {
self.current_line += 1;
}
"\"" => self.string(),
_ => {
if is_digit(c) || c == "." {
self.number()
} else if is_alpha(c) {
self.identifier()
}
}
}
}
fn peek(&self) -> &str {
if self.current_pos < self.chars.len() {
self.chars[self.current_pos]
} else {
""
}
}
fn peek_next(&self) -> &str {
self.chars[self.current_pos + 1]
}
fn identifier(&mut self) {
while is_alpha(self.peek()) || is_digit(self.peek()) {
self.advance();
}
let text = self.chars[self.start_pos..self.current_pos].concat();
let tokentype = KEYWORDS
.get(text.as_str())
.map(|d| *d)
.unwrap_or(Identifier);
self.add_token(tokentype);
}
fn number(&mut self) {
while is_digit(self.peek()) || (self.peek() == "." && is_digit(self.peek_next())) {
self.advance();
}
self.add_token(TokenType::Number);
}
fn string(&mut self) {
while self.peek() != "\"" && !self.is_at_end() {
if self.peek() == "\n" {
self.current_line = 1;
}
self.advance();
}
if self.is_at_end() {
println!("Unterminated string");
return;
}
self.advance();
self.add_token(TokenType::Str);
}
fn add_token(&mut self, tokentype: TokenType) {
// println!("add token {:?}", tokentype);
let token = Token::new(
tokentype,
self.chars[self.start_pos..self.current_pos].concat(),
self.current_line,
);
self.tokens.push(token);
}
fn match_token(&mut self, expected: &str) -> bool {
if self.is_at_end() || self.peek() != expected {
return false;
}
self.current_pos += 1;
return true;
}
fn advance(&mut self) -> &str {
let c = self.chars[self.current_pos];
if self.current_pos < self.chars.len() {
self.current_pos += 1;
}
c
}
fn is_at_end(&self) -> bool {
self.current_pos >= self.chars.len()
}
}
fn is_digit(char: &str) -> bool {
char.len() > 0 && char.chars().next().unwrap().is_ascii_digit()
}
fn is_alpha(char: &str) -> bool {
if char.len() == 0 {
false
} else {
let char = char.chars().next().unwrap();
if char.is_alphabetic() || char == '_' {
true
} else {
false
}
}
}

54
src/tokens.rs Normal file
View file

@ -0,0 +1,54 @@
use std::cell::LazyCell;
use std::collections::HashMap;
use TokenType::*;
pub const KEYWORDS: LazyCell<HashMap<&str, TokenType>> = LazyCell::new(|| {
let mut m = HashMap::new();
m.insert("markup", Markup);
m.insert("styles", Styles);
m.insert("group", Group);
m.insert("px", Px);
m
});
#[derive(Debug, Clone)]
pub struct Token {
pub tokentype: TokenType,
pub lexeme: String,
pub line: usize,
}
impl Token {
pub(crate) fn new(tokentype: TokenType, lexeme: String, line: usize) -> Self {
Token {
tokentype,
lexeme,
line,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum TokenType {
ArrowRight,
ArrowLeft,
DiamondArrowRight,
DiamondArrowLeft,
LeftParen,
RightParen,
LeftBrace,
RightBrace,
Comma,
Dot,
Colon,
Slash,
Identifier,
Str,
Number,
Minus,
Markup,
Styles,
Group,
Eof,
Px,
}