Compare commits

...

7 Commits
master ... rust

Author SHA1 Message Date
Brian Picciano 5ab4694c66 I don't know what I was doing here but I probably won't return to this branch anyway 7 months ago
Brian Picciano 92f93ff076 Implement Decoder 1 year ago
Brian Picciano e06b20b604 Fold graph::Graph into gg::Graph, add TokenKind::End to lexer 1 year ago
Brian Picciano 6b5f2d7e82 Developed gg.bnf 1 year ago
Brian Picciano 20e050438c Finish up lexer, add tests for it 1 year ago
Brian Picciano 2919bcaa77 Implemented lexer 1 year ago
Brian Picciano 0248a11285 Initial implementation of the Graph type in rust, construction functions only 1 year ago
  1. 1
      rust/.envrc
  2. 1
      rust/.gitignore
  3. 84
      rust/Cargo.lock
  4. 11
      rust/Cargo.toml
  5. 77
      rust/flake.lock
  6. 21
      rust/flake.nix
  7. 90
      rust/src/gg.rs
  8. 380
      rust/src/gg/decoder.rs
  9. 24
      rust/src/gg/gg.bnf
  10. 313
      rust/src/gg/lexer.rs
  11. 2
      rust/src/lib.rs
  12. 2
      rust/src/main.rs
  13. 54
      rust/src/vm.rs

@ -0,0 +1 @@
use flake

1
rust/.gitignore vendored

@ -0,0 +1 @@
target/

84
rust/Cargo.lock generated

@ -0,0 +1,84 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "bitmaps"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
dependencies = [
"typenum",
]
[[package]]
name = "char_reader"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37a59b22dec21ca7d6c173bd543eeab4cd2f36cf21f039a4134905034c87ed3a"
[[package]]
name = "ginger"
version = "0.1.0"
dependencies = [
"char_reader",
"im-rc",
"unicode_categories",
]
[[package]]
name = "im-rc"
version = "15.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
dependencies = [
"bitmaps",
"rand_core",
"rand_xoshiro",
"sized-chunks",
"typenum",
"version_check",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
[[package]]
name = "rand_xoshiro"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
dependencies = [
"rand_core",
]
[[package]]
name = "sized-chunks"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
dependencies = [
"bitmaps",
"typenum",
]
[[package]]
name = "typenum"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "unicode_categories"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"

@ -0,0 +1,11 @@
[package]
name = "ginger"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
im-rc = "15.1.0"
char_reader = "0.1.1"
unicode_categories = "0.1.1"

@ -0,0 +1,77 @@
{
"nodes": {
"naersk": {
"inputs": {
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1671096816,
"narHash": "sha256-ezQCsNgmpUHdZANDCILm3RvtO1xH8uujk/+EqNvzIOg=",
"owner": "nix-community",
"repo": "naersk",
"rev": "d998160d6a076cfe8f9741e56aeec7e267e3e114",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "master",
"repo": "naersk",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1671788672,
"narHash": "sha256-tLkPxJuos3jki2f/TZdHn+NuMQAzN9s2E4QudylQLg0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c74fcd6c5fc14a61de158fb796243543f46b217",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1671788672,
"narHash": "sha256-tLkPxJuos3jki2f/TZdHn+NuMQAzN9s2E4QudylQLg0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c74fcd6c5fc14a61de158fb796243543f46b217",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"naersk": "naersk",
"nixpkgs": "nixpkgs_2",
"utils": "utils"
}
},
"utils": {
"locked": {
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

@ -0,0 +1,21 @@
{
inputs = {
naersk.url = "github:nix-community/naersk/master";
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, utils, naersk }:
utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs { inherit system; };
naersk-lib = pkgs.callPackage naersk { };
in
{
defaultPackage = naersk-lib.buildPackage ./.;
devShell = with pkgs; mkShell {
buildInputs = [ cargo rustc rustfmt pre-commit rustPackages.clippy ];
RUST_SRC_PATH = rustPlatform.rustLibSrc;
};
});
}

@ -0,0 +1,90 @@
use std::hash::{Hash, Hasher};
use im_rc::{HashSet};
mod lexer;
mod decoder;
pub use decoder::Decoder;
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(test, derive(Debug))]
pub struct OpenEdge(Value, Value); // edge, src
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(test, derive(Debug))]
pub enum Value{
Name(String),
Number(i64),
Tuple(Vec<OpenEdge>),
Graph(Graph),
}
pub const ZERO_TUPLE: Value = Value::Tuple(vec![]);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(test, derive(Debug))]
pub struct Graph {
edges: HashSet<(Value, OpenEdge)>, // dst, src
}
impl Graph {
pub fn new() -> Graph {
Graph{edges: HashSet::new()}
}
pub fn with(&self, dst_val: Value, edge_val: Value, src_val: Value) -> Self {
Graph{
edges: self.edges.update((dst_val, OpenEdge(edge_val, src_val))),
}
}
}
// The implementation of hash for im_rc::HashSet does not sort the entries.
impl Hash for Graph {
fn hash<H: Hasher>(&self, state: &mut H) {
let mut edges = Vec::from_iter(&self.edges);
edges.sort();
edges.iter().for_each(|edge| edge.hash(state));
}
}
#[cfg(test)]
mod tests {
use super::*;
fn number(i: i64) -> Value {
Value::Number(i)
}
#[test]
fn equality() {
let g1 = Graph::new()
.with(number(0), number(1), number(2))
.with(number(3), number(4), number(5));
let g2 = Graph::new()
.with(number(3), number(4), number(5))
.with(number(0), number(1), number(2));
assert_eq!(g1, g2);
}
#[test]
fn deep_equality() {
let g1 = Graph::new().with(number(-2), ZERO_TUPLE, Value::Graph(Graph::new()
.with(number(0), number(1), number(2))
.with(number(3), number(4), number(5)),
));
let g2 = Graph::new().with(number(-2), ZERO_TUPLE, Value::Graph(Graph::new()
.with(number(3), number(4), number(5))
.with(number(0), number(1), number(2)),
));
assert_eq!(g1, g2);
}
}

@ -0,0 +1,380 @@
use std::io::{self, Read};
use super::{Graph, Value, OpenEdge, ZERO_TUPLE};
use super::lexer::{self, Lexer, Token, TokenKind, Location};
// In order to make sense of this file, check out the accompanying gg.bnf, which describes the
// grammar in BNF notation. Each method in the Decoder maps more or less exactly to a state within
// the BNF.
#[cfg_attr(test, derive(Debug))]
pub enum Error {
Decoding(String, Location),
IO(io::Error),
}
impl From<lexer::Error> for Error {
fn from(e: lexer::Error) -> Self {
match e {
lexer::Error::Tokenizing(s, loc) => Error::Decoding(s, loc),
lexer::Error::IO(e) => Error::IO(e)
}
}
}
static OUTER_GRAPH_TERM: Token = Token{
kind: TokenKind::End,
value: String::new(),
};
pub struct Decoder<R: Read> {
lexer: Lexer<R>,
}
impl<R: Read> Decoder<R> {
pub fn new(r: R) -> Decoder<R> {
Decoder{
lexer: Lexer::new(r),
}
}
pub fn decode_undelimited(&mut self) -> Result<Graph, Error> {
self.outer_graph(Graph::new())
}
fn exp_punct(&mut self, v: &'static str) -> Result<(), Error> {
match self.lexer.next()? {
(Token{kind: TokenKind::Punctuation, value: v2}, _) if v == v2 => Ok(()),
(tok, loc) => Err(Error::Decoding(
format!("expected '{}', found: {}", v, tok),
loc,
)),
}
}
fn generic_graph(&mut self, term_tok: &Token, g: Graph) -> Result<Graph, Error> {
match self.lexer.next()? {
(tok, _) if tok == *term_tok => Ok(g),
(Token{kind: TokenKind::Name, value: name}, _) => {
self.exp_punct("=")?;
let open_edge = self.generic_graph_tail(term_tok, ZERO_TUPLE)?;
self.generic_graph(term_tok, g.with(
Value::Name(name),
open_edge.0,
open_edge.1,
))
}
(tok, loc) => Err(Error::Decoding(
format!("expected name or {}, found: {}", term_tok, tok),
loc,
)),
}
}
fn generic_graph_tail(&mut self, term_tok: &Token, edge_val: Value) -> Result<OpenEdge, Error> {
let val = self.value()?;
match self.lexer.next()? {
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == ";" =>
Ok(OpenEdge(edge_val, val)),
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == "<" =>
if edge_val == ZERO_TUPLE {
self.generic_graph_tail(term_tok, val)
} else {
Ok(OpenEdge(edge_val, Value::Tuple(vec![
self.generic_graph_tail(term_tok, val)?,
])))
},
(tok, loc) => {
self.lexer.push_next(tok, loc);
Ok(OpenEdge(edge_val, val))
},
}
}
fn outer_graph(&mut self, g: Graph) -> Result<Graph, Error> {
self.generic_graph(&OUTER_GRAPH_TERM, g)
}
fn graph(&mut self, g: Graph) -> Result<Graph, Error> {
let term_tok = Token{
kind: TokenKind::Punctuation,
value: String::from("}"),
};
self.generic_graph(&term_tok, g)
}
fn tuple(&mut self, tuple_vec: &mut Vec<OpenEdge>) -> Result<(), Error> {
loop {
match self.lexer.next()? {
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == ")" =>
return Ok(()),
(tok, loc) => {
self.lexer.push_next(tok, loc);
tuple_vec.push(self.tuple_tail(ZERO_TUPLE)?);
},
}
}
}
fn tuple_tail(&mut self, edge_val: Value) -> Result<OpenEdge, Error> {
let val = self.value()?;
match self.lexer.next()? {
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == "," =>
Ok(OpenEdge(edge_val, val)),
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == "<" =>
if edge_val == ZERO_TUPLE {
self.tuple_tail(val)
} else {
Ok(OpenEdge(edge_val, Value::Tuple(vec![
self.tuple_tail(val)?,
])))
},
(tok, loc) => {
self.lexer.push_next(tok, loc);
Ok(OpenEdge(edge_val, val))
},
}
}
fn value(&mut self) -> Result<Value, Error> {
match self.lexer.next()? {
(Token{kind: TokenKind::Name, value: v}, _) =>
Ok(Value::Name(v)),
(Token{kind: TokenKind::Number, value: v}, loc) =>
match v.parse::<i64>() {
Ok(n) => Ok(Value::Number(n)),
Err(e) => Err(Error::Decoding(
format!("parsing {:#?} as integer: {}", v, e),
loc,
)),
},
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == "(" => {
let mut vec = Vec::new();
self.tuple(&mut vec)?;
Ok(Value::Tuple(vec))
},
(Token{kind: TokenKind::Punctuation, value: v}, _) if v == "{" =>
Ok(Value::Graph(self.graph(Graph::new())?)),
(tok, loc) => Err(Error::Decoding(
format!("expected name, number, '(', or '{{', found: {}", tok),
loc,
)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn decoder() {
fn name(s: &'static str) -> Value {
Value::Name(s.to_string())
}
fn number(i: i64) -> Value {
Value::Number(i)
}
struct Test {
input: &'static str,
exp: Graph,
}
let tests = vec!{
Test{
input: "",
exp: Graph::new(),
},
Test{
input: "out = 1",
exp: Graph::new().
with(name("out"), ZERO_TUPLE, number(1)),
},
Test{
input: "out = 1;",
exp: Graph::new().
with(name("out"), ZERO_TUPLE, number(1)),
},
Test{
input: "out = incr < 1",
exp: Graph::new().
with(name("out"), name("incr"), number(1)),
},
Test{
input: "out = incr < 1;",
exp: Graph::new().
with(name("out"), name("incr"), number(1)),
},
Test{
input: "out = a < b < 1",
exp: Graph::new().with(
name("out"),
name("a"),
Value::Tuple(vec![OpenEdge(name("b"), number(1))]),
),
},
Test{
input: "out = a < b < 1;",
exp: Graph::new().with(
name("out"),
name("a"),
Value::Tuple(vec![OpenEdge(name("b"), number(1))]),
),
},
Test{
input: "out = a < b < (1, c < 2, d < e < 3)",
exp: Graph::new().with(
name("out"),
name("a"),
Value::Tuple(vec![
OpenEdge(name("b"), Value::Tuple(vec![
OpenEdge(ZERO_TUPLE, number(1)),
OpenEdge(name("c"), number(2)),
OpenEdge(name("d"), Value::Tuple(vec![
OpenEdge(name("e"), number(3)),
])),
])),
]),
),
},
Test{
input: "out = (c < 2,);",
exp: Graph::new().with(
name("out"),
ZERO_TUPLE,
Value::Tuple(vec![
OpenEdge(name("c"), number(2)),
]),
),
},
Test{
input: "out = (1, c < 2) < 3;",
exp: Graph::new().with(
name("out"),
Value::Tuple(vec![
OpenEdge(ZERO_TUPLE, number(1)),
OpenEdge(name("c"), number(2)),
]),
number(3),
),
},
Test{
input: "out = a < b < (1, c < (d < 2, 3))",
exp: Graph::new().with(
name("out"),
name("a"),
Value::Tuple(vec![
OpenEdge(name("b"), Value::Tuple(vec![
OpenEdge(ZERO_TUPLE, number(1)),
OpenEdge(name("c"), Value::Tuple(vec![
OpenEdge(name("d"), number(2)),
OpenEdge(ZERO_TUPLE, number(3)),
])),
])),
]),
),
},
Test{
input: "out = { a = 1; b = 2 < 3; c = 4 < 5 < 6 }",
exp: Graph::new().with(
name("out"),
ZERO_TUPLE,
Value::Graph(Graph::new()
.with(name("a"), ZERO_TUPLE, number(1))
.with(name("b"), number(2), number(3))
.with(name("c"), number(4), Value::Tuple(vec![
OpenEdge(number(5), number(6)),
])),
),
),
},
Test{
input: "out = { a = 1; };",
exp: Graph::new().with(
name("out"),
ZERO_TUPLE,
Value::Graph(Graph::new()
.with(name("a"), ZERO_TUPLE, number(1)),
),
),
},
Test{
input: "out = { a = 1; } < 2",
exp: Graph::new().with(
name("out"),
Value::Graph(Graph::new()
.with(name("a"), ZERO_TUPLE, number(1)),
),
number(2),
),
},
Test{
input: "out = { a = 1; } < 2; foo = 5 < 6",
exp: Graph::new()
.with(
name("out"),
Value::Graph(Graph::new()
.with(name("a"), ZERO_TUPLE, number(1)),
),
number(2),
)
.with(name("foo"), number(5), number(6)),
},
Test{
input: "out = { a = 1 } < 2; foo = 5 < 6;",
exp: Graph::new()
.with(
name("out"),
Value::Graph(Graph::new()
.with(name("a"), ZERO_TUPLE, number(1)),
),
number(2),
)
.with(name("foo"), number(5), number(6)),
},
};
for test in tests {
println!("INPUT: {:#?}", test.input);
let mut d = Decoder::new(test.input.as_bytes());
let got = d.decode_undelimited().expect("no errors expected");
assert_eq!(test.exp, got);
}
}
}

@ -0,0 +1,24 @@
<name> ::= <letter> | <name-tail>
<name-charset> ::= <letter> | <number> | <mark>
<name-tail> ::= <name-chareset> <name-tail> | ""
<number> ::= "-" <ascii-number> <number-tail>
| <ascii-number> <number-tail>
<number-tail> ::= <ascii-number> <number-tail> | ""
<value> ::= <name> | <number> | "(" <tuple> | "{" <graph>
<tuple> ::= ")" | <tuple-tail> <tuple>
<tuple-tail> ::= <value> ""
| <value> ","
| <value> "<" <tuple-tail>
<graph> ::= "}" | <name> "=" <graph-tail> <graph>
<graph-tail> ::= <value> ""
| <value> ";"
| <value> "<" <graph-tail>
<outer-graph> ::= <end> | <name> "=" <outer-graph-tail> <outer-graph>
<outer-graph-tail> ::= <value> ""
| <value> ";"
| <value> "<" <outer-graph-tail>

@ -0,0 +1,313 @@
use std::fmt;
use std::io::{self, Read, BufReader};
use unicode_categories::UnicodeCategories;
use char_reader::CharReader;
#[derive(Copy, Clone, PartialEq)]
#[cfg_attr(test, derive(Debug))]
pub struct Location {
pub row: i64,
pub col: i64,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}:{}", self.row, self.col)
}
}
#[cfg_attr(test, derive(Debug))]
pub enum Error {
Tokenizing(String, Location),
IO(io::Error),
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::IO(e)
}
}
#[derive(PartialEq, Clone)]
#[cfg_attr(test, derive(Debug))]
pub enum TokenKind {
Name,
Number,
Punctuation,
End,
}
#[derive(PartialEq, Clone)]
#[cfg_attr(test, derive(Debug))]
pub struct Token {
pub kind: TokenKind,
pub value: String,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.kind {
TokenKind::Name => write!(f, "{:#?}", self.value),
TokenKind::Number => write!(f, "{}", self.value),
TokenKind::Punctuation => write!(f, "'{}'", self.value),
TokenKind::End => write!(f, "<end>"),
}
}
}
pub struct Lexer<R: Read> {
r: CharReader<BufReader<R>>,
buf: String,
next_stack: Vec<(Token, Location)>,
next_loc: Location,
}
impl<R: Read> Lexer<R>{
pub fn new(r: R) -> Lexer<R> {
Lexer{
r: CharReader::new(BufReader::new(r)),
buf: String::new(),
next_stack: Vec::new(),
next_loc: Location{
row: 0,
col: 0,
},
}
}
fn discard(&mut self) {
let c = self.r.next_char().
expect("discard should only get called after peek").
expect("discard should only get called after peek");
if c == '\n' {
self.next_loc = Location{
row: self.next_loc.row + 1,
col: 0
};
return;
}
self.next_loc = Location{
row: self.next_loc.row,
col: self.next_loc.col + 1,
};
}
fn peek_a_bool(&mut self) -> Result<(char, bool), Error> {
if let Some(c) = self.r.peek_char()? {
Ok((c, true))
} else {
Ok(('0', false))
}
}
fn discard_while(&mut self, pred: impl Fn(char) -> bool) -> Result<(), Error> {
loop {
let (c, ok) = self.peek_a_bool()?;
if !ok || !pred(c) {
return Ok(());
}
self.discard();
}
}
fn collect_token(
&mut self,
kind: TokenKind,
pred: impl Fn(char) -> bool,
) -> Result<(Token, Location), Error> {
let loc = self.next_loc;
self.buf.truncate(0);
loop {
let (c, ok) = self.peek_a_bool()?;
if !ok || !pred(c) {
return Ok((
Token{kind: kind, value: self.buf.clone()},
loc
))
}
self.buf.push(c);
self.discard();
}
}
fn is_number(c: char) -> bool {
c == '-' || ('0' <= c && c <= '9')
}
pub fn push_next(&mut self, token: Token, loc: Location) {
self.next_stack.push((token, loc))
}
pub fn next(&mut self) -> Result<(Token, Location), Error> {
if let Some(r) = self.next_stack.pop() {
return Ok(r);
}
loop {
let (c, ok) = self.peek_a_bool()?;
if !ok {
return Ok((
Token{kind: TokenKind::End, value: String::new()},
self.next_loc,
));
} else if c == '*' {
self.discard_while(|c| c != '\n')?;
// the terminating newline will be dealt with in the next loop
} else if c.is_letter() {
return self.collect_token(
TokenKind::Name,
|c| c.is_letter() || c.is_number() || c.is_mark() || c == '-',
);
} else if Self::is_number(c) {
return self.collect_token(TokenKind::Number, Self::is_number);
} else if c.is_punctuation() || c.is_symbol() {
let loc = self.next_loc;
self.discard();
return Ok((
Token{kind: TokenKind::Punctuation, value: c.to_string()},
loc,
))
} else if c.is_ascii_whitespace() {
self.discard_while(|c| c.is_ascii_whitespace())?;
} else {
return Err(Error::Tokenizing(
format!("unexpected character: {:#?}", c).to_string(),
self.next_loc,
));
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn lexer() {
struct Test {
input: &'static str,
exp: Vec<(Token, Location)>,
}
fn tok(kind: TokenKind, val: &'static str, loc_row: i64, loc_col: i64) -> (Token, Location) {
(
Token{kind: kind, value: val.to_string()},
Location{row: loc_row, col: loc_col},
)
}
let tests = vec![
Test{
input: "",
exp: vec![
tok(TokenKind::End, "", 0, 0),
],
},
Test{
input: "* foo",
exp: vec![
tok(TokenKind::End, "", 0, 5),
],
},
Test{
input: "* foo\n",
exp: vec![
tok(TokenKind::End, "", 1, 0),
],
},
Test{
input: "* foo\nbar",
exp: vec![
tok(TokenKind::Name, "bar", 1, 0),
tok(TokenKind::End, "", 1, 3),
],
},
Test{
input: "* foo\nbar ",
exp: vec![
tok(TokenKind::Name, "bar", 1, 0),
tok(TokenKind::End, "", 1, 4),
],
},
Test{
input: "foo bar\nf-o f0O Foo",
exp: vec![
tok(TokenKind::Name, "foo", 0, 0),
tok(TokenKind::Name, "bar", 0, 4),
tok(TokenKind::Name, "f-o", 1, 0),
tok(TokenKind::Name, "f0O", 1, 4),
tok(TokenKind::Name, "Foo", 1, 8),
tok(TokenKind::End, "", 1, 11),
],
},
Test{
input: "1 100 -100",
exp: vec![
tok(TokenKind::Number, "1", 0, 0),
tok(TokenKind::Number, "100", 0, 2),
tok(TokenKind::Number, "-100", 0, 6),
tok(TokenKind::End, "", 0, 10),
],
},
Test{
input: "1<2!-3 ()",
exp: vec![
tok(TokenKind::Number, "1", 0, 0),
tok(TokenKind::Punctuation, "<", 0, 1),
tok(TokenKind::Number, "2", 0, 2),
tok(TokenKind::Punctuation, "!", 0, 3),
tok(TokenKind::Number, "-3", 0, 4),
tok(TokenKind::Punctuation, "(", 0, 7),
tok(TokenKind::Punctuation, ")", 0, 8),
tok(TokenKind::End, "", 0, 9),
],
},
];
for test in tests {
println!("INPUT: {:#?}", test.input);
let mut l = Lexer::new(test.input.as_bytes());
let mut res = Vec::new();
loop {
let (token, loc) = l.next().expect("no errors expected");
let is_end = token.kind == TokenKind::End;
res.push((token, loc));
if is_end {
break;
}
}
assert_eq!(*test.exp, *res.as_slice())
}
}
}

@ -0,0 +1,2 @@
pub mod gg;
pub mod vm;

@ -0,0 +1,2 @@
fn main() {
}

@ -0,0 +1,54 @@
use super::gg;
use std::collections::HashMap;
pub enum Error{
FunctionFromGraph(String),
}
enum ResolvedValue<'a> {
Value(gg::Value),
Function(&'a dyn Fn(gg::Value) -> gg::Value),
}
struct Scope<'a> {
parent: Option<&'a Scope<'a>>,
values: HashMap<&'a str, ResolvedValue<'a>>,
}
impl<'a> Scope<'a> {
fn new(parent: Option<&'a Scope<'a>>) -> Scope<'a> {
Scope {
parent: parent,
values: HashMap::new(),
}
}
fn resolve(&self, name: &str) -> Option<&ResolvedValue> {
if let Some(rval) = self.values.get(name) {
Some(rval)
} else if let Some(parent) = self.parent {
parent.resolve(name)
} else {
None
}
}
fn insert(&mut self, name: &'a str, rval: ResolvedValue<'a>) {
self.values.insert(name, rval);
}
}
fn always(value: gg::Value) -> Box<dyn Fn(gg::Value) -> gg::Value> {
Box::new(move |_: gg::Value| value)
}
//fn function_from_graph<'a>(
// scope: &'a Scope<'a>, g: gg::Graph,
//) -> Result<&'a dyn Fn(gg::Value) -> gg::Value, Error> {
//
//}
Loading…
Cancel
Save