Skip to content

Commit

Permalink
Merge pull request #936 from andrew-johnson-4/lsts-control-flow
Browse files Browse the repository at this point in the history
Lsts control flow
  • Loading branch information
andrew-johnson-4 authored Nov 15, 2024
2 parents eb3fcad + 56ffb67 commit cde6c6c
Show file tree
Hide file tree
Showing 7 changed files with 19,902 additions and 19,480 deletions.
39,196 changes: 19,721 additions & 19,475 deletions BOOTSTRAP/cli.c

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "lambda_mountain"
version = "1.19.7"
version = "1.19.8"
authors = ["Andrew <andrew@subarctic.org>"]
license = "MIT"
description = "Typed Macro Assembler (backed by Coq proofs-of-correctness)"
Expand Down
8 changes: 8 additions & 0 deletions EXAMPLES/control-flow.lsts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@

import $"LIB/default.lm";

print(if 12 then 34 else 56);

(let xyz = 5; while xyz { print(xyz); xyz = xyz - 1; });

for abc in [3, 2, 1] { print(abc); };
1 change: 1 addition & 0 deletions EXAMPLES/control-flow.lsts.out
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3454321321
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

dev: install-production
lm EXAMPLES/collections.lsts
lm EXAMPLES/control-flow.lsts
cc tmp.c
./a.out

Expand Down
169 changes: 167 additions & 2 deletions PLUGINS/FRONTEND/LSTS/lsts-parse.lm
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ lsts-parse := λ(: tokens List<Token>). (: (
(set tokens (tail tokens))
(lsts-parse-expect( '\:_s tokens ))(set tokens (tail tokens))
) (
(let term-rest (lsts-parse-expression tokens))
(let term-rest (lsts-parse-small-expression tokens)) # small expression does not consume semicolons
(let term (.first term-rest))
(set tokens (.second term-rest))
(set ast-parsed-program (Seq(
Expand Down Expand Up @@ -76,6 +76,28 @@ lsts-parse-expect := λ(: expect String)(: b U64)(: tokens List<Token>). (: (
) ())
) Nil);

lsts-has-assign := λ(: tokens List<Token>). (: (
(let depth 0_u64)
(let has-assign 0_u64)
(while (non-zero tokens) (
(if (==( (lsts-parse-head tokens) '[_s )) (set depth (+( depth 1_u64 ))) ())
(if (==( (lsts-parse-head tokens) '{_s )) (set depth (+( depth 1_u64 ))) ())
(if (==( (lsts-parse-head tokens) '\[_s )) (set depth (+( depth 1_u64 ))) ())
(if (==( (lsts-parse-head tokens) ']_s )) (set depth (-( depth 1_u64 ))) ())
(if (==( (lsts-parse-head tokens) '}_s )) (set depth (-( depth 1_u64 ))) ())
(if (==( (lsts-parse-head tokens) '\]_s )) (set depth (-( depth 1_u64 ))) ())
(if (&&( (==( (lsts-parse-head tokens) '\:_s )) (<=( depth 0_u64 )) )) (
(set tokens (: LEOF List<Token>))
) ())
(if (&&( (==( (lsts-parse-head tokens) '=_s )) (==( depth 0_u64 )) )) (
(set has-assign 1_u64)
(set tokens (: LEOF List<Token>))
) ())
(if (non-zero tokens) (set tokens (tail tokens)) ())
))
has-assign
) U64);

lsts-parse-let := λ(: tokens List<Token>). (: (
(lsts-parse-expect( 'let_s tokens )) (set tokens (tail tokens))
(lsts-parse-expect( 'Identifier_s (is-ident-head(lsts-parse-head tokens)) tokens ))
Expand Down Expand Up @@ -147,10 +169,153 @@ lsts-parse-let := λ(: tokens List<Token>). (: (
tokens
) List<Token>);

lsts-parse-small-expression := λ(: tokens List<Token>). (: (
(let base ASTEOF)
(if (==( (lsts-parse-head tokens) 'if_s )) (scope(
(let loc (.location(head tokens)))
(set tokens (tail tokens))
(let c-rest (lsts-parse-add tokens))
(let c (.first c-rest))
(set tokens (.second c-rest))
(lsts-parse-expect( 'then_s tokens ))(set tokens (tail tokens))
(let t-rest (lsts-parse-add tokens))
(let t (.first t-rest))
(set tokens (.second t-rest))
(lsts-parse-expect( 'else_s tokens ))(set tokens (tail tokens))
(let f-rest (lsts-parse-add tokens))
(let f (.first f-rest))
(set tokens (.second f-rest))
(set base (App(
(close(App(
(close(App(
(close(Var( 'if_s (with-location( (token::new 'if_s) loc )) )))
(close c)
)))
(close t)
)))
(close f)
)))
)) (
(if (==( (lsts-parse-head tokens) 'let_s )) (scope(
(let loc (.location(head tokens)))
(set tokens (tail tokens))
(let lhs-rest (lsts-parse-lhs tokens))
(let lhs (.first lhs-rest))
(set tokens (.second lhs-rest))
(lsts-parse-expect( '=_s tokens ))(set tokens (tail tokens))
(let rhs-rest (lsts-parse-add tokens))
(let rhs (.first rhs-rest))
(set tokens (.second rhs-rest))
(set base (App(
(close(App(
(close(Var( 'let_s (with-location( (token::new 'let_s) loc )) )))
(close lhs)
)))
(close rhs)
)))
)) (
(if (==( (lsts-parse-head tokens) 'while_s )) (scope(
(let loc (.location(head tokens)))
(set tokens (tail tokens))
(let c-rest (lsts-parse-lhs tokens))
(let c (.first c-rest))
(set tokens (.second c-rest))
(lsts-parse-expect( '{_s tokens ))(set tokens (tail tokens))
(let rhs-rest (lsts-parse-expression tokens))
(let rhs (.first rhs-rest))
(set tokens (.second rhs-rest))
(lsts-parse-expect( '}_s tokens ))(set tokens (tail tokens))
(set base (App(
(close(App(
(close(Var( 'while_s (with-location( (token::new 'while_s) loc )) )))
(close c)
)))
(close rhs)
)))
)) (
(if (==( (lsts-parse-head tokens) 'for_s )) (scope(
(let loc (.location(head tokens)))
(set tokens (tail tokens))
(let lhs-rest (lsts-parse-lhs tokens))
(let lhs (.first lhs-rest))
(set tokens (.second lhs-rest))
(lsts-parse-expect( 'in_s tokens ))(set tokens (tail tokens))
(let iter-rest (lsts-parse-small-expression tokens))
(let iter (.first iter-rest))
(set tokens (.second iter-rest))
(lsts-parse-expect( '{_s tokens ))(set tokens (tail tokens))
(let rhs-rest (lsts-parse-expression tokens))
(let rhs (.first rhs-rest))
(set tokens (.second rhs-rest))
(lsts-parse-expect( '}_s tokens ))(set tokens (tail tokens))
(set base (App(
(close(App(
(close(Var( 'for-each_s (with-location( (token::new 'for-each_s) loc )) )))
(close(App(
(close(App(
(close lhs)
(close(Var( 'in_s (with-location( (token::new 'in_s) loc )) )))
)))
(close iter)
)))
)))
(close rhs)
)))
)) (
(if (lsts-has-assign tokens) (scope(
(let loc (.location(head tokens)))
(let lhs-rest (lsts-parse-lhs tokens))
(let lhs (.first lhs-rest))
(set tokens (.second lhs-rest))
(lsts-parse-expect( '=_s tokens ))(set tokens (tail tokens))
(let rhs-rest (lsts-parse-small-expression tokens))
(let rhs (.first rhs-rest))
(set tokens (.second rhs-rest))
(set base (App(
(close(App(
(close(Var( 'set_s (with-location( (token::new 'set_s) loc )) )))
(close lhs)
)))
(close rhs)
)))
)) (
(let base-rest (lsts-parse-add tokens))
(set base (.first base-rest))
(set tokens (.second base-rest))
))
))
))
))
))
(Tuple( base tokens ))
) Tuple<AST,List<Token>>);

lsts-parse-expression := λ(: tokens List<Token>). (: (
(let base-rest (lsts-parse-add tokens))
(let base-rest (lsts-parse-small-expression tokens))
(let base (.first base-rest))
(set tokens (.second base-rest))
(while (==( (lsts-parse-head tokens) '\:_s )) (
(set tokens (tail tokens))
(if (||( (==( (lsts-parse-head tokens) '\]_s )) (==( (lsts-parse-head tokens) '}_s )) )) () (
(set base-rest (lsts-parse-small-expression tokens))
(set base (App(
(close base)
(close(.first base-rest))
)))
(set tokens (.second base-rest))
))
))
(Tuple( base tokens ))
) Tuple<AST,List<Token>>);

lsts-parse-lhs := λ(: tokens List<Token>). (: (
(let base ASTNil)
(if (is-ident-head(lsts-parse-head tokens)) (
(set base (Var( (lsts-unwrap-identifier(lsts-parse-head tokens)) (head tokens) )))
(set tokens (tail tokens))
) (
(lsts-parse-expect( 'LHS_s 0_u64 tokens ))
))
(Tuple( base tokens ))
) Tuple<AST,List<Token>>);

Expand Down
4 changes: 3 additions & 1 deletion PLUGINS/FRONTEND/LSTS/lsts-tokenize.lm
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ lsts-tokenize := λ(: fp String). (: (
(if (has-prefix( ident-text '-_s )) (set tsfx '_i64_s) ())
(if (.contains( ident-text '._s )) (set tsfx '_f64_s) ())
(if (.contains( ident-text 'e_s )) (set tsfx '_f64_s) ())
(set ident-text (+( ident-text tsfx )))
(if (==( ident-text '-_s )) () (
(set ident-text (+( ident-text tsfx )))
))
) ())
(set current-column (+( current-column (.length ident-text) )))
(set tokens (cons(
Expand Down

0 comments on commit cde6c6c

Please sign in to comment.