changed the way that functions are defined

This commit is contained in:
IgorCielniak
2026-01-08 15:28:10 +01:00
parent d4dc6ceef5
commit b9098d9893
33 changed files with 232 additions and 198 deletions

View File

@@ -23,6 +23,8 @@
- `read-token`: splits the byte stream; default is whitespace delimited with numeric/string literal recognizers.
- `on-token`: user code decides whether to interpret, compile, or treat the token as syntax.
- `lookup`: resolves token → word entry; can be replaced to build new namespaces or module systems.
- **Definition form**: `word <name> ... end` is the required way to declare high-level words. Legacy `: <name> ... ;` definitions are no longer accepted.
- **Text macros**: `macro <name> [param_count] ... ;` records tokens until the closing `;` and registers a macro that performs positional substitution (`$1`, `$2`, ...). The old `macro: ... ;macro` form is removed.
- **Compile vs interpret**: Each word advertises stack effect + immediacy. Immediate words execute during compilation (macro behavior). Others emit code or inline asm.
- **Syntax morphing**: Provide primitives `set-reader`, `with-reader`, and word-lists so layers (e.g., Lisp-like forms) can be composed.
- **Inline Python hooks**: `:py name { ... } ;` executes the enclosed Python block immediately, then registers `name` as a word whose behavior is provided by that block. Define a `macro(ctx)` function to intercept compilation (receiving a `MacroContext` with helpers like `next_token`, `emit_literal`, `new_label`, `inject_tokens`, and direct access to the active parser), and/or an `intrinsic(builder)` function to emit custom assembly. This lets end users extend the language—parsing source, manipulating AST nodes, or writing NASM—without touching the bootstrap source. The standard librarys `extend-syntax` and `fn` forms are ordinary `:py` blocks built with these APIs, so users can clone or replace them entirely from L2 source files.
@@ -122,7 +124,7 @@ struct: Point
## 14. Standard Library Sketch
- **Core words**: Arithmetic, logic, stack ops, comparison, memory access, control flow combinators.
- **Return-stack helpers**: `>r`, `r>`, `rdrop`, and `rpick` shuffle values between the data stack and the return stack. Theyre used by the `fn` sugar but also available to user code for building custom control constructs.
- **Meta words**: Reader management, dictionary inspection, definition forms (`:`, `:noninline`, `:asm`, `immediate`).
- **Meta words**: Reader management, dictionary inspection, definition forms (`word ... end`, `:noninline`, `:asm`, `immediate`).
- **Allocators**: Default bump allocator, arena allocator, and hook to install custom malloc/free pairs.
- **FFI/syscalls**: Thin wrappers plus convenience words for POSIX-level APIs.
- **Diagnostics**: Minimal `type`, `emit`, `cr`, `dump`, and tracing hooks for debugging emitted asm.

4
a.sl
View File

@@ -1,7 +1,7 @@
import stdlib/stdlib.sl
import stdlib/io.sl
: main
word main
"hello world" puts
;
end
compile-time main

View File

@@ -1,10 +1,10 @@
import stdlib/stdlib.sl
import stdlib/io.sl
: main
word main
0 argc for
dup
argv@ dup strlen puts
1 +
end
;
end

View File

@@ -4,10 +4,10 @@ import stdlib/io.sl
extern long labs(long n)
extern void exit(int status)
: main
word main
# Test C-style extern with implicit ABI handling
-10 labs puti cr
# Test extern void
0 exit
;
end

4
f.sl
View File

@@ -4,7 +4,7 @@ import stdlib/float.sl
extern double atan2(double y, double x)
: main
word main
# Basic math
1.5 2.5 f+ fputln # Outputs: 4.000000
@@ -13,5 +13,5 @@ extern double atan2(double y, double x)
4.0 f* fputln # Outputs: 3.141593 (approx pi)
0
;
end

88
fn.sl
View File

@@ -1,4 +1,4 @@
: call-syntax-rewrite # ( fnameToken -- handled )
word call-syntax-rewrite # ( fnameToken -- handled )
dup token-lexeme identifier? 0 == if drop 0 exit end
peek-token dup nil? if drop drop 0 exit end
dup token-lexeme "(" string= 0 == if drop drop 0 exit end
@@ -26,34 +26,34 @@ begin
# default: append tok to cur
list-append
again
;
end
immediate
compile-only
: extend-syntax
word extend-syntax
"call-syntax-rewrite" set-token-hook
;
end
immediate
compile-only
: fn-op-prec
word fn-op-prec
dup "+" string= if drop 1 exit end
dup "-" string= if drop 1 exit end
dup "*" string= if drop 2 exit end
dup "/" string= if drop 2 exit end
dup "%" string= if drop 2 exit end
drop 0
;
end
compile-only
: fn-operator?
word fn-operator?
fn-op-prec 0 >
;
end
compile-only
: fn-check-dup
word fn-check-dup
>r # params (r: name)
0 # params idx
begin
@@ -66,10 +66,10 @@ begin
drop # drop comparison flag when no error
r> 1 + # params idx+1
again
;
end
compile-only
: fn-params
word fn-params
list-new # lexer params
swap # params lexer
>r # params (r: lexer)
@@ -90,17 +90,17 @@ begin
dup ")" string= if drop r> exit end
"expected ',' or ')' in parameter list" parse-error
again
;
end
compile-only
: fn-collect-body
word fn-collect-body
"{" lexer-expect drop # consume opening brace, keep lexer
lexer-collect-brace # lexer bodyTokens
swap drop # bodyTokens
;
end
compile-only
: fn-lexemes-from-tokens
word fn-lexemes-from-tokens
>r # (r: tokens)
list-new # acc
begin
@@ -114,10 +114,10 @@ begin
token-lexeme # acc lex
list-append # acc'
again
;
end
compile-only
: fn-validate-body
word fn-validate-body
dup list-length 0 == if "empty function body" parse-error end
dup 0 list-get token-lexeme "return" string= 0 == if "function body must start with 'return'" parse-error end
dup list-last ";" string= 0 == if "function body must terminate with ';'" parse-error end
@@ -125,11 +125,11 @@ compile-only
list-pop drop # body expr' (trim trailing ';')
list-pop-front drop # body expr (trim leading 'return')
dup list-length 0 == if "missing return expression" parse-error end
;
end
compile-only
: fn-filter-raw-body # bodyLexemes -- tokens
word fn-filter-raw-body # bodyLexemes -- tokens
list-new swap # out body
begin
dup list-empty? if
@@ -152,11 +152,11 @@ begin
r> # out' body'
continue
again
;
end
compile-only
: fn-body->tokens # bodyLexemes -- tokens
word fn-body->tokens # bodyLexemes -- tokens
dup list-length 0 == if "empty function body" parse-error end
dup 0 list-get token-lexeme "return" string= if
fn-validate-body # expr
@@ -165,10 +165,10 @@ compile-only
end
fn-filter-raw-body
dup list-length 0 == if "empty function body" parse-error end
;
end
compile-only
: fn-emit-prologue # params out -- params out
word fn-emit-prologue # params out -- params out
over list-length # params out n
begin
dup 0 > if
@@ -181,10 +181,10 @@ begin
drop # params out
exit
again
;
end
compile-only
: fn-emit-epilogue # params out -- out
word fn-emit-epilogue # params out -- out
over list-length >r # params out (r: n)
begin
r> dup 0 > if
@@ -196,30 +196,30 @@ begin
swap drop # out
exit
again
;
end
compile-only
: fn-translate-prologue-loop # count --
word fn-translate-prologue-loop # count --
dup 0 > if
1 -
0 rpick ">r" list-append drop
fn-translate-prologue-loop
end
drop
;
end
compile-only
: fn-translate-epilogue-loop # count --
word fn-translate-epilogue-loop # count --
dup 0 > if
1 -
0 rpick "rdrop" list-append drop
fn-translate-epilogue-loop
end
drop
;
end
compile-only
: fn-param-index # params name -- params idx flag
word fn-param-index # params name -- params idx flag
>r # params (r: name)
0 # params idx
@@ -238,11 +238,11 @@ begin
drop # params idx
1 + # params idx+1
again
;
end
compile-only
: fn-build-param-map # params -- params map
word fn-build-param-map # params -- params map
map-new # params map
0 # params map idx
begin
@@ -258,11 +258,11 @@ compile-only
r> 1 + # params map' idx'
continue
again
;
end
compile-only
: fn-translate-token # out map tok -- out map
word fn-translate-token # out map tok -- out map
# number?
dup string>number # out map tok num ok
if
@@ -299,11 +299,11 @@ compile-only
swap >r # out tok (r: map)
list-append # out'
r> # out' map
;
end
compile-only
: fn-translate-postfix-loop # map out postfix -- map out
word fn-translate-postfix-loop # map out postfix -- map out
begin
dup list-empty? if
drop
@@ -317,11 +317,11 @@ compile-only
r> # map out postfix'
continue
again
;
end
compile-only
: fn-translate-postfix # postfix params -- out
word fn-translate-postfix # postfix params -- out
swap # params postfix
list-new # params postfix out
@@ -341,15 +341,15 @@ compile-only
# drop map, emit epilogue
swap drop # params out
fn-emit-epilogue # out
;
end
compile-only
: fn-build-body
word fn-build-body
fn-translate-postfix # words
;
end
compile-only
: fn
word fn
"(),{};+-*/%," lexer-new # lexer
dup lexer-pop # lexer nameTok
dup >r # save nameTok
@@ -368,6 +368,6 @@ compile-only
r> drop # drop name string
r> # name token
swap emit-definition
;
end
immediate
compile-only

4
gg.sl
View File

@@ -2,6 +2,6 @@ import stdlib/io.sl
extern long labs(long n)
: main
word main
-3 labs puti
;
end

View File

@@ -1,6 +1,6 @@
import stdlib/stdlib.sl
import stdlib/io.sl
: main
word main
"hello world" puts
;
end

69
main.py
View File

@@ -201,6 +201,7 @@ class Definition:
body: List[Op]
immediate: bool = False
compile_only: bool = False
terminator: str = "end"
@dataclass
@@ -410,11 +411,19 @@ class Parser:
continue
lexeme = token.lexeme
if lexeme == ":":
self._begin_definition(token)
raise ParseError(
f"':' definitions are no longer supported; use 'word <name> ... end' at {token.line}:{token.column}"
)
if lexeme == "word":
self._begin_definition(token, terminator="end")
continue
if lexeme == ";":
self._end_definition(token)
if lexeme == "end":
if self.control_stack:
self._handle_end_control()
continue
if self._try_end_definition(token):
continue
raise ParseError(f"unexpected 'end' at {token.line}:{token.column}")
if lexeme == ":asm":
self._parse_asm_definition(token)
continue
@@ -439,13 +448,13 @@ class Parser:
if lexeme == "do":
self._handle_do_control()
continue
if lexeme == "end":
self._handle_end_control()
continue
if self._maybe_expand_macro(token):
continue
self._handle_token(token)
if self.macro_recording is not None:
raise ParseError("unterminated macro definition (missing ';')")
if len(self.context_stack) != 1:
raise ParseError("unclosed definition at EOF")
if self.control_stack:
@@ -593,7 +602,7 @@ class Parser:
def _handle_macro_recording(self, token: Token) -> bool:
if self.macro_recording is None:
return False
if token.lexeme == ";macro":
if token.lexeme == ";":
self._finish_macro_recording(token)
else:
self.macro_recording.tokens.append(token.lexeme)
@@ -638,7 +647,7 @@ class Parser:
def _finish_macro_recording(self, token: Token) -> None:
if self.macro_recording is None:
raise ParseError(f"unexpected ';macro' at {token.line}:{token.column}")
raise ParseError(f"unexpected ';' closing a macro at {token.line}:{token.column}")
macro_def = self.macro_recording
self.macro_recording = None
word = Word(name=macro_def.name)
@@ -715,11 +724,24 @@ class Parser:
self._append_op(Op(op="branch_zero", data=entry["end"]))
self._push_control(entry)
def _begin_definition(self, token: Token) -> None:
def _try_end_definition(self, token: Token) -> bool:
if len(self.context_stack) <= 1:
return False
ctx = self.context_stack[-1]
if not isinstance(ctx, Definition):
return False
if ctx.terminator != token.lexeme:
return False
self._end_definition(token)
return True
def _begin_definition(self, token: Token, terminator: str = "end") -> None:
if self._eof():
raise ParseError(f"definition name missing after ':' at {token.line}:{token.column}")
raise ParseError(
f"definition name missing after '{token.lexeme}' at {token.line}:{token.column}"
)
name_token = self._consume()
definition = Definition(name=name_token.lexeme, body=[])
definition = Definition(name=name_token.lexeme, body=[], terminator=terminator)
self.context_stack.append(definition)
word = self.dictionary.lookup(definition.name)
if word is None:
@@ -730,10 +752,14 @@ class Parser:
def _end_definition(self, token: Token) -> None:
if len(self.context_stack) <= 1:
raise ParseError(f"unexpected ';' at {token.line}:{token.column}")
raise ParseError(f"unexpected '{token.lexeme}' at {token.line}:{token.column}")
ctx = self.context_stack.pop()
if not isinstance(ctx, Definition):
raise ParseError("';' can only close definitions")
raise ParseError(f"'{token.lexeme}' can only close definitions")
if ctx.terminator != token.lexeme:
raise ParseError(
f"definition '{ctx.name}' expects terminator '{ctx.terminator}' but got '{token.lexeme}'"
)
word = self.definition_stack.pop()
ctx.immediate = word.immediate
ctx.compile_only = word.compile_only
@@ -1836,7 +1862,7 @@ def macro_compile_time(ctx: MacroContext) -> Optional[List[Op]]:
def macro_begin_text_macro(ctx: MacroContext) -> Optional[List[Op]]:
parser = ctx.parser
if parser._eof():
raise ParseError("macro name missing after 'macro:'")
raise ParseError("macro name missing after 'macro'")
name_token = parser.next_token()
param_count = 0
peek = parser.peek_token()
@@ -1850,14 +1876,6 @@ def macro_begin_text_macro(ctx: MacroContext) -> Optional[List[Op]]:
return None
def macro_end_text_macro(ctx: MacroContext) -> Optional[List[Op]]:
parser = ctx.parser
if parser.macro_recording is None:
raise ParseError("';macro' without matching 'macro:'")
# Actual closing handled in parser loop when ';macro' token is seen.
return None
def _struct_emit_definition(tokens: List[Token], template: Token, name: str, body: Sequence[str]) -> None:
def make_token(lexeme: str) -> Token:
return Token(
@@ -1868,11 +1886,11 @@ def _struct_emit_definition(tokens: List[Token], template: Token, name: str, bod
end=template.end,
)
tokens.append(make_token(":"))
tokens.append(make_token("word"))
tokens.append(make_token(name))
for lexeme in body:
tokens.append(make_token(lexeme))
tokens.append(make_token(";"))
tokens.append(make_token("end"))
class SplitLexer:
@@ -2521,8 +2539,7 @@ def bootstrap_dictionary() -> Dictionary:
dictionary.register(Word(name="immediate", immediate=True, macro=macro_immediate))
dictionary.register(Word(name="compile-only", immediate=True, macro=macro_compile_only))
dictionary.register(Word(name="compile-time", immediate=True, macro=macro_compile_time))
dictionary.register(Word(name="macro:", immediate=True, macro=macro_begin_text_macro))
dictionary.register(Word(name=";macro", immediate=True, macro=macro_end_text_macro))
dictionary.register(Word(name="macro", immediate=True, macro=macro_begin_text_macro))
dictionary.register(Word(name="struct:", immediate=True, macro=macro_struct_begin))
dictionary.register(Word(name=";struct", immediate=True, macro=macro_struct_end))
_register_compile_time_primitives(dictionary)

View File

@@ -2,14 +2,14 @@ import stdlib/stdlib.sl
import stdlib/io.sl
import fn.sl
: main
word main
2 40 +
puti cr
extend-syntax
foo(1, 2)
puti cr
0
;
end
fn foo(int a, int b){
return a + b;

View File

@@ -4,7 +4,7 @@
# and prints that much consequent elements
# from the stack while not modifying it
: dump
word dump
1 swap
for
dup pick
@@ -12,7 +12,7 @@
1 +
end
drop
;
end
# : int3 ( -- )
:asm int3 {

View File

@@ -86,10 +86,10 @@
# Output
extern int printf(char* fmt, double x)
: fput
word fput
"%f" drop swap printf drop
;
end
: fputln
word fputln
"%f\n" drop swap printf drop
;
end

View File

@@ -343,5 +343,6 @@
}
;
: cr 10 putc ;
: puts write_buf cr ;
word cr 10 putc end
word puts write_buf cr end

View File

@@ -1,6 +1,6 @@
import stdlib.sl
: alloc
word alloc
0 # addr hint (NULL)
swap # size
3 # prot (PROT_READ | PROT_WRITE)
@@ -8,8 +8,8 @@ import stdlib.sl
-1 # fd
0 # offset
mmap
;
end
: free
word free
munmap drop
;
end

View File

@@ -1,14 +1,14 @@
import stdlib/stdlib.sl
import stdlib/io.sl
: strcmp
word strcmp
3 pick 2 pick @ swap @ ==
;
end
: main
word main
"g" "g"
strcmp
puti cr
puts
puts
;
end

View File

@@ -1,7 +1,7 @@
import stdlib/stdlib.sl
import stdlib/io.sl
: strconcat
word strconcat
0 pick 3 pick +
dup
>r >r >r >r >r >r
@@ -25,9 +25,9 @@ import stdlib/io.sl
rot
drop
rdrop rdrop rdrop
;
end
: alloc
word alloc
0 # addr hint (NULL)
swap # size
3 # prot (PROT_READ | PROT_WRITE)
@@ -35,13 +35,13 @@ import stdlib/io.sl
-1 # fd
0 # offset
mmap
;
end
: free
word free
munmap drop
;
end
: strcpy #(dst_addr src_addr len -- dst_addr len)
word strcpy #(dst_addr src_addr len -- dst_addr len)
dup
>r
swap
@@ -66,10 +66,10 @@ import stdlib/io.sl
swap
nip
r> dup -rot - swap
;
end
: main
word main
"hello world hello world hello " "world hello world hello world"
strconcat
puts
;
end

4
t.sl
View File

@@ -8,9 +8,9 @@ fn foo(int a, int b){
return a b +;
}
: main
word main
extend-syntax
foo(3, 2)
puti cr
0
;
end

View File

@@ -2,22 +2,22 @@ import ../stdlib/stdlib.sl
import ../stdlib/io.sl
import ../stdlib/mem.sl
: test-mem-alloc
word test-mem-alloc
4096 alloc dup 1337 swap ! # allocate 4096 bytes, store 1337 at start
dup @ puti cr # print value at start
4096 free # free the memory
;
end
struct: Point
field x 8
field y 8
;struct
: main
word main
32 alloc # allocate 32 bytes (enough for a Point struct)
dup 111 swap Point.x!
dup 222 swap Point.y!
dup Point.x@ puti cr
Point.y@ puti cr
32 free # free the memory
;
end

View File

@@ -2,14 +2,14 @@ import ../stdlib/stdlib.sl
import ../stdlib/io.sl
import ../fn.sl
: main
word main
2 40 +
puti cr
extend-syntax
foo(1, 2)
puti cr
0
;
end
fn foo(int a, int b){
return a + b;

View File

@@ -2,7 +2,7 @@ import ../stdlib/stdlib.sl
import ../stdlib/io.sl
import ../stdlib/debug.sl
: main
word main
1 1 2dup 2dup puti cr puti cr
+
dup puti cr
@@ -15,12 +15,12 @@ import ../stdlib/debug.sl
r> 3 + puti
" numbers printed from the fibonaci sequence" puts
0
;
end
: main2
word main2
1 2 while over 100 < do
over puti cr
swap over +
end
;
end

View File

@@ -9,21 +9,21 @@ import ../fn.sl
}
;
macro: square
macro square
dup *
;macro
;
macro: defconst 2
: $1
macro defconst 2
word $1
$2
;
;macro
end
;
macro: defadder 3
: $1
macro defadder 3
word $1
$2 $3 +
;
;macro
end
;
defconst MAGIC 99
defadder add13 5 8
@@ -39,46 +39,46 @@ fn fancy_add(int a, int b){
return (a + b) * b;
}
: test-add
word test-add
5 7 + puti cr
;
end
: test-sub
word test-sub
10 3 - puti cr
;
end
: test-mul
word test-mul
6 7 * puti cr
;
end
: test-div
word test-div
84 7 / puti cr
;
end
: test-mod
word test-mod
85 7 % puti cr
;
end
: test-drop
word test-drop
10 20 drop puti cr
;
end
: test-dup
word test-dup
11 dup + puti cr
;
end
: test-swap
word test-swap
2 5 swap - puti cr
;
end
: test-store
word test-store
mem-slot dup
123 swap !
@ puti cr
;
end
: test-mmap
word test-mmap
0 # addr hint (NULL)
4096 # length (page)
3 # prot (PROT_READ | PROT_WRITE)
@@ -91,23 +91,23 @@ fn fancy_add(int a, int b){
dup
@ puti cr
4096 munmap drop
;
end
: test-macro
word test-macro
9 square puti cr
MAGIC puti cr
add13 puti cr
;
end
: test-if
word test-if
5 5 == if
111 puti cr
else
222 puti cr
end
;
end
: test-else-if
word test-else-if
2
dup 1 == if
50 puti cr
@@ -119,34 +119,34 @@ fn fancy_add(int a, int b){
end
end
drop
;
end
: test-for
word test-for
0
5 for
1 +
end
puti cr
;
end
: test-for-zero
word test-for-zero
123
0 for
drop
end
puti cr
;
end
: test-struct
word test-struct
mem-slot
dup 111 swap Point.x!
dup 222 swap Point.y!
dup Point.x@ puti cr
Point.y@ puti cr
Point.size puti cr
;
end
: test-cmp
word test-cmp
5 5 == puti cr
5 4 == puti cr
5 4 != puti cr
@@ -159,16 +159,16 @@ fn fancy_add(int a, int b){
6 5 <= puti cr
5 5 >= puti cr
4 5 >= puti cr
;
end
: test-c-fn
word test-c-fn
3
7
fancy_add()
puti cr
;
end
: main
word main
test-add
test-sub
test-mul
@@ -188,4 +188,4 @@ fn fancy_add(int a, int b){
test-struct
test-c-fn
0
;
end

View File

@@ -1,7 +1,7 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
"/tmp/l2_read_file_test.txt"
"read_file works\n"
write_file drop
@@ -33,4 +33,4 @@ import ../stdlib/io.sl
"unknown read_file failure" puts
dup # file_len file_len file_addr
exit # Exit with returned file_len as the program exit code (debug)
;
end

View File

@@ -1,7 +1,7 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
1024
read_stdin # returns (addr len)
dup 0 > if
@@ -10,4 +10,4 @@ import ../stdlib/io.sl
end
"read_stdin failed" puts
exit
;
end

View File

@@ -1,8 +1,8 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
"hello from write_buf test\n"
write_buf
0
;
end

View File

@@ -1,7 +1,7 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
"/tmp/l2_write_file_test.txt" # path
"hello from write_file test\n" # buffer
write_file
@@ -14,4 +14,4 @@ import ../stdlib/io.sl
"write failed errno=" puts
puti cr
exit
;
end

View File

@@ -1,7 +1,7 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
10
while
dup 0 >
@@ -10,4 +10,4 @@ import ../stdlib/io.sl
1 -
end
drop
;
end

View File

@@ -1,7 +1,7 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
0
5 for
1 +
@@ -10,4 +10,4 @@ import ../stdlib/io.sl
5 5 == puti cr
5 4 == puti cr
0
;
end

View File

@@ -1,9 +1,9 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
mem 5 swap !
mem 8 + 6 swap !
mem @ puti cr
mem 8 + @ puti cr
;
end

View File

@@ -1,12 +1,12 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: dup
word dup
6
;
end
compile-only
: emit-overridden
word emit-overridden
"dup" use-l2-ct
42
dup
@@ -17,12 +17,12 @@ compile-only
swap
list-append
inject-tokens
;
end
immediate
compile-only
: main
word main
emit-overridden
puti cr
0
;
end

View File

@@ -1,9 +1,9 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
: main
word main
"hello world" puts
"line1\nline2" puts
"" puts
0
;
end

View File

@@ -0,0 +1 @@
7

12
tests/word_syntax.sl Normal file
View File

@@ -0,0 +1,12 @@
import ../stdlib/stdlib.sl
import ../stdlib/io.sl
word add-two
+
end
word main
3 4 add-two
puti cr
0
end

1
tests/word_syntax.test Normal file
View File

@@ -0,0 +1 @@
python main.py tests/word_syntax.sl -o /tmp/word_syntax > /dev/null && /tmp/word_syntax