commit
This commit is contained in:
221
stdlib.sl
221
stdlib.sl
@@ -47,211 +47,6 @@ puts_finish_digits:
|
||||
}
|
||||
;
|
||||
|
||||
: extend-syntax
|
||||
enable-call-syntax
|
||||
;
|
||||
immediate
|
||||
compile-only
|
||||
|
||||
:py fn {
|
||||
FN_SPLIT_CHARS = set("(),{};+-*/%,")
|
||||
|
||||
def split_token(token):
|
||||
lex = token.lexeme
|
||||
parts = []
|
||||
idx = 0
|
||||
while idx < len(lex):
|
||||
char = lex[idx]
|
||||
if char in FN_SPLIT_CHARS:
|
||||
parts.append(Token(
|
||||
lexeme=char,
|
||||
line=token.line,
|
||||
column=token.column + idx,
|
||||
start=token.start + idx,
|
||||
end=token.start + idx + 1,
|
||||
))
|
||||
idx += 1
|
||||
continue
|
||||
start_idx = idx
|
||||
while idx < len(lex) and lex[idx] not in FN_SPLIT_CHARS:
|
||||
idx += 1
|
||||
segment = lex[start_idx:idx]
|
||||
if segment:
|
||||
parts.append(Token(
|
||||
lexeme=segment,
|
||||
line=token.line,
|
||||
column=token.column + start_idx,
|
||||
start=token.start + start_idx,
|
||||
end=token.start + idx,
|
||||
))
|
||||
return [part for part in parts if part.lexeme]
|
||||
|
||||
class FnLexer:
|
||||
def __init__(self, parser):
|
||||
self.parser = parser
|
||||
self.buffer = []
|
||||
|
||||
def _fill(self):
|
||||
while not self.buffer:
|
||||
if self.parser._eof():
|
||||
raise ParseError("unexpected EOF inside fn definition")
|
||||
token = self.parser.next_token()
|
||||
split = split_token(token)
|
||||
if not split:
|
||||
continue
|
||||
self.buffer.extend(split)
|
||||
|
||||
def peek(self):
|
||||
self._fill()
|
||||
return self.buffer[0]
|
||||
|
||||
def pop(self):
|
||||
token = self.peek()
|
||||
self.buffer.pop(0)
|
||||
return token
|
||||
|
||||
def expect(self, lexeme):
|
||||
token = self.pop()
|
||||
if token.lexeme != lexeme:
|
||||
raise ParseError(f"expected '{lexeme}' but found '{token.lexeme}'")
|
||||
return token
|
||||
|
||||
def push_back_remaining(self):
|
||||
if not self.buffer:
|
||||
return
|
||||
self.parser.tokens[self.parser.pos:self.parser.pos] = self.buffer
|
||||
self.buffer = []
|
||||
|
||||
def collect_block_tokens(self):
|
||||
depth = 1
|
||||
collected = []
|
||||
while depth > 0:
|
||||
token = self.pop()
|
||||
if token.lexeme == "{":
|
||||
depth += 1
|
||||
collected.append(token)
|
||||
continue
|
||||
if token.lexeme == "}":
|
||||
depth -= 1
|
||||
if depth == 0:
|
||||
break
|
||||
collected.append(token)
|
||||
continue
|
||||
collected.append(token)
|
||||
return collected
|
||||
|
||||
OP_PRECEDENCE = {}
|
||||
OP_PRECEDENCE["+"] = 1
|
||||
OP_PRECEDENCE["-"] = 1
|
||||
OP_PRECEDENCE["*"] = 2
|
||||
OP_PRECEDENCE["/"] = 2
|
||||
OP_PRECEDENCE["%"] = 2
|
||||
|
||||
def parse_fn_body(tokens):
|
||||
if not tokens:
|
||||
raise ParseError("empty function body")
|
||||
lexemes = [tok.lexeme for tok in tokens if tok.lexeme]
|
||||
if not lexemes or lexemes[0] != "return":
|
||||
raise ParseError("function body must start with 'return'")
|
||||
if lexemes[-1] != ";":
|
||||
raise ParseError("function body must terminate with ';'")
|
||||
extra = lexemes[1:-1]
|
||||
if not extra:
|
||||
raise ParseError("missing return expression")
|
||||
return extra
|
||||
|
||||
def shunting_yard(tokens):
|
||||
output = []
|
||||
stack = []
|
||||
for token in tokens:
|
||||
if token == "(":
|
||||
stack.append(token)
|
||||
continue
|
||||
if token == ")":
|
||||
while stack and stack[-1] != "(":
|
||||
output.append(stack.pop())
|
||||
if not stack:
|
||||
raise ParseError("mismatched parentheses in return expression")
|
||||
stack.pop()
|
||||
continue
|
||||
if token in OP_PRECEDENCE:
|
||||
while stack and stack[-1] in OP_PRECEDENCE and OP_PRECEDENCE[stack[-1]] >= OP_PRECEDENCE[token]:
|
||||
output.append(stack.pop())
|
||||
stack.append(token)
|
||||
continue
|
||||
output.append(token)
|
||||
while stack:
|
||||
top = stack.pop()
|
||||
if top == "(":
|
||||
raise ParseError("mismatched parentheses in return expression")
|
||||
output.append(top)
|
||||
return output
|
||||
|
||||
def is_int_literal(text):
|
||||
try:
|
||||
int(text, 0)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def translate_postfix(postfix, params):
|
||||
indices = {name: idx for idx, name in enumerate(params)}
|
||||
translated = []
|
||||
for token in postfix:
|
||||
if token in indices:
|
||||
translated.append(str(indices[token]))
|
||||
translated.append("rpick")
|
||||
continue
|
||||
if is_int_literal(token):
|
||||
translated.append(token)
|
||||
continue
|
||||
translated.append(token)
|
||||
return translated
|
||||
|
||||
def macro(ctx):
|
||||
parser = ctx.parser
|
||||
if not isinstance(parser.context_stack[-1], Module):
|
||||
raise ParseError("'fn' definitions must be top-level")
|
||||
lexer = FnLexer(parser)
|
||||
name_token = lexer.pop()
|
||||
name = name_token.lexeme
|
||||
if not is_identifier(name):
|
||||
raise ParseError("invalid function name for 'fn'")
|
||||
lexer.expect("(")
|
||||
params = []
|
||||
if lexer.peek().lexeme != ")":
|
||||
while True:
|
||||
type_token = lexer.pop()
|
||||
if type_token.lexeme != "int":
|
||||
raise ParseError("only 'int' parameters are supported in fn definitions")
|
||||
param_token = lexer.pop()
|
||||
if not is_identifier(param_token.lexeme):
|
||||
raise ParseError("invalid parameter name in fn definition")
|
||||
params.append(param_token.lexeme)
|
||||
if lexer.peek().lexeme == ",":
|
||||
lexer.pop()
|
||||
continue
|
||||
break
|
||||
lexer.expect(")")
|
||||
lexer.expect("{")
|
||||
body_tokens = lexer.collect_block_tokens()
|
||||
lexer.push_back_remaining()
|
||||
if len(params) != len(set(params)):
|
||||
raise ParseError("duplicate parameter names in fn definition")
|
||||
return_tokens = parse_fn_body(body_tokens)
|
||||
postfix = shunting_yard(return_tokens)
|
||||
body_words = []
|
||||
for _ in reversed(params):
|
||||
body_words.append(">r")
|
||||
body_words.extend(translate_postfix(postfix, params))
|
||||
for _ in params:
|
||||
body_words.append("rdrop")
|
||||
generated = []
|
||||
emit_definition(generated, name_token, name, body_words)
|
||||
ctx.inject_token_objects(generated)
|
||||
}
|
||||
;
|
||||
|
||||
:asm dup {
|
||||
mov rax, [r12]
|
||||
sub r12, 8
|
||||
@@ -264,6 +59,13 @@ compile-only
|
||||
}
|
||||
;
|
||||
|
||||
:asm over {
|
||||
mov rax, [r12 + 8]
|
||||
sub r12, 8
|
||||
mov [r12], rax
|
||||
}
|
||||
;
|
||||
|
||||
:asm swap {
|
||||
mov rax, [r12]
|
||||
mov rbx, [r12 + 8]
|
||||
@@ -453,6 +255,15 @@ compile-only
|
||||
}
|
||||
;
|
||||
|
||||
:asm pick {
|
||||
mov rcx, [r12]
|
||||
add r12, 8
|
||||
mov rax, [r12 + rcx * 8]
|
||||
sub r12, 8
|
||||
mov [r12], rax
|
||||
}
|
||||
;
|
||||
|
||||
:asm rpick {
|
||||
mov rcx, [r12]
|
||||
add r12, 8
|
||||
|
||||
Reference in New Issue
Block a user