Simon Génier pushed to branch source-spans at Stefan / Typer
Commits: 252f6038 by Simon Génier at 2021-04-26T16:33:24-04:00 Introduce swappable backends for Typer.
The backends themselves are objects which must implement the new backend abstract class. I also shuffled some code, mostly in the module REPL, so that is it more focused on evaluating interactive code. Finally, I moved code that was more concerned with loading regular Typer code in typer.ml and elab.ml.
- - - - - 33b205c5 by Simon Génier at 2021-04-26T16:34:24-04:00 Add tests for the lexer.
In addition to the tests themselves, added a few functions that were necessary. In particular there is now Pretoken.equal in Prelexer and Location.equal in Source that are equality predicates on pretokens and locations.
I also added in Sexp.sexp_string support for printing the location along with its associated sexp. This is useful in tests, when we check that the locations are correct, otherwise we would print two identical sexps!
- - - - - ea5cae0f by Simon Génier at 2021-04-28T13:29:47-04:00 Merge branch 'lexer-tests'
- - - - - 9723f3cf by Simon Génier at 2021-04-28T13:30:29-04:00 Merge branch 'backend-object'
- - - - - ac6a4e23 by Simon Génier at 2021-04-28T13:38:26-04:00 Consolidate location related code in Source.
The purpose of this changeset is to take all the Location related code and to move it into the Source module. There are many changed lines, but most of them are simple renaming. I rewrote a few places to use printf instead of a series of prints, which I think are clearer. The only externally visible change is that I merged two functions that printed location in a different way. Some debug code used to print something like "Ln 32, cl 1", but now prints "foo.typer:32:1" like everywhere else.
- - - - - d1365c0a by Simon Génier at 2021-04-28T13:38:26-04:00 Have the source tell its own location.
Move the code that keeps track of the current line and column inside of the source object. Not only does this make sense conceptually since the source is a kind of cursor, but it will allow us to simplify the lexer code.
- - - - - 11cc9eea by Simon Génier at 2021-04-28T13:38:26-04:00 Rename Pretoken -> Presymbol.
- - - - - d1a89df5 by Simon Génier at 2021-04-28T13:38:26-04:00 Rewrite the Lexer module in terms of source objects.
This might not look useful, but it will simplify a great deal our next change of add and end point to location records since all the location related code will be in there source objects.
I ended up rewriting most of the Lexer. I tried to avoid doing that, but the offset handling code was so mixed up with the rest than that turned out to give worse code and a diff as unreadable. I'm pretty confident that that the changes are OK since we are still able to load pervasives.typer and I wrote a few tests for the trickier inner operators.
- - - - - 5a115607 by Simon Génier at 2021-04-28T13:38:26-04:00 Shorten the names of a few methods on Source.t.
- - - - - 3b64afeb by Simon Génier at 2021-04-28T13:38:26-04:00 Replace raw offsets with points which also track line and column.
- - - - - 2c1bec80 by Simon Génier at 2021-04-28T13:38:26-04:00 Compute location inside the source from the given point.
- - - - - 7c2bfa89 by Simon Génier at 2021-04-28T13:38:26-04:00 Documentation comments are now an optional.
- - - - - 5bb2dddd by Simon Génier at 2021-04-28T13:38:26-04:00 Locations track their end point.
- - - - -
26 changed files:
- debug_util.ml - src/REPL.ml - + src/backend.ml - src/debruijn.ml - src/debug.ml - src/elab.ml - src/elexp.ml - src/env.ml - src/eval.ml - src/heap.ml - src/inverse_subst.ml - src/lexer.ml - src/lexp.ml - src/listx.ml - src/log.ml - src/opslexp.ml - src/option.ml - src/prelexer.ml - src/sexp.ml - src/source.ml - src/unification.ml - src/util.ml - tests/dune - + tests/lexer_test.ml - tests/unify_test.ml - typer.ml
Changes:
===================================== debug_util.ml ===================================== @@ -33,7 +33,6 @@ open Typerlib
(* Utilities *) -open Util open Fmt open Debug
@@ -56,7 +55,7 @@ open Builtin open Debruijn open Env
-let dloc = dummy_location +let dloc = Source.Location.dummy let dummy_decl = Imm(String(dloc, "Dummy"))
let discard _v = ()
===================================== src/REPL.ml ===================================== @@ -36,20 +36,16 @@ this program. If not, see http://www.gnu.org/licenses/. *) * * -------------------------------------------------------------------------- *)
-open Util +open Backend +open Debruijn +open Eval open Fmt - -open Prelexer open Lexer -open Sexp open Lexp +open Prelexer +open Sexp +open Util
-open Eval - -open Grammar - -open Env -open Debruijn module OL = Opslexp module EL = Elexp
@@ -58,7 +54,7 @@ let print_input_line i = ralign_print_int i 2; print_string "] >> "
-let ieval_error = Log.log_error ~section:"IEVAL" +let error = Log.log_error ~section:"REPL"
let print_and_clear_log () = if (not Log.typer_log_config.Log.print_at_log) then @@ -98,84 +94,6 @@ let rec read_input i =
loop "" i
-(* Interactive mode is not usual typer - It makes things easier to test out code *) -type lexpr = lexp - -(* Grouping declaration together will enable us to support mutually recursive - * declarations while bringing us closer to normal typer *) -let ipexp_parse (sxps: sexp list): (sexp list * sexp list) = - let rec pxp_parse sxps dacc pacc = - match sxps with - | [] -> (List.rev dacc), (List.rev pacc) - | sxp::tl -> match sxp with - (* Declaration *) - | Node (Symbol (_, ("_=_" | "_:_")), [Symbol _s; _t]) -> - pxp_parse tl (sxp :: dacc) pacc - - (* f arg1 arg2 = function body; *) - | Node (Symbol (_, "_=_"), [Node (Symbol _s, _args); _t]) -> - pxp_parse tl (sxp :: dacc) pacc - - (* Expression *) - | _ -> pxp_parse tl dacc (sxp::pacc) in - pxp_parse sxps [] [] - - -let ierase_type - (lctx : DB.lexp_context) - (ldecls, lexprs : ldecl list list * lexpr list) - : (vname * EL.elexp) list list * EL.elexp list = - - let lctx', eldecls = Listx.fold_left_map OL.clean_decls lctx ldecls in - let elexprs = List.map (OL.erase_type lctx') lexprs in - eldecls, elexprs - -let ilexp_parse pexps lctx: ((ldecl list list * lexpr list) * elab_context) = - let pdecls, pexprs = pexps in - (* FIXME We take the parsed input here but we should take the - unparsed tokens directly instead *) - let ldecls, lctx = Elab.lexp_p_decls pdecls [] lctx in - let lexprs = Elab.lexp_parse_all pexprs lctx in - List.iter (fun lxp -> ignore (OL.check (ectx_to_lctx lctx) lxp)) - lexprs; - (ldecls, lexprs), lctx - -let ieval source ectx rctx = - let ieval' lexps rctx = - let (ldecls, lexprs) = lexps in - let rctx = eval_decls_toplevel ldecls rctx in - let vals = eval_all lexprs rctx false in - vals, rctx in - - let pres = prelex source in - let sxps = lex default_stt pres in - (* FIXME: This is too eager: it prevents one declaration from changing - * the grammar used in subsequent declarations. *) - let nods = sexp_parse_all_to_list (ectx_to_grm ectx) sxps (Some ";") in - - (* Different from usual typer *) - let pxps = ipexp_parse nods in - let lxps, ectx' = ilexp_parse pxps ectx in - let elxps = ierase_type (ectx_to_lctx ectx) lxps in - let v, rctx = ieval' elxps rctx in - v, ectx', rctx - -let raw_eval source ectx rctx = - let pres = prelex source in - let sxps = lex default_stt pres in - let lxps, ectx' = Elab.lexp_p_decls [] sxps ectx in - let _, elxps = Listx.fold_left_map OL.clean_decls (ectx_to_lctx ectx) lxps in - (* At this point, `elxps` is a `(vname * elexp) list list`, where: - * - each `(vname * elexp)` is a definition - * - each `(vname * elexp) list` is a list of definitions which can - * refer to each other (i.e. they can be mutually recursive). - * - hence the overall "list of lists" is a sequence of such - * blocs of mutually-recursive definitions. *) - let rctx = eval_decls_toplevel elxps rctx in - (* This is for consistency with ieval *) - [], ectx', rctx - let help_msg = " %quit (%q) : leave REPL %who (%w) : print runtime environment @@ -187,80 +105,103 @@ let help_msg = %help (%h) : print help "
- -let readfiles files (i, lctx, rctx) prt = - (* Read specified files *) - List.fold_left (fun (i, lctx, rctx) file -> - - (if prt then ( - print_string " In["; ralign_print_int i 2; print_string "] >> "; - print_string ("%readfile " ^ file); print_string "\n";)); - - try - let source = new Source.source_file file in - let (ret, lctx, rctx) = raw_eval source lctx rctx in - (List.iter (print_eval_result i) ret; (i + 1, lctx, rctx)) - with - Sys_error _ -> ( - ieval_error ("file "" ^ file ^ "" does not exist."); - (i, lctx, rctx)) - ) - (i, lctx, rctx) files - +(* Elaborate Typer source from an interactive session. Like "normal" Typer, + declarations are grouped in blocks that are mutually recursive, but unlike + "normal" Typer, expressions are also accepted in addition to declarations. + + Note that all declarations are pulled before expressions, i.e. an expression + can refer to variable introduced by later declarations. *) +let eval_interactive + (interactive : #interactive_backend) + (i : int) + (ectx : elab_context) + (input : string) + : elab_context = + + let classify_sexps nodes = + let rec loop sexps decls exprs = + match sexps with + | [] -> (List.rev decls, List.rev exprs) + | sexp :: sexps + -> (match sexp with + | Node (Symbol (_, ("_=_" | "_:_")), [Symbol _; _]) + -> loop sexps (sexp :: decls) exprs + | Node (Symbol (_, ("_=_")), [Node _; _]) + -> loop sexps (sexp :: decls) exprs + | _ + -> loop sexps decls (sexp :: exprs)) + in loop nodes [] [] + in + + let source = new Source.source_string input in + let pretokens = prelex source in + let tokens = lex Grammar.default_stt pretokens in + (* FIXME: This is too eager: it prevents one declaration from changing the + grammar used in subsequent declarations. *) + let sexps = sexp_parse_all_to_list (ectx_to_grm ectx) tokens (Some ";") in + let decls, exprs = classify_sexps sexps in + + (* FIXME We take the parsed input here but we should take the unparsed tokens + directly instead *) + let ldecls, ectx' = Elab.lexp_p_decls decls [] ectx in + + let lexprs = Elab.lexp_parse_all exprs ectx' in + List.iter (fun lexpr -> ignore (OL.check (ectx_to_lctx ectx') lexpr)) lexprs; + + List.iter interactive#process_decls ldecls; + print_and_clear_log (); + + let values = List.map interactive#eval_expr lexprs in + List.iter (Eval.print_eval_result i) values; + print_and_clear_log (); + + ectx'
(* Specials commands %[command-name] [args] *) -let rec repl i clxp rctx = - let repl = repl (i + 1) in +let rec repl i (interactive : #interactive_backend) (ectx : elab_context) = let ipt = try read_input i with End_of_file -> "%quit" in + let ectx = match ipt with (* Check special keywords *) - | "%quit" | "%q" -> () - | "%help" | "%h" -> (print_string help_msg; repl clxp rctx) - | "%calltrace" | "%ct" -> (print_eval_trace None; repl clxp rctx) - | "%typertrace" | "%tt" -> (print_typer_trace None; repl clxp rctx) - | "%lcollisions" | "%cl" -> (get_stats_hashtbl (WHC.stats hc_table)) + | "%quit" | "%q" -> exit 0 + | "%help" | "%h" -> (print_string help_msg; ectx) + | "%calltrace" | "%ct" -> (print_eval_trace None; ectx) + | "%typertrace" | "%tt" -> (print_typer_trace None; ectx) + | "%lcollisions" | "%cl" -> (get_stats_hashtbl (WHC.stats hc_table); ectx)
(* command with arguments *) | _ when (ipt.[0] = '%' && ipt.[1] != ' ') -> ( match (str_split ipt ' ') with | "%readfile"::args -> - let (_i, clxp, rctx) = - try - readfiles args (i, clxp, rctx) false - with Log.Stop_Compilation msg -> - (handle_stopped_compilation msg; (i,clxp,rctx)) - in - repl clxp rctx; + let ectx = List.fold_left (Elab.eval_file interactive) ectx args in + print_and_clear_log (); + ectx | "%who"::args | "%w"::args -> ( let _ = match args with - | ["all"] -> dump_rte_ctx rctx - | _ -> print_rte_ctx rctx in - repl clxp rctx) + | ["all"] -> interactive#dump_rte_ctx + | _ -> interactive#print_rte_ctx in + ectx) | "%info"::args | "%i"::args -> ( let _ = match args with - | ["all"] -> dump_lexp_ctx (ectx_to_lctx clxp) - | _ -> print_lexp_ctx (ectx_to_lctx clxp) in - repl clxp rctx) + | ["all"] -> dump_lexp_ctx (ectx_to_lctx ectx) + | _ -> print_lexp_ctx (ectx_to_lctx ectx) in + ectx)
| cmd::_ -> - ieval_error (" "" ^ cmd ^ "" is not a correct repl command"); - repl clxp rctx - | _ -> repl clxp rctx) + error (" "" ^ cmd ^ "" is not a correct repl command"); + ectx + | _ -> ectx)
(* eval input *) | _ -> - try - let source = new Source.source_string ipt in - let (ret, clxp, rctx) = (ieval source clxp rctx) in - print_and_clear_log (); - List.iter (print_eval_result i) ret; - repl clxp rctx - with + try eval_interactive interactive i ectx ipt with | Log.Stop_Compilation msg -> - (handle_stopped_compilation msg; repl clxp rctx) + (handle_stopped_compilation msg; ectx) | Log.Internal_error msg -> (handle_stopped_compilation ("Internal error: " ^ msg); - repl clxp rctx) + ectx) | Log.User_error msg -> (handle_stopped_compilation ("Fatal user error: " ^ msg); - repl clxp rctx) + ectx) + in + repl (i + 1) interactive ectx
===================================== src/backend.ml ===================================== @@ -0,0 +1,48 @@ +(* Copyright (C) 2021 Free Software Foundation, Inc. + * + * Author: Simon Génier simon.genier@umontreal.ca + * Keywords: languages, lisp, dependent types. + * + * This file is part of Typer. + * + * Typer is free software; you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software + * Foundation, either version 3 of the License, or (at your option) any later + * version. + * + * Typer is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more + * details. + * + * You should have received a copy of the GNU General Public License along + * with this program. If not, see http://www.gnu.org/licenses/. *) + +(** A backend is a consumer of fully eleborated and checked lexps. This module + provides a superclass for all backends. *) + +open Lexp + +type value = Env.value_type + +class virtual backend = object + (* Processes (interpret or compile, depending on the backend) a block of + mutually recursive declarations. *) + method virtual process_decls : ldecls -> unit + + method interactive : interactive_backend option = None +end + +(* Backends may optionaly allow interactive evaluation, i.e. lexps can be + immediately evaluated as opposed compiled for evaluation later. *) +and virtual interactive_backend = object (self) + inherit backend + + method! interactive = Some (self :> interactive_backend) + + method virtual eval_expr : lexp -> value + + method virtual print_rte_ctx : unit + + method virtual dump_rte_ctx : unit +end
===================================== src/debruijn.ml ===================================== @@ -81,7 +81,7 @@ let fatal = Log.log_fatal ~section:"DEBRUIJN" (* Type definitions * ---------------------------------- *)
-let dloc = dummy_location +let dloc = Source.Location.dummy let type_level_sort = mkSort (dloc, StypeLevel) let sort_omega = mkSort (dloc, StypeOmega) let type_level = mkBuiltin ((dloc, "TypeLevel"), type_level_sort)
===================================== src/debug.ml ===================================== @@ -44,17 +44,17 @@ open Lexp let debug_pretokens_print pretoken = print_string " "; let print_info msg loc = - print_string msg; - print_string "["; loc_print loc; print_string "]\t" in + Printf.printf "%s[%s]\t" msg (Source.Location.to_string loc); + in
match pretoken with - | Preblock(loc, pts,_) + | Preblock(loc, pts) -> print_info "Preblock: " loc; print_string "{"; pretokens_print pts; print_string " }"
- | Pretoken(loc, str) - -> print_info "Pretoken: " loc; - print_string ("'" ^ str ^ "'"); + | Presymbol (loc, name) + -> print_info "Pretoken: " loc; + print_string ("'" ^ name ^ "'");
| Prestring(loc, str) -> print_info "Prestring: " loc; @@ -67,13 +67,13 @@ let debug_pretokens_print_all pretokens = (* Sexp Print *) let debug_sexp_print sexp = let print_info msg loc = - print_string msg; - print_string "["; loc_print loc; print_string "]\t" in + Printf.printf "%s[%s]\t" msg (Source.Location.to_string loc); + in match sexp with | Symbol(_, "") -> print_string "Epsilon " (* "ε" *)
- | Block(loc, pts, _) + | Block(loc, pts) -> print_info "Block: " loc; print_string "{"; pretokens_print pts; print_string " }"
@@ -111,10 +111,8 @@ let debug_pexp_print ptop = print_string " "; let l = sexp_location ptop in let print_info msg loc pex = - print_string msg; print_string "["; - loc_print loc; - print_string "]\t"; - sexp_print pex in + Printf.printf "%s[%s]\t" msg (Source.Location.to_string loc); + sexp_print pex in print_info (sexp_name ptop) l ptop
let debug_lexp_decls decls = @@ -124,7 +122,7 @@ let debug_lexp_decls decls =
print_string " "; lalign_print_string (lexp_name lxp) 15; - print_string "["; loc_print loc; print_string "]"; + Printf.printf "[%s]" (Source.Location.to_string loc);
let str = lexp_str_decls (!debug_ppctx) [e] in
===================================== src/elab.ml ===================================== @@ -61,7 +61,7 @@ module OL = Opslexp module EL = Elexp
(* dummies *) -let dloc = dummy_location +let dloc = Source.Location.dummy
let parsing_internals = ref false let btl_folder = @@ -98,7 +98,7 @@ type sform_type = | Lazy (* Hasn't looked as the requested type, nor inferred a type. *)
type special_forms_map = - (elab_context -> location -> sexp list -> ltype option + (elab_context -> Source.Location.t -> sexp list -> ltype option -> (lexp * sform_type)) SMap.t
let special_forms : special_forms_map ref = ref SMap.empty @@ -1008,10 +1008,11 @@ and lexp_parse_inductive ctors ctx = * The actual expression doesn't matter, as long as its * scoping is right: we only use it so we can pass it to * things like `fv` and `meta_to_var`. *) - let altacc = List.fold_right - (fun (ak, n, t) aa - -> mkArrow (ak, n, t, dummy_location, aa)) - acc impossible in + let altacc = + List.fold_right + (fun (ak, n, t) aa + -> mkArrow (ak, n, t, Source.Location.dummy, aa)) + acc impossible in let g = generalize nctx altacc in let altacc' = g (fun _ne vname t l e -> mkArrow (Aerasable, vname, t, l, e)) @@ -1208,7 +1209,7 @@ and lexp_decls_1 (tokens : token list) (* Rest of input *) (ectx : elab_context) (* External ctx. *) (nctx : elab_context) (* New context. *) - (pending_decls : location SMap.t) (* Pending type decls. *) + (pending_decls : Source.Location.t SMap.t) (* Pending type decls. *) (pending_defs : (symbol * sexp) list) (* Pending definitions. *) : (vname * lexp * ltype) list * sexp list * token list * elab_context =
@@ -1288,12 +1289,16 @@ and lexp_decls_1 | [Symbol (l, vname); sexp] -> if SMap.mem vname pending_decls then let decl_loc = SMap.find vname pending_decls in - let v = ({file = l.file; - line = l.line; - column = l.column; - docstr = String.concat "\n" [decl_loc.docstr; - l.docstr]}, - vname) in + let v = + let doc = + match decl_loc.doc, l.doc with + | Some (doc), Some (doc') + -> Some (Printf.sprintf "%s\n%s" doc doc') + | Some (doc), None | None, Some (doc) -> Some (doc) + | None, None -> None; + in + ({l with doc}, vname) + in let pending_decls = SMap.remove vname pending_decls in let pending_defs = ((v, sexp) :: pending_defs) in if SMap.is_empty pending_decls then @@ -1338,6 +1343,12 @@ and lexp_decls_1 pending_decls pending_defs in (Log.stop_on_error (); res))
+(* Why is the return value a list of list? + - each `(vname * lexp * ltype)` is a definition + - each `(vname * lexp * ltype) list` is a list of definitions which can refer + to each other (i.e. they can be mutually recursive). + - hence the overall "list of lists" is a sequence of such blocs of + mutually-recursive definitions. *) and lexp_p_decls (sdecls : sexp list) (tokens : token list) (ctx : elab_context) : ((vname * lexp * ltype) list list * elab_context) = let rec impl sdecls tokens ctx = @@ -1512,7 +1523,7 @@ let sform_immediate ctx loc sargs ot = | [(String _) as se] -> mkImm (se), Inferred DB.type_string | [(Integer _) as se] -> mkImm (se), Inferred DB.type_int | [(Float _) as se] -> mkImm (se), Inferred DB.type_float - | [Block (_sl, pts, _el)] + | [Block (_sl, pts)] -> let grm = ectx_get_grammar ctx in let tokens = lex default_stt pts in let (se, _) = sexp_parse_all grm tokens None in @@ -1605,7 +1616,7 @@ let rec sform_lambda kind ctx loc sargs ot = | Symbol arg -> (elab_p_id arg, None) | _ -> sexp_error (sexp_location sarg) "Unrecognized lambda argument"; - ((dummy_location, None), None) in + ((Source.Location.dummy, None), None) in
let olt1 = match ost1 with | Some st -> Some (infer_type st ctx arg) @@ -1943,3 +1954,21 @@ let eval_decl_str str ectx rctx = let lctx = ectx_to_lctx ectx in let _, elxps = Listx.fold_left_map (OL.clean_decls) lctx lxps in (EV.eval_decls_toplevel elxps rctx), ectx' + +let eval_file + (backend : #Backend.backend) + (ectx : elab_context) + (file_name : string) + : elab_context = + + try + let source = new Source.source_file file_name in + let pretokens = prelex source in + let tokens = lex Grammar.default_stt pretokens in + let ldecls, ectx' = lexp_p_decls [] tokens ectx in + List.iter backend#process_decls ldecls; + ectx' + with + | Sys_error _ + -> (error (Printf.sprintf {|file %s does not exist.|} file_name); + ectx)
===================================== src/elexp.ml ===================================== @@ -33,14 +33,13 @@
open Sexp (* Sexp type *)
-module U = Util module L = Lexp
-type vname = U.vname -type vref = U.vref +type vname = Util.vname +type vref = Util.vref type label = symbol
-module SMap = U.SMap +module SMap = Util.SMap
type elexp = (* A constant, either string, integer, or float. *) @@ -53,7 +52,7 @@ type elexp = | Var of vref
(* Recursive `let` binding. *) - | Let of U.location * (vname * elexp) list * elexp + | Let of Source.Location.t * (vname * elexp) list * elexp
(* An anonymous function. *) | Lambda of vname * elexp @@ -71,8 +70,8 @@ type elexp = * Case (l, e, branches, default) * tests the value of `e`, and either selects the corresponding branch * in `branches` or branches to the `default`. *) - | Case of U.location * elexp - * (U.location * vname list * elexp) SMap.t + | Case of Source.Location.t * elexp + * (Source.Location.t * vname list * elexp) SMap.t * (vname * elexp) option
(* A Type expression. There's no useful operation we can apply to it,
===================================== src/env.ml ===================================== @@ -40,7 +40,7 @@ module L = Lexp module BI = Z (* Was Big_int *) module DB = Debruijn
-let dloc = Util.dummy_location +let dloc = Source.Location.dummy
let fatal = Log.log_fatal ~section:"ENV" let warning = Log.log_warning ~section:"ENV"
===================================== src/eval.ml ===================================== @@ -51,7 +51,7 @@ module Prelexer = Prelexer (* prelex_string *)
type eval_debug_info = elexp list * elexp list
-let dloc = dummy_location +let dloc = Source.Location.dummy let global_eval_trace = ref ([], []) let global_eval_ctx = ref make_runtime_ctx (* let eval_max_recursion_depth = ref 23000 *) @@ -62,7 +62,7 @@ let error loc ?print_action msg = Log.internal_error msg
type builtin_function = - location -> eval_debug_info -> value_type list -> value_type + Source.Location.t -> eval_debug_info -> value_type list -> value_type
(** A map of the function name to its implementation and arity. *) let builtin_functions : (builtin_function * int) SMap.t ref = ref SMap.empty @@ -355,11 +355,11 @@ let make_block loc _depth args_val = match args_val with (* From what would we like to make a block? *) | [Vstring str] -> let source = new Source.source_string str in - Vsexp (Block (loc, (Prelexer.prelex source), loc)) + Vsexp (Block (loc, (Prelexer.prelex source))) | _ -> error loc "Sexp.block expects one string as argument"
let reader_parse loc _depth args_val = match args_val with - | [Velabctx ectx; Vsexp (Block (_,toks,_))] -> + | [Velabctx ectx; Vsexp (Block (_, toks))] -> let grm = ectx_get_grammar ectx in o2v_list (sexp_parse_all_to_list grm (Lexer.lex default_stt toks) (Some ";")) | [Velabctx _; s] -> (warning loc "Reader.parse do nothing without a Block"; s) @@ -632,7 +632,7 @@ and sexp_dispatch loc depth args = | String (_ , s) -> eval_call str [Vstring s] | Integer (_ , i) -> eval_call it [Vinteger i] | Float (_ , f) -> eval_call flt [Vfloat f] - | Block (_ , _, _) as b -> + | Block (_ , _) as b -> (* I think this code breaks what Blocks are. *) (* We delay parsing but parse with default_stt and default_grammar... *) (*let toks = Lexer.lex default_stt s in @@ -678,7 +678,7 @@ and print_trace title trace default = (* Now eval trace and elab trace are the same *) let print_trace = (fun type_name type_string type_loc i expr -> (* Print location info *) - print_string (" [" ^ (loc_string (type_loc expr)) ^ "] "); + Printf.printf " [%s] " (Source.Location.to_string (type_loc expr));
(* Print call trace visualization *) Fmt.print_ct_tree i; print_string "+- "; @@ -784,9 +784,9 @@ let debug_doc loc _depth args_val = match args_val with | [Vstring name; Velabctx ectx] -> (try let idx = senv_lookup name ectx in let elem = lctx_lookup (ectx_to_lctx ectx) - (((dummy_location, Some name), idx)) in + (((Source.Location.dummy, Some name), idx)) in match elem with - | ((l,_),_,_) -> Vstring (l.docstr) + | ((l,_),_,_) -> Vstring (Option.value ~default:"" l.doc) with _ -> Vstring "element not found") | _ -> error loc "Elab.debug_doc takes a String and an Elab_Context as arguments"
@@ -797,13 +797,15 @@ let is_bound loc _depth args_val = match args_val with | _ -> error loc "Elab.isbound takes an Elab_Context and a String as arguments"
let constructor_p name ectx = - try let idx = senv_lookup name ectx in - (* Use `lexp_whnf` so that `name` can be indirectly - * defined as a constructor - * (e.g. as in `let foo = cons in case foo x xs | ...` *) - match OL.lexp'_whnf (mkVar ((dummy_location, Some name), idx)) (ectx_to_lctx ectx) with - | Cons _ -> true (* It's indeed a constructor! *) - | _ -> false + try + let idx = senv_lookup name ectx in + (* Use `lexp_whnf` so that `name` can be indirectly + * defined as a constructor + * (e.g. as in `let foo = cons in case foo x xs | ...` *) + let var = mkVar ((Source.Location.dummy, Some name), idx) in + match OL.lexp'_whnf var (ectx_to_lctx ectx) with + | Cons _ -> true (* It's indeed a constructor! *) + | _ -> false with Senv_Lookup_Fail _ -> false
let erasable_p name nth ectx = @@ -814,14 +816,16 @@ let erasable_p name nth ectx = | (k, _, _) -> k = Aerasable ) else false | _ -> false in - try let idx = senv_lookup name ectx in - match OL.lexp'_whnf (mkVar ((dummy_location, Some name), idx)) (ectx_to_lctx ectx) with - | Cons (e, _) when is_var e - -> (match (env_lookup_expr ectx (get_var e)) with - | Some i when is_inductive i - -> is_erasable (get_inductive_ctor (get_inductive i)) - | _ -> false) - | _ -> false + try + let idx = senv_lookup name ectx in + let var = mkVar ((Source.Location.dummy, Some name), idx) in + match OL.lexp'_whnf var (ectx_to_lctx ectx) with + | Cons (e, _) when is_var e + -> (match (env_lookup_expr ectx (get_var e)) with + | Some i when is_inductive i + -> is_erasable (get_inductive_ctor (get_inductive i)) + | _ -> false) + | _ -> false with Senv_Lookup_Fail _ -> false
let erasable_p2 t name ectx = @@ -835,14 +839,16 @@ let erasable_p2 t name ectx = | _ -> false) args) | _ -> false in - try let idx = senv_lookup t ectx in - match OL.lexp'_whnf (mkVar ((dummy_location, Some t), idx)) (ectx_to_lctx ectx) with - | Cons (e, _) when is_var e - -> (match (env_lookup_expr ectx (get_var e)) with - | Some i when is_inductive i - -> is_erasable (get_inductive_ctor (get_inductive i)) - | _ -> false) - | _ -> false + try + let idx = senv_lookup t ectx in + let var = mkVar ((Source.Location.dummy, Some t), idx) in + match OL.lexp'_whnf var (ectx_to_lctx ectx) with + | Cons (e, _) when is_var e + -> (match (env_lookup_expr ectx (get_var e)) with + | Some i when is_inductive i + -> is_erasable (get_inductive_ctor (get_inductive i)) + | _ -> false) + | _ -> false with Senv_Lookup_Fail _ -> false
let nth_ctor_arg name nth ectx = @@ -853,14 +859,16 @@ let nth_ctor_arg name nth ectx = | _ -> "_" | exception (Failure _) -> "_" ) | _ -> "_" in - try let idx = senv_lookup name ectx in - match OL.lexp'_whnf (mkVar ((dummy_location, Some name), idx)) (ectx_to_lctx ectx) with - | Cons (e, _) when is_var e - -> (match (env_lookup_expr ectx (get_var e)) with - | Some i when is_inductive i - -> find_nth (get_inductive_ctor (get_inductive i)) - | _ -> "_") - | _ -> "_" + try + let idx = senv_lookup name ectx in + let var = mkVar ((Source.Location.dummy, Some name), idx) in + match OL.lexp'_whnf var (ectx_to_lctx ectx) with + | Cons (e, _) when is_var e + -> (match (env_lookup_expr ectx (get_var e)) with + | Some i when is_inductive i + -> find_nth (get_inductive_ctor (get_inductive i)) + | _ -> "_") + | _ -> "_" with Senv_Lookup_Fail _ -> "_"
let ctor_arg_pos name arg ectx = @@ -874,14 +882,16 @@ let ctor_arg_pos name arg ectx = | None -> (-1) | Some n -> n ) | _ -> (-1) in - try let idx = senv_lookup name ectx in - match OL.lexp'_whnf (mkVar ((dummy_location, Some name), idx)) (ectx_to_lctx ectx) with - | Cons (e, _) when is_var e - -> (match (env_lookup_expr ectx (get_var e)) with - | Some i when is_inductive i - -> find_arg (get_inductive_ctor (get_inductive i)) - | _ -> (-1)) - | _ -> (-1) + try + let idx = senv_lookup name ectx in + let var = mkVar ((Source.Location.dummy, Some name), idx) in + match OL.lexp'_whnf var (ectx_to_lctx ectx) with + | Cons (e, _) when is_var e + -> (match (env_lookup_expr ectx (get_var e)) with + | Some i when is_inductive i + -> find_arg (get_inductive_ctor (get_inductive i)) + | _ -> (-1)) + | _ -> (-1) with Senv_Lookup_Fail _ -> (-1)
let is_constructor loc _depth args_val = match args_val with @@ -969,8 +979,8 @@ let test_info loc _depth args_val = match args_val with | _ -> error loc "Test.info takes two String as argument"
let test_location loc _depth args_val = match args_val with - | [_] -> Vstring (loc.file ^ ":" ^ string_of_int loc.line - ^ ":" ^ string_of_int loc.column) + | [_] + -> Vstring (Source.Location.to_string loc) | _ -> error loc "Test.location takes a Unit as argument"
let test_true loc _depth args_val = match args_val with @@ -1187,3 +1197,23 @@ let from_lctx (lctx: lexp_context): runtime_env = (* build a rctx from a ectx. *) let from_ectx (ctx: elab_context): runtime_env = from_lctx (ectx_to_lctx ctx) + +class ast_interpreter lctx = object + inherit Backend.interactive_backend + + val mutable rctx = from_lctx lctx + val mutable lctx = lctx + + method process_decls ldecls = + let lctx', eldecls = Opslexp.clean_decls lctx ldecls in + rctx <- eval_decls eldecls rctx; + lctx <- lctx' + + method eval_expr lexp = + let elexp = Opslexp.erase_type lctx lexp in + debug_eval elexp rctx + + method print_rte_ctx = Env.print_rte_ctx rctx + + method dump_rte_ctx = Env.dump_rte_ctx rctx +end
===================================== src/heap.ml ===================================== @@ -27,7 +27,7 @@ open Lexp
module IMap = Util.IMap
-type location = Util.location +type location = Source.Location.t type symbol = Sexp.symbol type value = Env.value_type
@@ -37,7 +37,7 @@ type addr = int let error ~(location : location) (message : string) : 'a = Log.log_fatal ~section:"HEAP" ~loc:location message
-let dloc = Util.dummy_location +let dloc = Source.Location.dummy let type0 = Debruijn.type0 let type_datacons_label = mkBuiltin ((dloc, "DataconsLabel"), type0) let type_heap = mkBuiltin ((dloc, "Heap"), type_arrow_0)
===================================== src/inverse_subst.ml ===================================== @@ -94,7 +94,7 @@ let sizeOf (s: (int * int) list): int = List.length s let counter = ref 0 let mkVar (idx: int) : lexp = counter := !counter + 1; - mkVar ((U.dummy_location, None), idx) + mkVar ((Source.Location.dummy, None), idx)
(** Fill the gap between e_i in the list of couple (e_i, i) by adding dummy variables.
===================================== src/lexer.ml ===================================== @@ -27,12 +27,6 @@ open Grammar
(*************** The Lexer phase *********************)
-let digit_p char = - let code = Char.code char - in Char.code '0' <= code && code <= Char.code '9' - -type num_part = | NPint | NPfrac | NPexp - let unescape str = let rec split b = if b >= String.length str then [] @@ -41,108 +35,198 @@ let unescape str = string_sub str b e :: split (e + 1) in String.concat "" (split 0)
-let nexttoken (stt : token_env) (pts : pretoken list) bpos cpos - (* The returned Sexp may not be a Node. *) - : sexp * pretoken list * bytepos * charpos = - match pts with - | [] -> (Log.internal_error "No next token!") - | (Preblock (sl, bpts, el) :: pts) -> (Block (sl, bpts, el), pts, 0, 0) - | (Prestring (loc, str) :: pts) -> (String (loc, str), pts, 0, 0) - | (Pretoken ({file;line;column;docstr}, name) :: pts') - -> let char = name.[bpos] in - if digit_p char - || (char = '-' (* FIXME: Handle '+' as well! *) - && bpos + 1 < String.length name - && digit_p (name.[bpos + 1])) then - let rec lexnum bp cp (np : num_part) = - if bp >= String.length name then - ((if np == NPint then - Integer ({file;line;column=column+cpos;docstr=docstr}, - Z.of_string (string_sub name bpos bp)) - else - Float ({file;line;column=column+cpos;docstr=docstr}, - float_of_string (string_sub name bpos bp))), - pts', 0, 0) - else - match name.[bp] with - | ('0'|'1'|'2'|'3'|'4'|'5'|'6'|'7'|'8'|'9') - -> lexnum (bp+1) (cp+1) np - | '.' when np == NPint -> lexnum (bp+1) (cp+1) NPfrac - | ('e'|'E') when not (np == NPexp) -> lexnum (bp+1) (cp+1) NPexp - | ('+'|'-') - when np == NPexp && (name.[bp-1] == 'e' || name.[bp-1] == 'E') - -> lexnum (bp+1) (cp+1) NPexp - | _ - -> ((if np == NPint then - Integer ({file;line;column=column+cpos;docstr=docstr}, - Z.of_string (string_sub name bpos bp)) - else - Float ({file;line;column=column+cpos;docstr=docstr}, - float_of_string (string_sub name bpos bp))), - pts, bp, cp) - in lexnum (bpos+1) (cpos+1) NPint - else if bpos + 1 >= String.length name then - (hSymbol ({file;line;column=column+cpos;docstr=docstr}, - string_sub name bpos (String.length name)), - pts', 0, 0) - else if stt.(Char.code name.[bpos]) = CKseparate then - (hSymbol ({file;line;column=column+cpos;docstr=docstr}, - string_sub name bpos (bpos + 1)), - pts, bpos+1, cpos+1) - else - let rec lexsym bpos cpos = - let mksym epos escaped - = if epos = bpos then epsilon {file;line;column=column+cpos;docstr=docstr} else - let rawstr = string_sub name bpos epos in - let str = if escaped then unescape rawstr else rawstr in - hSymbol ({file;line;column=column+cpos;docstr=docstr}, str) in - let rec lexsym' prec lf bp cp escaped = - if bp >= String.length name then - (lf (mksym (String.length name) escaped), pts', 0, 0) - else - let char = name.[bp] in - let bp' = bp + 1 in - let is_last = bp' >= String.length name in - match stt.(Char.code char) with - | _ when char == '\' && not is_last - (* Skip next char, in case it's a special token. *) - (* For utf-8, this cp+2 is risky but actually works: \ counts - * as 1 and if the input is valid utf-8 the next byte has to - * be a leading byte, so it has to count as 1 as well ;-) *) - -> lexsym' prec lf (bp+2) (cp+2) true - | CKseparate -> (lf (mksym bp escaped), pts, bp, cp) - (* Turn `inner` infix operators, such as the "." of - * "Module.elem" into the equivalent of (__.__ Module elem). *) - | CKinner prec' - (* To be considered `inner`, an operator char needs to have - * something on its LHS or its RHS, otherwise, treat it as - * a normal char, `.` can be a normal operator as well. *) - when bpos != bp - || (not is_last - && CKseparate != (stt.(Char.code name.[bp']))) - || not (lf dummy_epsilon = dummy_epsilon) - -> let left = mksym bp escaped in - let op = hSymbol ({file;line;column=column+cp;docstr=docstr}, - "__" ^ String.sub name bp 1 ^ "__") in - let bpos = bp' in - let cpos = inc_cp cp char in - let lf' = if prec' > prec then - (fun s -> lf (Node (op, [left; s]))) - else - (fun s -> Node (op, [lf left; s])) in - lexsym bpos cpos prec' lf' - bpos cpos false - | _ -> lexsym' prec lf (bp+1) (inc_cp cp char) escaped - in lexsym' - in lexsym bpos cpos 0 (fun s -> s) bpos cpos false - -let lex tenv (pts : pretoken list) : sexp list = - let rec gettokens pts bpos cpos acc = - match pts with +let lex_number + (source : #Source.t) + (start_point : Source.Point.t) + (doc : string option) + : sexp = + + let rec lex_exponent () = + match source#peek with + | Some ('0' .. '9') + -> source#advance; + lex_exponent () + + | _ + -> let source, location = source#slice ?doc start_point in + Float (location, float_of_string source) + in + + let lex_exponent_sign () = + match source#peek with + | Some ('0' .. '9' | '+' | '-') + -> source#advance; + lex_exponent () + + (* TODO: should it be an error when there are no signs or digits after the + 'e'? *) + | _ + -> let source, location = source#slice ?doc start_point in + Float (location, float_of_string source) + in + + let rec lex_fractional () = + match source#peek with + | Some ('0' .. '9') + -> source#advance; + lex_fractional () + + | Some ('e' | 'E') + -> source#advance; + lex_exponent_sign () + + | _ + -> let source, location = source#slice ?doc start_point in + Float (location, float_of_string source) + in + + let rec lex_integer () = + match source#peek with + | Some ('0' .. '9') + -> source#advance; + lex_integer () + + | Some ('.') + -> source#advance; + lex_fractional () + + | Some ('e' | 'E') + -> source#advance; + lex_exponent_sign () + + | _ + -> let source, location = source#slice ?doc start_point in + Integer (location, Z.of_string source) + in + + lex_integer () + +(* Splits one symbol from the beginning of the source. *) +let lex_symbol + (token_env : token_env) + (source : #Source.t) + (doc : string option) + (start_point : Source.Point.t) + : sexp = + + let mksym start_point escaped = + if Source.Point.equal start_point source#point + then epsilon (Source.Location.of_point source#file_name start_point ?doc) + else + let raw_name, location = source#slice start_point ?doc in + let name = if escaped then unescape raw_name else raw_name in + hSymbol (location, name) + in + + let rec loop start_point prec lf (escaped : bool) = + match source#peek with + | None + -> lf (mksym start_point escaped) + + | Some c + -> (match token_env.(Char.code c) with + | CKseparate + -> lf (mksym start_point escaped) + + | CKinner prec' + -> let left = mksym start_point escaped in + let op_start_point = source#point in + + (* To be considered `inner`, an operator char needs to have + something on its LHS or its RHS, otherwise, treat it as a + normal char. `.` can be the normal function composition + operator as well. *) + let lhs_p = not (Source.Point.equal start_point source#point) in + source#advance; + let rhs_p = + (match source#peek with + | None -> false + | Some c' -> CKseparate != token_env.(Char.code c')) + in + if lhs_p || rhs_p || lf dummy_epsilon <> dummy_epsilon + then + let op_text, op_location = source#slice ?doc op_start_point in + let op = hSymbol (op_location, Printf.sprintf "__%s__" op_text) in + let lf' = + if prec' > prec + then fun s -> lf (Node (op, [left; s])) + else fun s -> Node (op, [lf left; s]) + in + loop source#point prec' lf' false + else + loop start_point prec lf escaped + + | CKnormal + -> (source#advance; + let is_last = Option.is_none source#peek in + match c with + (* Skip next char, in case it's a special token. For utf-8, + simply advancing is risky but actually works: '' counts as 1 + and if the input is valid utf-8 the next byte has to be a + leading byte, so it has to count as 1 as well ;-) *) + | '\' when not is_last + -> source#advance; + loop start_point prec lf true + + | _ -> loop start_point prec lf escaped)) + in + loop start_point 0 (fun s -> s) false + +let split_presymbol + (token_env : token_env) + (source : #Source.t) + (doc : string option) + : sexp list = + + let rec loop (acc : sexp list) : sexp list = + match source#peek with + | None -> List.rev acc + | Some (c) + -> let start_point = source#point in + let token = + match c with + | '-' + -> (* Could be a negative number literal, or a symbol starting with + '-'. *) + (source#advance; + match source#peek with + | Some ('0' .. '9') + -> lex_number source start_point doc + | _ -> lex_symbol token_env source doc start_point) + + | '0' .. '9' + -> (source#advance; + lex_number source start_point doc) + + | _ when token_env.(Char.code c) = CKseparate + -> source#advance; + let name, location = source#slice ?doc start_point in + hSymbol (location, name) + + | _ + -> lex_symbol token_env source doc start_point + in + loop (token :: acc) + in + loop [] + +let lex (token_env : token_env) (pretokens : pretoken list) : sexp list = + let rec loop pretokens acc = + match pretokens with | [] -> List.rev acc - | _ -> let (tok, pts, bpos, cpos) = nexttoken tenv pts bpos cpos - in gettokens pts bpos cpos (tok :: acc) in - gettokens pts 0 0 [] + + | Preblock (location, block_pretokens) :: pretokens' + -> loop pretokens' (Block (location, block_pretokens) :: acc) + + | Prestring (location, text) :: pretokens' + -> loop pretokens' (String (location, text) :: acc) + + | Presymbol ({file; start_line; start_column; doc; _}, name) :: pretokens' + -> let source = new Source.source_string ~file ~line:start_line ~column:start_column name in + let tokens = split_presymbol token_env source doc in + loop pretokens' (List.rev_append tokens acc) + in + loop pretokens []
let lex_source (source : #Source.t) tenv = let pretoks = prelex source in @@ -151,4 +235,3 @@ let lex_source (source : #Source.t) tenv = let sexp_parse_source (source : #Source.t) tenv grm limit = let toks = lex_source source tenv in sexp_parse_all_to_list grm toks limit -
===================================== src/lexp.ml ===================================== @@ -63,22 +63,22 @@ type ltype = lexp and lexp' = | Imm of sexp (* Used for strings, ... *) | SortLevel of sort_level - | Sort of U.location * sort + | Sort of Source.Location.t * sort | Builtin of symbol * ltype | Var of vref | Susp of lexp * subst (* Lazy explicit substitution: e[σ]. *) (* This "Let" allows recursion. *) - | Let of U.location * (vname * lexp * ltype) list * lexp - | Arrow of arg_kind * vname * ltype * U.location * ltype + | Let of Source.Location.t * (vname * lexp * ltype) list * lexp + | Arrow of arg_kind * vname * ltype * Source.Location.t * ltype | Lambda of arg_kind * vname * ltype * lexp | Call of lexp * (arg_kind * lexp) list (* Curried call. *) - | Inductive of U.location * label + | Inductive of Source.Location.t * label * ((arg_kind * vname * ltype) list) (* formal Args *) * ((arg_kind * vname * ltype) list) SMap.t | Cons of lexp * symbol (* = Type info * ctor_name *) - | Case of U.location * lexp + | Case of Source.Location.t * lexp * ltype (* The type of the return value of all branches *) - * (U.location * (arg_kind * vname) list * lexp) SMap.t + * (Source.Location.t * (arg_kind * vname) list * lexp) SMap.t * (vname * lexp) option (* Default. *) (* The `subst` will be applied to the the metavar's value when it * gets instantiated. *) @@ -118,6 +118,7 @@ type ltype = lexp | SLlub of lexp * lexp
type ldecl = vname * lexp * ltype +type ldecls = ldecl list
type varbind = | Variable @@ -505,7 +506,7 @@ let rec lexp_location e = | Sort (l,_) -> l | SortLevel (SLsucc e) -> lexp_location e | SortLevel (SLlub (e, _)) -> lexp_location e - | SortLevel SLz -> U.dummy_location + | SortLevel SLz -> Source.Location.dummy | Imm s -> sexp_location s | Var ((l,_),_) -> l | Builtin ((l, _), _) -> l @@ -523,7 +524,7 @@ let rec lexp_location e =
(********* Normalizing a term *********)
-let vdummy = (U.dummy_location, None) +let vdummy = (Source.Location.dummy, None) let maybename n = match n with None -> "<anon>" | Some v -> v let sname (l,n) = (l, maybename n)
@@ -655,8 +656,8 @@ let clean e = | _ -> mkMetavar (idx, s, name) in clean S.identity e
-let sdatacons = Symbol (U.dummy_location, "##datacons") -let stypecons = Symbol (U.dummy_location, "##typecons") +let sdatacons = Symbol (Source.Location.dummy, "##datacons") +let stypecons = Symbol (Source.Location.dummy, "##typecons")
(* ugly printing (sexp_print (pexp_unparse (lexp_unparse e))) *) let rec lexp_unparse lxp = @@ -692,14 +693,14 @@ let rec lexp_unparse lxp = -> (* (vdef * lexp * ltype) list *) let sdecls = List.fold_left (fun acc (vdef, lxp, ltp) - -> Node (Symbol (U.dummy_location, "_=_"), + -> Node (Symbol (Source.Location.dummy, "_=_"), [Symbol (sname vdef); lexp_unparse ltp]) - :: Node (Symbol (U.dummy_location, "_=_"), + :: Node (Symbol (Source.Location.dummy, "_=_"), [Symbol (sname vdef); lexp_unparse lxp]) :: acc) [] ldecls in Node (Symbol (loc, "let_in_"), - [Node (Symbol (U.dummy_location, "_;_"), sdecls); + [Node (Symbol (Source.Location.dummy, "_;_"), sdecls); lexp_unparse body])
| Call(lxp, largs) -> (* (arg_kind * lexp) list *) @@ -768,7 +769,7 @@ let rec lexp_unparse lxp = -> Symbol (loc, "?" ^ (maybename name) ^ "-" ^ string_of_int idx ^ "[" ^ subst_string subst ^ "]")
- | SortLevel (SLz) -> Symbol (U.dummy_location, "##TypeLevel.z") + | SortLevel (SLz) -> Symbol (Source.Location.dummy, "##TypeLevel.z") | SortLevel (SLsucc l) -> Node (Symbol (lexp_location l, "##TypeLevel.succ"), [lexp_unparse l])
===================================== src/listx.ml ===================================== @@ -44,3 +44,10 @@ let fold_left_map loop fold_acc (new_element :: map_acc) bs in loop i [] bs + +(* Backport from 4.12. *) +let rec equal (p : 'a -> 'a -> bool) (ls : 'a list) (rs : 'a list) : bool = + match ls, rs with + | l :: ls', r :: rs' -> p l r && equal p ls' rs' + | _ :: _, [] | [], _ :: _ -> false + | [], [] -> true
===================================== src/log.ml ===================================== @@ -88,7 +88,7 @@ type log_entry = { kind : string option; section : string option; print_action : (unit -> unit) option; - loc : location option; + loc : Source.Location.t option; msg : string; }
@@ -106,11 +106,6 @@ let mkEntry level ?kind ?section ?print_action ?loc msg = let log_push (entry : log_entry) = typer_log := entry::(!typer_log)
-let string_of_location (loc : location) = - (loc.file ^ ":" - ^ string_of_int loc.line ^ ":" - ^ string_of_int loc.column ^ ":") - let maybe_color_string color_opt str = match color_opt with | Some color -> Fmt.color_string color str @@ -119,7 +114,7 @@ let maybe_color_string color_opt str = let string_of_log_entry {level; kind; section; loc; msg; _} = let color = if typer_log_config.color then (level_color level) else None in let parens s = "(" ^ s ^ ")" in - (option_default "" (option_map string_of_location loc) + (option_default "" (option_map Source.Location.to_string loc) ^ maybe_color_string color (option_default (string_of_level level) kind) ^ ":" ^ option_default "" (option_map parens section) ^ " " ^ msg)
===================================== src/opslexp.ml ===================================== @@ -601,7 +601,7 @@ and check'' erased ctx e = | Imm (Float (_, _)) -> DB.type_float | Imm (Integer (_, _)) -> DB.type_int | Imm (String (_, _)) -> DB.type_string - | Imm (Block (_, _, _) | Symbol _ | Node (_, _)) + | Imm (Block (_, _) | Symbol _ | Node (_, _)) -> (error_tc ~loc:(lexp_location e) "Unsupported immediate value!"; DB.type_int) | SortLevel SLz -> DB.type_level @@ -1005,7 +1005,7 @@ and get_type ctx e = | Imm (Float (_, _)) -> DB.type_float | Imm (Integer (_, _)) -> DB.type_int | Imm (String (_, _)) -> DB.type_string - | Imm (Block (_, _, _) | Symbol _ | Node (_, _)) -> DB.type_int + | Imm (Block (_, _) | Symbol _ | Node (_, _)) -> DB.type_int | Builtin (_, t) -> t | SortLevel _ -> DB.type_level | Sort (l, Stype e) -> mkSort (l, Stype (mkSortLevel (mkSLsucc e))) @@ -1226,7 +1226,7 @@ let erase_type lctx lxp = Log.log_fatal ~print_action:(fun () -> IMap.iter (fun i (_, t, _, (l, n)) -> - print_endline ("\t" ^ (Log.string_of_location l) + print_endline ("\t" ^ (Source.Location.to_string l) ^ " ?" ^ (U.option_default "" n) ^ "[" ^ (string_of_int i) ^ "] : " ^ (lexp_string t)) ) mvs) @@ -1256,7 +1256,7 @@ let ctx2tup ctx nctx = | DB.CVfix (_, nctx) as bloc -> get_blocs nctx (bloc :: blocs) | _ -> assert false in let rec mk_lets_and_tup blocs types = - let loc = U.dummy_location in + let loc = Source.Location.dummy in match blocs with | [] -> let cons_name = "cons" in
===================================== src/option.ml ===================================== @@ -35,3 +35,13 @@ let value ~(default : 'a) (o : 'a option) : 'a = match o with | Some v -> v | None -> default + +let is_some (o : 'a option) : bool = + match o with + | Some _ -> true + | None -> false + +let is_none (o : 'a option) : bool = + match o with + | Some _ -> false + | None -> true
===================================== src/prelexer.ml ===================================== @@ -25,10 +25,24 @@ open Util let prelexer_error loc = Log.log_error ~section:"PRELEXER" ~loc
type pretoken = - | Pretoken of location * string - | Prestring of location * string - (* | Preerror of location * string *) - | Preblock of location * pretoken list * location + | Presymbol of Source.Location.t * string + | Prestring of Source.Location.t * string + | Preblock of Source.Location.t * pretoken list + +module Pretoken = struct + type t = pretoken + + (* Equality up to location, i.e. the location is not considered. *) + let rec equal (l : t) (r : t) = + match l, r with + | Presymbol (_, l_name), Presymbol (_, r_name) + -> String.equal l_name r_name + | Prestring (_, l_text), Prestring (_, r_text) + -> String.equal l_text r_text + | Preblock (_, l_inner), Preblock (_, r_inner) + -> Listx.equal equal l_inner r_inner + | _ -> false +end
(*************** The Pre-Lexer phase *********************)
@@ -46,153 +60,157 @@ type pretoken = (* FIXME: Add syntax for char constants (maybe 'c'). *) (* FIXME: Handle multiline strings. *)
-let inc_cp (cp:charpos) (c:char) = - (* Count char positions in utf-8: don't count the non-leading bytes. *) - if utf8_head_p c then cp+1 else cp - let rec consume_until_newline (source : #Source.t) : unit = - match source#next_char with + match source#next with | None | Some ('\n') -> () | Some _ -> consume_until_newline source
-let rec prelex (source : #Source.t) ln ctx acc (doc : string) - : pretoken list = +(* Splits a Typer source into pretokens, stopping when it is completely + consumed. *) +let prelex (source : #Source.t) : pretoken list = + let rec loop + (ctx : (Source.Point.t * pretoken list) list) + (acc : pretoken list) + (doc : string option) + : pretoken list =
- let nextline = prelex source (ln + 1) in - let rec prelex' ctx (cpos:charpos) acc doc = - let nexttok = prelex' ctx in - match source#peek_char with + match source#peek with | None -> (match ctx with | [] -> List.rev acc - | ((ln, cpos, _) :: _ctx) -> - (prelexer_error {file=source#file_name; line=ln; column=cpos; docstr=""} - "Unmatched opening brace"; List.rev acc)) + | ((brace_start, _) :: _ctx) + -> let location = source#make_location brace_start in + prelexer_error location "Unmatched opening brace"; + List.rev acc)
| Some ('\n') -> source#advance; - nextline ctx acc doc + loop ctx acc doc
| Some (c) when c <= ' ' -> source#advance; - nexttok (cpos+1) acc doc + loop ctx acc doc
(* A comment. *) | Some ('%') -> consume_until_newline source; - nextline ctx acc doc + loop ctx acc doc
(* line's bounds seems ok: String.sub line 1 0 == "" *) | Some ('@') -> source#advance; - let start_offset = source#current_offset in + let doc_start = source#point in consume_until_newline source; - let new_doc = String.trim (source#slice_from_offset start_offset) in - nextline ctx acc (doc ^ "\n" ^ new_doc) + let doc_line, _ = source#slice doc_start in + let doc' = + match doc with + | None -> doc_line + | Some doc -> doc ^ "\n" ^ String.trim doc_line + in + loop ctx acc (Some (doc'))
(* A string. *) | Some ('"') - -> source#advance; - let rec prestring cp chars = - match source#next_char with + -> let string_start = source#point in + source#advance; + let rec prestring chars = + match source#peek with | None | Some ('\n') - -> let location = {file=source#file_name; line=ln; column=cpos; docstr=doc} in + -> source#advance; + let location = source#make_location ?doc string_start in prelexer_error location "Unterminated string"; - nextline ctx (Prestring (location, "") :: acc) "" + loop ctx (Prestring (location, "") :: acc) None
| Some ('"') - -> let location = {file=source#file_name; line=ln; column=cpos; docstr=doc} in - let pretoken = Prestring (location, string_implode (List.rev chars)) in - nexttok (cp + 1) (pretoken :: acc) "" + -> source#advance; + let location = source#make_location ?doc string_start in + let text = string_implode (List.rev chars) in + let pretoken = Prestring (location, text) in + loop ctx (pretoken :: acc) None
| Some ('\') - -> (match source#next_char with + -> (let escape_sequence_start = source#point in + source#advance; + match source#next with | None | Some ('\n') - -> let location = {file=source#file_name; line=ln; column=cp; docstr=doc} in + -> let location = source#make_location escape_sequence_start in prelexer_error location "Unterminated escape sequence"; - nextline ctx (Prestring (location, "") :: acc) "" - | Some ('t') -> prestring (cp + 2) ('\t' :: chars) - | Some ('n') -> prestring (cp + 2) ('\n' :: chars) - | Some ('r') -> prestring (cp + 2) ('\r' :: chars) + loop ctx (Prestring (location, "") :: acc) None + | Some ('t') -> prestring ('\t' :: chars) + | Some ('n') -> prestring ('\n' :: chars) + | Some ('r') -> prestring ('\r' :: chars) | Some ('u') - -> let location = {file=source#file_name; line=ln; column=cp; docstr=doc} in + -> let location = source#make_location escape_sequence_start in prelexer_error location "Unimplemented unicode escape"; - prestring (cp + 2) chars - | Some (c) -> prestring (cp + 2) (c :: chars)) + prestring chars + | Some (c) -> prestring (c :: chars))
- | Some (char) -> prestring (inc_cp cp char) (char :: chars) - in prestring (cpos + 1) [] + | Some (char) + -> source#advance; + prestring (char :: chars) + in prestring []
| Some ('{') -> source#advance; - prelex' ((ln, cpos, acc) :: ctx) (cpos + 1) [] doc + let ctx' = (source#point, acc) :: ctx in + loop ctx' [] doc
| Some ('}') - -> source#advance; + -> let brace_start = source#point in + source#advance; (match ctx with - | ((sln, scpos, sacc) :: ctx) -> - prelex' ctx (cpos+1) - (Preblock ({file=source#file_name; line=sln; column=scpos; docstr=doc}, - List.rev acc, - {file=source#file_name; line=ln; column=(cpos + 1); docstr=doc}) - :: sacc) "" - | _ -> (prelexer_error {file=source#file_name; line=ln; column=cpos; docstr=doc} - "Unmatched closing brace"; - prelex' ctx (cpos + 1) acc doc)) + | ((block_start, sacc) :: ctx) + -> let location = source#make_location ?doc block_start in + let preblock = Preblock (location, List.rev acc) in + loop ctx (preblock :: sacc) None + | _ + -> let location = source#make_location brace_start in + prelexer_error location "Unmatched closing brace"; + loop ctx acc doc)
(* A pretoken. *) - | Some (c) - -> let start_offset = source#current_offset in + | Some _ + -> let pretoken_start = source#point in source#advance; - let rec pretok cp = - match source#peek_char with + let rec pretok () = + match source#peek with | None | Some (' ' | '\t' | '%' | '"' | '{' | '}') - -> let location = {file=source#file_name; line=ln; column=cpos; docstr=doc} in - let text = source#slice_from_offset start_offset in - nexttok cp (Pretoken (location, text) :: acc) "" + -> let text, location = source#slice ?doc pretoken_start in + loop ctx (Presymbol (location, text) :: acc) None
| Some ('\n') - -> let location = {file=source#file_name; line=ln; column=cpos; docstr=doc} in - let text = source#slice_from_offset start_offset in + -> let text, location = source#slice ?doc pretoken_start in source#advance; - nextline ctx (Pretoken (location, text) :: acc) "" + loop ctx (Presymbol (location, text) :: acc) None
| Some ('\') - -> source#advance; - (match source#next_char with - | Some (char) -> pretok (1 + inc_cp cp char) + -> let backslash_start = source#point in + source#advance; + (match source#next with + | Some _ -> pretok () | None - -> let location = {file=source#file_name; line=ln; column=cpos; docstr=doc} in + -> let error_location = source#make_location backslash_start in source#advance; - let text = source#slice_from_offset start_offset in - prelexer_error location ("Unterminated escape sequence in: " ^ text); - nexttok (cp + 1) (Pretoken (location, text) :: acc) "") + let text, location = source#slice ?doc pretoken_start in + prelexer_error + error_location + ("Unterminated escape sequence in: " ^ text); + loop ctx (Presymbol (location, text) :: acc) None)
- | Some (c) + | Some _ -> source#advance; - pretok (inc_cp cp c) - in pretok (inc_cp cpos c) + pretok () + in pretok () in - (* Traditionally, column numbers start at 1 :-( *) - prelex' ctx 1 acc doc - -let prelex (source : #Source.t) : pretoken list = - (* Traditionally, line numbers start at 1 :-( *) - prelex source 1 [] [] "" - -let pretoken_name pretok = - match pretok with - | Pretoken _ -> "Pretoken" - | Prestring _ -> "Prestring" - | Preblock _ -> "Preblock" + loop [] [] None
let rec pretoken_string pretok = - match pretok with - | Preblock(_,pts,_) -> "{" ^ ( - List.fold_left (fun str pts -> str ^ " " ^ (pretoken_string pts)) - "" pts) ^ " }" - | Pretoken(_, str) -> str - | Prestring(_, str) -> """ ^ str ^ """ + match pretok with + | Preblock (_, pts) -> "{" ^ ( + List.fold_left (fun str pts -> str ^ " " ^ (pretoken_string pts)) + "" pts) ^ " }" + | Presymbol (_, name) -> name + | Prestring (_, str) -> """ ^ str ^ """
let pretokens_string pretokens = List.fold_left (fun str pt -> str ^ (pretoken_string pt)) "" pretokens @@ -203,9 +221,9 @@ let pretokens_print p = print_string (pretokens_string p)
(* Prelexer comparison, ignoring source-line-number info, used for tests. *) let rec pretokens_equal p1 p2 = match p1, p2 with - | Pretoken (_, s1), Pretoken (_, s2) -> s1 = s2 + | Presymbol (_, s1), Presymbol (_, s2) -> s1 = s2 | Prestring (_, s1), Prestring (_, s2) -> s1 = s2 - | Preblock (_, ps1, _), Preblock (_, ps2, _) -> + | Preblock (_, ps1), Preblock (_, ps2) -> pretokens_eq_list ps1 ps2 | _ -> false and pretokens_eq_list ps1 ps2 = match ps1, ps2 with
===================================== src/sexp.ml ===================================== @@ -28,19 +28,20 @@ let sexp_error ?print_action loc msg = Log.log_error ~section:"SEXP" ?print_action ~loc msg
type integer = Z.t -type symbol = location * string +type symbol = Source.Location.t * string
-type sexp = (* Syntactic expression, kind of like Lisp. *) - | Block of location * pretoken list * location +(* Syntactic expression, kind of like Lisp. *) +type sexp = + | Block of Source.Location.t * pretoken list | Symbol of symbol - | String of location * string - | Integer of location * integer - | Float of location * float + | String of Source.Location.t * string + | Integer of Source.Location.t * integer + | Float of Source.Location.t * float | Node of sexp * sexp list type token = sexp
let epsilon l = Symbol (l, "") -let dummy_epsilon = epsilon dummy_location +let dummy_epsilon = epsilon Source.Location.dummy
(********************** Sexp tests **********************)
@@ -67,30 +68,44 @@ let emptyString = hString ""
(*************** The Sexp Printer *********************)
-let rec sexp_string sexp = - match sexp with - | Block(_,pts,_) -> "{" ^ (pretokens_string pts) ^ " }" +let rec sexp_location (s : sexp) : Source.Location.t = + match s with + | Block (l, _) -> l + | Symbol (l, _) -> l + | String (l, _) -> l + | Integer (l, _) -> l + | Float (l, _) -> l + | Node (s, _) -> sexp_location s + +(* Converts a sexp to a string, optionally printing locations as a list preceded + by a Racket-style reader comment (#;). *) +let rec sexp_string ?(print_locations = false) sexp = + (if print_locations + then + let open Source.Location in + let location = sexp_location sexp in + Printf.sprintf + "#;("%s" %d %d %d %d) " + location.file + location.start_line + location.start_column + location.end_line + location.end_column + else "") + ^ match sexp with + | Block (_, pts) -> "{" ^ (pretokens_string pts) ^ " }" | Symbol(_, "") -> "()" (* Epsilon *) | Symbol(_, name) -> name | String(_, str) -> """ ^ str ^ """ | Integer(_, n) -> Z.to_string n | Float(_, x) -> string_of_float x | Node(f, args) -> - let str = "(" ^ (sexp_string f) in - (List.fold_left (fun str sxp -> - str ^ " " ^ (sexp_string sxp)) str args) ^ ")" + let str = "(" ^ (sexp_string ~print_locations f) in + (List.fold_left (fun str sxp -> + str ^ " " ^ (sexp_string ~print_locations sxp)) str args) ^ ")"
let sexp_print sexp = print_string (sexp_string sexp)
-let rec sexp_location s = - match s with - | Block (l, _, _) -> l - | Symbol (l, _) -> l - | String (l, _) -> l - | Integer (l, _) -> l - | Float (l, _) -> l - | Node (s, _) -> sexp_location s - let sexp_name s = match s with | Block _ -> "Block" @@ -203,7 +218,7 @@ let rec sexp_parse (g : grammar) (rest : sexp list) sexp_parse rest' level op largs (e::rargs)) | e::rest -> sexp_parse rest level op largs (e::rargs) | [] -> (mk_node (match rargs with [] -> op - | _ -> ((dummy_location,"")::op)) + | _ -> ((Source.Location.dummy, "")::op)) largs rargs false, [])
@@ -255,7 +270,7 @@ let sexp_parse_all_to_list grm tokens limit : sexp list =
(* Sexp comparison, ignoring source-line-number info, used for tests. *) let rec sexp_equal s1 s2 = match s1, s2 with - | Block (_, ps1, _), Block (_, ps2, _) -> pretokens_eq_list ps1 ps2 + | Block (_, ps1), Block (_, ps2) -> pretokens_eq_list ps1 ps2 | Symbol (_, s1), Symbol (_, s2) -> s1 = s2 | String (_, s1), String (_, s2) -> s1 = s2 | Integer (_, n1), Integer (_, n2) -> n1 = n2
===================================== src/source.ml ===================================== @@ -18,35 +18,161 @@ * You should have received a copy of the GNU General Public License along * with this program. If not, see http://www.gnu.org/licenses/. *)
-type location = Util.location +module Point = struct + (* offset * line * column *) + type t = int * int * int + + let equal (l, _, _ : t) (r, _, _ : t) : bool = + (* The line and column are metadata, only the offset is necessary for + determining equality. *) + Int.equal l r +end + +module Location = struct + type t = + { + file : string; + start_line : int; + start_column : int; + end_line : int; + end_column : int; + doc : string option; + } + + let dummy = + { + file = ""; + start_line = 0; + start_column = 0; + end_line = 0; + end_column = 0; + doc = None; + } + + (* Creates a zero-width location around the given point. *) + let of_point + ?(doc : string option) + (file : string) + (_, line, column : Point.t) + : t = + + { + file; + start_line = line; + start_column = column; + end_line = line; + end_column = column; + doc; + } + + let to_string ({file; start_line; start_column; _} : t) : string = + Printf.sprintf "%s:%d:%d" file start_line start_column + + let equal (l : t) (r : t) = + let + { + file = l_file; + start_line = l_start_line; + start_column = l_start_column; + end_line = l_end_line; + end_column = l_end_column; + doc = l_doc; + } = l + in + let + { + file = r_file; + start_line = r_start_line; + start_column = r_start_column; + end_line = r_end_line; + end_column = r_end_column; + doc = r_doc; + } = r + in + String.equal l_file r_file + && Int.equal l_start_line r_start_line + && Int.equal l_start_column r_start_column + && Int.equal l_end_line r_end_line + && Int.equal l_end_column r_end_column + && Option.equal String.equal l_doc r_doc +end + +(* Traditionally, line numbers start at 1… *) +let first_line_of_file = 1 +(* … and so do column numbers :-( *) +let first_column_of_file = 1
(* A source object is text paired with a cursor. The text can be lazily loaded as it is accessed byte by byte, but it must be retained for future reference by error messages. *) -class virtual t = object (self) +class virtual t (base_line : int) (base_column : int) (file_name : string) = +object (self) + val mutable line = base_line + val mutable column = base_column + (* A path if the text comes from a file, otherwise a meaningful identifier. *) - method virtual file_name : string + method file_name : string = file_name
(* Return the byte at the cursor, or None if the cursor is at the end of the text. *) - method virtual peek_char : char option + method virtual peek : char option
- (* The current offset of the cursor in the file, in bytes. *) - method virtual current_offset : int + method point : Point.t = + (self#offset, line, column)
- (* Slices the text, from (and including) a starting offset and to (and + (* The current offset of the cursor in the file, in bytes. *) + method virtual private offset : int + + (* Makes a location starting at the given point and ending at the current + cursor position. *) + method make_location + ?(doc : string option) + (_, start_line, start_column : Point.t) + : Location.t = + + let open Location in + { + file = file_name; + start_line; + start_column; + end_line = line; + end_column = column; + doc; + } + + (* Slices the text, from (and including) a starting point and to (and excluding) the curent cursor offset.
Note that the source is required only to buffer the last line read and may raise an Invalid_argument if the slice extends before the start of the line. *) - method virtual slice_from_offset : int -> string + method slice + ?(doc : string option) + (offset, _, _ as point : Point.t) + : string * Location.t =
- (* Moves the cursor forward one byte *) - method virtual advance : unit + (self#slice_impl offset, self#make_location ?doc point) + + method virtual private slice_impl : int -> string
- method next_char : char option = - let c = self#peek_char in + (* Moves the cursor forward one byte *) + method advance : unit = + let is_utf8_head c = Char.code c < 128 || Char.code c >= 192 in + let c = self#peek in + self#advance_impl; + match c with + | Some '\n' + -> line <- line + 1; + column <- 0; + | Some c when is_utf8_head c + -> column <- column + 1 + | _ -> () + + method private virtual advance_impl : unit + + (* Returns the char at the cursor, then advances it forward. *) + method next : char option = + let c = self#peek in self#advance; c end @@ -54,60 +180,62 @@ end let read_buffer_length = 4096
class source_file file_name = object (self) - inherit t + inherit t first_line_of_file first_column_of_file file_name
val in_channel = open_in file_name val mutable end_of_line = false - val mutable line = "" + val mutable source_line = "" val mutable line_offset = 0 val mutable offset = 0
- method private peek_char_unchecked = - if offset < String.length line - then line.[offset] + method private peek_unchecked = + if offset < String.length source_line + then source_line.[offset] else '\n'
- method peek_char = - if offset <= String.length line - then Some self#peek_char_unchecked + method peek = + if offset <= String.length source_line + then Some self#peek_unchecked else try - line_offset <- line_offset + 1 + String.length line; - line <- input_line in_channel; + line_offset <- line_offset + 1 + String.length source_line; + source_line <- input_line in_channel; offset <- 0; - Some self#peek_char_unchecked + Some self#peek_unchecked with | End_of_file -> None
- method advance = + method private advance_impl = offset <- offset + 1
- method current_offset = line_offset + offset + method private offset = line_offset + offset
- method slice_from_offset start_offset = + method private slice_impl start_offset = let relative_start_offset = start_offset - line_offset in - String.sub line relative_start_offset (offset - relative_start_offset) - - method file_name = file_name + String.sub source_line relative_start_offset (offset - relative_start_offset) end
-class source_string source = object - inherit t +class source_string + ?(file : string = "<string>") + ?(line : int = first_line_of_file) + ?(column : int = first_column_of_file) + source + = +object + inherit t line column file
val mutable offset = 0
- method peek_char = + method peek = if offset < String.length source then Some (source.[offset]) else None
- method advance = + method private advance_impl = offset <- offset + 1
- method current_offset = offset + method private offset = offset
- method slice_from_offset start_offset = + method private slice_impl start_offset = String.sub source start_offset (offset - start_offset) - - method file_name = "<string>" end
===================================== src/unification.ml ===================================== @@ -148,11 +148,11 @@ let common_subset ctx s1 s2 = * Identity 0 = #0 · #1 · #2 ... = #0 · (Identity 1) *) | (S.Cons _, S.Identity o2') - -> loop s1 (S.Cons (mkVar ((U.dummy_location, None), 0), + -> loop s1 (S.Cons (mkVar ((Source.Location.dummy, None), 0), S.Identity 1, o2')) o1 o2 o | (S.Identity o1', S.Cons _) - -> loop (S.Cons (mkVar ((U.dummy_location, None), 0), + -> loop (S.Cons (mkVar ((Source.Location.dummy, None), 0), S.Identity 1, o1')) s2 o1 o2 o | (S.Identity o1', S.Identity o2')
===================================== src/util.ml ===================================== @@ -36,17 +36,12 @@ end module SMap = UPDATABLE(Map.Make(String)) module IMap = UPDATABLE(Map.Make(Int))
-type charpos = int -type bytepos = int -type location = { file : string; line : int; column : charpos; docstr : string; } -let dummy_location = {file=""; line=0; column=0; docstr=""} - (*************** DeBruijn indices for variables *********************)
(* Occurrence of a variable's symbol: we use DeBruijn index, and for * debugging purposes, we remember the name that was used in the source * code. *) -type vname = location * string option +type vname = Source.Location.t * string option type db_index = int (* DeBruijn index. *) type db_offset = int (* DeBruijn index offset. *) type db_revindex = int (* DeBruijn index counting from the root. *) @@ -63,12 +58,6 @@ let get_vname_name vname = | Some n -> n | _ -> "" (* FIXME: Replace with dummy_name ? *)
-(* print debug info *) -let loc_string loc = - "Ln " ^ (Fmt.ralign_int loc.line 3) ^ ", cl " ^ (Fmt.ralign_int loc.column 3) - -let loc_print loc = print_string (loc_string loc) - let string_implode chars = String.concat "" (List.map (String.make 1) chars) let string_sub str b e = String.sub str b (e - b)
===================================== tests/dune ===================================== @@ -6,6 +6,7 @@ eval_test inverse_test lexp_test + lexer_test macro_test sexp_test unify_test)
===================================== tests/lexer_test.ml ===================================== @@ -0,0 +1,108 @@ +(* Copyright (C) 2021 Free Software Foundation, Inc. + * + * Author: Simon Génier simon.genier@umontreal.ca + * Keywords: languages, lisp, dependent types. + * + * This file is part of Typer. + * + * Typer is free software; you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software + * Foundation, either version 3 of the License, or (at your option) any later + * version. + * + * Typer is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + * A PARTICULAR PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with + * this program. If not, see http://www.gnu.org/licenses/. *) + +open Typerlib + +open Utest_lib + +open Prelexer +open Sexp +open Source + +let test_lex name pretokens expected = + (* Unlike the version in the Sexp module, this version of equality considers + locations. *) + let rec sexp_equal actual expected = + match actual, expected with + | Sexp.Block (actual_location, actual_pretokens), + Sexp.Block (expected_location, expected_pretokens) + -> Location.equal actual_location expected_location + && Listx.equal Pretoken.equal actual_pretokens expected_pretokens + | Sexp.Symbol (actual_location, actual_name), + Sexp.Symbol (expected_location, expected_name) + -> Location.equal actual_location expected_location + && String.equal actual_name expected_name + | Sexp.Node (actual_head, actual_tail), + Sexp.Node (expected_head, expected_tail) + -> sexp_equal actual_head expected_head + && Listx.equal sexp_equal actual_tail expected_tail + | _, _ -> false + in + let lex () = + let actual = Lexer.lex Grammar.default_stt pretokens in + if Listx.equal sexp_equal actual expected + then success + else + ((* Only print locations if the Sexp are otherwise identical so its easier + to spot the problem. *) + let print_locations = + Listx.equal Sexp.sexp_equal actual expected + in + let print_token token = + Printf.printf "%s\n" (sexp_string ~print_locations token) + in + Printf.printf "%sExpected:%s\n" Fmt.red Fmt.reset; + List.iter print_token expected; + Printf.printf "%sActual:%s\n" Fmt.red Fmt.reset; + List.iter print_token actual; + failure) + in + add_test "LEXER" name lex + +let l : Source.Location.t = + { + file = "test.typer"; + start_line = 1; + start_column = 1; + end_line = 1; + end_column = 1; + doc = None; + } + +let () = + test_lex + "Inner operator inside a presymbol" + [Presymbol (l, "a.b")] + [Node (Symbol ({l with start_column = 2; end_column = 3}, "__.__"), + [Symbol ({l with start_column = 1; end_column = 2}, "a"); + Symbol ({l with start_column = 3; end_column = 4}, "b")])] + +let () = + test_lex + "Inner operators at the beginning of a presymbol" + [Presymbol (l, ".b")] + [Node (Symbol ({l with start_column = 1; end_column = 2}, "__.__"), + [epsilon {l with start_column = 1; end_column = 1}; + Symbol ({l with start_column = 2; end_column = 3}, "b")])] + +let () = + test_lex + "Inner operators at the end of a presymbol" + [Presymbol (l, "a.")] + [Node (Symbol ({l with start_column = 2; end_column = 3}, "__.__"), + [Symbol ({l with start_column = 1; end_column = 2}, "a"); + epsilon {l with start_column = 3; end_column = 3}])] + +let () = + test_lex + "An inner operator by itself is a simple symbol" + [Presymbol (l, ".")] + [Symbol ({l with start_column = 1; end_column = 2}, ".")] + +let () = run_all ()
===================================== tests/unify_test.ml ===================================== @@ -91,7 +91,7 @@ let _ = {| type Nat | Z | S (Nat); |} ectx in - let dloc = U.dummy_location in + let dloc = Source.Location.dummy in let nat = mkVar ((dloc, Some "Nat"), 2) in let shift l i = mkSusp l (S.shift i) in let ectx, _ =
===================================== typer.ml ===================================== @@ -51,22 +51,34 @@ let arg_defs = let main () = let usage = Sys.executable_name ^ " [options] <file1> [<file2>] …" in Arg.parse arg_defs add_input_file usage; - let files = List.rev !arg_files in - let is_interactive = not !arg_batch in
- if is_interactive - then - (print_string (Fmt.make_title " TYPER REPL "); - print_string welcome_msg; - print_string (Fmt.make_sep '-'); - flush stdout); + let ectx = Elab.default_ectx in + let backend = new Eval.ast_interpreter (Debruijn.ectx_to_lctx ectx) in + let interactive = backend#interactive in + let file_names = List.rev !arg_files in + let is_interactive = not !arg_batch && Option.is_some interactive in
- let i, ectx, rctx = + let process_file = + if is_interactive + then + (print_string (Fmt.make_title " TYPER REPL "); + print_string welcome_msg; + print_string (Fmt.make_sep '-'); + flush stdout; + fun ectx i file_name + -> Printf.printf " In[%02d] >> %%readfile %s\n" i file_name; + Elab.eval_file backend ectx file_name) + else + fun ectx _ file_name -> Elab.eval_file backend ectx file_name + in + + let ectx, i = try - let ectx = Elab.default_ectx in - let rctx = Elab.default_rctx in let res = - REPL.readfiles files (1, ectx, rctx) is_interactive + List.fold_left + (fun (ectx, i) file_name -> (process_file ectx i file_name, i + 1)) + (ectx, 0) + file_names in REPL.print_and_clear_log (); res @@ -82,7 +94,8 @@ let main () = exit 1 in
- if is_interactive - then REPL.repl i ectx rctx + match is_interactive, interactive with + | true, Some (interactive) -> REPL.repl i interactive ectx + | _ -> ()
let () = main ()
View it on GitLab: https://gitlab.com/monnier/typer/-/compare/3818bc2a8194d13efbd7d0d27e7e0a738...
Afficher les réponses par date