BONNEVALLE Vincent pushed to branch unification at Stefan / Typer
Commits: 4ef9006d by n3f4s at 2016-08-22T17:07:33+02:00 fix throw exception for partial application
- - - - - d00bf033 by n3f4s at 2016-08-22T20:17:04+02:00 remove useless printing
- - - - - da4bebac by n3f4s at 2016-08-23T23:15:52+02:00 remove debug printing
- - - - - 343fb9c2 by Stefan Monnier at 2016-08-23T20:26:42-04:00 * tests/sexp_test.ml (test_sexp_add): New function. Use it.
- - - - - 2331f785 by Stefan Monnier at 2016-08-23T20:28:44-04:00 * tests/sexp_test.ml (test_sexp_eqv): New fun. Add some tests
* src/sexp.ml (sexp_equal, sexp_eq_list): New funcs. * src/prelexer.ml (pretokens_equal, pretokens_eq_list): New funs.
- - - - - c9bd8ae0 by Stefan Monnier at 2016-08-23T20:30:22-04:00 * src/lexer.ml (nexttoken.lexsym): Fix __<something> handling
- - - - - e54bbd6d by Stefan Monnier at 2016-08-23T20:34:24-04:00 * src/grammar.ml (char_kind): New type
(token_env): Move here; use it. (default_stt): Adjust accordingly. Add entry for '.' tho not currently obeyed. * src/lexer.ml (token_env): Move to util.ml. (nexttoken, nexttoken.lexsym): Adjust to new token_env type. * src/lparse.ml (_lexp_expr_str): * src/pexp.ml (_pexp_expr_str): Use `token_type'. * src/debruijn.ml (has_property): Remove unused var.
- - - - - 85c06461 by n3f4s at 2016-08-25T17:30:34+02:00 Merge branch 'master' into unification
- - - - -
9 changed files:
- DESIGN - src/debruijn.ml - src/grammar.ml - src/lexer.ml - src/lparse.ml - src/pexp.ml - src/prelexer.ml - src/sexp.ml - tests/sexp_test.ml
Changes:
===================================== DESIGN ===================================== --- a/DESIGN +++ b/DESIGN @@ -1,5 +1,12 @@ -*- outline -*-
+New: +- extend the stt table to give precedence for "inner op chars" to use within + identifiers (i.e. the "." for module naming). +- "Confusion is power: blurring the line between Bool and Type". + Maybe have "if" use a "Decidable" type class, or maybe automatically + turn "e : t : Bool" into "e : t = true : Type"? + * Categories of arguments We want different categories of arguments. I'm thinking here of the following categories, which are not necessarily mutually-exclusive: @@ -841,16 +848,6 @@ univalence n = ua ¹ n , ua n , ua- n , ua- n , ua- n
-* Modules and macros -If we want to be able to call a macro from another module (via -"Module.macro <args>"), then we have to allow macro calls to have a "name" -that's not just a symbol but a module-ref (i.e. a potentially complex -expression), so it's not obvious how to distinguish macro calls from -function calls, short of normalizing Pcall's function argument. - -Solution: macros have a different type, so if M.x has type "macro", we know -we should normalize it to get the actual macro. - * Modules
Modules should be able to hold definitions which are universe-polymorphic, @@ -1003,32 +1000,10 @@ Tokens will be made of any sequence of letters other than blanks/comments. special tokens: a(b are 3 tokens if ( is special, but a_(_b is a single (magic) token.
-** To affect lexing and parsing, declarations have to take effect before the - whole file is parsed. At first we could say that parsing stops (to allow - evaluation) at every top-level semi-colon, or after every top-level - closer. But what about - - module Toto = { - define _::_ a b = ... - define foo = 1 :: 2 - } - - This seems very reasonable, but requires evaluation in the middle of the - parse of a top-level construct. So we have to handle some top-level - elements (like "module = ...") specially. - - E.g.: - - make it imperative: "module toto;" is a self-contained top-level - expression which affects the interpretation of subsequent expressions - to be considered as within `toto'. - - if we disallow declarations of new special tokens within such a module, - then we could re-parse the remaining expressions after finding the - definition for _::_. - - make it possible for the evaluation of expressions to read the rest - of the input (and hence lex&parse it any way it wants). - - define special "reader macros", so "module". - - define {...} as special so it gets read and passed to `module' - without parsing. +** Structured identifiers +We want to support identifiers with inner structure, such as "foo.bar.baz". + +This could be lexed into a Node identical to that of ( __.__ foo bar baz)
** Provide syntax for backquote&unquote Since "," is to be used as separator, maybe (,e) and (`e) could be used? @@ -1379,6 +1354,7 @@ when they're only used in irrelevant args might relieve the tension. ** Papers *** occurrence checks: [Reed, LFMTP 2009; Abel & Pientka, TLCA 2011] *** pruning: [Pientka, PhD, Sec. 3.1.2; Abel & Pientka, TLCA 2011] +*** http://www.cs.cmu.edu/~fp/papers/jicslp96.pdf
* Universes ** Universe polymorphism
===================================== src/debruijn.ml ===================================== --- a/src/debruijn.ml +++ b/src/debruijn.ml @@ -233,8 +233,8 @@ let has_property ctx (var_i, var_n) (att_i, att_n): bool =
try let pmap = PropertyMap.find (n - var_i, var_n) property_map in - let prop = PropertyMap.find (n - att_i, att_n) pmap in - true + let _ = PropertyMap.find (n - att_i, att_n) pmap in + true with Not_found -> false
===================================== src/grammar.ml ===================================== --- a/src/grammar.ml +++ b/src/grammar.ml @@ -38,12 +38,20 @@ open Util
type grammar = (int option * int option) SMap.t
-let default_stt = - let stt = Array.make 256 false - in stt.(Char.code ';') <- true; - stt.(Char.code ',') <- true; - stt.(Char.code '(') <- true; - stt.(Char.code ')') <- true; +(* A token_end array indicates the few chars which are separate tokens, + * even if not surrounded by spaces, such as '(', ')', and ';'. + * It also indicates which chars are "inner" operators, i.e. those chars + * that make up the inner structure of structured identifiers such as + * foo.bar.baz. *) +type char_kind = | CKnormal | CKseparate | CKinner of int +type token_env = char_kind array +let default_stt : token_env = + let stt = Array.make 256 CKnormal + in stt.(Char.code ';') <- CKseparate; + stt.(Char.code ',') <- CKseparate; + stt.(Char.code '(') <- CKseparate; + stt.(Char.code ')') <- CKseparate; + stt.(Char.code '.') <- CKinner 5; stt
(* default_grammar is auto-generated from typer-smie-grammar via:
===================================== src/lexer.ml ===================================== --- a/src/lexer.ml +++ b/src/lexer.ml @@ -27,17 +27,6 @@ open Grammar
(*************** The Lexer phase *********************)
-(* FIXME: if we want to handle mixfix declarations such as "let _$_ x y = - toto in a+1 $ b-2" we could make the "_$_" token (i.e. a simple lexical - criteria) trigger the addition of corresponding syntax rules, but - it seems hard to extend it so the precedence can also be specified - because it is awkward to include the info in the lexical part, and as - soon as we move into the syntactical part we get bitten by the fact that - we don't know what the syntax means until we perform macroexpansion - (i.e. much later). *) - -type token_env = bool array - let digit_p char = let code = Char.code char in Char.code '0' <= code && code <= Char.code '9' @@ -84,7 +73,7 @@ let nexttoken (stt : token_env) (pts : pretoken list) bpos cpos (hSymbol ({file;line;column=column+bpos}, string_sub name bpos (String.length name)), pts', 0, 0) - else if stt.(Char.code name.[bpos]) + else if stt.(Char.code name.[bpos]) = CKseparate && not (name.[bpos+1] == '_') then (hSymbol ({file;line;column=column+bpos}, string_sub name bpos (bpos + 1)), @@ -97,14 +86,17 @@ let nexttoken (stt : token_env) (pts : pretoken list) bpos cpos pts', 0, 0) else let char = name.[bp] in - if char == '_' then + if char == '_' + && bp + 1 < String.length name + && name.[bp + 1] != '_' then (* Skip next char, in case it's a special token. *) (* For utf-8, this cp+2 is risky but actually works: _ counts - as 1 and if the input is valid utf-8 the next byte has to - be a leading byte, so it has to count as 1 as well ;-) *) + * as 1 and if the input is valid utf-8 the next byte has to + * be a leading byte, so it has to count as 1 as well ;-) *) lexsym (bp+2) (cp+2) - else if stt.(Char.code name.[bp]) && (bp + 1 >= String.length name - || not (name.[bp+1] == '_')) then + else if stt.(Char.code name.[bp]) = CKseparate + && (bp + 1 >= String.length name + || not (name.[bp+1] == '_')) then (hSymbol ({file;line;column=column+bpos}, string_sub name bpos bp), pts, bp, cp)
===================================== src/lparse.ml ===================================== --- a/src/lparse.ml +++ b/src/lparse.ml @@ -448,19 +448,9 @@ and lexp_call (fun_name: pexp) (sargs: sexp list) ctx i = * Anonymous : lambda *)
let rec infer_implicit_arg ltp largs nargs depth = - (* TODO : check arg number ?*) - (* FIXME error management *) match nosusp ltp with (* Error case *) - | Arrow (Aexplicit, _, _, _, _) when (List.length largs <= 0) -> - Debug_fun.debug_print_no_buff ( "<LPARSE.LEXP_CALL>(infer_implicit_arg) Not enough arguments : \n\tltp = " - ^ Fmt_lexp.string_of_lxp ltp - ^ " ,\n\tlargs = [" - ^ (List.fold_left (fun a e -> a ^ ", " ^ (Fmt_lexp.string_of_lxp e)) "" largs) - ^ "],\n\tfun_name = " - ^ Fmt_lexp.string_of_pexp fun_name); - Debug_fun.do_debug (fun () -> prerr_newline (); ()); - assert false + | Arrow (Aexplicit, _, _, _, _) when (List.length largs <= 0) -> []
(* Explicit parameter *) | Arrow (Aexplicit, _, ltp_arg, _, ltp_ret) -> @@ -472,32 +462,20 @@ and lexp_call (fun_name: pexp) (sargs: sexp list) ctx i = (if List.length largs >0 then let head, tail = List.hd largs, List.tl largs in (Aexplicit, head)::(infer_implicit_arg ltp_ret tail (nargs - 1) (depth - 1)) - else assert false) + else [])
(* Implicit paramter not given*) | Arrow (kind, _, ltp_arg, _, ltp_ret) -> (kind, mkMetavar ())::(infer_implicit_arg ltp_ret largs nargs (depth - 1))
(* "End" of the arrow "list"*) - | _ -> Debug_fun.debug_print_no_buff ( "<LPARSE.LEXP_CALL>(infer_implicit_arg) ltp = " - ^ Fmt_lexp.string_of_lxp ltp - ^ " ,\n\tlargs = [" - ^ (List.fold_left (fun a e -> a ^ ", " ^ (Fmt_lexp.string_of_lxp e)) "" largs) - ^ "],\n\tfun_name = " - ^ Fmt_lexp.string_of_pexp fun_name - ^ "\n"); - (* Cause the throw in builtin*) - (* [] *) - List.map (fun g -> Aexplicit, g) largs - (* cause the too many args *) + | _ -> List.map (fun g -> Aexplicit, g) largs in (* retrieve function's body *) let body, ltp = _lexp_p_infer fun_name ctx (i + 1) in let ltp = nosusp ltp in
let handle_named_call (loc, name) = - Debug_fun.debug_print_no_buff ("<LPARSE.LEXP_CALL>handle_named_call (?loc?, " ^ name ^ ")\n"); (* Process Arguments *) - Debug_fun.do_debug (fun () -> prerr_newline (); ()); let pargs = List.map pexp_parse sargs in let largs = _lexp_parse_all pargs ctx i in let rec depth_of = function @@ -534,14 +512,7 @@ and lexp_call (fun_name: pexp) (sargs: sexp list) ctx i = let subst, _ = !global_substitution in let ret_type = get_return_type name 0 ltp new_args subst ctx in let call = Call(vf, new_args) - in - Debug_fun.debug_print_no_buff "<LPARSE.lexp_call>(new_args) "; (* debug printing remove ASAP*) - Debug_fun.do_debug (fun () -> - List.iter (fun (_, l) -> Debug_fun.debug_print_lexp l; prerr_string ", "; ()) new_args; () - ); - Debug_fun.debug_print_lexp call; - Debug_fun.debug_print_no_buff "\n"; - call, ret_type + in call, ret_type
with Not_found -> Debug_fun.debug_print_no_buff ("==== <LPARSE.LEXP_CALL>handle_named_call (?loc?, " ^ name ^ ") ====\n"); @@ -1128,7 +1099,7 @@ let default_rctx = * --------------------------------------------------------- *)
(* Lexp helper *) -let _lexp_expr_str (str: string) (tenv: bool array) +let _lexp_expr_str (str: string) (tenv: token_env) (grm: grammar) (limit: string option) (ctx: lexp_context) = let pxps = _pexp_expr_str str tenv grm limit in lexp_parse_all pxps ctx
===================================== src/pexp.ml ===================================== --- a/src/pexp.ml +++ b/src/pexp.ml @@ -389,7 +389,7 @@ let pexp_decls_all (nodes: sexp list): pdecl list = * --------------------------------------------------------- *)
(* Lexp helper *) -let _pexp_expr_str (str: string) (tenv: bool array) +let _pexp_expr_str (str: string) (tenv: token_env) (grm: grammar) (limit: string option) = let sxps = _sexp_parse_str str tenv grm limit in pexp_parse_all sxps @@ -419,5 +419,3 @@ let pexp_to_string e = | Pinductive ((_,_), _, _) -> "Pinductive" | Pcons ((_,_),_) -> "Pcons" | Pcase (_, _, _) -> "Pcase" - -
===================================== src/prelexer.ml ===================================== --- a/src/prelexer.ml +++ b/src/prelexer.ml @@ -172,3 +172,15 @@ let pretokens_to_str pretokens = let pretokens_print p = print_string (pretokens_to_str p)
+(* Prelexer comparison, ignoring source-line-number info, used for tests. *) +let rec pretokens_equal p1 p2 = match p1, p2 with + | Pretoken (_, s1), Pretoken (_, s2) -> s1 = s2 + | Prestring (_, s1), Prestring (_, s2) -> s1 = s2 + | Preblock (_, ps1, _), Preblock (_, ps2, _) -> + pretokens_eq_list ps1 ps2 + | _ -> false +and pretokens_eq_list ps1 ps2 = match ps1, ps2 with + | [], [] -> true + | (p1 :: ps1), (p2 :: ps2) -> + pretokens_equal p1 p2 && pretokens_eq_list ps1 ps2 + | _ -> false
===================================== src/sexp.ml ===================================== --- a/src/sexp.ml +++ b/src/sexp.ml @@ -231,3 +231,21 @@ let sexp_parse_all_to_list grm tokens limit = | _ -> let (sxp, rest) = sexp_parse_all grm tokens limit in sexp_parse_impl grm rest limit (sxp :: acc) in sexp_parse_impl grm tokens limit [] + +(* Sexp comparison, ignoring source-line-number info, used for tests. *) +let rec sexp_equal s1 s2 = match s1, s2 with + | Epsilon, Epsilon -> true + | Block (_, ps1, _), Block (_, ps2, _) -> pretokens_eq_list ps1 ps2 + | Symbol (_, s1), Symbol (_, s2) -> s1 = s2 + | String (_, s1), String (_, s2) -> s1 = s2 + | Integer (_, n1), Integer (_, n2) -> n1 = n2 + | Float (_, n1), Float (_, n2) -> n1 = n2 + | Node (s1, ss1), Node (s2, ss2) -> + sexp_equal s1 s2 && sexp_eq_list ss1 ss2 + | _ -> false + +and sexp_eq_list ss1 ss2 = match ss1, ss2 with + | [], [] -> true + | (s1 :: ss1), (s2 :: ss2) -> + sexp_equal s1 s2 && sexp_eq_list ss1 ss2 + | _ -> false
===================================== tests/sexp_test.ml ===================================== --- a/tests/sexp_test.ml +++ b/tests/sexp_test.ml @@ -2,38 +2,43 @@ open Sexp open Lexer open Utest_lib
-let _ = (add_test "SEXP" "lambda x -> x + x" (fun () -> +let test_sexp_add dcode testfun = + add_test "SEXP" dcode (fun () -> testfun (sexp_parse_str dcode))
- let dcode = "lambda x -> x + x;" in - - let ret = sexp_parse_str dcode in +let _ = test_sexp_add "lambda x -> x + x" (fun ret -> match ret with - | [Node(lbd, [x; add])] -> (match lbd, x, add with - | (Symbol(_, "lambda_->_"), Symbol(_, "x"), - Node(Symbol(_, "_+_"), [Symbol(_, "x"); Symbol(_, "x")])) -> success () - | _ -> failure ()) - | _ -> failure () -)) - -let _ = (add_test "SEXP" "x * x * x" (fun () -> - - let dcode = "x * x * x;" in - - let ret = sexp_parse_str dcode in - match ret with - | [n] ->(match n with - | Node(Symbol(_, "_*_"), - [Node(Symbol(_, "_*_"), [Symbol(_, "x"); Symbol(_, "x")]); Symbol(_, "x")]) - -> success () - - | Node(Symbol(_, "_*_"), - [Symbol(_, "x"); Node(Symbol(_, "_*_"), [Symbol(_, "x")]); Symbol(_, "x")]) - -> success () - - | _ -> failure ()) - | _ -> failure () - -)) + | [Node(Symbol(_, "lambda_->_"), + [Symbol(_, "x"); + Node(Symbol(_, "_+_"), [Symbol(_, "x"); Symbol(_, "x")])])] + -> success () + | _ -> failure () +) + +let _ = test_sexp_add "x * x * x" (fun ret -> + match ret with + | [Node(Symbol(_, "_*_"), + [Node(Symbol(_, "_*_"), [Symbol(_, "x"); Symbol(_, "x")]); + Symbol(_, "x")])] + -> success () + | _ -> failure () +) + +let test_sexp_eqv dcode1 dcode2 = + add_test "SEXP" dcode1 + (fun () -> + let s1 = sexp_parse_str dcode1 in + let s2 = sexp_parse_str dcode2 in + if sexp_eq_list s1 s2 + then success () + else (sexp_print (List.hd s1); + sexp_print (List.hd s2); + failure ())) + +let _ = test_sexp_eqv "((a) ((1.00)))" "a 1.0" +let _ = test_sexp_eqv "(x + y)" "_+_ x y" +let _ = test_sexp_eqv "f __; y" "(f ( __; ) y)" +let _ = test_sexp_eqv "case e | p1 => e1 | p2 => e2" + "case_ ( _|_ e ( _=>_ p1 e1) ( _=>_ p2 e2))"
(* run all tests *) let _ = run_all ()
View it on GitLab: https://gitlab.com/monnier/typer/compare/80547416749c74d44c42a4b8aadfb51c907...