1structure parse_term :> parse_term = 2struct 3 4open Lib 5open errormonad term_tokens term_grammar HOLgrammars 6open GrammarSpecials PrecAnalysis 7open parse_term_dtype 8 9infix >> >- ++ >-> 10 11val syntax_error_trace = ref true 12val _ = Feedback.register_btrace ("syntax_error", syntax_error_trace) 13 14fun WARN f msg = if !syntax_error_trace then 15 Feedback.HOL_WARNING "parse_term" f msg 16 else () 17fun WARNloc f loc msg = if !syntax_error_trace then 18 Feedback.HOL_WARNINGloc "parse_term" f loc msg 19 else () 20 21fun noloc s = (s, locn.Loc_None) 22fun qfail x = error (noloc "") x 23fun WARNloc_string loc s = (s, loc) 24 25fun PRINT s = print (s ^ "\n") 26 27exception Failloc of (locn.locn * string) 28fun FAILloc locn s = raise Failloc (locn, s) 29 30fun liftlocn f (x,locn) = (f x,locn) 31 32exception ParseTermError of string locn.located 33type 'a token = 'a term_tokens.term_token 34type 'a qbuf = 'a qbuf.qbuf 35 36 37fun insert_sorted (k, v) [] = [(k, v)] 38 | insert_sorted kv1 (wholething as (kv2::rest)) = let 39 val (k1, _) = kv1 40 val (k2, _) = kv2 41 in 42 if (k1 < k2) then kv1::wholething 43 else kv2::insert_sorted kv1 rest 44 end 45 46 47fun listtoString f [] = "[]" 48 | listtoString f xs = let 49 fun lf [x] = f x 50 | lf (x::xs) = f x ^ ", " ^ lf xs 51 | lf _ = raise Fail "Can't happen" 52 in 53 "[" ^ lf xs ^ "]" 54 end 55 56fun fragtoString (QUOTE s) = s 57 | fragtoString (ANTIQUOTE _) = " ^... " 58fun quotetoString [] = "" 59 | quotetoString (x::xs) = fragtoString x ^ quotetoString xs 60 61 62datatype 'a varstruct 63 = SIMPLE of string 64 | VPAIR of ('a varstruct locn.located * 'a varstruct locn.located) 65 | TYPEDV of 'a varstruct locn.located * Pretype.pretype locn.located 66 | RESTYPEDV of 'a varstruct locn.located * 'a preterm locn.located 67 | VS_AQ of 'a 68and 'a preterm 69 = COMB of ('a preterm locn.located * 'a preterm locn.located) 70 | VAR of string 71 | QIDENT of string * string 72 | ABS of ('a varstruct locn.located * 'a preterm locn.located) 73 | AQ of 'a 74 | TYPED of ('a preterm locn.located * Pretype.pretype locn.located) 75 76infix -- 77fun (l1 -- []) = l1 78 | (l1 -- (x::xs)) = List.filter (fn y => y <> x) l1 -- xs 79 80fun all_tokens strlist = 81 map STD_HOL_TOK strlist @ [BOS, EOS, Id, TypeColon, TypeTok, EndBinding] 82 83exception PrecConflict of stack_terminal * stack_terminal 84 85val complained_already = ref false; 86 87structure Polyhash = 88struct 89 open Uref 90 fun peek (dict) k = Binarymap.peek(!dict,k) 91 fun peekInsert r (k,v) = 92 let val dict = !r in 93 case Binarymap.peek(dict,k) of 94 NONE => (r := Binarymap.insert(dict,k,v); NONE) 95 | x => x 96 end 97 fun insert r (k,v) = 98 r := Binarymap.insert(!r,k,v) 99 fun listItems dict = Binarymap.listItems (!dict) 100 fun mkDict cmp = let 101 val newref = Uref.new (Binarymap.mkDict cmp) 102 in 103 newref 104 end 105end 106 107local 108 val rule_elements = term_grammar.rule_elements o #elements 109in 110fun rule_left (rr: rule_record) = 111 case hd (rule_elements rr) of 112 TOK s => STD_HOL_TOK s 113 | _ => raise Fail ("rule list is bogus for "^ #term_name rr) 114end 115 116fun left_grabbing_elements rule = 117 case rule of 118 INFIX (STD_infix(rules, _)) => map rule_left rules 119 | INFIX RESQUAN_OP => [ResquanOpTok] 120 | INFIX (FNAPP rules) => STD_HOL_TOK fnapp_special :: map rule_left rules 121 | INFIX VSCONS => [VS_cons] 122 | SUFFIX (STD_suffix rules) => map rule_left rules 123 | SUFFIX TYPE_annotation => [TypeColon] 124 | _ => [] 125 126datatype order_compat_verdict = OK | Multify of mx_order | Bad 127fun order_compat mx1 mx2 = 128 case (mx1, mx2) of 129 (PM_LESS src1, PM_LESS src2) => if src1 = src2 then OK 130 else Multify (PM_LESS MS_Multi) 131 | (PM_GREATER src1, PM_GREATER src2) => if src1 = src2 then OK 132 else Multify (PM_GREATER MS_Multi) 133 | (PM_EQUAL, _) => OK 134 | (_, PM_EQUAL) => OK 135 | (PM_LG i1, PM_LG i2) => if i1 = i2 then OK else Bad 136 | (_ , _) => Bad 137 138fun mx_order mxo = 139 case mxo of 140 PM_LESS _ => SOME LESS 141 | PM_GREATER _ => SOME GREATER 142 | PM_EQUAL => SOME EQUAL 143 | PM_LG _ => NONE 144 145(* turn a rule element list into a list of std_hol_toks *) 146val rel_list_to_toklist = 147 List.mapPartial (fn TOK s => SOME (STD_HOL_TOK s) | _ => NONE) 148 149fun find_suffix_rhses (G : grammar) = let 150 fun select (SUFFIX TYPE_annotation) = [[TypeTok]] 151 | select (SUFFIX (STD_suffix rules)) = let 152 in 153 map (rel_list_to_toklist o rule_elements o #elements) rules 154 end 155 | select (CLOSEFIX rules) = 156 map (rel_list_to_toklist o rule_elements o #elements) rules 157 | select _ = [] 158 val suffix_rules = List.concat (map (select o #2) (rules G)) 159in 160 Id :: map List.last suffix_rules 161end 162 163fun find_prefix_lhses (G : grammar) = let 164 fun select x = let 165 in 166 case x of 167 PREFIX (STD_prefix rules) => 168 map (rel_list_to_toklist o rule_elements o #elements) rules 169 | CLOSEFIX rules => 170 map (rel_list_to_toklist o rule_elements o #elements) rules 171 | _ => [] 172 end 173 val prefix_rules = List.concat (map (select o #2) (rules G)) 174in 175 Id :: map STD_HOL_TOK (binders G) @ map hd prefix_rules 176end 177 178val ambigrm = ref 1 179val _ = Feedback.register_trace ("ambiguous grammar warning", ambigrm, 2) 180 181fun STtoString (G:grammar) x = 182 case x of 183 STD_HOL_TOK s => s 184 | BOS => "<beginning of input>" 185 | EOS => "<end of input>" 186 | VS_cons => "<gap between varstructs>" 187 | Id => "<identifier>" 188 | TypeColon => #type_intro (specials G) 189 | TypeTok => "<type>" 190 | EndBinding => #endbinding (specials G) ^ " (end binding)" 191 | ResquanOpTok => #res_quanop (specials G)^" (res quan operator)" 192 193fun mk_prec_matrix G = let 194 exception NotFound = Binarymap.NotFound 195 exception BadTokList 196 type dict = ((stack_terminal * bool) * stack_terminal, 197 mx_order) Binarymap.dict Uref.t 198 val {lambda, endbinding, type_intro, restr_binders, ...} = specials G 199 val specs = grammar_tokens G 200 val Grules = term_grammar.grammar_rules G 201 val alltoks = 202 ResquanOpTok :: VS_cons :: STD_HOL_TOK fnapp_special :: all_tokens specs 203 val matrix: dict = 204 Polyhash.mkDict (pair_compare(pair_compare(ST_compare,bool_compare), 205 ST_compare)) 206 (* the bool is true if there is a non-terminal between the two 207 terminal tokens. E.g., 208 x + -y 209 doesn't have such a space between the + and -, whereas 210 x + z - y 211 does (the z will be a non-terminal) 212 *) 213 214 val rule_elements = term_grammar.rule_elements o #elements 215 val complained_this_iteration = Uref.new false 216 fun insert_bail k = 217 (Polyhash.insert matrix (k, PM_LESS MS_Multi); 218 let open Uref in complained_this_iteration := true end; 219 if not (!complained_already) andalso 220 (!Globals.interactive orelse !ambigrm = 2) 221 then let 222 val msg = "Grammar ambiguous on token pair "^ 223 STtoString G (#1 (#1 k)) ^ " and " ^ 224 STtoString G (#2 k) ^ ", and "^ 225 "probably others too" 226 in 227 case !ambigrm of 228 0 => () 229 | 1 => (Feedback.HOL_WARNING "Parse" "Term" msg; 230 complained_already := true) 231 | 2 => raise Feedback.mk_HOL_ERR "parse_term" "mk_prec_matrix" msg 232 | _ => raise Fail "parse_term: matrix construction invariant fail!" 233 end 234 else 235 ()) 236 fun insert k v = let 237 val insert_result = Polyhash.peekInsert matrix (k, v) 238 val raw_insert = Polyhash.insert matrix 239 in 240 case (insert_result, v) of 241 (SOME PM_EQUAL, _) => () (* ignore this *) 242 | (SOME _, PM_EQUAL) => raw_insert (k,v) (* EQUAL overrides *) 243 | (SOME oldv, _) => let 244 in 245 case order_compat oldv v of 246 OK => () 247 | Multify v' => raw_insert (k, v') 248 | Bad => let 249 nonfix << >> 250 val op >> = GREATER and op << = LESS 251 in 252 case (oldv,v) of 253 (PM_LESS src1, PM_GREATER src2) => let 254 in 255 case (src1, src2) of 256 (Ifx, Pfx) => raw_insert (k, PM_LG {ifx= <<, pfx= >>}) 257 | (Pfx, Ifx) => raw_insert (k, PM_LG {ifx= >>, pfx= <<}) 258 | _ => insert_bail k 259 end 260 | (PM_GREATER src1, PM_LESS src2) => let 261 in 262 case (src1, src2) of 263 (Ifx,Pfx) => raw_insert (k, PM_LG {ifx= >>, pfx= <<}) 264 | (Pfx,Ifx) => raw_insert (k, PM_LG {ifx= <<, pfx= >>}) 265 | _ => insert_bail k 266 end 267 | _ => insert_bail k 268 end 269 end 270 | (NONE, _) => () 271 end 272 fun insert_eqs rule = let 273 fun insert_eqn (tk1,intervening_tm,tk2) = 274 insert((STD_HOL_TOK tk1, intervening_tm), STD_HOL_TOK tk2) PM_EQUAL 275 val insert_eqns = app insert_eqn 276 val equalities = PrecAnalysis.rule_equalities 277 in 278 case rule of 279 PREFIX (STD_prefix rules) => app (insert_eqns o equalities) rules 280 | PREFIX (BINDER slist) => let 281 fun bindertok (BinderString {tok, ...}) = [tok] 282 | bindertok LAMBDA = lambda 283 val btoks = List.concat (map bindertok slist) 284 in 285 app (fn s => insert ((STD_HOL_TOK s, true), EndBinding) PM_EQUAL) btoks 286 end 287 | SUFFIX (STD_suffix rules) => app (insert_eqns o equalities) rules 288 | SUFFIX TYPE_annotation => () 289 | INFIX (STD_infix (rules, _)) => app (insert_eqns o equalities) rules 290 | INFIX RESQUAN_OP => () 291 | INFIX (FNAPP lst) => app (insert_eqns o equalities) lst 292 | INFIX VSCONS => () 293 | CLOSEFIX rules => app (insert_eqns o equalities) rules 294 end 295 296 fun bi_insert (t1,t2) order = (insert ((t1,false), t2) order; 297 insert ((t1,true), t2) order) 298 (* the right hand end of a suffix or a closefix is greater than 299 everything *) 300 val closed_rhses = find_suffix_rhses G 301 fun insert_rhs_relns () = let 302 val all = alltoks 303 val rhses = closed_rhses 304 in 305 app (fn rhs => app (fn t => bi_insert (rhs, t) (PM_GREATER MS_Other)) all) 306 rhses 307 end 308 309 (* everything not a suffix rhs is less than the left hand end of a 310 prefix/closefix *) 311 val closed_lhses = find_prefix_lhses G 312 fun insert_lhs_relns () = let 313 val all = alltoks -- (EOS::closed_rhses) 314 in 315 app (fn lhs => app (fn t => insert ((t,false), lhs) (PM_LESS MS_Other)) 316 all) 317 closed_lhses 318 end 319 fun map_rule f [] = [] 320 | map_rule f (x::xs) = let 321 val rest = map_rule f xs 322 val here = 323 case x of 324 INFIX (STD_infix (rules, _)) => map (f o rule_elements) rules 325 | INFIX (FNAPP rules) => map (f o rule_elements) rules 326 | SUFFIX (STD_suffix rules) => map (f o rule_elements) rules 327 | PREFIX (STD_prefix rules) => map (f o rule_elements) rules 328 | CLOSEFIX rules => map (f o rule_elements) rules 329 | _ => [] 330 in 331 here @ rest 332 end 333 val last_rtok = first_rtok o List.rev 334 val all_lhs = 335 TypeColon::BOS::VS_cons::ResquanOpTok::Id:: 336 map STD_HOL_TOK (fnapp_special::(map_rule first_rtok Grules)) 337 val all_rhs = 338 TypeTok::EndBinding::EOS::VS_cons::ResquanOpTok::Id:: 339 map STD_HOL_TOK (fnapp_special::(map_rule last_rtok Grules)) 340 (* Between things that are equal, the thing on the left is less than 341 all possible left hand sides, and the thing on the right is 342 greater than all possible right hand sides. *) 343 fun calc_eqpairs() = 344 List.mapPartial 345 (fn (((t1,_),t2), v) => if v = PM_EQUAL then SOME (t1,t2) else NONE) 346 (Polyhash.listItems matrix) 347 fun terms_between_equals (k1, k2) = let 348 in 349 app (fn lhs => bi_insert (k1, lhs) (PM_LESS MS_Other)) all_lhs; 350 app (fn rhs => bi_insert (rhs, k2) (PM_GREATER MS_Other)) all_rhs 351 end 352 353 (* now the tricky stuff where the precedence relation makes a difference *) 354 (* if the rule might grab stuff to its left (is an infix or suffix), 355 then add 356 x > rule's left hand token 357 for all x on right hand side of prefixes and infixes below 358 if the rule might grab stuff to its right (is an infix or prefix) 359 then add 360 rule's right hand token < x 361 for all x on left hand side of suffixes and infixes below *) 362 fun rule_right (rr:rule_record) = 363 case List.last (rule_elements rr) of 364 TOK s => STD_HOL_TOK s 365 | _ => raise Fail ("rule list is bogus for "^ #term_name rr) 366 fun add2 v p = (p,v) 367 fun right_grabbing_elements rule = 368 case rule of 369 INFIX(STD_infix(rules, _)) => map (add2 Ifx o rule_right) rules 370 | INFIX RESQUAN_OP => [(ResquanOpTok,Ifx)] 371 | INFIX(FNAPP rules) => 372 (STD_HOL_TOK fnapp_special,Ifx) :: map (add2 Ifx o rule_right) rules 373 | INFIX VSCONS => [(VS_cons,Ifx)] 374 | PREFIX (BINDER _) => [(EndBinding,Pfx)] 375 | PREFIX (STD_prefix rules) => map (add2 Pfx o rule_right) rules 376 | _ => [] 377 val GREATER' = PM_GREATER MS_Other 378 val LESS' = PM_LESS MS_Other 379 val EQUAL' = PM_EQUAL 380 fun process_rule rule remainder = 381 case rule of 382 INFIX(STD_infix(rules, assoc)) => let 383 val this_level_lefts = map rule_left rules 384 val lower_rights = List.concat (map right_grabbing_elements remainder) 385 val this_level_rights = map rule_right rules 386 val lower_lefts = List.concat (map left_grabbing_elements remainder) 387 (* for infix rules, also need to add precedence relations if there is 388 associativity *) 389 in 390 app (fn lefttok => 391 app 392 (fn (lower_right,src) => 393 insert ((lower_right,true), lefttok) (PM_GREATER src)) 394 lower_rights) 395 this_level_lefts; 396 app (fn righttok => 397 app 398 (fn lower_left => insert ((righttok,true), lower_left) 399 (PM_LESS Ifx)) 400 lower_lefts) 401 this_level_rights; 402 case assoc of 403 LEFT => let 404 in 405 app (fn right_tok => 406 app (fn left_tok => insert ((right_tok,true), left_tok) 407 (PM_GREATER Ifx)) 408 this_level_lefts) 409 this_level_rights 410 end 411 | RIGHT => let 412 in 413 app (fn right_tok => 414 app (fn left_tok => insert ((right_tok,true), left_tok) 415 (PM_LESS Ifx)) 416 this_level_lefts) 417 this_level_rights 418 end 419 | NONASSOC => () 420 end 421 | INFIX RESQUAN_OP => let 422 val lower_rights = List.concat (map right_grabbing_elements remainder) 423 val lower_lefts = List.concat (map left_grabbing_elements remainder) 424 in 425 app (fn lower_right => insert((#1 lower_right,true), ResquanOpTok) 426 GREATER') 427 lower_rights; 428 app (fn lower_left => insert((ResquanOpTok,true), lower_left) LESS') 429 lower_lefts 430 end 431 | PREFIX (STD_prefix rules) => let 432 val this_level_rights = map rule_right rules 433 val lower_lefts = List.concat (map left_grabbing_elements remainder) 434 in 435 app (fn right_tok => 436 app 437 (fn lower_left => insert ((right_tok,true), lower_left) 438 (PM_LESS Pfx)) 439 lower_lefts) 440 this_level_rights 441 end 442 | PREFIX (BINDER _) => let 443 val lower_lefts = List.concat (map left_grabbing_elements remainder) 444 in 445 app (fn lower_left => insert ((EndBinding,true),lower_left) LESS') 446 lower_lefts 447 end 448 | SUFFIX TYPE_annotation => let 449 val lower_rights = List.concat (map right_grabbing_elements remainder) 450 in 451 app (fn (lower_right,src) => 452 insert ((lower_right,true), TypeColon) (PM_GREATER src)) 453 lower_rights 454 end 455 | SUFFIX (STD_suffix rules) => let 456 val lower_rights = List.concat (map right_grabbing_elements remainder) 457 val lefts = map rule_left rules 458 in 459 app (fn left_tok => 460 app (fn (lower_right,src) => 461 insert ((lower_right,true), left_tok) (PM_GREATER src)) 462 lower_rights) 463 lefts 464 end 465 | INFIX (FNAPP rules) => let 466 val lower_rights = List.concat (map right_grabbing_elements remainder) 467 val lower_lefts = List.concat (map left_grabbing_elements remainder) 468 val this_level_lefts = map rule_left rules 469 val this_level_rights = map rule_right rules 470 val fnapp = STD_HOL_TOK fnapp_special 471 in 472 app (fn lower_left => insert ((fnapp,true), lower_left) LESS') 473 lower_lefts; 474 app (fn lower_right => insert ((#1 lower_right,true), fnapp) GREATER') 475 lower_rights; 476 (* function application left associative *) 477 insert ((fnapp,true), fnapp) GREATER'; 478 app (fn other => insert ((other,true), fnapp) GREATER') 479 this_level_rights; 480 app (fn other => insert ((fnapp,true), other) GREATER') 481 this_level_lefts; 482 process_rule (INFIX(STD_infix (rules, LEFT))) remainder 483 end 484 | INFIX VSCONS => let 485 val lower_rights = List.concat (map right_grabbing_elements remainder) 486 val lower_lefts = List.concat (map left_grabbing_elements remainder) 487 in 488 app (fn lower_left => insert ((VS_cons,true), lower_left) LESS') 489 lower_lefts; 490 app (fn lower_right => insert ((#1 lower_right,true), VS_cons) GREATER') 491 lower_rights; 492 (* kind of non-associative *) 493 insert ((VS_cons,true), VS_cons) EQUAL' 494 end 495 | _ => () 496 fun apply_them_all f [] = () 497 | apply_them_all f (x::xs) = (f x xs ; apply_them_all f xs) 498in 499 app insert_eqs Grules ; 500 insert ((BOS, true), EOS) EQUAL'; 501 app terms_between_equals (calc_eqpairs()); 502 (* these next equality pairs will never have terms interfering between 503 them, so we can insert the equality relation between them after doing 504 the above *) 505 insert ((TypeColon,false), TypeTok) EQUAL'; 506 app (fn b => insert ((STD_HOL_TOK b,true), EndBinding) EQUAL') (binders G); 507 insert_rhs_relns () ; 508 insert_lhs_relns () ; 509 apply_them_all process_rule Grules; 510 if (not (Uref.!complained_this_iteration)) then complained_already := false 511 else (); 512 matrix 513end 514 515(* string is name of term; list of pairs, is list of token-pairs between 516 which a list style reduction is required *) 517type mini_lspec = {cons:string,nilstr:string,sep:string} 518datatype rsfixity = rsInfix | rsPrefix | rsClosefix | rsSuffix 519datatype rule_summary = 520 RealRule of rsfixity * string 521 (* keyed on, e.g., [TOK "let"; TM; TOK "in"; TM] *) 522 | ListOnly of mini_lspec 523 (* for lists that appear between tokens that are only a part of 524 the rule. E.g., keyed on [TOK "let"; TOK "in"] *) 525 526fun summary_toString rs = 527 case rs of 528 RealRule (_, s) => s 529 | ListOnly {cons,nilstr=n,...} => "List : {cons = "^cons^", nil = "^n^"}" 530 531 532(* in addition to all of the rules that you'd expect due to the infix, 533 prefix, suffix and closefix operators, we also add rules for the 534 nil singleton, and doubleton cases of the list rules. This 535 increases efficiency for these relatively common cases, saving us 536 an examination of the grammar when we come to do a reduction to 537 spot whether a putative rhs is an instance of a list *) 538 539fun listTM_delimiters rels = 540 case rels of 541 [] => [] 542 | [_] => [] 543 | TOK tk1 :: ListTM lsp :: (rest as (TOK tk2 :: _)) => 544 ([TOK tk1,TOK tk2], ListOnly lsp) :: listTM_delimiters rest 545 | (_ :: rest) => listTM_delimiters rest 546 547fun de_listTM rels = map (fn ListTM _ => TM | x => x) rels 548 549fun infix_rule (rels, nm) = 550 (mkrels_infix (de_listTM rels), RealRule(rsInfix, nm)) 551fun prefix_rule (rels,nm) = 552 (mkrels_prefix (de_listTM rels), RealRule(rsPrefix, nm)) 553fun closefix_rule (rels,nm) = 554 (mkrels_closefix (de_listTM rels), RealRule(rsClosefix, nm)) 555fun suffix_rule (rels,nm) = 556 (mkrels_suffix (de_listTM rels), RealRule(rsSuffix, nm)) 557 558fun mk_ruledb (G:grammar) = let 559 val Grules = term_grammar.grammar_rules G 560 val table:(rule_element list, rule_summary)Binarymap.dict Uref.t = 561 Polyhash.mkDict (Lib.list_compare RE_compare) 562 fun insert_rule mkfix (rr:rule_record) = 563 let 564 val rels = term_grammar.rule_elements (#elements rr) 565 val nm = #term_name rr 566 in 567 (Polyhash.insert table (mkfix (rels,nm)); 568 List.app (Polyhash.insert table) (listTM_delimiters rels)) 569 end 570 fun addrule rule = 571 case rule of 572 INFIX (STD_infix(rules, _)) => app (insert_rule infix_rule) rules 573 | INFIX RESQUAN_OP => () 574 | INFIX VSCONS => () 575 | INFIX (FNAPP rules) => let 576 in 577 Polyhash.insert table (mkrels_infix [TOK fnapp_special], 578 RealRule(rsInfix, fnapp_special)); 579 app (insert_rule infix_rule) rules 580 end 581 | PREFIX (STD_prefix rules) => 582 app (insert_rule prefix_rule) rules 583 | PREFIX (BINDER s) => () 584 | SUFFIX (STD_suffix rules) => 585 app (insert_rule suffix_rule) rules 586 | SUFFIX TYPE_annotation => () 587 | CLOSEFIX rules => app (insert_rule closefix_rule) rules 588in 589 app addrule Grules; 590 table 591end 592 593 594fun mwhile B C = 595 B >- (fn b => if b then C >> mwhile B C else ok) 596 597fun mif B C = B >- (fn b => if b then C else ok) 598 599datatype 'a stack_item = 600 Terminal of stack_terminal | 601 NonTerminal of 'a preterm | 602 NonTermVS of 'a varstruct locn.located list 603 604fun is_terminal (Terminal _,_) = true 605 | is_terminal _ = false 606fun dest_terminal (Terminal x,_) = x 607 | dest_terminal _ = raise Fail "dest_terminal not called on terminal" 608fun is_nonterm t = not (is_terminal t) 609 610 611datatype 'a lookahead_item = 612 Token of 'a term_token | PreType of Pretype.pretype | 613 LA_Symbol of stack_terminal 614 615datatype vsres_state = VSRES_Normal | VSRES_VS | VSRES_RESTM 616 617datatype 'a PStack = 618 PStack of {stack : ('a stack_item locn.located * 'a lookahead_item) list, 619 lookahead : 'a lookahead_item locn.located list, 620 in_vstruct : (vsres_state * int) list} 621 622(* dummy lookahead token *) 623val XXX = Token (Ident "XXX") 624 625 626fun pstack (PStack {stack, ...}) = stack 627fun upd_stack (PStack {stack, lookahead, in_vstruct}) new = 628 PStack{stack = new, lookahead = lookahead, in_vstruct = in_vstruct} 629fun pla (PStack{lookahead,...}) = lookahead 630fun upd_la (PStack {stack, lookahead, in_vstruct}) new = 631 PStack{stack = stack, lookahead = new, in_vstruct = in_vstruct} 632fun invstructp0 (PStack{in_vstruct,...}) = in_vstruct 633 634fun fupd_hd f [] = [] 635 | fupd_hd f (x::xs) = f x :: xs 636 637fun fupd_fst f (x,y) = (f x, y) 638fun fupd_snd f (x,y) = (x, f y) 639 640fun fupd_vs0 f (PStack{in_vstruct,lookahead,stack}) = 641 PStack{stack = stack, lookahead = lookahead, in_vstruct = f in_vstruct} 642 643fun invstructp (cs, p) = Some ((cs, p), invstructp0 p) 644 645fun inc_paren (cs, p) = 646 Some ((cs, fupd_vs0 (fupd_hd (fupd_snd (fn n => n + 1))) p), ()) 647fun dec_paren (cs, p) = 648 Some ((cs, fupd_vs0 (fupd_hd (fupd_snd (fn n => n - 1))) p), ()) 649fun enter_binder (cs,p) = 650 Some ((cs, fupd_vs0 (fn rest => (VSRES_VS, 0)::rest) p), ()) 651fun enter_restm (cs, p) = 652 Some ((cs, fupd_vs0 (fupd_hd (fupd_fst (K VSRES_RESTM))) p), ()) 653fun leave_restm (cs, p) = 654 Some ((cs, fupd_vs0 (fupd_hd (fupd_fst (K VSRES_VS))) p), ()) 655fun leave_binder (cs, p) = Some ((cs, fupd_vs0 tl p), ()) 656 657fun push_pstack p i = upd_stack p (i::pstack p) 658fun top_pstack p = hd (pstack p) 659fun pop_pstack p = upd_stack p (tl (pstack p)) 660 661fun push t (cs, p) = Some ((cs, push_pstack p t), ()) 662fun topterm (cs, p) = 663 case List.find (is_terminal o #1) (pstack p) of 664 NONE => Error (noloc "No terminal in stack") 665 | SOME x => Some((cs,p), x) 666fun pop (cs, p) = 667 Some ((cs, pop_pstack p), top_pstack p) 668 handle Empty => Error (noloc "pop: empty stack") 669fun getstack (cs, p) = Some ((cs, p), pstack p) 670 671 672fun set_la tt (cs, p) = Some ((cs, upd_la p tt), ()) 673fun current_la (cs, p) = Some ((cs, p), pla p) 674 675fun findpos P [] = NONE 676 | findpos P (x::xs) = if P x then SOME(0,x) 677 else Option.map (fn (n,x) => (n + 1, x)) (findpos P xs) 678 679fun get_case_info (G : grammar) = let 680 fun extract_tok(ppel,acc) = 681 case ppel of 682 RE (TOK s) => s :: acc 683 | PPBlock (ppels, _) => List.foldl extract_tok acc ppels 684 | _ => acc 685 fun extract_toks ppels = List.rev (List.foldl extract_tok [] ppels) 686 fun rr_foldthis ({term_name,elements,...}, acc) = 687 if GrammarSpecials.is_case_special term_name then 688 case Binarymap.peek(acc, term_name) of 689 NONE => Binarymap.insert(acc, term_name, [extract_toks elements]) 690 | SOME els => 691 Binarymap.insert(acc, term_name, extract_toks elements :: els) 692 else acc 693 fun foldthis (gr, acc) = 694 case gr of 695 PREFIX (STD_prefix rrs) => List.foldl rr_foldthis acc rrs 696 | _ => acc 697 val candidates = 698 List.foldl foldthis (Binarymap.mkDict String.compare) (grammar_rules G) 699 val error_case = {casebar = NONE, casecase = NONE, caseof = NONE} 700 fun mapthis (cspecial, candidates) = 701 case Listsort.sort (flip_cmp (measure_cmp length)) candidates of 702 [] => error_case 703 | toks :: _ => 704 let 705 in 706 if length toks <> 4 orelse last toks <> last (butlast toks) then 707 error_case 708 else {casebar = SOME (last toks), casecase = SOME (hd toks), 709 caseof = SOME (hd (tl toks))} 710 end 711 val specials_to_toks = Binarymap.map mapthis candidates 712 val toks_to_specials = let 713 fun foldthis (k,r,acc) = 714 case #casecase r of 715 NONE => acc 716 | SOME tok => 717 (case Binarymap.peek(acc,tok) of 718 NONE => Binarymap.insert(acc,tok,k) 719 | SOME k' => raise Fail ("Tok \"" ^ tok ^ 720 "\" maps to case specials " ^ 721 valOf (dest_case_special k) ^ " and " ^ 722 valOf (dest_case_special k'))) 723 in 724 Binarymap.foldl foldthis (Binarymap.mkDict String.compare) specials_to_toks 725 end 726in 727 (specials_to_toks, toks_to_specials) 728end 729 730fun parse_term (G : grammar) (typeparser : term qbuf -> Pretype.pretype) = let 731 val Grules = grammar_rules G 732 val {type_intro, lambda, endbinding, restr_binders, res_quanop} = specials G 733 val num_info = numeral_info G 734 val overload_info = overload_info G 735 val (casespec_to_tok, casetok_to_spec) = get_case_info G 736 val closed_lefts = find_prefix_lhses G 737 val left_grabbers = List.concat (map left_grabbing_elements Grules) 738 val fnapp_closed_lefts = Lib.subtract closed_lefts left_grabbers 739 val _ = 740 isSome (findpos (fn (SUFFIX TYPE_annotation) => true | _ => false) 741 Grules) 742 orelse raise Fail "Grammar must allow type annotation" 743 val prec_matrix = mk_prec_matrix G 744 val rule_db = mk_ruledb G 745 val is_binder = is_binder G 746 val binder_table = let 747 fun recurse (r, acc) = 748 case r of 749 PREFIX (BINDER blist) => let 750 fun irec (b, acc) = 751 case b of 752 LAMBDA => acc 753 | BinderString {term_name, tok, ...} => 754 Binarymap.insert(acc,tok,term_name) 755 in 756 List.foldl irec acc blist 757 end 758 | _ => acc 759 in 760 List.foldl recurse (Binarymap.mkDict String.compare) Grules 761 end 762 fun term_name_for_binder s = Binarymap.find(binder_table,s) 763 val grammar_tokens = term_grammar.grammar_tokens G 764 val lex = let 765 val specials = endbinding::grammar_tokens @ term_grammar.known_constants G 766 val ttlex = term_tokens.lex specials 767 in 768 fn (qb, ps) => 769 case ttlex qb of 770 NONE => Error (noloc "Token-lexing failed") 771 | SOME t => Some ((qb, ps), t) 772 end 773 fun lifted_typeparser (qb, ps) = Some ((qb, ps), typeparser qb) 774 775 776 val keyword_table = 777 HOLset.addList(HOLset.empty String.compare, grammar_tokens) 778 779 (* transform takes an input token (of the sort that comes out of the 780 lexer), and turns it into a token of the sort used internally by the 781 parser. *) 782 fun transform (in_vs as (vs_state, nparens)) 783 (t:'a lookahead_item locn.located option) = 784 case t of 785 NONE => (EOS, locn.Loc_None, NONE) 786 | SOME (tt as Token x,locn) => let 787 in 788 case x of 789 Ident s => 790 if String.sub(s,0) = #"$" andalso 791 CharVector.exists (not o equal #"$") s 792 then 793 let 794 val locn = locn.move_start 1 locn 795 in 796 (Id, locn, SOME (Token (Ident (String.extract(s,1,NONE))))) 797 end 798 else if s = res_quanop andalso vs_state = VSRES_VS then 799 (ResquanOpTok, locn, SOME tt) 800 else if s = type_intro then (TypeColon, locn, SOME tt) 801 else if s = vs_cons_special then (VS_cons, locn, SOME tt) 802 else if s = endbinding andalso nparens = 0 andalso 803 vs_state <> VSRES_Normal then (EndBinding, locn, SOME tt) 804 else if HOLset.member(keyword_table, s) then 805 (STD_HOL_TOK s, locn, SOME tt) 806 else (Id, locn, SOME tt) 807 | Antiquote _ => (Id, locn, SOME tt) 808 | Numeral _ => (Id, locn, SOME tt) 809 | Fraction _ => (Id, locn, SOME tt) 810 | StrLit _ => (Id, locn, SOME tt) 811 | QIdent _ => (Id, locn, SOME tt) 812 end 813 | SOME (tt as PreType ty,locn) => (TypeTok, locn, SOME tt) 814 | SOME (tt as LA_Symbol st,locn) => (st, locn, SOME tt) 815 816 (* find the initial segment of the stack such that all of the segment 817 has the equality relation between components and such that the first 818 element not in the segment is less than than the last one in the 819 segment *) 820 (* NB: the FAILloc invocations in this function raise Failloc exceptions that 821 are trapped at the bottom of the perform_reduction function *) 822 fun find_reduction stack = 823 case stack of 824 [] => raise Fail "find_reduction: stack empty!" 825 | [_] => raise Fail "find_reduction: stack singleton!" 826 | ((t1 as ((Terminal x1,x1locn), _))::rest) => let 827 in 828 case rest of 829 [] => FAILloc x1locn "find_reduction : impossible" 830 | (((Terminal x2,x2locn), _)::_) => let 831 val res = valOf (Polyhash.peek prec_matrix ((x2,false),x1)) 832 handle Option => 833 FAILloc (locn.between x2locn x1locn) 834 ("No relation between "^STtoString G x2^" and "^ 835 STtoString G x1) 836 in 837 case res of 838 PM_LESS _ => [t1] 839 | PM_EQUAL => (t1::find_reduction rest) 840 | PM_LG _ => [t1] 841 (* must be a less, because a greater is impossible *) 842 | PM_GREATER _ => FAILloc (locn.between x2locn x1locn) 843 "find_reduction: found a greater on stack" 844 end 845 | ((t2 as ((_,t2locn),_))::rest2) => let 846 (* t2 is a non-terminal, whether VS or not *) 847 in 848 case rest2 of 849 [] => FAILloc t2locn "find_reduction : nonterminal at stack base!" 850 | (((Terminal x2,x2locn), _)::_) => let 851 val res = valOf (Polyhash.peek prec_matrix ((x2,true), x1)) 852 handle Option => 853 FAILloc (locn.between x2locn t2locn) 854 ("No relation between "^STtoString G x2^" and "^ 855 STtoString G x1) 856 in 857 case res of 858 PM_LESS _ => [t1,t2] 859 | PM_EQUAL => t1::t2::find_reduction rest2 860 | PM_LG _ => [t1,t2] 861 | PM_GREATER _ => FAILloc (locn.between x2locn t2locn) 862 "find_reduction: greater on stack!" 863 end 864 | (((_,locn),_)::_) => FAILloc (locn.between locn t2locn) "find_reduction 2 NTs!" 865 end 866 end 867 | (t1::rest) => t1::find_reduction rest (* t1 a non-terminal *) 868 869 (* given an initial segment of the stack that corresponds to a reduction, 870 determine whether or not this corresponds to a rule, and if it does 871 pull the tokens of the stack and replace them with the right 872 non-terminal *) 873 fun perform_reduction rhs = let 874 fun ok_item ((Terminal (STD_HOL_TOK _),_), _) = true 875 | ok_item ((NonTerminal _,_), _) = true 876 | ok_item _ = false 877 878 (* If the RHS is for a list, then what follows is only called on what 879 will be list RHSes of length greater than two, as smaller lists will 880 have been caught by the insertion of these rules specifically into 881 the DB. *) 882 datatype rule_possibility = Normal of (rsfixity * string) 883 | CaseRule of string 884 885 fun handle_case_reduction lrlocn pattern = let 886 val errmsg = "No rule for "^ listtoString reltoString pattern 887 fun fail() = FAILloc lrlocn errmsg 888 fun badcase() = FAILloc lrlocn "Mal-formed case expression" 889 fun tokstring x = case x of TOK s => SOME s | _ => NONE 890 val poss_left = case hd pattern of TOK x => x | _ => fail() 891 in 892 case Binarymap.peek(casetok_to_spec, poss_left) of 893 SOME cspec => 894 let 895 val {casecase,casebar,caseof} = 896 Binarymap.find(casespec_to_tok,cspec) 897 fun bars_ok l = 898 case l of 899 [TM] => true 900 | TM :: TOK s :: rest => SOME s = casebar andalso bars_ok rest 901 | _ => false 902 fun case_ok l = 903 case l of 904 TM :: TOK ofs :: TOK s :: rest => 905 let 906 in 907 SOME ofs = caseof andalso SOME s = casebar andalso 908 bars_ok rest 909 end 910 | TM :: TOK ofs :: rest => SOME ofs = caseof andalso 911 bars_ok rest 912 | _ => false 913 in 914 if case_ok (tl pattern) then CaseRule cspec else badcase() 915 end 916 | NONE => fail() 917 end 918 fun checkcase r = 919 case r of 920 RealRule (r0 as (fx, s)) => 921 if fx = rsPrefix andalso GrammarSpecials.is_case_special s then 922 CaseRule s 923 else Normal r0 924 | ListOnly _ => raise Fail "checkcase: found ListOnly" 925 in 926 if List.all ok_item rhs then let 927 (* it's important to remember that the left end of the possible 928 RHS might have looked like TOK s1 -- TM -- TOK s2, and that 929 in this case there will be s1 < s2 in the precedence matrix, but 930 that TM will always have been popped in this case. 931 932 This may or may not be appropriate. If the RHS is a suffix thing 933 or an infix, then that TM is part of the reduction, otherwise it 934 isn't, and should be left on the stack. 935 936 Below, the variable top_was_tm is true if a TM was popped in this 937 way. *) 938 (* NB: terminology: each stack item is either a TM (=term, i.e., 939 nonterminal) or a TOK (=token, i.e., terminal). *) 940 fun stack_item_to_rule_element (Terminal (STD_HOL_TOK s),_) = TOK s 941 | stack_item_to_rule_element (NonTerminal _,_) = TM 942 | stack_item_to_rule_element (_,locn) = 943 FAILloc locn "perform_reduction: gak!" 944 val ((_,rlocn),_) = List.hd rhs 945 val rhs = List.rev rhs 946 val translated_rhs = map (stack_item_to_rule_element o #1) rhs 947 val ((_,llocn),_) = List.hd rhs 948 val lrlocn = locn.between llocn rlocn 949 val top_was_tm = hd translated_rhs = TM 950 fun lrcheck (s1,s2) = 951 case Polyhash.peek rule_db [TOK s1, TOK s2] of 952 SOME (ListOnly lsp) => SOME lsp 953 | _ => NONE 954 val listredns = check_for_listreductions lrcheck translated_rhs 955 val (listfixed_rhs, lspinfo) = 956 remove_listrels listredns translated_rhs 957 val rule = 958 case Polyhash.peek rule_db listfixed_rhs of 959 NONE => 960 if top_was_tm then 961 let 962 val drop1 = tl listfixed_rhs 963 in 964 case Polyhash.peek rule_db drop1 of 965 NONE => handle_case_reduction lrlocn drop1 966 | SOME r => checkcase r 967 end 968 else 969 handle_case_reduction lrlocn listfixed_rhs 970 | SOME r => checkcase r 971 val ignore_top_item = 972 case rule of 973 Normal (rsInfix, _) => false 974 | Normal (rsSuffix, _) => false 975 | _ => top_was_tm 976 (* rhs' is the actual stack segment matched by the rule, and llocn' is 977 its left edge, unlike rhs and llocn which may contain a spurious TM 978 on the left *) 979 val rhs' = if ignore_top_item then tl rhs else rhs 980 val ((_,llocn'),_) = List.hd rhs' 981 val lrlocn' = locn.between llocn' rlocn 982 fun seglocs xs als mal = 983 (* extract TM items, and locations of right edges of 984 maximal initial segments containing them *) 985 case (xs,mal) of 986 ((((NonTerminal p,locn),_)::xs), NONE) => 987 seglocs xs als (SOME((p,locn),locn)) 988 | ((((NonTerminal p,locn),_)::xs), SOME al) => 989 seglocs xs (al::als) (SOME((p,locn),locn)) 990 | ((((_ ,locn),_)::xs), NONE ) => seglocs xs als mal 991 | ((((_ ,locn),_)::xs), SOME (pl,_)) => 992 seglocs xs als (SOME(pl,locn)) 993 | ([], NONE) => List.rev als 994 | ([], SOME al) => List.rev (al::als) 995 val args_w_seglocs0 = seglocs rhs' [] NONE 996 fun CCOMB((x,locn),y) = (COMB(y,x),locn.between (#2 y) locn) 997 fun process_lspinfos A i lspis args = 998 (* let 999 fun pr (VAR s, _) = s 1000 | pr (COMB(t1,t2), _) = pr t1 ^ " $ " ^ pr t2 1001 fun prl p l = "[" ^ String.concatWith "," (map p l) ^ "]" 1002 in 1003 if not (null lspis) then 1004 (print ("\nA = "^prl (pr o #1) A^", i="^Int.toString i^"\n"); 1005 print ("lspinfo = (" ^ #cons (#1 (hd lspis)) ^ "," ^ 1006 #nilstr (#1 (hd lspis)) ^ 1007 "), "); 1008 print ("i = " ^ Int.toString (#2 (hd lspis)) ^ ", "); 1009 print ("c = " ^ Int.toString (#3 (hd lspis)) ^ ", "); 1010 print ("args = " ^ prl (pr o #1) args ^ "\n")) 1011 else (); *) 1012 1013 case lspis of 1014 [] => List.revAppend(A,args) 1015 | ({cons,nilstr,...}, start, cnt) :: more_lsps => 1016 let 1017 fun mk_list [] = ((VAR nilstr,rlocn), rlocn) 1018 | mk_list ((lpt,l)::xs) = 1019 let 1020 val (ptl, locn) = mk_list xs 1021 in 1022 ((COMB((COMB((VAR cons,#2 lpt), lpt), #2 lpt), 1023 ptl), locn), 1024 locn.between (#2 lpt) rlocn) 1025 end 1026 in 1027 if start = i then 1028 let 1029 val (listtms, rest) = Lib.split_after cnt args 1030 in 1031 process_lspinfos (mk_list listtms :: A) (i + cnt) 1032 more_lsps 1033 rest 1034 end 1035 else process_lspinfos (hd args :: A) (i + 1) lspis (tl args) 1036 end 1037 (* end *) 1038 val args_w_seglocs = process_lspinfos [] 0 lspinfo args_w_seglocs0 1039 val newterm = 1040 case rule of 1041 Normal (_, s) => 1042 if term_name_is_lform s then 1043 if length args_w_seglocs <> 1 then 1044 raise Fail 1045 "seglocs extraction: rule with more than one TM" 1046 else #1 (hd (args_w_seglocs)) 1047 else List.foldl CCOMB (VAR s,llocn') args_w_seglocs 1048 | CaseRule cs => let 1049 fun mkcase1 ((t,loc),_) = (COMB((VAR cs, loc), (t,loc)), loc) 1050 fun mkbar(((t,loc),_),acc) = 1051 (COMB((COMB((QIDENT("bool", 1052 GrammarSpecials.case_split_special), 1053 loc), 1054 (t,loc)), 1055 loc), 1056 acc), 1057 locn.between loc rlocn) 1058 in 1059 (COMB(mkcase1 (hd args_w_seglocs), 1060 List.foldr mkbar 1061 (#1 (last (tl args_w_seglocs))) 1062 (butlast (tl args_w_seglocs))), 1063 lrlocn) 1064 end 1065 in 1066 repeatn (length rhs') pop >> 1067 push ((NonTerminal (#1 newterm),lrlocn'), XXX) 1068 (* lrlocn: force location to entire RHS, including tokens *) 1069 end 1070 else 1071 case rhs of 1072 (((Terminal Id,locn), tt as Token (Antiquote a))::_) => let 1073 in 1074 pop >> invstructp >- 1075 (fn inv => 1076 if #1 (hd inv) = VSRES_VS then 1077 push ((NonTermVS [(VS_AQ a,locn)],locn), tt) 1078 else 1079 push ((NonTerminal (AQ a),locn), tt)) 1080 end 1081 | (((Terminal Id,locn), Token tt)::_) => let 1082 exception Temp of string 1083 val mk_numeral = 1084 Literal.gen_mk_numeral 1085 {mk_comb = fn (x,y) => (COMB(x,y),locn), 1086 ZERO = (QIDENT ("num" , "0" ),locn), 1087 ALT_ZERO = (QIDENT ("arithmetic", "ZERO" ),locn), 1088 NUMERAL = (QIDENT ("arithmetic", "NUMERAL"),locn), 1089 BIT1 = (QIDENT ("arithmetic", "BIT1") ,locn), 1090 BIT2 = (QIDENT ("arithmetic", "BIT2") ,locn)} 1091 val mk_string = Literal.mk_stringlit_term 1092 val mk_char = Literal.mk_charlit_term 1093 fun inject_np NONE t = t 1094 | inject_np (SOME s) t = (COMB((VAR s,locn), t),locn) 1095 in 1096 pop >> invstructp >- 1097 (fn inv => let 1098 val thing_to_push = 1099 case (#1 (hd inv), tt) of 1100 (VSRES_VS, Numeral _) => let 1101 in 1102 raise Temp "can't have numerals in binding positions" 1103 end 1104 | (VSRES_VS, Fraction _) => let 1105 in 1106 raise Temp "can't have fractions in binding positions" 1107 end 1108 | (_, Fraction{wholepart,fracpart,places}) => let 1109 val _ = not (null num_info) orelse 1110 raise Temp "No fractions/numerals allowed" 1111 val ten = Arbnum.fromInt 10 1112 val denominator = Arbnum.pow(ten, Arbnum.fromInt places) 1113 val numerator = 1114 Arbnum.+(Arbnum.*(denominator,wholepart), 1115 fracpart) 1116 val mknum = inject_np (SOME fromNum_str) o mk_numeral 1117 in 1118 liftlocn NonTerminal 1119 (COMB((COMB((VAR decimal_fraction_special, 1120 locn.Loc_None), 1121 mknum numerator), locn.Loc_None), 1122 mknum denominator), 1123 locn) 1124 end 1125 | (_, Numeral(dp, copt)) => let 1126 val numeral_part = mk_numeral dp 1127 in 1128 case copt of 1129 SOME c => let 1130 val injector = 1131 List.find (fn (k,v) => k = c) num_info 1132 in 1133 case injector of 1134 NONE => let 1135 in 1136 raise Temp ("Invalid suffix " ^ str c ^ 1137 " for numeral") 1138 end 1139 | SOME (_, strop) => 1140 liftlocn NonTerminal 1141 (inject_np strop numeral_part) 1142 end 1143 | NONE => 1144 if null num_info then 1145 if dp = Arbnum.zero then 1146 (WARN "term_parser" 1147 ("\n 0 treated specially and allowed - "^ 1148 "no other numerals permitted"); 1149 liftlocn NonTerminal 1150 (inject_np NONE numeral_part)) 1151 else 1152 raise Temp "No numerals currently allowed" 1153 else let 1154 val fns = fromNum_str 1155 in 1156 if Overload.is_overloaded overload_info fns then 1157 liftlocn NonTerminal 1158 (inject_np (SOME fns) numeral_part) 1159 else 1160 raise Temp ("No overloadings exist for "^fns^ 1161 ": use character suffix for \ 1162 \numerals") 1163 (* NonTerminal (inject_np (#2 (hd num_info))) *) 1164 end 1165 end 1166 | (VSRES_VS, _) => 1167 (NonTermVS [(SIMPLE (token_string tt),locn)],locn) 1168 | (_, QIdent x) => (NonTerminal (QIDENT x),locn) 1169 | (_, StrLit{ldelim,contents}) => 1170 if ldelim = "#\"" then 1171 (NonTerminal (AQ (mk_char (String.sub(contents,0)))), 1172 locn) 1173 else 1174 liftlocn NonTerminal 1175 (inject_np 1176 (SOME (mk_stringinjn_name ldelim)) 1177 (AQ (mk_string contents), locn)) 1178 | _ => (NonTerminal (VAR (token_string tt)),locn) 1179 (* tt is not an antiquote because of the wider context; 1180 antiquotes are dealt with in the wider case statement 1181 above *) 1182 in 1183 push (thing_to_push, Token tt) 1184 end handle Temp s => (WARNloc "parse_term" locn s; 1185 error (WARNloc_string locn s))) 1186 end 1187 | (((Terminal TypeTok,rlocn), PreType ty)::((Terminal TypeColon,_), _):: 1188 ((NonTerminal t,llocn), _)::rest) => let 1189 in 1190 repeatn 3 pop >> 1191 push ((NonTerminal (TYPED ((t,llocn), (ty,rlocn))), 1192 locn.between llocn rlocn), 1193 XXX) 1194 end 1195 | (((Terminal TypeTok,rlocn), PreType ty)::((Terminal TypeColon,_), _):: 1196 ((NonTermVS vsl,llocn), _)::rest) => let 1197 in 1198 repeatn 3 pop >> 1199 push ((NonTermVS 1200 (map (fn (v as (_,locn)) => (TYPEDV(v,(ty,rlocn)),locn)) 1201 vsl), 1202 locn.between llocn rlocn), 1203 XXX) 1204 end 1205 | [((Terminal TypeTok,rlocn), PreType ty), ((Terminal TypeColon,_), _)] => 1206 let 1207 val nonterm0 = (QIDENT("bool", "the_value"), rlocn) 1208 val type_annotation = 1209 (Pretype.Tyop{Thy="bool", Tyop = "itself", Args = [ty]}, 1210 rlocn) 1211 in 1212 pop >> pop >> 1213 push ((NonTerminal (TYPED(nonterm0, type_annotation)), rlocn), XXX) 1214 end 1215 | (((NonTerminal t,rlocn), _)::((Terminal EndBinding,_), _):: 1216 ((NonTermVS vsl,_), _)::((Terminal (STD_HOL_TOK binder),llocn), _):: 1217 rest) => let 1218 exception Urk of string 1219 in 1220 let 1221 fun has_resq (v,_) = 1222 case v of 1223 VPAIR(v1, v2) => has_resq v1 orelse has_resq v2 1224 | TYPEDV(v0, ty) => has_resq v0 1225 | RESTYPEDV _ => true 1226 | _ => false 1227 fun has_tupled_resq (VPAIR(v1, v2),_) = 1228 has_resq v1 orelse has_resq v2 1229 | has_tupled_resq (TYPEDV(v0, _),_) = has_tupled_resq v0 1230 | has_tupled_resq (RESTYPEDV(v0, _),_) = has_tupled_resq v0 1231 | has_tupled_resq _ = false 1232 fun ERROR s1 s2 = Urk (s1^": "^s2) 1233 fun extract_resq (v,_) = 1234 case v of 1235 TYPEDV (v0, _) => extract_resq v0 1236 | RESTYPEDV(v0, t) => let 1237 val sub_resq = extract_resq v0 1238 in 1239 case sub_resq of 1240 NONE => SOME(v0, t) 1241 | SOME _ => 1242 raise ERROR "parse_term" 1243 "Can't have double restricted quantification" 1244 end 1245 | _ => NONE 1246 fun comb_abs_fn(v,t) = let 1247 val binder = term_name_for_binder binder 1248 in 1249 if has_tupled_resq v then 1250 raise ERROR "parse_term" 1251 "Can't have restricted quantification on nested \ 1252 \arguments" 1253 else 1254 case extract_resq v of 1255 NONE => (COMB((VAR binder,llocn), 1256 (ABS(v, t),locn.between (#2 v) (#2 t))), 1257 locn.between llocn rlocn) 1258 | SOME (v',P) => let 1259 in 1260 (COMB((COMB((VAR (Lib.assoc (SOME binder) restr_binders), 1261 llocn), 1262 P),locn.between llocn (#2 P)), 1263 (ABS(v', t),locn.between (#2 v') (#2 t))), 1264 locn.between llocn rlocn) 1265 handle Feedback.HOL_ERR _ => 1266 raise ERROR "parse_term" 1267 ("No restricted quantifier associated \ 1268 \with " ^binder) 1269 end 1270 end 1271 fun abs_fn (v,t) = 1272 if has_tupled_resq v then 1273 raise ERROR "parse_term" 1274 "Can't have restricted quantification on \ 1275 \nested arguments" 1276 else 1277 case extract_resq v of 1278 NONE => (ABS(v,t),locn.between llocn rlocn) 1279 | SOME(v', P) => 1280 (COMB((COMB((VAR (Lib.assoc NONE restr_binders),llocn), 1281 P), locn.between llocn (#2 P)), 1282 (ABS(v', t),locn.between (#2 v') (#2 t))), 1283 locn.between llocn rlocn) 1284 handle Feedback.HOL_ERR _ => 1285 raise ERROR "parse_term" 1286 "No restricted quantifier associated \ 1287 \with lambda" 1288 val vsl = List.rev vsl 1289 val abs_t = 1290 List.foldr (if mem binder lambda then abs_fn else comb_abs_fn) 1291 (t,rlocn) vsl 1292 in 1293 repeatn 4 pop >> push (liftlocn NonTerminal abs_t, XXX) 1294 end handle Urk s => 1295 (WARNloc "parse_term" (locn.between llocn rlocn) s; 1296 error (WARNloc_string (locn.between llocn rlocn) s)) 1297 end 1298 | (((Terminal(STD_HOL_TOK ")"),rlocn), _)::(vsl as ((NonTermVS _,_),_)):: 1299 ((Terminal(STD_HOL_TOK "("),llocn), _)::rest) => let 1300 in 1301 (* need a rule here because 1302 1. a NonTermVS makes this a non-standard rule; and 1303 2. bracket-removal in the remove_specials code won't see 1304 the parentheses in the binding "var-struct" position 1305 *) 1306 repeatn 3 pop >> 1307 push vsl (* don't bother expanding locn; would add no useful info *) 1308 end 1309 | (((NonTermVS vsl1,rlocn), _)::((Terminal(STD_HOL_TOK ","),_), _):: 1310 ((NonTermVS vsl2,llocn), _)::rest) => let 1311 val lrlocn = locn.between llocn rlocn 1312 in 1313 if length vsl1 <> 1 orelse length vsl2 <> 1 then let 1314 val msg = "Can't have lists of atoms as arguments to , in binder" 1315 in 1316 (WARNloc "parse_term" lrlocn msg; 1317 error (WARNloc_string lrlocn msg)) 1318 end 1319 else 1320 repeatn 3 pop >> 1321 push ((NonTermVS [(VPAIR(hd vsl2, hd vsl1),lrlocn)],lrlocn), XXX) 1322 end 1323 | (((NonTerminal t,rlocn), _)::((Terminal ResquanOpTok,_), _):: 1324 ((NonTermVS vsl,llocn), _)::rest) => let 1325 in 1326 repeatn 3 pop >> 1327 push ((NonTermVS 1328 (map (fn (v as (_,locn))=> 1329 (RESTYPEDV(v,(t,rlocn)),locn)) vsl), 1330 locn.between llocn rlocn), 1331 XXX) >> 1332 leave_restm 1333 end 1334 | _ => let 1335 fun is_vcons_list [] = false 1336 | is_vcons_list [x] = false 1337 | is_vcons_list (((NonTermVS _,_), _)::((Terminal VS_cons,_), _)::rest) = let 1338 in 1339 case rest of 1340 [((NonTermVS _,_),_)] => true 1341 | _ => is_vcons_list rest 1342 end 1343 | is_vcons_list _ = false 1344 fun get_vsls ((NonTermVS vsl,_), _) = SOME vsl | get_vsls _ = NONE 1345 val ((_,rlocn),_) = List.hd rhs 1346 val ((_,llocn),_) = List.last rhs 1347 val lrlocn = locn.between llocn rlocn 1348 in 1349 if is_vcons_list rhs then 1350 repeatn (length rhs) pop >> 1351 push ((NonTermVS(List.concat (List.mapPartial get_vsls rhs)), 1352 lrlocn), 1353 XXX) 1354 else 1355 (WARNloc "parse_term" lrlocn "Can't do this sort of reduction"; 1356 error (WARNloc_string lrlocn "Can't do this sort of reduction")) 1357 end 1358 end handle Failloc (loc,s) => 1359 (if !syntax_error_trace then 1360 (print (locn.toString loc); 1361 print ":\n"; 1362 print s; 1363 print "\n") 1364 else (); 1365 error (WARNloc_string loc s)) 1366 1367 val do_reduction = 1368 getstack >- (return o find_reduction) >- perform_reduction 1369 1370 1371 fun el2list x = [x] 1372 (* calls the lexer and puts the result onto the lookahead list *) 1373 val get_item = (lex >- set_la o el2list o liftlocn Token) ++ (set_la []) 1374 1375 (* takes the top (first) terminal from the lookahead list (there is 1376 assumed to be one), and transfers it to the stack. If this 1377 empties the lookahead list, then this is replenished by calling 1378 the appropriate lexer *) 1379 1380 val shift = 1381 current_la >- 1382 (fn la => invstructp >- (return o hd) >- 1383 (fn in_vs => 1384 case la of 1385 [] => error (noloc "No lookahead") 1386 | (x0::xs) => let 1387 val (terminal,locn,x) = transform in_vs (SOME x0) 1388 in 1389 push ((Terminal terminal,locn), valOf x) >> 1390 (if null xs then get_item else set_la xs) >> 1391 (case terminal of 1392 STD_HOL_TOK s => 1393 if is_binder s then enter_binder 1394 else if s = "(" andalso #1 in_vs <> VSRES_Normal then 1395 inc_paren 1396 else if s = ")" andalso #1 in_vs <> VSRES_Normal then 1397 dec_paren 1398 else ok 1399 | ResquanOpTok => enter_restm 1400 | EndBinding => leave_binder 1401 | _ => ok) 1402 end)) 1403 1404 1405 fun doit (tt, (top_item, top_token), in_vs) = let 1406 val (input_term, itlocn, _) = transform (hd in_vs) tt 1407 val top = dest_terminal top_item 1408 val toplocn = #2 top_item 1409 val ttt = Terminal input_term 1410 fun check_for_fnapp_insert stack = 1411 (* if the next thing in the lookahead might begin a whole new phrase, 1412 i.e. is a closed lhs, and the top thing on the stack is a non-terminal 1413 then we should insert the magical function application operator *) 1414 case stack of 1415 ((NonTerminal _,locn), _)::_ => let 1416 val fnt = (LA_Symbol (STD_HOL_TOK fnapp_special),locn.Loc_Near locn) 1417 in 1418 if mem input_term fnapp_closed_lefts then 1419 current_la >- (fn oldla => set_la (fnt::oldla)) 1420 else 1421 qfail 1422 end 1423 | ((NonTermVS _,locn), _) :: _ => let 1424 val fnt = (LA_Symbol VS_cons,locn.Loc_Near locn) 1425 in 1426 if mem input_term fnapp_closed_lefts then 1427 current_la >- (fn oldla => set_la (fnt::oldla)) 1428 else 1429 qfail 1430 end 1431 | _ => qfail 1432 1433 (* a "paren escape" is a CAML style escaping of what would otherwise 1434 be a special token by parenthesising it. Thus (/\) instead of $/\. 1435 The analysis is done at this level rather than inside the lexer to 1436 allow for white-space etc. People will have to write ( * ) to escape 1437 the multiplication symbol, because without the space, it will look like 1438 a comment *) 1439 val check_for_paren_escape = let 1440 fun require (s:string option) = 1441 getstack >- 1442 (fn [] => error (noloc "Stack empty") 1443 | (tt :: _ ) => let 1444 in 1445 case tt of 1446 ((Terminal (STD_HOL_TOK s'),locn), _) => let 1447 in 1448 case s of 1449 NONE => pop >> return (s',locn) 1450 | SOME s'' => 1451 if s' = s'' then pop >> return (s',locn) 1452 else error (WARNloc_string 1453 locn 1454 ("Expected "^s''^", found "^s')) 1455 end 1456 | _ => error (noloc "Expected a terminal") 1457 end) 1458 in 1459 if input_term = STD_HOL_TOK ")" then 1460 require NONE >- 1461 (fn (s,_) => 1462 if s = ")" orelse s = "(" then 1463 qfail (* don't want to paren-escape "())" or "(()" *) 1464 else 1465 require (SOME "(") >- 1466 (fn (_,locn) => let val lrlocn = locn.between locn itlocn 1467 in 1468 shift >> pop >> 1469 (if is_binder s then leave_binder else return ()) >> 1470 invstructp >- (return o #1 o hd) >- 1471 (fn vstate => 1472 if vstate <> VSRES_VS then 1473 push ((NonTerminal (VAR s),lrlocn), XXX) 1474 else 1475 push ((NonTermVS [(SIMPLE s,lrlocn)],lrlocn), XXX)) 1476 end )) 1477 else qfail 1478 end 1479 fun top_might_be_infix stk = 1480 case stk of 1481 ((NonTerminal _, _), _) :: ((Terminal _, _), _) :: 1482 ((NonTerminal _, _), _) :: _ => true 1483 | _ => false 1484 1485 val usual_action = let 1486 fun get_topntp stk = 1487 case stk of 1488 [] => return (false,stk) 1489 | ((Terminal _,_), _) :: _ => return (false,stk) 1490 | _ => return (true,stk) 1491 fun check_order (topntp,stk) = 1492 case Polyhash.peek prec_matrix ((top,topntp), input_term) of 1493 NONE => let 1494 val msg = "Don't expect to find a "^STtoString G input_term^ 1495 " in this position after a "^STtoString G top^"\n"^ 1496 locn.toString itlocn^" and " ^ 1497 locn.toString toplocn^".\n" 1498 in 1499 if !syntax_error_trace then print msg 1500 else (); 1501 error (msg, toplocn) 1502 end 1503 | SOME order => let 1504 fun byorder GREATER = do_reduction 1505 | byorder _ = shift 1506 in 1507 case mx_order order of 1508 SOME x => byorder x 1509 | NONE => let 1510 val (pfx,ifx) = 1511 case order of 1512 PM_LG {pfx,ifx} => (pfx,ifx) 1513 | _ => raise Fail "parse_term: check_order invariant fail" 1514 in 1515 if top_might_be_infix stk then byorder ifx 1516 else byorder pfx 1517 end 1518 end 1519 in 1520 check_for_paren_escape ++ 1521 (getstack >- check_for_fnapp_insert) ++ 1522 (getstack >- get_topntp >- check_order) 1523 end 1524 in 1525 case input_term of 1526 TypeColon => let 1527 (* behaviour has to be slightly tricksy in this case: 1528 - we need to make sure that the next thing that appears in 1529 the stream of tokens is a complete type. 1530 - The way we do this is by calling the type parser on the 1531 remaining input stream and putting the result into the 1532 lookahead list behind the colon. 1533 - We only do this once, so the following action checks to 1534 see that the lookahead list is only one long, otherwise 1535 it can do the normal action 1536 *) 1537 fun action_on_la la = 1538 case la of 1539 [x as (_,locn)] => 1540 lifted_typeparser >- (fn ty => set_la [x, (PreType ty,locn.Loc_Near locn)]) 1541 (* TODO: if lifted_typeparser returned a location, use that *) 1542 | _ => usual_action 1543 in 1544 current_la >- action_on_la 1545 end 1546 | STD_HOL_TOK s => usual_action 1547 | _ => usual_action 1548 end 1549 1550 val current_input = 1551 current_la >- (fn lal => (* lookahead list *) 1552 if null lal then return NONE else return (SOME (hd lal))) 1553 1554 1555 val basic_action = 1556 current_input >- (fn tt (* term token *) => 1557 topterm >- (fn top => 1558 invstructp >- (fn invs => 1559 doit (tt, top, invs)))) 1560 fun notBOS (Terminal BOS) = false 1561 | notBOS _ = true 1562 1563in 1564 push ((Terminal BOS,locn.Loc_None), XXX) >> get_item >> 1565 mwhile (current_input >- 1566 (fn optt => if (isSome optt) then return true 1567 else 1568 topterm >- (fn t => return (notBOS (#1 (#1 t)))))) 1569 basic_action 1570end 1571 1572val initial_pstack = PStack {stack = [], lookahead = [], 1573 in_vstruct = [(VSRES_Normal, 0)]} 1574 1575fun is_final_pstack p = 1576 case p of 1577 PStack {stack = [((NonTerminal pt,_), _), ((Terminal BOS,_), _)], 1578 lookahead = [], in_vstruct = [(VSRES_Normal, 0)]} => true 1579 | _ => false 1580 1581val recupd_errstring = 1582 "Record list must have (fld := value) or (fld updated_by f) elements only" 1583 1584fun to_vabsyn vs = 1585 case vs of 1586 (SIMPLE x,locn) => Absyn.VIDENT (locn,x) 1587 | (VPAIR(v1,v2),locn) => Absyn.VPAIR(locn,to_vabsyn v1, to_vabsyn v2) 1588 | (TYPEDV(v,(ty,_)),locn) => Absyn.VTYPED(locn,to_vabsyn v, ty) 1589 | (VS_AQ x,locn) => Absyn.VAQ (locn,x) 1590 | (RESTYPEDV _,locn) => 1591 raise ParseTermError 1592 ("Attempt to convert a vstruct still containing a RESTYPEDV",locn) 1593 1594fun remove_specials t = 1595 case #1 t of 1596 COMB (t1, t2) => let 1597 open Absyn 1598 in 1599 case #1 t1 of 1600 VAR s => if s = bracket_special then remove_specials t2 1601 else APP(#2 t, IDENT (#2 t1,s), remove_specials t2) 1602 | COMB ((VAR s,slocn), f) => let 1603 in 1604 if s = fnapp_special then APP(#2 t, remove_specials f, remove_specials t2) 1605 else if s = recsel_special then 1606 case #1 t2 of 1607 VAR fldname => APP(#2 t, IDENT (#2 t2, recsel_special ^ fldname), 1608 remove_specials f) 1609 | _ => raise ParseTermError 1610 ("Record selection must have single id to right \ 1611 \(possibly non-integer numeric literal)",#2 t2) 1612 else if s = reccons_special then 1613 remove_recupdate (#2 t) f t2 (QIDENT (locn.Loc_None,"bool", "ARB")) 1614 else if s = recwith_special then 1615 remove_recupdate' (#2 t) t2 (remove_specials f) 1616 else 1617 if s = recupd_special orelse s = recfupd_special then 1618 raise ParseTermError 1619 ("May not use record update functions at top level \ 1620 \(possibly missing semicolon)",slocn) 1621 else 1622 APP(#2 t, remove_specials t1, remove_specials t2) 1623 end 1624 | _ => APP(#2 t, remove_specials t1, remove_specials t2) 1625 end 1626 | ABS(v, t2) => Absyn.LAM(#2 t, to_vabsyn v, remove_specials t2) 1627 | TYPED(t, (ty,_)) => Absyn.TYPED(#2 t, remove_specials t, ty) 1628 | VAR s => Absyn.IDENT(#2 t, s) 1629 | QIDENT(x,y) => Absyn.QIDENT(#2 t, x, y) 1630 | AQ x => Absyn.AQ(#2 t, x) 1631and remove_recupdate locn upd1 updates bottom = let 1632 open Absyn 1633in 1634 case #1 upd1 of 1635 COMB((COMB((VAR s,slocn), (VAR fld,_)),sflocn), newvalue) => let 1636 in 1637 if s = recfupd_special then 1638 APP(locn, APP(#2 upd1, IDENT (sflocn,s^fld), remove_specials newvalue), 1639 remove_recupdate' (#2 updates) updates bottom) 1640 else if s = recupd_special then 1641 APP(locn, APP(#2 upd1, IDENT (sflocn, recfupd_special^fld), 1642 APP(locn.Loc_None, QIDENT(locn.Loc_None, "combin", "K"), 1643 remove_specials newvalue)), 1644 remove_recupdate' (#2 updates) updates bottom) 1645 else raise ParseTermError (recupd_errstring,slocn) 1646 end 1647 | _ => 1648 raise ParseTermError (recupd_errstring,#2 upd1) 1649end 1650and remove_recupdate' locn updatelist bottom = let 1651 open Absyn 1652in 1653 case #1 updatelist of 1654 VAR s => if s = recnil_special then bottom 1655 else raise ParseTermError (recupd_errstring,#2 updatelist) 1656 | COMB((COMB((VAR s,slocn), upd1),sflocn), updates) => let 1657 in 1658 if s = reccons_special then remove_recupdate locn upd1 updates bottom 1659 else 1660 if s = recupd_special orelse s = recfupd_special then 1661 case #1 upd1 of 1662 VAR fldname => 1663 if s = recfupd_special then 1664 APP(locn,APP(#2 upd1, IDENT (sflocn,s^fldname), 1665 remove_specials updates), 1666 bottom) 1667 else 1668 APP(locn,APP(#2 upd1, IDENT (sflocn, recfupd_special^fldname), 1669 APP(locn.Loc_None, 1670 QIDENT(locn.Loc_None, "combin", "K"), 1671 remove_specials updates)), 1672 bottom) 1673 | _ => raise ParseTermError 1674 ("Must have field name as first argument to update operator",#2 upd1) 1675 else 1676 raise ParseTermError (recupd_errstring,slocn) 1677 end 1678 | _ => raise ParseTermError (recupd_errstring,#2 updatelist) 1679end 1680 1681 1682fun top_nonterminal pstack = 1683 case pstack of 1684 PStack {stack = ((NonTerminal pt,locn), _)::_, ...} => remove_specials (pt,locn) 1685 | PStack {stack = ((_,locn),_)::_, ...} => 1686 raise ParseTermError ("No non-terminal on top of stack",locn) 1687 | _ => 1688 raise ParseTermError ("No non-terminal on top of stack",locn.Loc_Unknown(*TODO*)) 1689 1690(* 1691infix Gmerge 1692 1693 1694Useful functions to test with: 1695fun do_parse0 G ty = let 1696 val pt = parse_term G ty 1697in 1698 fn q => let 1699 val ((cs, p), _) = pt (q, PStack {lookahead = [], stack = [], 1700 in_vstruct = false}) 1701 in 1702 case pstack p of 1703 [(NonTerminal pt, _), (Terminal BOS, _)] => remove_specials pt 1704 | _ => raise Fail "Parse failed " 1705 end 1706end 1707 1708Remember to start with 1709 quotation := true 1710in raw MoscowML sessions 1711 1712fun pp (v : ''a) = let 1713 val p:''a frag list -> ''a preterm = 1714 do_parse0 stdhol (parse_type.parse_type parse_type.empty_grammar) 1715in 1716 p 1717end 1718 1719fun ppt (v: ''a) = let 1720 fun fix_stack stack = 1721 case stack of 1722 ((NonTerminal t, _)::rest) => 1723 pop >> push (NonTerminal(remove_specials t), XXX) 1724 | x => ok 1725 val p:''a frag list -> ((''a frag list * ''a PStack) * unit option) = 1726 fn q => 1727 (parse_term stdhol (parse_type.parse_type parse_type.empty_grammar) >> 1728 getstack >- fix_stack) 1729 (q, PStack{lookahead = [], stack = [], in_vstruct = false}) 1730in 1731 p 1732end 1733 1734val p = pp () 1735*) 1736(* 1737quotation := true; 1738val pType = parse_type.pType 1739fun do_test (q, res) = let 1740 val test = p q 1741in 1742 if test <> res then let 1743 in 1744 print "Failure on: \""; 1745 print (quotetoString q); 1746 print "\" not parsing to "; 1747 Meta.printVal res; 1748 print "\n" 1749 end else () 1750end handle _ => print ("Failure of \""^quotetoString q^"\" to parse at all\n"); 1751 1752val tests : (unit frag list * unit preterm) list = 1753[(`x`, VAR "x"), 1754 (`x'`, VAR "x'"), 1755 (`f x`, COMB (VAR "f", VAR "x")), 1756 (`f x y` , COMB (COMB (VAR "f", VAR "x"), VAR "y")), 1757 (`p /\ q`, COMB (COMB (VAR "/\\", VAR "p"), VAR "q")), 1758 (`p /\ q \/ r`, COMB (COMB(VAR "\\/", COMB(COMB(VAR "/\\", VAR "p"), 1759 VAR "q")), 1760 VAR "r")), 1761 (`f : num`, TYPED(VAR "f", parse_type.pType("num", []))), 1762 (`f x : bool list`,TYPED(COMB(VAR "f", VAR "x"), 1763 pType("list", [pType("bool", [])]))), 1764 (`f (p \/ q)`, COMB(VAR "f", COMB (COMB (VAR "\\/", VAR "p"), VAR "q"))), 1765 (`f ^(())`, COMB(VAR "f", AQ ())), 1766 (`f (p:bool \/ q)`,COMB(VAR "f", 1767 COMB(COMB(VAR "\\/", 1768 TYPED(VAR "p", pType("bool", []))), 1769 VAR "q"))), 1770 (`f x.f1`, COMB(VAR "f", COMB(COMB(VAR rec_special, VAR "x"), 1771 VAR "f1"))), 1772 (`f theni`, COMB(VAR "f", VAR "theni")), 1773 (`\x. x`, ABS (SIMPLE "x", VAR "x")), 1774 (`\x y. x`, ABS (SIMPLE "x", ABS (SIMPLE "y", VAR "x"))), 1775 (`?x y. x`, COMB (VAR "?", ABS (SIMPLE "x", 1776 COMB(VAR "?", 1777 ABS(SIMPLE "y", VAR "x"))))), 1778 (`[p; q]`, COMB (COMB(VAR "CONS", VAR "p"), 1779 COMB(COMB(VAR "CONS", VAR "q"), VAR "NIL"))), 1780 (`f [] x`, COMB (COMB (VAR "f", VAR "NIL"), VAR "x")), 1781 (`[[]]`, COMB (COMB (VAR "CONS", VAR "NIL"), VAR "NIL")), 1782 (`\^(()). x`, ABS(VS_AQ(), VAR "x")), 1783 (`f x = if ~f p /\ t then q else r`, 1784 COMB(COMB(VAR "=", COMB(VAR "f", VAR "x")), 1785 COMB(COMB(COMB(VAR "COND", 1786 COMB(COMB(VAR "/\\", 1787 COMB(VAR "~", 1788 COMB(VAR "f",VAR "p"))), 1789 VAR "t")), 1790 VAR "q"), 1791 VAR "r")))]; 1792 1793 1794 1795app do_test tests; 1796 1797*) 1798 1799 1800end 1801