diff --git a/compiler/graindoc/docblock.re b/compiler/graindoc/docblock.re index ace67c35b3..d3f3404189 100644 --- a/compiler/graindoc/docblock.re +++ b/compiler/graindoc/docblock.re @@ -136,7 +136,10 @@ let output_for_history = (~current_version, attr_version, attr_desc) => { let types_for_function = (~ident, vd: Types.value_description) => { switch (Ctype.repr(vd.val_type).desc) { - | TTyArrow(args, returns, _) => (Some(args), Some(returns)) + | TTyArrow(args, returns, _) => ( + Some(List.map(snd, args)), + Some(returns), + ) | _ => (None, None) }; }; diff --git a/compiler/src/formatting/format.re b/compiler/src/formatting/format.re index 09029fc0d4..dfcec7b41f 100644 --- a/compiler/src/formatting/format.re +++ b/compiler/src/formatting/format.re @@ -1762,7 +1762,8 @@ and print_type = Doc.group( switch (types) { | [] => Doc.concat([Doc.lparen, Doc.rparen]) - | [t] => print_type(~original_source, ~comments, t) + | [{ptyp_arg_label: Unlabeled, ptyp_arg_type: t}] => + print_type(~original_source, ~comments, t) | _types => Doc.concat([ Doc.lparen, @@ -1772,7 +1773,29 @@ and print_type = Doc.join( ~sep=Doc.concat([Doc.comma, Doc.line]), List.map( - t => print_type(~original_source, ~comments, t), + ({Parsetree.ptyp_arg_label: label, ptyp_arg_type: t}) => { + let label = + switch (label) { + | Asttypes.Unlabeled => Doc.nil + | Labeled(name) => + Doc.concat([ + Doc.text(name.txt), + Doc.text(":"), + Doc.space, + ]) + | Optional(name) => + Doc.concat([ + Doc.question, + Doc.text(name.txt), + Doc.text(":"), + Doc.space, + ]) + }; + Doc.concat([ + label, + print_type(~original_source, ~comments, t), + ]); + }, types, ), ), @@ -1867,7 +1890,7 @@ and print_type = and print_application = ( ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.expression), + ~expressions: list(Parsetree.application_argument), ~original_source: array(string), ~comments: list(Parsetree.comment), func: Parsetree.expression, @@ -1897,7 +1920,7 @@ and print_application = and print_infix_application = ( ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.expression), + ~expressions: list(Parsetree.application_argument), ~original_source: array(string), ~comments: list(Parsetree.comment), func: Parsetree.expression, @@ -1908,12 +1931,12 @@ and print_infix_application = | [first, second] => let next_comments = Comment_utils.get_comments_between_locations( - ~loc1=first.pexp_loc, - ~loc2=second.pexp_loc, + ~loc1=first.paa_loc, + ~loc2=second.paa_loc, comments, ); - let (_, line, _, _) = Locations.get_raw_pos_info(first.pexp_loc.loc_end); + let (_, line, _, _) = Locations.get_raw_pos_info(first.paa_loc.loc_end); let line_comments = Comment_utils.get_comments_on_line(line, next_comments); @@ -1928,20 +1951,20 @@ and print_infix_application = Comment_utils.single_line_of_comments(line_comments); let left_is_if = - switch (first.pexp_desc) { + switch (first.paa_expr.pexp_desc) { | PExpIf(_) => true | _ => false }; let right_is_if = - switch (second.pexp_desc) { + switch (second.paa_expr.pexp_desc) { | PExpIf(_) => true | _ => false }; let parent_prec = op_precedence(function_name); let left_is_leaf = - switch (first.pexp_desc) { + switch (first.paa_expr.pexp_desc) { | PExpApp(fn, expr) => let child_name = get_function_name(fn); let this_prec = op_precedence(child_name); @@ -1951,7 +1974,7 @@ and print_infix_application = }; let right_is_leaf = - switch (second.pexp_desc) { + switch (second.paa_expr.pexp_desc) { | PExpApp(fn, expr) => let child_name = get_function_name(fn); let this_prec = op_precedence(child_name); @@ -1961,7 +1984,7 @@ and print_infix_application = }; let left_grouping_required = - switch (first.pexp_desc) { + switch (first.paa_expr.pexp_desc) { | PExpApp(fn1, _) => op_precedence(get_function_name(fn1)) < parent_prec | PExpConstant(PConstNumber(PConstNumberRational(_, _))) => @@ -1972,7 +1995,7 @@ and print_infix_application = let right_grouping_required = // the equality check is needed for the value on the right // as we process from the left by default when the same prededence - switch (second.pexp_desc) { + switch (second.paa_expr.pexp_desc) { | PExpApp(fn1, _) => op_precedence(get_function_name(fn1)) <= parent_prec | PExpConstant(PConstNumber(PConstNumberRational(_, _))) => @@ -1983,7 +2006,7 @@ and print_infix_application = // Put parens around different operators for clarity, except // math and logic operations where precedence is well-known let left_is_different_op = - switch (first.pexp_desc) { + switch (first.paa_expr.pexp_desc) { | PExpApp(fn1, _) => let fn = get_function_name(fn1); if (infixop(fn)) { @@ -2006,7 +2029,7 @@ and print_infix_application = ~expression_parent=GenericExpression, ~original_source, ~comments, - first, + first.paa_expr, ); Doc.concat([ Doc.lparen, @@ -2022,7 +2045,7 @@ and print_infix_application = ~expression_parent, ~original_source, ~comments, - first, + first.paa_expr, ); }; @@ -2035,7 +2058,7 @@ and print_infix_application = ~expression_parent=GenericExpression, ~original_source, ~comments, - second, + second.paa_expr, ), ]), Doc.rparen, @@ -2046,7 +2069,7 @@ and print_infix_application = ~expression_parent, ~original_source, ~comments, - second, + second.paa_expr, ), ]); }; @@ -2096,8 +2119,8 @@ and print_infix_application = } and print_arg_lambda = - (~comments, ~original_source, lambda: Parsetree.expression) => { - switch (lambda.pexp_desc) { + (~comments, ~original_source, lambda: Parsetree.application_argument) => { + switch (lambda.paa_expr.pexp_desc) { | PExpLambda(patterns, expression) => let comments_in_expression = Comment_utils.get_comments_inside_location( @@ -2106,7 +2129,7 @@ and print_arg_lambda = ); let raw_args = - print_patterns( + print_lambda_arguments( ~next_loc=expression.pexp_loc, ~comments, ~original_source, @@ -2114,24 +2137,35 @@ and print_arg_lambda = patterns, ); + let label = + switch (lambda.paa_label) { + | Unlabeled => Doc.nil + | Labeled(name) + | Optional(name) => Doc.concat([Doc.text(name.txt), Doc.equal]) + }; + let args = - Doc.group( - switch (patterns) { - | [] => Doc.concat([Doc.lparen, raw_args, Doc.rparen]) - | [pat] => - switch (pat.ppat_desc) { - | PPatVar(_) => raw_args - | _ => Doc.group(Doc.concat([Doc.lparen, raw_args, Doc.rparen])) - } - | _patterns => - Doc.concat([ - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, raw_args])), - Doc.softLine, - Doc.rparen, - ]) - }, - ); + Doc.concat([ + label, + Doc.group( + switch (patterns) { + | [ + { + pla_label: Labeled(name), + pla_pattern: {ppat_desc: PPatVar(var)}, + }, + ] + when name.txt == var.txt => raw_args + | _patterns => + Doc.concat([ + Doc.lparen, + Doc.indent(Doc.concat([Doc.softLine, raw_args])), + Doc.softLine, + Doc.rparen, + ]) + }, + ), + ]); Doc.group( switch (expression.pexp_desc) { @@ -2224,13 +2258,14 @@ and print_arg_lambda = }; } -and print_arg = (~original_source, ~comments, arg: Parsetree.expression) => { - switch (arg.pexp_desc) { +and print_arg = + (~original_source, ~comments, arg: Parsetree.application_argument) => { + switch (arg.paa_expr.pexp_desc) { | PExpLambda(patterns, expression) => print_arg_lambda(~comments, ~original_source, arg) | _ => Doc.group( - print_expression( + print_application_argument( ~expression_parent=InfixExpression, ~original_source, ~comments, @@ -2244,12 +2279,12 @@ and print_args_with_comments = ( ~comments: list(Parsetree.comment), ~original_source, - args: list(Parsetree.expression), + args: list(Parsetree.application_argument), ) => { - let get_loc = (e: Parsetree.expression) => e.pexp_loc; - let print_item = (~comments, e: Parsetree.expression) => { + let get_loc = (e: Parsetree.application_argument) => e.paa_loc; + let print_item = (~comments, e: Parsetree.application_argument) => { Doc.group( - print_expression( + print_application_argument( ~expression_parent=InfixExpression, ~original_source, ~comments, @@ -2272,7 +2307,7 @@ and print_args_with_comments = } and print_arguments_with_callback_in_first_position = - (~original_source, ~comments, args: list(Parsetree.expression)) => { + (~original_source, ~comments, args: list(Parsetree.application_argument)) => { switch (args) { | [] => Doc.nil | [callback] => @@ -2323,7 +2358,7 @@ and print_arguments_with_callback_in_first_position = } and print_arguments_with_callback_in_last_position = - (~original_source, ~comments, args: list(Parsetree.expression)) => + (~original_source, ~comments, args: list(Parsetree.application_argument)) => switch (args) { | [] => Doc.nil | [expr, callback] => @@ -2365,7 +2400,7 @@ and print_arguments_with_callback_in_last_position = and print_other_application = ( ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.expression), + ~expressions: list(Parsetree.application_argument), ~original_source: array(string), ~comments: list(Parsetree.comment), func: Parsetree.expression, @@ -2374,7 +2409,7 @@ and print_other_application = switch (expressions) { | [first] when prefixop(function_name) => - switch (first.pexp_desc) { + switch (first.paa_expr.pexp_desc) { | PExpApp(fn, _) => let inner_fn = get_function_name(fn); if (infixop(inner_fn)) { @@ -2382,7 +2417,7 @@ and print_other_application = Doc.text(function_name), Doc.lparen, Doc.group( - print_expression( + print_application_argument( ~expression_parent, ~original_source, ~comments, @@ -2395,7 +2430,7 @@ and print_other_application = Doc.concat([ Doc.text(function_name), Doc.group( - print_expression( + print_application_argument( ~expression_parent, ~original_source, ~comments, @@ -2409,7 +2444,7 @@ and print_other_application = Doc.concat([ Doc.text(function_name), Doc.group( - print_expression( + print_application_argument( ~expression_parent, ~original_source, ~comments, @@ -2439,7 +2474,7 @@ and print_other_application = func, ), Doc.space, - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, @@ -2451,7 +2486,7 @@ and print_other_application = // look out for special cases of callbacks in first or last position let first_arg_is_callback = - switch (first_expr.pexp_desc) { + switch (first_expr.paa_expr.pexp_desc) { | PExpLambda(_) => true | _ => false }; @@ -2462,7 +2497,7 @@ and print_other_application = | _ => let last_expression = get_last_item_in_list(expressions); - switch (last_expression.pexp_desc) { + switch (last_expression.paa_expr.pexp_desc) { | PExpLambda(_) => true | _ => false }; @@ -2596,6 +2631,140 @@ and print_patterns = }; } +and print_lambda_arguments = + ( + ~next_loc: Location.t, + ~comments: list(Parsetree.comment), + ~original_source: array(string), + ~followed_by_arrow: option(bool)=?, + arguments: list(Parsetree.lambda_argument), + ) => { + let get_loc = (l: Parsetree.lambda_argument) => l.pla_loc; + let print_item = + ( + ~comments, + { + pla_label: label, + pla_pattern: pattern, + pla_default: default, + pla_loc, + }: Parsetree.lambda_argument, + ) => { + let label_doc = + switch (label, pattern.ppat_desc) { + | ( + Asttypes.Unlabeled, + PPatAny | PPatConstraint({ppat_desc: PPatAny}, _), + ) => Doc.nil + | ( + Labeled({txt: label}) | Optional({txt: label}), + PPatVar({txt: name}) | + PPatConstraint({ppat_desc: PPatVar({txt: name})}, _), + ) + when label == name => Doc.nil + | (Unlabeled, _) => Doc.concat([Doc.text("_"), Doc.space]) + | (Labeled(name) | Optional(name), _) => + Doc.concat([Doc.text(name.txt), Doc.space]) + }; + let pattern_doc = + print_pattern(~original_source, ~comments, ~next_loc, pattern); + let default_doc = + switch (default) { + | None => Doc.nil + | Some(expr) => + Doc.concat([ + Doc.equal, + print_expression( + ~expression_parent=GenericExpression, + ~original_source, + ~comments, + expr, + ), + ]) + }; + + Doc.concat([label_doc, pattern_doc, default_doc]); + }; + + let comments_in_scope = + Comment_utils.get_comments_before_location(~location=next_loc, comments); + + switch (arguments) { + | [] => Doc.nil + | _ => + let items = + item_iterator( + ~get_loc, + ~print_item, + ~comments=comments_in_scope, + ~followed_by_arrow?, + ~iterated_item=IteratedPatterns, + arguments, + ); + Doc.join(~sep=Doc.line, items); + }; +} + +and print_application_argument = + ( + ~comments: list(Parsetree.comment), + ~expression_parent: expression_parent_type, + ~original_source: array(string), + argument: Parsetree.application_argument, + ) => { + // FIXME: Comments? + let expr_doc = + print_expression( + ~expression_parent, + ~original_source, + ~comments, + argument.paa_expr, + ); + switch (argument.paa_label, argument.paa_expr.pexp_desc) { + | (Asttypes.Unlabeled, _) => expr_doc + | (Labeled(name) | Optional(name), _) => + Doc.concat([Doc.text(name.txt), Doc.equal, expr_doc]) + }; +} + +and print_application_arguments = + ( + ~next_loc: Location.t, + ~comments: list(Parsetree.comment), + ~expression_parent: expression_parent_type, + ~original_source: array(string), + ~followed_by_arrow: option(bool)=?, + arguments: list(Parsetree.application_argument), + ) => { + let get_loc = (l: Parsetree.application_argument) => l.paa_loc; + let print_item = (~comments, argument: Parsetree.application_argument) => { + print_application_argument( + ~comments, + ~expression_parent, + ~original_source, + argument, + ); + }; + + let comments_in_scope = + Comment_utils.get_comments_before_location(~location=next_loc, comments); + + switch (arguments) { + | [] => Doc.nil + | _ => + let items = + item_iterator( + ~get_loc, + ~print_item, + ~comments=comments_in_scope, + ~followed_by_arrow?, + ~iterated_item=IteratedPatterns, + arguments, + ); + Doc.join(~sep=Doc.line, items); + }; +} + and paren_wrap_patterns = ( ~wrapper: Location.t, @@ -2603,10 +2772,10 @@ and paren_wrap_patterns = ~comments: list(Parsetree.comment), ~original_source: array(string), ~followed_by_arrow: bool, - patterns: list(Parsetree.pattern), + patterns: list(Parsetree.lambda_argument), ) => { let args = - print_patterns( + print_lambda_arguments( ~next_loc, ~comments, ~original_source, @@ -2616,11 +2785,8 @@ and paren_wrap_patterns = switch (patterns) { | [] => Doc.concat([Doc.lparen, args, Doc.rparen]) - | [pat] => - switch (pat.ppat_desc) { - | PPatVar(_) => args - | _ => Doc.concat([Doc.lparen, args, Doc.rparen]) - } + | [{pla_label: Labeled(name), pla_pattern: {ppat_desc: PPatVar(var)}}] + when name.txt == var.txt => args | _patterns => let trail_sep = Doc.ifBreaks(Doc.comma, Doc.nil); @@ -3640,6 +3806,17 @@ and print_expression_inner = ~location=expr.pexp_loc, comments, ); + // Treat constructors as function calls + let expressions = + List.map( + expr => + { + Parsetree.paa_label: Unlabeled, + paa_expr: expr, + paa_loc: expr.pexp_loc, + }, + expressions, + ); print_application( ~expression_parent, ~expressions, @@ -3813,7 +3990,7 @@ and print_assignment = (~original_source, ~comments, left, value) => { let left_matches_first = switch (expressions) { | [expr, ...remainder] => - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, @@ -3840,16 +4017,16 @@ and print_assignment = (~original_source, ~comments, left, value) => { raise(IllegalParse("Sugared op needs at least one expression")) | [expression] => let expr = - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, expression, ); - switch (expression.pexp_desc) { + switch (expression.paa_expr.pexp_desc) { | PExpIf(_) => Doc.indent( - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, @@ -3860,16 +4037,16 @@ and print_assignment = (~original_source, ~comments, left, value) => { }; | [expression1, expression2, ...rest] => let expr = - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, expression2, ); - switch (expression2.pexp_desc) { + switch (expression2.paa_expr.pexp_desc) { | PExpIf(_) => Doc.indent( - print_expression( + print_application_argument( ~expression_parent=GenericExpression, ~original_source, ~comments, diff --git a/compiler/src/middle_end/linearize.re b/compiler/src/middle_end/linearize.re index 656d4c4e8b..588ec02c5b 100644 --- a/compiler/src/middle_end/linearize.re +++ b/compiler/src/middle_end/linearize.re @@ -124,6 +124,31 @@ let convert_binds = anf_binds => { List.fold_left(convert_bind, ans, top_binds); }; +// reorder arguments according to labels +let reorder_arguments = (args, order) => { + let rec reorder = (reordered_args, args, order) => { + let rec extract_label = (l, arg) => { + switch (arg) { + | [] => failwith("Impossible: no argument matching label") + | [(argl, arg), ...rest_args] when Btype.same_label_name(argl, l) => ( + arg, + rest_args, + ) + | [arg, ...rest_args] => + let (res, rest_args) = extract_label(l, rest_args); + (res, [arg, ...rest_args]); + }; + }; + switch (order) { + | [] => reordered_args + | [tyl, ...order] => + let (value, args) = extract_label(tyl, args); + reorder([value, ...reordered_args], args, order); + }; + }; + List.rev(reorder([], args, order)); +}; + let transl_const = (~loc=Location.dummy_loc, ~env=Env.empty, c: Types.constant) : Either.t(imm_expression, (ident, list(anf_bind))) => { @@ -568,28 +593,43 @@ let rec transl_imm = | TExpApp( {exp_desc: TExpIdent(_, _, {val_kind: TValPrim("@throw")})}, _, + _, ) => let (ans, ans_setup) = transl_comp_expression(e); (Imm.trap(~loc, ~env, ()), ans_setup @ [BSeq(ans)]); - | TExpApp({exp_desc: TExpIdent(_, _, {val_kind: TValPrim(prim)})}, args) => + | TExpApp( + {exp_desc: TExpIdent(_, _, {val_kind: TValPrim(prim)})}, + _, + args, + ) => Translprim.( switch (PrimMap.find_opt(prim_map, prim), args) { | (Some(Primitive0(prim)), []) => transl_imm({...e, exp_desc: TExpPrim0(prim)}) - | (Some(Primitive1(prim)), [arg]) => + | (Some(Primitive1(prim)), [(_, arg)]) => transl_imm({...e, exp_desc: TExpPrim1(prim, arg)}) - | (Some(Primitive2(prim)), [arg1, arg2]) => + | (Some(Primitive2(prim)), [(_, arg1), (_, arg2)]) => transl_imm({...e, exp_desc: TExpPrim2(prim, arg1, arg2)}) | (Some(PrimitiveN(prim)), args) => - transl_imm({...e, exp_desc: TExpPrimN(prim, args)}) + transl_imm({...e, exp_desc: TExpPrimN(prim, List.map(snd, args))}) | (Some(_), _) => failwith("transl_imm: invalid primitive arity") | (None, _) => failwith("transl_imm: unknown primitive") } ) - | TExpApp(func, args) => + | TExpApp(func, order, args) => let tmp = gensym("app"); let (new_func, func_setup) = transl_imm(func); - let (new_args, new_setup) = List.split(List.map(transl_imm, args)); + let (new_args, new_setup) = + List.split( + List.map( + ((l, arg)) => { + let (arg, setup) = transl_imm(arg); + ((l, arg), setup); + }, + args, + ), + ); + let new_args = reorder_arguments(new_args, order); ( Imm.id(~loc, ~env, tmp), (func_setup @ List.concat(new_setup)) @@ -1300,10 +1340,21 @@ and transl_comp_expression = failwith("transl_comp_expression: NYI: multi-branch lambda") | TExpApp( {exp_desc: TExpIdent(_, _, {val_kind: TValPrim("@throw")})} as func, + order, args, ) => let (new_func, func_setup) = transl_imm(func); - let (new_args, new_setup) = List.split(List.map(transl_imm, args)); + let (new_args, new_setup) = + List.split( + List.map( + ((l, arg)) => { + let (arg, setup) = transl_imm(arg); + ((l, arg), setup); + }, + args, + ), + ); + let new_args = reorder_arguments(new_args, order); let (ans, ans_setup) = ( Comp.app( ~loc, @@ -1319,25 +1370,42 @@ and transl_comp_expression = Comp.imm(~attributes, ~allocation_type, ~env, Imm.trap(~loc, ~env, ())), ans_setup @ [BSeq(ans)], ); - | TExpApp({exp_desc: TExpIdent(_, _, {val_kind: TValPrim(prim)})}, args) => + | TExpApp( + {exp_desc: TExpIdent(_, _, {val_kind: TValPrim(prim)})}, + _, + args, + ) => Translprim.( switch (PrimMap.find_opt(prim_map, prim), args) { | (Some(Primitive0(prim)), []) => transl_comp_expression({...e, exp_desc: TExpPrim0(prim)}) - | (Some(Primitive1(prim)), [arg]) => + | (Some(Primitive1(prim)), [(_, arg)]) => transl_comp_expression({...e, exp_desc: TExpPrim1(prim, arg)}) - | (Some(Primitive2(prim)), [arg1, arg2]) => + | (Some(Primitive2(prim)), [(_, arg1), (_, arg2)]) => transl_comp_expression({...e, exp_desc: TExpPrim2(prim, arg1, arg2)}) | (Some(PrimitiveN(prim)), args) => - transl_comp_expression({...e, exp_desc: TExpPrimN(prim, args)}) + transl_comp_expression({ + ...e, + exp_desc: TExpPrimN(prim, List.map(snd, args)), + }) | (Some(_), _) => failwith("transl_comp_expression: invalid primitive arity") | (None, _) => failwith("transl_comp_expression: unknown primitive") } ) - | TExpApp(func, args) => + | TExpApp(func, order, args) => let (new_func, func_setup) = transl_imm(func); - let (new_args, new_setup) = List.split(List.map(transl_imm, args)); + let (new_args, new_setup) = + List.split( + List.map( + ((l, arg)) => { + let (arg, setup) = transl_imm(arg); + ((l, arg), setup); + }, + args, + ), + ); + let new_args = reorder_arguments(new_args, order); ( Comp.app( ~loc, diff --git a/compiler/src/parsing/ast_helper.re b/compiler/src/parsing/ast_helper.re index f01c7ace99..c3ee650a88 100644 --- a/compiler/src/parsing/ast_helper.re +++ b/compiler/src/parsing/ast_helper.re @@ -334,38 +334,46 @@ module Exp = { // and if you choose to shift then 1 / foo would always be a syntax error // because the parser would expect a number). It's easier to just parse it // as division and have this action decide that it's actually a rational. - let binop = (~loc=?, ~attributes=?, a, b) => { + let binop = (~loc=?, ~attributes=?, f, a, b) => { // Locations of nested binops are difficult to compute in the parser so we // just set the location manually here let loc = Location.( Option.map( loc => - switch (b) { - | [{pexp_loc: {loc_start}}, {pexp_loc: {loc_end}}] => { + switch (a, b) { + | ({pexp_loc: {loc_start}}, {pexp_loc: {loc_end}}) => { ...loc, loc_start, loc_end, } - | _ => failwith("Impossible: not a binop") }, loc, ) ); - switch (a, b) { + switch (f, a, b) { | ( {pexp_desc: PExpId({txt: IdentName({txt: "/"})})}, - [ - {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(x)))}, - {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(y)))}, - ], + {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(x)))}, + {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(y)))}, ) => constant( ~loc?, ~attributes?, PConstNumber(PConstNumberRational(x, y)), ) - | _ => mk(~loc?, ~attributes?, PExpApp(a, b)) + | _ => + mk( + ~loc?, + ~attributes?, + PExpApp( + f, + [ + {paa_label: Unlabeled, paa_expr: a, paa_loc: a.pexp_loc}, + {paa_label: Unlabeled, paa_expr: b, paa_loc: b.pexp_loc}, + ], + ), + ) }; }; let block = (~loc=?, ~attributes=?, a) => @@ -473,6 +481,37 @@ module Imp = { }; }; +module La = { + let mk = (~loc=?, label, pattern, annotation, default) => { + open Asttypes; + let pla_loc = Option.value(~default=Location.dummy_loc, loc); + let pla_label = + switch (label, default) { + | (Unlabeled, Some(_)) => + raise( + SyntaxError(pla_loc, "Optional, default arguments must be named."), + ) + | (Labeled(name) | Optional(name), Some(_)) => Optional(name) + | (_, None) => label + }; + let pla_default = default; + let pattern = + switch (label, pattern) { + | (_, Some(pattern)) => pattern + | (Labeled(name) | Optional(name), None) => + Pat.var(~loc=name.loc, name) + | (Unlabeled, None) => Pat.any(~loc=pla_loc, ()) + }; + let pla_pattern = + switch (annotation) { + | Some(annotation) => + Pat.constraint_(~loc=pattern.ppat_loc, pattern, annotation) + | None => pattern + }; + {pla_label, pla_default, pla_pattern, pla_loc}; + }; +}; + module Ex = { let mk = (~loc=?, exports) => { let loc = Option.value(~default=Location.dummy_loc, loc); diff --git a/compiler/src/parsing/ast_helper.rei b/compiler/src/parsing/ast_helper.rei index 3651b94fcb..bf2a437823 100644 --- a/compiler/src/parsing/ast_helper.rei +++ b/compiler/src/parsing/ast_helper.rei @@ -53,7 +53,8 @@ module Typ: { let mk: (~loc: loc=?, parsed_type_desc) => parsed_type; let any: (~loc: loc=?, unit) => parsed_type; let var: (~loc: loc=?, string) => parsed_type; - let arrow: (~loc: loc=?, list(parsed_type), parsed_type) => parsed_type; + let arrow: + (~loc: loc=?, list(parsed_type_argument), parsed_type) => parsed_type; let tuple: (~loc: loc=?, list(parsed_type)) => parsed_type; let constr: (~loc: loc=?, id, list(parsed_type)) => parsed_type; let poly: (~loc: loc=?, list(str), parsed_type) => parsed_type; @@ -216,10 +217,20 @@ module Exp: { (~loc: loc=?, ~attributes: attributes=?, expression, expression) => expression; let lambda: - (~loc: loc=?, ~attributes: attributes=?, list(pattern), expression) => + ( + ~loc: loc=?, + ~attributes: attributes=?, + list(lambda_argument), + expression + ) => expression; let apply: - (~loc: loc=?, ~attributes: attributes=?, expression, list(expression)) => + ( + ~loc: loc=?, + ~attributes: attributes=?, + expression, + list(application_argument) + ) => expression; let construct: (~loc: loc, ~attributes: attributes=?, id, constructor_expression) => @@ -229,7 +240,13 @@ module Exp: { let record_construct: (~loc: loc, ~attributes: attributes=?, id, list(recorditem)) => expression; let binop: - (~loc: loc=?, ~attributes: attributes=?, expression, list(expression)) => + ( + ~loc: loc=?, + ~attributes: attributes=?, + expression, + expression, + expression + ) => expression; let block: (~loc: loc=?, ~attributes: attributes=?, list(expression)) => expression; @@ -305,6 +322,18 @@ module Imp: { let mk: (~loc: loc=?, list(import_value), str) => import_declaration; }; +module La: { + let mk: + ( + ~loc: loc=?, + Asttypes.argument_label, + option(pattern), + option(parsed_type), + option(expression) + ) => + lambda_argument; +}; + module Ex: { let mk: (~loc: loc=?, list((str, option(str)))) => list(export_declaration); diff --git a/compiler/src/parsing/ast_iterator.re b/compiler/src/parsing/ast_iterator.re index 0b3131fd7c..00c9a43ec4 100644 --- a/compiler/src/parsing/ast_iterator.re +++ b/compiler/src/parsing/ast_iterator.re @@ -132,11 +132,24 @@ module E = { sub.expr(sub, e); sub.typ(sub, t); | PExpLambda(pl, e) => - iter_patterns(sub, pl); + List.iter( + arg => { + sub.pat(sub, arg.pla_pattern); + Option.iter(sub.expr(sub), arg.pla_default); + sub.location(sub, arg.pla_loc); + }, + pl, + ); sub.expr(sub, e); | PExpApp(e, el) => sub.expr(sub, e); - iter_expressions(sub, el); + List.iter( + arg => { + sub.expr(sub, arg.paa_expr); + sub.location(sub, arg.paa_loc); + }, + el, + ); | PExpConstruct(c, e) => iter_ident(sub, c); switch (e) { @@ -247,7 +260,13 @@ module T = { | PTyAny => () | PTyVar(v) => () | PTyArrow(args, ret) => - List.iter(sub.typ(sub), args); + List.iter( + arg => { + sub.typ(sub, arg.ptyp_arg_type); + sub.location(sub, arg.ptyp_arg_loc); + }, + args, + ); sub.typ(sub, ret); | PTyTuple(ts) => List.iter(sub.typ(sub), ts) | PTyConstr(name, ts) => diff --git a/compiler/src/parsing/ast_mapper.re b/compiler/src/parsing/ast_mapper.re index a1523df7fc..454472aa2a 100644 --- a/compiler/src/parsing/ast_mapper.re +++ b/compiler/src/parsing/ast_mapper.re @@ -132,7 +132,16 @@ module E = { lambda( ~loc, ~attributes, - List.map(sub.pat(sub), pl), + List.map( + arg => + { + pla_label: arg.pla_label, + pla_pattern: sub.pat(sub, arg.pla_pattern), + pla_default: Option.map(sub.expr(sub), arg.pla_default), + pla_loc: sub.location(sub, arg.pla_loc), + }, + pl, + ), sub.expr(sub, e), ) | PExpApp(e, el) => @@ -140,7 +149,15 @@ module E = { ~loc, ~attributes, sub.expr(sub, e), - List.map(sub.expr(sub), el), + List.map( + arg => + { + paa_label: arg.paa_label, + paa_expr: sub.expr(sub, arg.paa_expr), + paa_loc: sub.location(sub, arg.paa_loc), + }, + el, + ), ) | PExpConstruct(id, e) => construct( @@ -289,7 +306,19 @@ module T = { | PTyAny => any(~loc, ()) | PTyVar(v) => var(~loc, v) | PTyArrow(args, ret) => - arrow(~loc, List.map(sub.typ(sub), args), sub.typ(sub, ret)) + arrow( + ~loc, + List.map( + arg => + { + ptyp_arg_label: arg.ptyp_arg_label, + ptyp_arg_type: sub.typ(sub, arg.ptyp_arg_type), + ptyp_arg_loc: sub.location(sub, arg.ptyp_arg_loc), + }, + args, + ), + sub.typ(sub, ret), + ) | PTyTuple(ts) => tuple(~loc, List.map(sub.typ(sub), ts)) | PTyConstr(name, ts) => constr(~loc, map_identifier(sub, name), List.map(sub.typ(sub), ts)) diff --git a/compiler/src/parsing/asttypes.re b/compiler/src/parsing/asttypes.re index ce5dd3d485..8d5cbc6e8e 100644 --- a/compiler/src/parsing/asttypes.re +++ b/compiler/src/parsing/asttypes.re @@ -101,3 +101,9 @@ let mknoloc = Location.mknoloc; /** Addtional expression information that may affect compilation. */ [@deriving (sexp, yojson)] type attributes = list((loc(string), list(loc(string)))); + +[@deriving (sexp, yojson)] +type argument_label = + | Unlabeled + | Labeled(loc(string)) + | Optional(loc(string)); diff --git a/compiler/src/parsing/lexer.re b/compiler/src/parsing/lexer.re index 2cc48dab87..00f892889a 100644 --- a/compiler/src/parsing/lexer.re +++ b/compiler/src/parsing/lexer.re @@ -259,6 +259,7 @@ let rec token = lexbuf => { | "::" => positioned(COLONCOLON) | ":=" => positioned(GETS) | ":" => positioned(COLON) + | "?" => positioned(QUESTION) | "=" => positioned(EQUAL) | "," => positioned(COMMA) | ";" => positioned(SEMI) diff --git a/compiler/src/parsing/parser.messages b/compiler/src/parsing/parser.messages index 9075b2f37d..cf08a609ac 100644 --- a/compiler/src/parsing/parser.messages +++ b/compiler/src/parsing/parser.messages @@ -2337,81 +2337,6 @@ program: FUN LPAREN RPAREN THICKARROW EOL UNDERSCORE Expected a function body—a block surrounded by `{`/`}` or a single expression. -program: FUN LPAREN WASMI64 COMMA EOL WASMI64 WHILE -## -## Ends in an error in state: 385. -## -## lseparated_nonempty_list_inner(comma,pattern) -> lseparated_nonempty_list_inner(comma,pattern) comma pattern . [ RPAREN RBRACK EOL COMMA ] -## pattern -> pattern . COLON typ [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . COLON eols typ [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . PIPE pattern [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . PIPE eols pattern [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . AS id_str [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . AS eols id_str [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## -## The known suffix of the stack is as follows: -## lseparated_nonempty_list_inner(comma,pattern) comma pattern -## -program: FUN LPAREN WASMI64 WHILE -## -## Ends in an error in state: 363. -## -## lseparated_nonempty_list_inner(comma,pattern) -> pattern . [ RPAREN RBRACK EOL COMMA ] -## pattern -> pattern . COLON typ [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . COLON eols typ [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . PIPE pattern [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . PIPE eols pattern [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . AS id_str [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## pattern -> pattern . AS eols id_str [ RPAREN RBRACK PIPE EOL COMMA COLON AS ] -## -## The known suffix of the stack is as follows: -## pattern -## - -Expected a type annotation, a comma followed by more patterns, `)`, or `]`. - -program: FUN LPAREN WASMI64 COMMA EOL WHILE -## -## Ends in an error in state: 396. -## -## lseparated_nonempty_list_inner(comma,pattern) -> lseparated_nonempty_list_inner(comma,pattern) comma . pattern [ RPAREN RBRACK EOL COMMA ] -## option(comma) -> comma . [ RPAREN RBRACK EOL ] -## -## The known suffix of the stack is as follows: -## lseparated_nonempty_list_inner(comma,pattern) comma -## -## WARNING: This example involves spurious reductions. -## This implies that, although the LR(1) items shown above provide an -## accurate view of the past (what has been recognized so far), they -## may provide an INCOMPLETE view of the future (what was expected next). -## In state 3, spurious reduction of production nonempty_list(eol) -> EOL -## In state 5, spurious reduction of production eols -> nonempty_list(eol) -## In state 96, spurious reduction of production comma -> COMMA eols -## - -Expected another pattern, `)`, or `]`. - -program: FUN LPAREN WASMI64 RBRACK -## -## Ends in an error in state: 749. -## -## lam_expr -> FUN lparen option(patterns) . rparen thickarrow expr [ THICKARROW STAR SLASH SEMI RPAREN RCARET RBRACK RBRACE PIPE LCARET INFIX_90 INFIX_80 INFIX_70 INFIX_60 INFIX_50 INFIX_40 INFIX_30 INFIX_120 INFIX_110 INFIX_100 EOL EOF ELSE DASH COMMA COLON ] -## -## The known suffix of the stack is as follows: -## FUN lparen option(patterns) -## -## WARNING: This example involves spurious reductions. -## This implies that, although the LR(1) items shown above provide an -## accurate view of the past (what has been recognized so far), they -## may provide an INCOMPLETE view of the future (what was expected next). -## In state 363, spurious reduction of production lseparated_nonempty_list_inner(comma,pattern) -> pattern -## In state 394, spurious reduction of production option(comma) -> -## In state 395, spurious reduction of production patterns -> lseparated_nonempty_list_inner(comma,pattern) option(comma) -## In state 748, spurious reduction of production option(patterns) -> patterns -## - -Expected `)` to complete the function arguments. - program: UIDENT LPAREN COMMA WHILE ## ## Ends in an error in state: 704. diff --git a/compiler/src/parsing/parser.mly b/compiler/src/parsing/parser.mly index 0b5ed3a66c..5aca0b2271 100644 --- a/compiler/src/parsing/parser.mly +++ b/compiler/src/parsing/parser.mly @@ -22,7 +22,7 @@ module Grain_parsing = struct end %token THICKARROW ARROW %token EQUAL GETS %token UNDERSCORE -%token COLON DOT ELLIPSIS +%token COLON QUESTION DOT ELLIPSIS %token ASSERT FAIL EXCEPTION THROW @@ -63,7 +63,7 @@ module Grain_parsing = struct end %left INFIX_110 DASH %left INFIX_120 STAR SLASH -%right SEMI EOL COMMA DOT COLON LPAREN +%right SEMI EOL COMMA DOT COLON LPAREN EQUAL %nonassoc _if %nonassoc ELSE @@ -232,8 +232,8 @@ annotated_expr: | non_binop_expr colon typ { Exp.constraint_ ~loc:(to_loc $loc) $1 $3 } binop_expr: - | non_stmt_expr infix_op opt_eols non_stmt_expr { Exp.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) [$1; $4] } - | non_stmt_expr rcaret_rcaret_op opt_eols non_stmt_expr %prec INFIX_100 { Exp.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) [$1; $4] } + | non_stmt_expr infix_op opt_eols non_stmt_expr { Exp.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } + | non_stmt_expr rcaret_rcaret_op opt_eols non_stmt_expr %prec INFIX_100 { Exp.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } ellipsis_prefix(X): | ELLIPSIS X {$2} @@ -287,17 +287,25 @@ data_typ: | type_id %prec _below_infix { Typ.constr ~loc:(to_loc $loc) $1 [] } typ: - | data_typ arrow typ { Typ.arrow ~loc:(to_loc $loc) [$1] $3 } - | FUN LIDENT arrow typ { Typ.arrow ~loc:(to_loc $loc) [(Typ.var $2)] $4 } - | FUN lparen typs? rparen arrow typ { Typ.arrow ~loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } + | data_typ arrow typ { Typ.arrow ~loc:(to_loc $loc) [{ptyp_arg_label=Unlabeled; ptyp_arg_type=$1; ptyp_arg_loc=(to_loc $loc($1))}] $3 } + | FUN LIDENT arrow typ { Typ.arrow ~loc:(to_loc $loc) [{ptyp_arg_label=Unlabeled; ptyp_arg_type=Typ.var $2; ptyp_arg_loc=(to_loc $loc($2))}] $4 } + | FUN lparen arg_typs? rparen arrow typ { Typ.arrow ~loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } | lparen tuple_typs rparen { Typ.tuple ~loc:(to_loc $loc) $2 } | lparen typ rparen { $2 } | LIDENT { Typ.var ~loc:(to_loc $loc) $1 } | data_typ { $1 } +arg_typ: + | LIDENT colon typ { {ptyp_arg_label=Labeled (mkstr $loc($1) $1); ptyp_arg_type=$3; ptyp_arg_loc=(to_loc $loc)} } + | QUESTION LIDENT colon typ { {ptyp_arg_label=Optional (mkstr $loc($2) $2); ptyp_arg_type=$4; ptyp_arg_loc=(to_loc $loc)} } + | typ { {ptyp_arg_label=Unlabeled; ptyp_arg_type=$1; ptyp_arg_loc=(to_loc $loc)} } + typs: | lseparated_nonempty_list(comma, typ) comma? { $1 } +arg_typs: + | lseparated_nonempty_list(comma, arg_typ) comma? { $1 } + %inline tuple_typ_ending: | ioption(eols) lseparated_nonempty_list(comma, typ) ioption(comma) { $2 } @@ -383,13 +391,17 @@ data_declaration: | RECORD UIDENT id_vec? data_labels { Dat.record ~loc:(to_loc $loc) (mkstr $loc($2) $2) (Option.value ~default:[] $3) $4 } unop_expr: - | prefix_op non_assign_expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) $1]) [$2] } + | prefix_op non_assign_expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) $1]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } paren_expr: | lparen expr rparen { $2 } +app_arg: + | expr { {paa_label=Unlabeled; paa_expr=$1; paa_loc=to_loc $loc} } + | id_str EQUAL expr { {paa_label=(Labeled $1); paa_expr=$3; paa_loc=to_loc $loc} } + app_expr: - | left_accessor_expr lparen lseparated_list(comma, expr) comma? rparen { Exp.apply ~loc:(to_loc $loc) $1 $3 } + | left_accessor_expr lparen lseparated_list(comma, app_arg) comma? rparen { Exp.apply ~loc:(to_loc $loc) $1 $3 } rcaret_rcaret_op: | lnonempty_list(RCARET) RCARET { (String.init (1 + List.length $1) (fun _ -> '>')) } @@ -436,7 +448,7 @@ special_op: | lseparated_nonempty_list(dot, type_id_str) { $1 } non_modid: - | id_str { [$1] } + | id_str %prec EQUAL { [$1] } id: | modid dot non_modid { mkid (List.append $1 $3) (to_loc $loc) } @@ -468,9 +480,28 @@ braced_expr: block: | lbrace block_body rbrace { Exp.block ~loc:(to_loc $loc) $2 } +arg_destructure: + | ioption(pattern) %prec COLON { $1 } + +arg_annotation: + | colon typ { $2 } + +arg_label: + | LIDENT { Labeled (mkstr $loc $1) } + | UNDERSCORE { Unlabeled } + +arg_default: + | EQUAL non_stmt_expr { $2 } + +lam_arg: + | arg_label arg_destructure arg_annotation? arg_default? { La.mk ~loc:(to_loc $loc) $1 $2 $3 $4 } + +lam_args: + | lseparated_nonempty_list(comma, lam_arg) comma? { $1 } + lam_expr: - | FUN lparen patterns? rparen thickarrow expr { Exp.lambda ~loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } - | FUN LIDENT thickarrow expr { Exp.lambda ~loc:(to_loc $loc) [Pat.var ~loc:(to_loc $loc($2)) (mkstr $loc($2) $2)] $4 } + | FUN lparen lam_args? rparen thickarrow expr { Exp.lambda ~loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } + | FUN LIDENT thickarrow expr { Exp.lambda ~loc:(to_loc $loc) [La.mk ~loc:(to_loc $loc($2)) (Labeled (mkstr $loc($2) $2)) None None None] $4 } attribute_argument: | STRING { mkstr $loc $1 } @@ -531,9 +562,9 @@ array_expr: | lbrackrcaret opt_eols lseparated_nonempty_list(comma, expr) comma? rbrack { Exp.array ~loc:(to_loc $loc) $3 } stmt_expr: - | THROW expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "throw"]) [$2] } - | ASSERT expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "assert"]) [$2] } - | FAIL expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "fail"]) [$2] } + | THROW expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "throw"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | ASSERT expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "assert"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | FAIL expr { Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "fail"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } // allow DASH to cause a shift instead of the usual reduction of the left side for subtraction | RETURN ioption(expr) %prec _below_infix { Exp.return ~loc:(to_loc $loc) $2 } | CONTINUE { Exp.continue ~loc:(to_loc $loc) () } @@ -545,7 +576,7 @@ assign_binop_op: assign_expr: | left_accessor_expr GETS opt_eols expr { Exp.box_assign ~loc:(to_loc $loc) $1 $4 } | id_expr equal expr { Exp.assign ~loc:(to_loc $loc) $1 $3 } - | id_expr assign_binop_op opt_eols expr { Exp.assign ~loc:(to_loc $loc) $1 (Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($2) [$2]) [$1; $4]) } + | id_expr assign_binop_op opt_eols expr { Exp.assign ~loc:(to_loc $loc) $1 (Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($2) [$2]) [{paa_label=Unlabeled; paa_expr=$1; paa_loc=(to_loc $loc($1))}; {paa_label=Unlabeled; paa_expr=$4; paa_loc=(to_loc $loc($4))}]) } | record_set { $1 } | array_set { $1 } @@ -589,7 +620,7 @@ record_get: record_set: | left_accessor_expr dot simple_id equal expr { Exp.record_set ~loc:(to_loc $loc) $1 $3 $5 } - | left_accessor_expr dot simple_id assign_binop_op opt_eols expr { Exp.record_set ~loc:(to_loc $loc) $1 $3 (Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($4) [$4]) [Exp.record_get ~loc:(to_loc $loc) $1 $3; $6]) } + | left_accessor_expr dot simple_id assign_binop_op opt_eols expr { Exp.record_set ~loc:(to_loc $loc) $1 $3 (Exp.apply ~loc:(to_loc $loc) (mkid_expr $loc($4) [$4]) [{paa_label=Unlabeled; paa_expr=Exp.record_get ~loc:(to_loc $loc) $1 $3; paa_loc=(to_loc $loc($6))}; {paa_label=Unlabeled; paa_expr=$6; paa_loc=(to_loc $loc($6))}]) } %inline record_field_value: | colon expr {$2} diff --git a/compiler/src/parsing/parsetree.re b/compiler/src/parsing/parsetree.re index 4d7d706574..a89753e7e6 100644 --- a/compiler/src/parsing/parsetree.re +++ b/compiler/src/parsing/parsetree.re @@ -24,7 +24,7 @@ type mut_flag = Asttypes.mut_flag = | Mutable | Immutable; type parsed_type_desc = | PTyAny | PTyVar(string) - | PTyArrow(list(parsed_type), parsed_type) + | PTyArrow(list(parsed_type_argument), parsed_type) | PTyTuple(list(parsed_type)) | PTyConstr(loc(Identifier.t), list(parsed_type)) | PTyPoly(list(loc(string)), parsed_type) @@ -33,6 +33,12 @@ and parsed_type = { ptyp_desc: parsed_type_desc, [@sexp_drop_if sexp_locs_disabled] ptyp_loc: Location.t, +} + +and parsed_type_argument = { + ptyp_arg_label: argument_label, + ptyp_arg_type: parsed_type, + ptyp_arg_loc: Location.t, }; /** Type for fields within a record */ @@ -463,8 +469,8 @@ and expression_desc = | PExpBreak | PExpReturn(option(expression)) | PExpConstraint(expression, parsed_type) - | PExpLambda(list(pattern), expression) - | PExpApp(expression, list(expression)) + | PExpLambda(list(lambda_argument), expression) + | PExpApp(expression, list(application_argument)) | PExpConstruct(loc(Identifier.t), constructor_expression) | PExpBlock(list(expression)) | PExpBoxAssign(expression, expression) @@ -477,6 +483,21 @@ and constructor_expression = | PExpConstrTuple(list(expression)) | PExpConstrRecord(list((loc(Identifier.t), expression))) +[@deriving (sexp, yojson)] +and lambda_argument = { + pla_label: argument_label, + pla_pattern: pattern, + pla_default: option(expression), + pla_loc: Location.t, +} + +[@deriving (sexp, yojson)] +and application_argument = { + paa_label: argument_label, + paa_expr: expression, + paa_loc: Location.t, +} + /** let-binding form */ [@deriving (sexp, yojson)] diff --git a/compiler/src/typed/btype.re b/compiler/src/typed/btype.re index 876ccc3ae2..b2c4310c06 100644 --- a/compiler/src/typed/btype.re +++ b/compiler/src/typed/btype.re @@ -130,7 +130,7 @@ let iter_type_expr = (f, ty) => switch (ty.desc) { | TTyVar(_) => () | TTyArrow(args, ret, _) => - List.iter(f, args); + List.iter(((_, arg)) => f(arg), args); f(ret); | TTyTuple(ts) => List.iter(f, ts) | TTyRecord(ts) => List.iter(((_, t)) => f(t), ts) @@ -276,7 +276,11 @@ let rec copy_type_desc = (~keep_names=false, f) => TTyVar(None); } | TTyArrow(tyl, ret, c) => - TTyArrow(List.map(f, tyl), f(ret), copy_commu(c)) + TTyArrow( + List.map(((l, arg)) => (l, f(arg)), tyl), + f(ret), + copy_commu(c), + ) | TTyTuple(l) => TTyTuple(List.map(f, l)) | TTyRecord(l) => TTyRecord(List.map(((name, arg)) => (name, f(arg)), l)) @@ -409,17 +413,39 @@ let is_optional = | Optional(_) => true | _ => false; +let label_equal = (l1, l2) => { + switch (l1, l2) { + | (Unlabeled, Unlabeled) => true + | (Labeled({txt: name1}), Labeled({txt: name2})) + | (Optional({txt: name1}), Optional({txt: name2})) when name1 == name2 => + true + | _ => false + }; +}; + +let same_label_name = (l1, l2) => + switch (l1, l2) { + | (Unlabeled, Unlabeled) => true + | ( + Labeled({txt: name1}) | Optional({txt: name1}), + Labeled({txt: name2}) | Optional({txt: name2}), + ) + when name1 == name2 => + true + | _ => false + }; + let label_name = fun - | Nolabel => "" - | Labelled(s) - | Optional(s) => s; + | Unlabeled => "" + | Labeled(s) + | Optional(s) => s.txt; -let prefixed_label_name = +let qualified_label_name = fun - | Nolabel => "" - | Labelled(s) => "~" ++ s - | Optional(s) => "?" ++ s; + | Unlabeled => "" + | Labeled(s) => s.txt + | Optional(s) => "?" ++ s.txt; let rec extract_label_aux = (hd, l) => fun diff --git a/compiler/src/typed/btype.rei b/compiler/src/typed/btype.rei index 823c89dd39..2255adfeb1 100644 --- a/compiler/src/typed/btype.rei +++ b/compiler/src/typed/btype.rei @@ -110,6 +110,21 @@ let memorize_abbrev: let forget_abbrev: (ref(abbrev_memo), Path.t) => unit; /* Remove an abbreviation from the cache */ +/**** Utilities for labels ****/ +let is_optional: argument_label => bool; +let label_equal: (argument_label, argument_label) => bool; +let same_label_name: (argument_label, argument_label) => bool; +let label_name: argument_label => label; +let qualified_label_name: argument_label => label; +let extract_label: + (label, list((argument_label, 'a))) => + ( + argument_label, + 'a, + list((argument_label, 'a)), + list((argument_label, 'a)), + ); + /**** Utilities for backtracking ****/ type snapshot; diff --git a/compiler/src/typed/ctype.re b/compiler/src/typed/ctype.re index 17950a6a6e..8155062707 100644 --- a/compiler/src/typed/ctype.re +++ b/compiler/src/typed/ctype.re @@ -415,7 +415,7 @@ let rec generalize_spine = ty => { switch (ty.desc) { | TTyArrow(tyl, ty2, _) => set_level(ty, generic_level); - List.iter(generalize_spine, tyl); + List.iter(((_, ty)) => generalize_spine(ty), tyl); generalize_spine(ty2); | TTyPoly(ty', _) => set_level(ty, generic_level); @@ -555,7 +555,7 @@ let rec generalize_expansive = (env, var_level, visited, ty) => { else generalize_expansive env var_level visited t) variance tyl*/ | TTyArrow(tl, t2, _) => - List.iter(generalize_structure(var_level), tl); + List.iter(((_, ty)) => generalize_structure(var_level, ty), tl); generalize_expansive(env, var_level, visited, t2); | _ => iter_type_expr(generalize_expansive(env, var_level, visited), ty) }; @@ -1981,8 +1981,21 @@ let rec mcomp = (type_pairs, env, t1, t2) => switch (t1'.desc, t2'.desc) { | (TTyVar(_), _) | (_, TTyVar(_)) => () - | (TTyArrow(t1, u1, _), TTyArrow(t2, u2, _)) => - mcomp_list(type_pairs, env, t1, t2); + | (TTyArrow(a1, u1, _), TTyArrow(a2, u2, _)) + when + List.length(a1) == List.length(a2) + && List.for_all2( + ((l1, _), (l2, _)) => + is_optional(l1) == is_optional(l2), + a1, + a2, + ) => + mcomp_list( + type_pairs, + env, + List.map(snd, a1), + List.map(snd, a2), + ); mcomp(type_pairs, env, u1, u2); | (TTyTuple(tl1), TTyTuple(tl2)) => mcomp_list(type_pairs, env, tl1, tl2) @@ -2447,8 +2460,16 @@ and unify3 = (env, t1, t1', t2, t2') => { try( { switch (d1, d2) { - | (TTyArrow(t1, u1, c1), TTyArrow(t2, u2, c2)) => - unify_list(env, t1, t2); + | (TTyArrow(a1, u1, c1), TTyArrow(a2, u2, c2)) + when + List.length(a1) == List.length(a2) + && List.for_all2( + ((l1, _), (l2, _)) => + is_optional(l1) == is_optional(l2), + a1, + a2, + ) => + unify_list(env, List.map(snd, a1), List.map(snd, a2)); unify(env, u1, u2); switch (commu_repr(c1), commu_repr(c2)) { | (TComLink(r), c2) => set_commu(r, c2) @@ -2606,29 +2627,53 @@ let unify = (env, ty1, ty2) => unify_pairs(ref(env), ty1, ty2, []); /**** Special cases of unification ****/ let expand_head_trace = (env, t) => { - let t = expand_head_unif(env, t); - t; + expand_head_unif(env, t); }; -let filter_arrow = (arity, env, t) => { - let t = expand_head_trace(env, t); +type filter_arrow_failure = + | Unification_error(list((type_expr, type_expr))) + | Label_mismatch({ + got: argument_label, + expected: argument_label, + expected_type: type_expr, + }) + | Not_a_function; + +exception Filter_arrow_failed(filter_arrow_failure); + +let filter_arrow = (env, t, labels) => { + let t = + try(expand_head_trace(env, t)) { + | Unify(types) => raise(Filter_arrow_failed(Unification_error(types))) + }; switch (t.desc) { | TTyVar(_) => - /*Printf.eprintf "filter_arrow: TTyVar\n";*/ let lv = t.level; - let vars = ref([]); - for (i in 1 to arity) { - vars := [newvar2(lv), ...vars^]; - }; + let vars = List.map(_ => newvar2(lv), labels); + let args = List.combine(labels, vars); let t2 = newvar2(lv); - let t' = newty2(lv, TTyArrow(vars^, t2, TComOk)); + let t' = newty2(lv, TTyArrow(args, t2, TComOk)); link_type(t, t'); - (vars^, t2); - | TTyArrow(t1, t2, _) => - /*Printf.eprintf "filter_arrow: TTyArrow\n";*/ - (t1, t2) - | _ => raise(Unify([])) + (vars, t2); + | TTyArrow(a1, t2, _) => + let types = + List.map2( + (l, (l', ty)) => + if (is_optional(l) == is_optional(l')) { + ty; + } else { + raise( + Filter_arrow_failed( + Label_mismatch({got: l, expected: l', expected_type: ty}), + ), + ); + }, + labels, + a1, + ); + (types, t2); + | _ => raise(Filter_arrow_failed(Not_a_function)) }; }; @@ -2708,8 +2753,14 @@ let rec moregen = (inst_nongen, type_pairs, env, t1, t2) => | (TTyVar(_), _) when may_instantiate(inst_nongen, t1') => moregen_occur(env, t1'.level, t2); link_type(t1', t2); - | (TTyArrow(t1, u1, _), TTyArrow(t2, u2, _)) => - moregen_list(inst_nongen, type_pairs, env, t1, t2); + | (TTyArrow(a1, u1, _), TTyArrow(a2, u2, _)) => + moregen_list( + inst_nongen, + type_pairs, + env, + List.map(snd, a1), + List.map(snd, a2), + ); moregen(inst_nongen, type_pairs, env, u1, u2); | (TTyTuple(tl1), TTyTuple(tl2)) => moregen_list(inst_nongen, type_pairs, env, tl1, tl2) @@ -2928,8 +2979,15 @@ let rec eqtype = (rename, type_pairs, subst, env, t1, t2) => }; subst := [(t1', t2'), ...subst^]; } - | (TTyArrow(t1, u1, _), TTyArrow(t2, u2, _)) => - eqtype_list(rename, type_pairs, subst, env, t1, t2); + | (TTyArrow(a1, u1, _), TTyArrow(a2, u2, _)) => + eqtype_list( + rename, + type_pairs, + subst, + env, + List.map(snd, a1), + List.map(snd, a2), + ); eqtype(rename, type_pairs, subst, env, u1, u2); | (TTyTuple(tl1), TTyTuple(tl2)) => eqtype_list(rename, type_pairs, subst, env, tl1, tl2) diff --git a/compiler/src/typed/ctype.rei b/compiler/src/typed/ctype.rei index 9be76ffdf3..d9b6ec09ab 100644 --- a/compiler/src/typed/ctype.rei +++ b/compiler/src/typed/ctype.rei @@ -159,7 +159,20 @@ let unify_var: (Env.t, type_expr, type_expr) => unit; is a variable. */ let with_passive_variants: ('a => 'b, 'a) => 'b; /* Call [f] in passive_variants mode, for exhaustiveness check. */ -let filter_arrow: (int, Env.t, type_expr) => (list(type_expr), type_expr); + +type filter_arrow_failure = + | Unification_error(list((type_expr, type_expr))) + | Label_mismatch({ + got: argument_label, + expected: argument_label, + expected_type: type_expr, + }) + | Not_a_function; + +exception Filter_arrow_failed(filter_arrow_failure); + +let filter_arrow: + (Env.t, type_expr, list(argument_label)) => (list(type_expr), type_expr); /* A special case of unification (with l:'a -> 'b). */ let occur_in: (Env.t, type_expr, type_expr) => bool; let deep_occur: (type_expr, type_expr) => bool; diff --git a/compiler/src/typed/env.re b/compiler/src/typed/env.re index e6b04b94fe..c8629315c2 100644 --- a/compiler/src/typed/env.re +++ b/compiler/src/typed/env.re @@ -1661,7 +1661,13 @@ and components_of_module_maker = ((env, sub, path, mty)) => switch (desc.cstr_args) { | [] => desc.cstr_res | args => - Btype.newgenty(TTyArrow(args, desc.cstr_res, TComOk)) + Btype.newgenty( + TTyArrow( + List.map(arg => (Unlabeled, arg), args), + desc.cstr_res, + TComOk, + ), + ) }; let val_type = switch (desc.cstr_existentials) { @@ -1728,7 +1734,14 @@ and components_of_module_maker = ((env, sub, path, mty)) => let val_type = switch (desc.cstr_args) { | [] => desc.cstr_res - | args => Btype.newgenty(TTyArrow(args, desc.cstr_res, TComOk)) + | args => + Btype.newgenty( + TTyArrow( + List.map(arg => (Unlabeled, arg), args), + desc.cstr_res, + TComOk, + ), + ) }; let val_type = switch (desc.cstr_existentials) { diff --git a/compiler/src/typed/oprint.re b/compiler/src/typed/oprint.re index bd5136168a..2bf5eb40e1 100644 --- a/compiler/src/typed/oprint.re +++ b/compiler/src/typed/oprint.re @@ -353,13 +353,13 @@ let rec print_out_type = ppf => and print_out_type_1 = ppf => fun - | Otyp_arrow(ty1, ty2) => { - let args_length = List.length(ty1); + | Otyp_arrow(al, ty2) => { + let args_length = List.length(al); pp_open_box(ppf, 1); if (args_length != 1) { pp_print_char(ppf, '('); }; - fprintf(ppf, "@[<0>%a@]", print_typlist(print_out_type_2, ","), ty1); + fprintf(ppf, "@[<0>%a@]", print_argtyplist(print_out_type_2, ","), al); if (args_length != 1) { pp_print_char(ppf, ')'); }; @@ -519,6 +519,26 @@ and print_row_field = (ppf, (l, opt_amp, tyl)) => { tyl, ); } +and print_argtyplist = (print_elem, sep, ppf) => + fun + | [] => () + | [(l, ty)] => { + if (l != "") { + pp_print_string(ppf, l); + pp_print_string(ppf, ": "); + }; + print_elem(ppf, ty); + } + | [(l, ty), ...al] => { + if (l != "") { + pp_print_string(ppf, l); + pp_print_string(ppf, ": "); + }; + print_elem(ppf, ty); + pp_print_string(ppf, sep); + pp_print_space(ppf, ()); + print_argtyplist(print_elem, sep, ppf, al); + } and print_typlist = (print_elem, sep, ppf) => fun | [] => () diff --git a/compiler/src/typed/outcometree.re b/compiler/src/typed/outcometree.re index 06fb71e7f2..3e9cc6704b 100644 --- a/compiler/src/typed/outcometree.re +++ b/compiler/src/typed/outcometree.re @@ -54,7 +54,7 @@ type out_type = | Otyp_abstract | Otyp_open | Otyp_alias(out_type, string) - | Otyp_arrow(list(out_type), out_type) + | Otyp_arrow(list((string, out_type)), out_type) | Otyp_class(bool, out_ident, list(out_type)) | Otyp_constr(out_ident, list(out_type)) | Otyp_manifest(out_type, out_type) diff --git a/compiler/src/typed/printtyp.re b/compiler/src/typed/printtyp.re index 079693ff93..defa49cfec 100644 --- a/compiler/src/typed/printtyp.re +++ b/compiler/src/typed/printtyp.re @@ -152,16 +152,20 @@ let rec raw_type = (ppf, ty) => { ); }; } +and raw_argtype = (ppf, (l, ty)) => { + fprintf(ppf, "@[<1>%s: %a@]", qualified_label_name(l), raw_type, ty); +} and raw_type_list = tl => raw_list(raw_type, tl) +and raw_argtype_list = al => raw_list(raw_argtype, al) and raw_type_desc = ppf => fun | TTyVar(name) => fprintf(ppf, "TTyVar %a", print_name, name) - | TTyArrow(t1, t2, c) => + | TTyArrow(a1, t2, c) => fprintf( ppf, "@[TTyArrow(@,%a,@,%a,@,%s)@]", - raw_type_list, - t1, + raw_argtype_list, + a1, raw_type, t2, safe_commu_repr([], c), @@ -581,8 +585,8 @@ let rec mark_loops_rec = (visited, ty) => { let visited = [px, ...visited]; switch (ty.desc) { | TTyVar(_) => add_named_var(ty) - | TTyArrow(ty1, ty2, _) => - List.iter(mark_loops_rec(visited), ty1); + | TTyArrow(a1, ty2, _) => + List.iter(((_, t)) => mark_loops_rec(visited, t), a1); mark_loops_rec(visited, ty2); | TTyTuple(tyl) => List.iter(mark_loops_rec(visited), tyl) | TTyRecord(tyl) => @@ -659,12 +663,12 @@ let rec tree_of_typexp = (sch, ty) => { new_name; }; Otyp_var(non_gen, name_of_type(name_gen, ty)); - | TTyArrow(ty1, ty2, _) => - let pr_arrow = (ty1, ty2) => { - let t1 = tree_of_typlist(sch, ty1); - Otyp_arrow(t1, tree_of_typexp(sch, ty2)); + | TTyArrow(a1, ty2, _) => + let pr_arrow = (a1, ty2) => { + let a1 = tree_of_argtyplist(sch, a1); + Otyp_arrow(a1, tree_of_typexp(sch, ty2)); }; - pr_arrow(ty1, ty2); + pr_arrow(a1, ty2); | TTyTuple(tyl) => Otyp_tuple(tree_of_typlist(sch, tyl)) | TTyRecord(tyl) => Otyp_record( @@ -719,6 +723,23 @@ let rec tree_of_typexp = (sch, ty) => { } and tree_of_typlist = (sch, tyl) => List.map(tree_of_typexp(sch), tyl) +and tree_of_argtyplist = (sch, al) => + List.map( + ((l, ty)) => { + let ty = + switch (l) { + | Optional(_) => + switch (ty.desc) { + | TTyConstr(_, [ty], _) => ty + | _ => + failwith("Impossible: optional argument with non-option type") + } + | _ => ty + }; + (qualified_label_name(l), tree_of_typexp(sch, ty)); + }, + al, + ) and is_non_gen = (sch, ty) => sch && is_Tvar(ty) && ty.level != generic_level diff --git a/compiler/src/typed/translprim.re b/compiler/src/typed/translprim.re index d5566fa836..6122342459 100644 --- a/compiler/src/typed/translprim.re +++ b/compiler/src/typed/translprim.re @@ -1454,6 +1454,13 @@ let transl_prim = (env, desc) => { let disable_gc = [(Location.mknoloc("disableGC"), [])]; + let lambda_arg = pat => { + pla_label: Unlabeled, + pla_pattern: pat, + pla_default: None, + pla_loc: Location.dummy_loc, + }; + // `attrs` are attributes which should be applied to the `let` which gets implicitly generated. // // Specifically, consider: @@ -1527,7 +1534,7 @@ let transl_prim = (env, desc) => { Exp.lambda( ~loc, ~attributes=disable_gc, - [pat_a], + [lambda_arg(pat_a)], Exp.prim1(~loc, p, id_a), ), [], @@ -1536,7 +1543,7 @@ let transl_prim = (env, desc) => { // This primitive must always be inlined, so we do not generate a lambda (Exp.constant(PConstVoid), []) | Primitive1(p) => ( - Exp.lambda(~loc, [pat_a], Exp.prim1(~loc, p, id_a)), + Exp.lambda(~loc, [lambda_arg(pat_a)], Exp.prim1(~loc, p, id_a)), [], ) | Primitive2( @@ -1552,13 +1559,17 @@ let transl_prim = (env, desc) => { Exp.lambda( ~loc, ~attributes=disable_gc, - [pat_a, pat_b], + [lambda_arg(pat_a), lambda_arg(pat_b)], Exp.prim2(~loc, p, id_a, id_b), ), [], ) | Primitive2(p) => ( - Exp.lambda(~loc, [pat_a, pat_b], Exp.prim2(~loc, p, id_a, id_b)), + Exp.lambda( + ~loc, + [lambda_arg(pat_a), lambda_arg(pat_b)], + Exp.prim2(~loc, p, id_a, id_b), + ), [], ) | PrimitiveN(WasmMemorySize as p) => ( @@ -1576,7 +1587,7 @@ let transl_prim = (env, desc) => { Exp.lambda( ~loc, ~attributes=disable_gc, - [pat_a, pat_b, pat_c], + [lambda_arg(pat_a), lambda_arg(pat_b), lambda_arg(pat_c)], Exp.primn(~loc, p, [id_a, id_b, id_c]), ), [], diff --git a/compiler/src/typed/translsig.re b/compiler/src/typed/translsig.re index 0f13c03528..06f6672804 100644 --- a/compiler/src/typed/translsig.re +++ b/compiler/src/typed/translsig.re @@ -17,7 +17,7 @@ let rec collect_type_vars = typ => Tbl.add(typ.id, ref([typ]), used_type_variables^) } | TTyArrow(ty_args, ty_res, _) => - List.iter(collect_type_vars, ty_args); + List.iter(((_, arg)) => collect_type_vars(arg), ty_args); collect_type_vars(ty_res); | TTyTuple(ty_args) => List.iter(collect_type_vars, ty_args) | TTyRecord(ty_args) => @@ -46,7 +46,11 @@ let link_type_vars = ty => { | Not_found => ty } | TTyArrow(tyl, ret, c) => - TTyArrow(List.map(link_types, tyl), link_types(ret), c) + TTyArrow( + List.map(((l, arg)) => (l, link_types(arg)), tyl), + link_types(ret), + c, + ) | TTyTuple(l) => TTyTuple(List.map(link_types, l)) | TTyRecord(l) => TTyRecord(List.map(((name, arg)) => (name, link_types(arg)), l)) diff --git a/compiler/src/typed/type_utils.re b/compiler/src/typed/type_utils.re index ca0b3d79a4..d6d6f4f8e7 100644 --- a/compiler/src/typed/type_utils.re +++ b/compiler/src/typed/type_utils.re @@ -24,7 +24,7 @@ let rec get_fn_allocation_type = (env, ty) => { | TTySubst(linked) | TTyLink(linked) => get_fn_allocation_type(env, linked) | TTyArrow(args, ret, _) => ( - List.map(get_allocation_type(env), args), + List.map(((_, arg)) => get_allocation_type(env, arg), args), get_allocation_type(env, ret), ) | TTyConstr(path, args, _) => diff --git a/compiler/src/typed/typecore.re b/compiler/src/typed/typecore.re index 502c0ae4ce..1693387081 100644 --- a/compiler/src/typed/typecore.re +++ b/compiler/src/typed/typecore.re @@ -24,6 +24,9 @@ type error = option(type_forcing_context), ) | Apply_non_function(type_expr) + | Apply_too_many_arguments(type_expr) + | Apply_too_few_arguments(list((argument_label, type_expr))) + | Apply_unknown_label(string, list(string)) | Label_multiply_defined(string) | Label_missing(list(Ident.t)) | Label_not_mutable(Identifier.t) @@ -55,7 +58,14 @@ type error = list((type_expr, type_expr)), bool, ) + | Not_a_function(type_expr, option(type_forcing_context)) | Too_many_arguments(bool, type_expr, option(type_forcing_context)) + | Abstract_wrong_label({ + got: argument_label, + expected: argument_label, + expected_type: type_expr, + explanation: option(type_forcing_context), + }) | Scoping_let_module(string, type_expr) | Masked_instance_variable(Identifier.t) | Not_a_variant_type(Identifier.t) @@ -322,11 +332,6 @@ let constant: let constant_or_raise = Checkertypes.constant_or_raise; -/* Specific version of type_option, using newty rather than newgenty */ - -/*let type_option ty = - newty (TTyConstr(Predef.path_option,[ty], ref TMemNil))*/ - let mkexp = (exp_desc, exp_type, exp_loc, exp_env, exp_attributes) => { exp_desc, exp_type, @@ -336,6 +341,50 @@ let mkexp = (exp_desc, exp_type, exp_loc, exp_env, exp_attributes) => { exp_attributes, }; +/* Specific version of type_option, using newty rather than newgenty */ + +let type_option = ty => + newty(TTyConstr(Builtin_types.path_option, [ty], ref(TMemNil))); + +let option_some = (env, texp) => { + let csome = + Env.find_constructor(Path.PIdent(Builtin_types.ident_some_cstr), env); + mkexp( + TExpConstruct( + mknoloc(Identifier.IdentName(mknoloc("Some"))), + csome, + TExpConstrTuple([texp]), + ), + type_option(texp.exp_type), + texp.exp_loc, + texp.exp_env, + [], + ); +}; + +let option_none = (env, ty, loc) => { + let cnone = + Env.find_constructor(Path.PIdent(Builtin_types.ident_none_cstr), env); + mkexp( + TExpConstruct( + mknoloc(Identifier.IdentName(mknoloc("None"))), + cnone, + TExpConstrTuple([]), + ), + type_option(ty), + loc, + env, + [], + ); +}; + +let extract_option_type = (env, ty) => { + switch (expand_head(env, ty).desc) { + | TTyConstr(path, [ty], _) when Path.same(path, Builtin_types.path_option) => ty + | _ => assert(false) + }; +}; + /* Typing of patterns */ /* unification inside type_pat*/ @@ -410,7 +459,7 @@ let rec approx_type = (env, sty) => | PTyArrow(args, ret) => newty( TTyArrow( - List.map(x => newvar(), args), + List.map(x => (x.ptyp_arg_label, newvar()), args), approx_type(env, ret), TComOk, ), @@ -439,7 +488,11 @@ let rec type_approx = (env, sexp: Parsetree.expression) => | PExpWhile(_, e) => type_approx(env, e) | PExpLambda(args, e) => newty( - TTyArrow(List.map(x => newvar(), args), type_approx(env, e), TComOk), + TTyArrow( + List.map(x => (x.pla_label, newvar()), args), + type_approx(env, e), + TComOk, + ), ) | PExpBlock([_, ..._] as es) => type_approx(env, last(es)) | _ => newvar() @@ -932,6 +985,81 @@ and type_expect_ = }); | PExpLambda(args, body) => open Ast_helper; + let opt_counter = ref(0); + let gen_opt = () => { + incr(opt_counter); + "option_" ++ string_of_int(opt_counter^); + }; + let (args, labels, prelude) = + List.fold_right( + (arg, (args, labels, prelude)) => { + switch (arg.pla_default) { + | Some(default) => + let default_value_name = mknoloc(""); + let default_loc = default.pexp_loc; + let scases = [ + Mb.mk( + Pat.construct( + ~loc=default_loc, + mknoloc(Identifier.IdentName(mknoloc("Some"))), + PPatConstrTuple([ + Pat.var(~loc=default_loc, default_value_name), + ]), + ), + Exp.ident( + ~loc=default_loc, + mknoloc(Identifier.IdentName(default_value_name)), + ), + None, + ), + Mb.mk( + Pat.construct( + ~loc=default_loc, + mknoloc(Identifier.IdentName(mknoloc("None"))), + PPatConstrTuple([]), + ), + default, + None, + ), + ]; + let sloc = { + Location.loc_start: arg.pla_pattern.ppat_loc.Location.loc_start, + loc_end: default_loc.Location.loc_end, + loc_ghost: true, + }; + let opt_name = mknoloc(gen_opt()); + let smatch = + Exp.match( + ~loc=sloc, + Exp.ident( + ~loc=sloc, + mknoloc(Identifier.IdentName(opt_name)), + ), + scases, + ); + let pat = Pat.var(~loc=sloc, opt_name); + let prelude_expr = + Exp.let_( + ~loc=sloc, + Nonrecursive, + Immutable, + [Vb.mk(arg.pla_pattern, smatch)], + ); + ( + [pat, ...args], + [arg.pla_label, ...labels], + [prelude_expr, ...prelude], + ); + | None => ( + [arg.pla_pattern, ...args], + [arg.pla_label, ...labels], + prelude, + ) + } + }, + args, + ([], [], []), + ); let pat = switch (args) { | [] => @@ -942,13 +1070,18 @@ and type_expect_ = ) | args => Pat.tuple(args) }; + let body = + switch (prelude) { + | [] => body + | _ => Exp.block(~loc=body.pexp_loc, prelude @ [body]) + }; type_function( ~in_function?, loc, attributes, env, ty_expected_explained, - (), + labels, [Mb.mk(pat, body, None)], ); | PExpApp(func, args) => @@ -974,11 +1107,12 @@ and type_expect_ = end_def(); /*lower_args [] ty;*/ begin_def(); - let (args, ty_res) = type_application(~in_function?, env, funct, args); + let (label_order, args, ty_res) = + type_application(~in_function?, ~loc, env, funct, args); end_def(); unify_var(env, newvar(), funct.exp_type); rue({ - exp_desc: TExpApp(funct, args), + exp_desc: TExpApp(funct, label_order, args), exp_loc: loc, exp_extra: [], exp_attributes: attributes, @@ -1275,7 +1409,7 @@ and type_expect_ = end_def(); generalize_structure(ty); let (arg, ty') = ( - List.hd @@ type_arguments(env, [sarg], [ty], [instance(env, ty)]), + type_argument(env, sarg, ty, instance(env, ty)), instance(env, ty), ); rue({ @@ -1338,7 +1472,6 @@ and type_expect_ = and type_function = (~in_function=?, loc, attrs, env, ty_expected_explained, l, caselist) => { let {ty: ty_expected, explanation} = ty_expected_explained; - /*Format.eprintf "@[type_function: expected: %a@]@." Printtyp.raw_type_expr ty_expected;*/ let (loc_fun, ty_fun) = (loc, instance(env, ty_expected)); let separate = @@ -1346,59 +1479,37 @@ and type_function = if (separate) { begin_def(); }; - let rec arity = caselist => - switch (caselist) { - | [] => failwith("Impossible: type_function: empty lambda") - | [{pmb_pat: {ppat_desc: PPatConstraint(p, _)}, _} as mb] => - arity([{...mb, pmb_pat: p}]) - | [{pmb_pat: {ppat_desc: PPatTuple(args)}, _}] => List.length(args) - // TODO(#1507): Reduce the number of hard-coded cases - | [ - { - pmb_pat: { - ppat_desc: PPatConstruct({txt: ident, _}, PPatConstrTuple([])), - _, - }, - _, - }, - ] - when Identifier.equal(ident, Identifier.IdentName(mknoloc("()"))) => 0 - | _ => failwith("Impossible: type_function: impossible caselist") - }; - let arity = arity(caselist); let exp_inst = instance(env, ty_expected); - /*Format.eprintf "@[type_function: pre: %a@]@." Printtyp.raw_type_expr exp_inst;*/ - let (ty_arg, ty_res) = - try(filter_arrow(arity, env, exp_inst)) { - | Unify(_) => - raise( - Error( - loc_fun, - env, - Too_many_arguments(in_function != None, ty_fun, explanation), - ), - ) + let (ty_args, ty_res) = + try(filter_arrow(env, exp_inst, l)) { + | Filter_arrow_failed(err) => + let err = + switch (err) { + | Unification_error(unif_err) => Expr_type_clash(unif_err, None) + | Label_mismatch({got, expected, expected_type}) => + Abstract_wrong_label({got, expected, expected_type, explanation}) + | Not_a_function => + switch (in_function) { + | Some(_) => + Too_many_arguments(in_function != None, ty_fun, explanation) + | None => Not_a_function(ty_fun, explanation) + } + }; + raise(Error(loc_fun, env, err)); }; - - /*let rec fmt_args ppf = function - | [] -> Format.fprintf ppf ")" - | a::tl -> - Format.fprintf ppf "%a, %a" Printtyp.raw_type_expr a fmt_args tl in - Format.eprintf "@[type_function: %i@ (%a -> %a@]@." (get_current_level()) - fmt_args (ty_arg) Printtyp.raw_type_expr ty_res;*/ if (separate) { end_def(); - List.iter(generalize_structure, ty_arg); + List.iter(generalize_structure, ty_args); generalize_structure(ty_res); }; let normalized_arg_type = - switch (ty_arg) { + switch (ty_args) { | [] => Builtin_types.type_void - | _ => newty(TTyTuple(ty_arg)) + | _ => newty(TTyTuple(ty_args)) }; let (cases, partial) = type_cases( - ~in_function=(loc_fun, ty_arg, ty_res), + ~in_function=(loc_fun, ty_args, ty_res), env, normalized_arg_type, ty_res, @@ -1406,77 +1517,51 @@ and type_function = loc, caselist, ); - // TODO: Decide if this should be added to TExpLambda - /*let param = name_pattern "param" cases in*/ re({ exp_desc: TExpLambda(cases, partial), exp_loc: loc, exp_extra: [], exp_attributes: attrs, - exp_type: instance(env, newgenty(TTyArrow(ty_arg, ty_res, TComOk))), + exp_type: + instance( + env, + newgenty(TTyArrow(List.combine(l, ty_args), ty_res, TComOk)), + ), exp_env: env, }); } -and type_arguments = - (~in_function=?, ~recarg=?, env, sargs, tys_expected', tys_expected) => +and type_argument = + (~in_function=?, ~recarg=?, env, sarg, ty_expected', ty_expected) => { /* ty_expected' may be generic */ - /* Note (Philip): I think the heavy lifting of this function - was there to support optional arguments (which we currently don't). */ - List.map2( - (sarg, (targ', targ)) => { - let texp = - type_expect(~in_function?, ~recarg?, env, sarg, mk_expected(targ')); - unify_exp(env, texp, targ); - texp; - }, - sargs, - List.combine(tys_expected', tys_expected), - ) + let texp = + type_expect( + ~in_function?, + ~recarg?, + env, + sarg, + mk_expected(ty_expected'), + ); + unify_exp(env, texp, ty_expected); + texp; +} -and type_application = (~in_function=?, env, funct, args) => { +and type_application = (~in_function=?, ~loc, env, funct, sargs) => { /* funct.exp_type may be generic */ - /*** Arguments, return value */ let ty_fun = expand_head(env, funct.exp_type); - let (ty_args, ty_ret, ty_level) = + let (ty_args, ty_ret) = switch (ty_fun.desc) { | TTyVar(_) => - let t_args = List.map(x => newvar(), args) + let t_args = List.map(arg => (arg.paa_label, newvar()), sargs) and t_ret = newvar(); - /*let not_identity = function - | TExpIdent(_,_,{val_kind=TValPrim - {Primitive.prim_name="%identity"}}) -> - false - | _ -> true - in - List.iter2 (fun arg t_arg -> - if ty_fun.level >= t_arg.level && not_identity funct.exp_desc then - Location.prerr_warning arg.pexp_loc Warnings.Unused_argument - ) args t_args;*/ unify( env, ty_fun, newty(TTyArrow(t_args, t_ret, TComLink(ref(TComUnknown)))), ); - (t_args, t_ret, ty_fun.level); - | TTyArrow(t_args, t_ret, _) - when List.length(t_args) == List.length(args) => ( - t_args, - t_ret, - ty_fun.level, - ) - | TTyArrow(t_args, t_ret, _) => - raise( - Error( - funct.exp_loc, - env, - Arity_mismatch( - expand_head(env, funct.exp_type), - List.length(args), - ), - ), - ) - | td => + (t_args, t_ret); + | TTyArrow(t_args, t_ret, _) => (t_args, t_ret) + | _ => raise( Error( funct.exp_loc, @@ -1486,15 +1571,182 @@ and type_application = (~in_function=?, env, funct, args) => { ) }; - let typed_args = - type_arguments( - ~in_function?, - env, - args, + let ordered_labels = List.map(fst, ty_args); + + let (labeled_sargs, unlabeled_sargs) = + List.partition( + sarg => { + switch (sarg.paa_label) { + | Labeled(_) => true + | _ => false + } + }, + sargs, + ); + + let (used_labeled_tyargs, unused_tyargs) = + List.partition( + ((l, _)) => { + List.exists( + sarg => same_label_name(l, sarg.paa_label), + labeled_sargs, + ) + }, ty_args, - List.map(instance(env), ty_args), ); - (typed_args, instance(env, ty_ret)); + + let rec type_args = + ( + args, + remaining_sargs, + remaining_used_labeled_tyargs, + remaining_unused_tyargs, + ) => { + let rec extract_label = (l, tyargs) => { + switch (tyargs) { + | [] => (None, []) + | [(tyl, _) as tyarg, ...rest_tyargs] when same_label_name(tyl, l) => ( + Some(tyarg), + rest_tyargs, + ) + | [tyarg, ...rest_tyargs] => + let (res, rest_tyargs) = extract_label(l, rest_tyargs); + (res, [tyarg, ...rest_tyargs]); + }; + }; + let rec next_tyarg = tyargs => { + switch (tyargs) { + | [] => (None, []) + | [(tyl, _) as tyarg, ...rest_tyargs] when !is_optional(tyl) => ( + Some(tyarg), + rest_tyargs, + ) + | [tyarg, ...rest_tyargs] => + let (res, rest_tyargs) = next_tyarg(rest_tyargs); + (res, [tyarg, ...rest_tyargs]); + }; + }; + switch (remaining_sargs) { + | [] => (args, remaining_unused_tyargs) + | [sarg, ...remaining_sargs] => + let ( + corresponding_tyarg, + remaining_used_labeled_tyargs, + remaining_unused_tyargs, + ) = + switch (sarg.paa_label) { + | Optional(_) => + failwith("Impossible: optional argument in application") + | Labeled(_) => + let (corresponding_tyarg, remaining_used_labeled_tyargs) = + extract_label(sarg.paa_label, remaining_used_labeled_tyargs); + ( + corresponding_tyarg, + remaining_used_labeled_tyargs, + remaining_unused_tyargs, + ); + | Unlabeled => + let (corresponding_tyarg, remaining_unused_tyargs) = + next_tyarg(remaining_unused_tyargs); + ( + corresponding_tyarg, + remaining_used_labeled_tyargs, + remaining_unused_tyargs, + ); + }; + switch (corresponding_tyarg) { + | Some((l, ty)) => + let arg = + if (!is_optional(l)) { + ( + () => + type_argument( + ~in_function?, + env, + sarg.paa_expr, + ty, + instance(env, ty), + ) + ); + } else { + ( + () => + option_some( + env, + type_argument( + ~in_function?, + env, + sarg.paa_expr, + extract_option_type(env, ty), + extract_option_type(env, instance(env, ty)), + ), + ) + ); + }; + type_args( + [(l, arg), ...args], + remaining_sargs, + remaining_used_labeled_tyargs, + remaining_unused_tyargs, + ); + | None => + switch (sarg.paa_label) { + | Unlabeled => + raise( + Error( + loc, + env, + Apply_too_many_arguments(expand_head(env, funct.exp_type)), + ), + ) + | _ => + raise( + Error( + sarg.paa_loc, + env, + Apply_unknown_label( + label_name(sarg.paa_label), + List.filter_map( + l => { + switch (l) { + | Unlabeled => None + | _ => Some(label_name(l)) + } + }, + ordered_labels, + ), + ), + ), + ) + } + }; + }; + }; + + let (args, remaining_tyargs) = + type_args([], sargs, used_labeled_tyargs, unused_tyargs); + + let omitted_args = + List.map( + ((l, ty)) => { + switch (l) { + | Optional(_) => + // omitted optional argument + (l, option_none(env, instance(env, ty), Location.dummy_loc)) + | _ => + let missing_args = + List.filter(((l, _)) => !is_optional(l), remaining_tyargs); + raise(Error(loc, env, Apply_too_few_arguments(missing_args))); + } + }, + remaining_tyargs, + ); + + // Typecheck all arguments. + // Order here is important; rev_map would be incorrect. + let typed_args = List.map(((l, argf)) => (l, argf()), List.rev(args)); + + (ordered_labels, omitted_args @ typed_args, instance(env, ty_ret)); } and type_construct = (env, loc, lid, sarg, ty_expected_explained, attrs) => { @@ -1614,7 +1866,13 @@ and type_construct = (env, loc, lid, sarg, ty_expected_explained, attrs) => { raise (Error(loc, env, Inlined_record_expected)) end*/ - let args = type_arguments(~recarg, env, sargs, ty_args, ty_args0); + let args = + List.map2( + (sarg, (ty_arg, ty_arg0)) => + type_argument(~recarg, env, sarg, ty_arg, ty_arg0), + sargs, + List.combine(ty_args, ty_args0), + ); let arg = if (is_record_cstr) { switch (args) { @@ -2230,9 +2488,7 @@ and type_label_exp = (create, env, loc, ty_expected, (lid, label, sarg)) => { } else { Some(Btype.snapshot()); }; - let arg = - List.hd @@ - type_arguments(env, [sarg], [ty_arg], [instance(env, ty_arg)]); + let arg = type_argument(env, sarg, ty_arg, instance(env, ty_arg)); end_def(); try( { @@ -2439,28 +2695,49 @@ let report_error = (env, ppf) => ) | Apply_non_function(typ) => { reset_and_mark_loops(typ); - switch (repr(typ).desc) { - | TTyArrow(_) => - fprintf( - ppf, - "@[@[<2>This function has type@ %a@]", - type_expr, - typ, - ); - fprintf( - ppf, - "@ @[It is applied to too many arguments;@ %s@]@]", - "maybe you forgot a `;'.", - ); - | _ => - fprintf( - ppf, - "@[@[<2>This expression has type@ %a@]@ %s@]", - type_expr, - typ, - "This is not a function; it cannot be applied.", - ) + fprintf( + ppf, + "@[@[<2>This expression has type@ %a@]@ %s@]", + type_expr, + typ, + "This is not a function; it cannot be applied.", + ); + } + | Apply_too_many_arguments(typ) => { + reset_and_mark_loops(typ); + fprintf(ppf, "@[@[<2>This function has type@ %a@]", type_expr, typ); + fprintf(ppf, "@ @[It is applied to too many arguments.@]@]"); + } + | Apply_too_few_arguments(args) => { + List.iter(((_, typ)) => reset_and_mark_loops(typ), args); + let rec print_args = (ppf, args) => { + let print_arg = ((l, arg)) => { + reset_and_mark_loops(arg); + switch (l) { + | Unlabeled => fprintf(ppf, "%a", type_expr, arg) + | _ => + fprintf(ppf, "%s: %a", qualified_label_name(l), type_expr, arg) + }; + }; + switch (args) { + | [] => () + | [arg] => print_arg(arg) + | [arg, ...rest] => + print_arg(arg); + fprintf(ppf, ",@ "); + print_args(ppf, rest); + }; }; + fprintf( + ppf, + "@[This function call is missing arguments: %a@]", + print_args, + args, + ); + } + | Apply_unknown_label(label, valid_labels) => { + fprintf(ppf, "This argument cannot be applied with label %s.", label); + spellcheck(ppf, label, valid_labels); } | Label_multiply_defined(s) => fprintf(ppf, "The record field label %s is defined several times", s) @@ -2615,6 +2892,21 @@ let report_error = (env, ppf) => ); }; } + | Not_a_function(ty, explanation) => { + reset_and_mark_loops(ty); + fprintf(ppf, "This expression is not a function,@ "); + fprintf( + ppf, + "the expected type is@ %a%t", + type_expr, + ty, + report_type_expected_explanation_opt(explanation), + ); + } + | Abstract_wrong_label({got, expected, expected_type, explanation}) => { + // FIXME + fprintf(ppf, "WRONG LABEL@ "); + } | Too_many_arguments(in_function, ty, explanation) => { reset_and_mark_loops(ty); if (in_function) { @@ -2800,5 +3092,3 @@ let () = let type_expect = (~in_function=?, env, e, ty) => type_expect(~in_function?, env, e, ty); let type_exp = (env, e) => type_exp(env, e); -let type_arguments = (env, es, t1s, t2s) => - type_arguments(env, es, t1s, t2s); diff --git a/compiler/src/typed/typecore.rei b/compiler/src/typed/typecore.rei index cff40accbd..79cb7ecd48 100644 --- a/compiler/src/typed/typecore.rei +++ b/compiler/src/typed/typecore.rei @@ -57,9 +57,6 @@ let type_expect: Typedtree.expression; let type_exp: (Env.t, Parsetree.expression) => Typedtree.expression; let type_approx: (Env.t, Parsetree.expression) => type_expr; -let type_arguments: - (Env.t, list(Parsetree.expression), list(type_expr), list(type_expr)) => - list(Typedtree.expression); let generalizable: (int, type_expr) => bool; @@ -79,6 +76,9 @@ type error = option(Checkertypes.type_forcing_context), ) | Apply_non_function(type_expr) + | Apply_too_many_arguments(type_expr) + | Apply_too_few_arguments(list((argument_label, type_expr))) + | Apply_unknown_label(string, list(string)) | Label_multiply_defined(string) | Label_missing(list(Ident.t)) | Label_not_mutable(Identifier.t) @@ -117,11 +117,18 @@ type error = list((type_expr, type_expr)), bool, ) + | Not_a_function(type_expr, option(Checkertypes.type_forcing_context)) | Too_many_arguments( bool, type_expr, option(Checkertypes.type_forcing_context), ) + | Abstract_wrong_label({ + got: argument_label, + expected: argument_label, + expected_type: type_expr, + explanation: option(Checkertypes.type_forcing_context), + }) | Scoping_let_module(string, type_expr) | Masked_instance_variable(Identifier.t) | Not_a_variant_type(Identifier.t) diff --git a/compiler/src/typed/typed_well_formedness.re b/compiler/src/typed/typed_well_formedness.re index 8ce403dfc1..65a66d36c4 100644 --- a/compiler/src/typed/typed_well_formedness.re +++ b/compiler/src/typed/typed_well_formedness.re @@ -95,10 +95,11 @@ module WellFormednessArg: TypedtreeIter.IteratorArgument = { _, ), }, + _, args, ) when func == "==" || func == "!=" => - if (List.exists(exp_is_wasm_unsafe, args)) { + if (List.exists(((_, arg)) => exp_is_wasm_unsafe(arg), args)) { let warning = Grain_utils.Warnings.FuncWasmUnsafe( Printf.sprintf("Pervasives.(%s)", func), @@ -121,15 +122,19 @@ module WellFormednessArg: TypedtreeIter.IteratorArgument = { _, ), }, + _, [ - { - exp_desc: - TExpConstant( - Const_number( - (Const_number_int(_) | Const_number_float(_)) as n, + ( + Unlabeled, + { + exp_desc: + TExpConstant( + Const_number( + (Const_number_int(_) | Const_number_float(_)) as n, + ), ), - ), - }, + }, + ), ], ) when diff --git a/compiler/src/typed/typedecl.re b/compiler/src/typed/typedecl.re index 076b1ab2c1..df34d5d0d0 100644 --- a/compiler/src/typed/typedecl.re +++ b/compiler/src/typed/typedecl.re @@ -54,7 +54,6 @@ type error = /*| Unbound_type_var_ext of type_expr * extension_constructor*/ | Varying_anonymous | Val_in_structure - | Multiple_native_repr_attributes | Cannot_unbox_or_untag_type(native_repr_kind) | Deep_unbox_or_untag_attribute(native_repr_kind) | Bad_immediate_attribute @@ -854,107 +853,6 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { (final_decls, final_env); }; -type native_repr_attribute = - | Native_repr_attr_absent - | Native_repr_attr_present(native_repr_kind); - -let get_native_repr_attribute = (attrs, ~global_repr) => - /*Attr_helper.get_no_payload_attribute ["unboxed"; "ocaml.unboxed"] attrs, - Attr_helper.get_no_payload_attribute ["untagged"; "ocaml.untagged"] attrs,*/ - switch (None, None, global_repr) { - | (None, None, None) => Native_repr_attr_absent - | (None, None, Some(repr)) => Native_repr_attr_present(repr) - | (Some(_), None, None) => Native_repr_attr_present(Unboxed) - | (None, Some(_), None) => Native_repr_attr_present(Untagged) - | (Some({Location.loc}), _, _) - | (_, Some({Location.loc}), _) => - raise(Error(loc, Multiple_native_repr_attributes)) - }; - -let native_repr_of_type = (env, kind, ty) => - switch (kind, Ctype.expand_head_opt(env, ty).desc) { - | (Untagged, TTyConstr(path, _, _)) - when Path.same(path, Builtin_types.path_number) => - Some(Untagged_int) - /*| Unboxed, TTyConstr (path, _, _) when Path.same path Predef.path_float -> - Some Unboxed_float - | Unboxed, TTyConstr (path, _, _) when Path.same path Predef.path_int32 -> - Some (Unboxed_integer Pint32) - | Unboxed, TTyConstr (path, _, _) when Path.same path Predef.path_int64 -> - Some (Unboxed_integer Pint64) - | Unboxed, TTyConstr (path, _, _) when Path.same path Predef.path_nativeint -> - Some (Unboxed_integer Pnativeint)*/ - | _ => None - }; - -/* Raises an error when [core_type] contains an [@unboxed] or [@untagged] - attribute in a strict sub-term. */ -let error_if_has_deep_native_repr_attributes = core_type => { - open Ast_iterator; - let this_iterator = { - ...default_iterator, - typ: (iterator, core_type) => { - switch ( - get_native_repr_attribute( - [] /*core_type.ptyp_attributes*/, - ~global_repr=None, - ) - ) { - | Native_repr_attr_present(kind) => - raise( - Error(core_type.ptyp_loc, Deep_unbox_or_untag_attribute(kind)), - ) - | Native_repr_attr_absent => () - }; - default_iterator.typ(iterator, core_type); - }, - }; - - List.iter(default_iterator.typ(this_iterator), core_type); -}; - -let make_native_repr = (env, core_type, ty, ~global_repr) => { - error_if_has_deep_native_repr_attributes(core_type); - switch ( - get_native_repr_attribute([] /*core_type.ptyp_attributes*/, ~global_repr) - ) { - | Native_repr_attr_absent => Same_as_ocaml_repr - | Native_repr_attr_present(kind) => - List.fold_left( - (_, ty) => - switch (native_repr_of_type(env, kind, ty)) { - | None => - raise(Error(Location.dummy_loc, Cannot_unbox_or_untag_type(kind))) - | Some(repr) => repr - }, - Same_as_ocaml_repr, - ty, - ) - }; -}; - -let rec parse_native_repr_attributes = (env, core_type, ty, ~global_repr) => - switch ( - core_type.ptyp_desc, - Ctype.repr(ty).desc, - get_native_repr_attribute( - [] /*core_type.ptyp_attributes*/, - ~global_repr=None, - ), - ) { - | (PTyArrow(_), TTyArrow(_), Native_repr_attr_present(kind)) => - raise(Error(core_type.ptyp_loc, Cannot_unbox_or_untag_type(kind))) - | (PTyArrow(ct1, ct2), TTyArrow(t1, t2, _), _) => - let repr_arg = make_native_repr(env, ct1, t1, ~global_repr); - let (repr_args, repr_res) = - parse_native_repr_attributes(env, ct2, t2, ~global_repr); - - ([repr_arg, ...repr_args], repr_res); - | (PTyArrow(_), _, _) - | (_, TTyArrow(_), _) => assert(false) - | _ => ([], make_native_repr(env, [core_type], [ty], ~global_repr)) - }; - /* Translate a value declaration */ let transl_value_decl = (env, loc, valdecl) => { let cty = Typetexp.transl_type_scheme(env, valdecl.pval_type); @@ -1494,8 +1392,6 @@ let report_error = ppf => ) | Val_in_structure => fprintf(ppf, "Value declarations are only allowed in signatures") - | Multiple_native_repr_attributes => - fprintf(ppf, "Too many [@@unboxed]/[@@untagged] attributes") | Cannot_unbox_or_untag_type(Unboxed) => fprintf( ppf, diff --git a/compiler/src/typed/typedtree.re b/compiler/src/typed/typedtree.re index 6ff51c23aa..81c4f48ce8 100644 --- a/compiler/src/typed/typedtree.re +++ b/compiler/src/typed/typedtree.re @@ -38,6 +38,9 @@ type partial = type rec_flag = Asttypes.rec_flag = | Nonrecursive | Recursive; type mut_flag = Asttypes.mut_flag = | Mutable | Immutable; +type argument_label = + Asttypes.argument_label = + | Unlabeled | Labeled(loc(string)) | Optional(loc(string)); type wasm_prim_type = Parsetree.wasm_prim_type = @@ -314,7 +317,7 @@ type core_type = { and core_type_desc = | TTyAny | TTyVar(string) - | TTyArrow(list(core_type), core_type) + | TTyArrow(list((argument_label, core_type)), core_type) | TTyTuple(list(core_type)) | TTyRecord(list((loc(Identifier.t), core_type))) | TTyConstr(Path.t, loc(Identifier.t), list(core_type)) @@ -477,7 +480,11 @@ and expression_desc = | TExpBreak | TExpReturn(option(expression)) | TExpLambda(list(match_branch), partial) - | TExpApp(expression, list(expression)) + | TExpApp( + expression, + list(argument_label), + list((argument_label, expression)), + ) | TExpConstruct( loc(Identifier.t), constructor_description, diff --git a/compiler/src/typed/typedtree.rei b/compiler/src/typed/typedtree.rei index 3cec156cf3..27715e8492 100644 --- a/compiler/src/typed/typedtree.rei +++ b/compiler/src/typed/typedtree.rei @@ -38,6 +38,9 @@ type partial = type rec_flag = Asttypes.rec_flag = | Nonrecursive | Recursive; type mut_flag = Asttypes.mut_flag = | Mutable | Immutable; +type argument_label = + Asttypes.argument_label = + | Unlabeled | Labeled(loc(string)) | Optional(loc(string)); type wasm_prim_type = Parsetree.wasm_prim_type = @@ -294,7 +297,7 @@ type core_type = { and core_type_desc = | TTyAny | TTyVar(string) - | TTyArrow(list(core_type), core_type) + | TTyArrow(list((argument_label, core_type)), core_type) | TTyTuple(list(core_type)) | TTyRecord(list((loc(Identifier.t), core_type))) | TTyConstr(Path.t, loc(Identifier.t), list(core_type)) @@ -445,7 +448,11 @@ and expression_desc = | TExpBreak | TExpReturn(option(expression)) | TExpLambda(list(match_branch), partial) - | TExpApp(expression, list(expression)) + | TExpApp( + expression, + list(argument_label), + list((argument_label, expression)), + ) | TExpConstruct( loc(Identifier.t), constructor_description, diff --git a/compiler/src/typed/typedtreeIter.re b/compiler/src/typed/typedtreeIter.re index 1e09628b6d..fa74a23884 100644 --- a/compiler/src/typed/typedtreeIter.re +++ b/compiler/src/typed/typedtreeIter.re @@ -65,7 +65,7 @@ module MakeIterator = | TTyAny | TTyVar(_) => () | TTyArrow(args, ret) => - List.iter(iter_core_type, args); + List.iter(((_, a)) => iter_core_type(a), args); iter_core_type(ret); | TTyConstr(_, _, args) | TTyTuple(args) => List.iter(iter_core_type, args) @@ -208,9 +208,9 @@ module MakeIterator = | TExpLet(recflag, mutflag, binds) => iter_bindings(recflag, mutflag, binds) | TExpLambda(branches, _) => iter_match_branches(branches) - | TExpApp(exp, args) => + | TExpApp(exp, _, args) => iter_expression(exp); - List.iter(iter_expression, args); + List.iter(((_, arg)) => iter_expression(arg), args); | TExpPrim0(_) => () | TExpPrim1(_, e) => iter_expression(e) | TExpPrim2(_, e1, e2) => diff --git a/compiler/src/typed/typedtreeMap.re b/compiler/src/typed/typedtreeMap.re index 49d452e0ff..0c6f357fb0 100644 --- a/compiler/src/typed/typedtreeMap.re +++ b/compiler/src/typed/typedtreeMap.re @@ -56,7 +56,7 @@ module MakeMap = | TTyAny | TTyVar(_) => ct.ctyp_desc | TTyArrow(args, ret) => - let args = List.map(map_core_type, args); + let args = List.map(((l, arg)) => (l, map_core_type(arg)), args); let ret = map_core_type(ret); TTyArrow(args, ret); | TTyConstr(a, b, args) => @@ -206,8 +206,12 @@ module MakeMap = TExpLet(recflag, mutflag, map_bindings(recflag, mutflag, binds)) | TExpLambda(branches, p) => TExpLambda(map_match_branches(branches), p) - | TExpApp(exp, args) => - TExpApp(map_expression(exp), List.map(map_expression, args)) + | TExpApp(exp, labels, args) => + TExpApp( + map_expression(exp), + labels, + List.map(((l, arg)) => (l, map_expression(arg)), args), + ) | TExpPrim0(o) => TExpPrim0(o) | TExpPrim1(o, e) => TExpPrim1(o, map_expression(e)) | TExpPrim2(o, e1, e2) => diff --git a/compiler/src/typed/typemod.re b/compiler/src/typed/typemod.re index 1e6c5330a3..fd2244948a 100644 --- a/compiler/src/typed/typemod.re +++ b/compiler/src/typed/typemod.re @@ -794,7 +794,7 @@ let type_module = (~toplevel=false, funct_body, anchor, env, sstr /*scope*/) => ...expr, desc: TTyArrow( - List.map(resolve_type_expr, args), + List.map(((l, arg)) => (l, resolve_type_expr(arg)), args), resolve_type_expr(result), c, ), diff --git a/compiler/src/typed/types.re b/compiler/src/typed/types.re index 93b95ae0f8..3ba5f66ac4 100644 --- a/compiler/src/typed/types.re +++ b/compiler/src/typed/types.re @@ -38,7 +38,7 @@ type type_expr = { and type_desc = | TTyVar(option(string)) // A type variable (None == "_") - | TTyArrow(list(type_expr), type_expr, commutable) // A function type. + | TTyArrow(list((argument_label, type_expr)), type_expr, commutable) // A function type. | TTyTuple(list(type_expr)) // A tuple type. | TTyRecord(list((string, type_expr))) // A record type. | TTyConstr(Path.t, list(type_expr), ref(abbrev_memo)) // A parameterized type. diff --git a/compiler/src/typed/typetexp.re b/compiler/src/typed/typetexp.re index 5c46c316b4..545984e62d 100644 --- a/compiler/src/typed/typetexp.re +++ b/compiler/src/typed/typetexp.re @@ -340,12 +340,37 @@ and transl_type_aux = (env, policy, styp) => { }; ctyp(TTyVar(name), ty); - | PTyArrow(st1, st2) => - let cty1 = List.map(transl_type(env, policy), st1); + | PTyArrow(stl, st2) => + let ctyl = + List.map( + st => { + let ty = transl_type(env, policy, st.ptyp_arg_type); + (st.ptyp_arg_label, ty); + }, + stl, + ); + let tyl = + List.map( + ((l, ty)) => { + let ty = + if (Btype.is_optional(l)) { + newty( + TTyConstr( + Builtin_types.path_option, + [ty.ctyp_type], + ref(TMemNil), + ), + ); + } else { + ty.ctyp_type; + }; + (l, ty); + }, + ctyl, + ); let cty2 = transl_type(env, policy, st2); - let ty1 = List.map(x => x.ctyp_type, cty1); - let ty = newty(TTyArrow(ty1, cty2.ctyp_type, TComOk)); - ctyp(TTyArrow(cty1, cty2), ty); + let ty = newty(TTyArrow(tyl, cty2.ctyp_type, TComOk)); + ctyp(TTyArrow(ctyl, cty2), ty); | PTyTuple(stl) => assert(List.length(stl) >= 1); let ctys = List.map(transl_type(env, policy), stl); @@ -603,62 +628,6 @@ let type_attributes = attrs => { ); }; -let rec type_expr_to_core_type = (env, expr) => { - let desc = - switch (expr.desc) { - | TTyVar(None) => TTyAny - | TTyVar(Some(name)) => TTyVar(name) - | TTyArrow(args, result, _) => - TTyArrow( - List.map(type_expr_to_core_type(env), args), - type_expr_to_core_type(env, result), - ) - | TTyTuple(args) => - TTyTuple(List.map(type_expr_to_core_type(env), args)) - | TTyRecord(fields) => - TTyRecord( - List.map( - ((field, ty)) => - ( - Location.mknoloc( - Identifier.IdentName(Location.mknoloc(field)), - ), - type_expr_to_core_type(env, ty), - ), - fields, - ), - ) - | TTyConstr(path, args, _) => - TTyConstr( - path, - Location.mknoloc( - Identifier.IdentName(Location.mknoloc(Path.name(path))), - ), - List.map(type_expr_to_core_type(env), args), - ) - | TTyUniVar(Some(name)) => TTyVar(name) - | TTyUniVar(None) => TTyAny - | TTyPoly(ty, args) => - TTyPoly( - List.map( - fun - | {desc: TTyVar(Some(name))} => name - | _ => failwith("TTyPoly invalid type vars"), - args, - ), - type_expr_to_core_type(env, ty), - ) - | TTyLink(ty) => type_expr_to_core_type(env, ty).ctyp_desc - | TTySubst(ty) => type_expr_to_core_type(env, ty).ctyp_desc - }; - { - ctyp_desc: desc, - ctyp_type: expr, - ctyp_env: env, - ctyp_loc: Location.dummy_loc, - }; -}; - let report_error = (env, ppf) => fun | Unbound_type_variable(name) => diff --git a/compiler/src/typed/typetexp.rei b/compiler/src/typed/typetexp.rei index 5d2c0eaa37..40fce2f9b8 100644 --- a/compiler/src/typed/typetexp.rei +++ b/compiler/src/typed/typetexp.rei @@ -114,5 +114,3 @@ let type_attributes: Asttypes.attributes => Typedtree.attributes; let unbound_label_error: (Env.t, Location.loc(Identifier.t)) => 'a; let unbound_constructor_error: (Env.t, Location.loc(Identifier.t)) => 'a; - -let type_expr_to_core_type: (Env.t, type_expr) => Typedtree.core_type; diff --git a/compiler/test/formatter_inputs/application.gr b/compiler/test/formatter_inputs/application.gr index dfa98796af..654bc99e45 100644 --- a/compiler/test/formatter_inputs/application.gr +++ b/compiler/test/formatter_inputs/application.gr @@ -4,7 +4,7 @@ import Map from "map" let data = [>] -let busNumbers = Array.map(((num, idx)) => { (Option.expect("Expected a number", None), idx) }, data) +let busNumbers = Array.map((item (num, idx)) => { (Option.expect("Expected a number", None), idx) }, data) let sillyFunction = (a,b,c,d,e,f,g,h,j,k) => 10 @@ -29,7 +29,7 @@ export let unique2 = array => { filteri((el, index) => findIndex(value => value == el, array) == Some(index), array) } -export let batchActionCreateAccount = ({index}) => { +export let batchActionCreateAccount = (index {index}) => { Native.promiseBatchActionCreateAccount(Conv.fromInt64(index)) } diff --git a/compiler/test/formatter_outputs/application.gr b/compiler/test/formatter_outputs/application.gr index 9e71cf0322..d0f28b01e2 100644 --- a/compiler/test/formatter_outputs/application.gr +++ b/compiler/test/formatter_outputs/application.gr @@ -4,7 +4,7 @@ import Map from "map" let data = [>] -let busNumbers = Array.map(((num, idx)) => { +let busNumbers = Array.map((item (num, idx)) => { (Option.expect("Expected a number", None), idx) }, data) @@ -43,7 +43,7 @@ export let unique2 = array => { findIndex(value => value == el, array) == Some(index), array) } -export let batchActionCreateAccount = ({ index }) => { +export let batchActionCreateAccount = (index { index }) => { Native.promiseBatchActionCreateAccount(Conv.fromInt64(index)) } diff --git a/compiler/test/stdlib/path.test.gr b/compiler/test/stdlib/path.test.gr index 9d8fc5bb4c..7f03dfa8bb 100644 --- a/compiler/test/stdlib/path.test.gr +++ b/compiler/test/stdlib/path.test.gr @@ -90,7 +90,7 @@ let parseFileTests = [ }, ] -List.forEach(({ pathStr, expParent, expStr, expName, expStem, expExt }) => { +List.forEach((data { pathStr, expParent, expStr, expName, expStem, expExt }) => { let path = fs(pathStr) assert Path.toString(path) == expStr assert fs(expParent) == Path.parent(path) @@ -130,7 +130,7 @@ let parseDirTests = [ { pathStr: "c:/.././..", expParent: "c:/", expStr: "c:/", expName: None }, ] -List.forEach(({ pathStr, expParent, expStr, expName }: ParseDirTestData) => { +List.forEach((data { pathStr, expParent, expStr, expName }: ParseDirTestData) => { let path = fs(pathStr) assert Path.toString(path) == expStr assert fs(expParent) == Path.parent(path) @@ -172,7 +172,7 @@ let pathTypeTests = [ { pathStr: "./file", isDir: false, isAbs: false }, ] -List.forEach(({ pathStr, isDir, isAbs }) => { +List.forEach((data { pathStr, isDir, isAbs }) => { let path = fs(pathStr) assert isDir == Path.isDirectory(path) assert isAbs == Path.isAbsolute(path) @@ -206,7 +206,7 @@ let appendTests = [ { base: "/usr/", toAppend: "../../file", final: "/file" }, ] -List.forEach(({ base, toAppend, final }) => { +List.forEach((data { base, toAppend, final }) => { let path = fs(base) let expPath = fs(final) let append = Path.append(path, fs(toAppend)) @@ -252,7 +252,7 @@ let relativeToTests = [ }, ] -List.forEach(({ source, dest, result }) => { +List.forEach((data { source, dest, result }) => { let source = fs(source) let dest = fs(dest) assert Path.relativeTo(source, dest) == result diff --git a/compiler/test/suites/functions.re b/compiler/test/suites/functions.re index 30f2deec2a..c6c98665cb 100644 --- a/compiler/test/suites/functions.re +++ b/compiler/test/suites/functions.re @@ -85,27 +85,27 @@ describe("functions", ({test, testSkip}) => { assertSnapshot("lam_destructure_2", "let foo = (_) => 5; foo(\"foo\")"); assertSnapshot( "lam_destructure_3", - "(((a, b, c)) => a + b + c)((1, 2, 3))", + "((_ (a, b, c)) => a + b + c)((1, 2, 3))", ); assertSnapshot( "lam_destructure_4", - "let foo = ((a, b, c)) => a + b + c; foo((1, 2, 3))", + "let foo = (_ (a, b, c)) => a + b + c; foo((1, 2, 3))", ); assertSnapshot( "lam_destructure_5", - "(((a, b, c), (x, y)) => a + b + c + x + y)((1, 2, 3), (4, 5))", + "((_ (a, b, c), _ (x, y)) => a + b + c + x + y)((1, 2, 3), (4, 5))", ); assertSnapshot( "lam_destructure_6", - "let foo = ((a, b, c), (x, y)) => a + b + c + x + y; foo((1, 2, 3), (4, 5))", + "let foo = (_ (a, b, c), _ (x, y)) => a + b + c + x + y; foo((1, 2, 3), (4, 5))", ); assertSnapshot( "lam_destructure_7", - "(((a, b, (c, d))) => a + b + c + d)((1, 2, (3, 4)))", + "((_ (a, b, (c, d))) => a + b + c + d)((1, 2, (3, 4)))", ); assertSnapshot( "lam_destructure_8", - "let foo = ((a, b, (c, d))) => a + b + c + d; foo((1, 2, (3, 4)))", + "let foo = (_ (a, b, (c, d))) => a + b + c + d; foo((1, 2, (3, 4)))", ); assertRun("lambda_1", "print((x) => {x})", "\n"); assertSnapshot("app_1", "((x) => {x})(1)"); diff --git a/compiler/test/suites/parsing.re b/compiler/test/suites/parsing.re index 3a174471ab..684de2e504 100644 --- a/compiler/test/suites/parsing.re +++ b/compiler/test/suites/parsing.re @@ -2,6 +2,7 @@ open Grain_tests.TestFramework; open Grain_tests.Runner; open Grain_parsing; open Grain_parsing.Ast_helper; +open Grain_parsing.Parsetree; describe("parsing", ({test, testSkip}) => { let test_or_skip = @@ -27,6 +28,11 @@ describe("parsing", ({test, testSkip}) => { Exp.ident( Location.mknoloc(Identifier.IdentName(Location.mknoloc("c"))), ); + let unlabled_expr = expr => { + paa_label: Unlabeled, + paa_expr: expr, + paa_loc: Location.dummy_loc, + }; let testOp = op => assertParse( op, @@ -40,7 +46,7 @@ describe("parsing", ({test, testSkip}) => { Identifier.IdentName(Location.mknoloc(op)), ), ), - [a, b], + [unlabled_expr(a), unlabled_expr(b)], ), ), ], @@ -109,14 +115,16 @@ describe("parsing", ({test, testSkip}) => { ), ), [ - a, - Exp.apply( - Exp.ident( - Location.mknoloc( - Identifier.IdentName(Location.mknoloc("***")), + unlabled_expr(a), + unlabled_expr( + Exp.apply( + Exp.ident( + Location.mknoloc( + Identifier.IdentName(Location.mknoloc("***")), + ), ), + [unlabled_expr(b), unlabled_expr(c)], ), - [b, c], ), ], ), @@ -139,14 +147,16 @@ describe("parsing", ({test, testSkip}) => { ), ), [ - a, - Exp.apply( - Exp.ident( - Location.mknoloc( - Identifier.IdentName(Location.mknoloc("&--")), + unlabled_expr(a), + unlabled_expr( + Exp.apply( + Exp.ident( + Location.mknoloc( + Identifier.IdentName(Location.mknoloc("&--")), + ), ), + [unlabled_expr(b), unlabled_expr(c)], ), - [b, c], ), ], ), @@ -169,14 +179,16 @@ describe("parsing", ({test, testSkip}) => { ), ), [ - a, - Exp.apply( - Exp.ident( - Location.mknoloc( - Identifier.IdentName(Location.mknoloc("|--")), + unlabled_expr(a), + unlabled_expr( + Exp.apply( + Exp.ident( + Location.mknoloc( + Identifier.IdentName(Location.mknoloc("|--")), + ), ), + [unlabled_expr(b), unlabled_expr(c)], ), - [b, c], ), ], ), @@ -199,15 +211,17 @@ describe("parsing", ({test, testSkip}) => { ), ), [ - Exp.apply( - Exp.ident( - Location.mknoloc( - Identifier.IdentName(Location.mknoloc("<<")), + unlabled_expr( + Exp.apply( + Exp.ident( + Location.mknoloc( + Identifier.IdentName(Location.mknoloc("<<")), + ), ), + [unlabled_expr(a), unlabled_expr(b)], ), - [a, b], ), - c, + unlabled_expr(c), ], ), ), diff --git a/stdlib/immutablemap.gr b/stdlib/immutablemap.gr index 9dcb6e72c6..1094038348 100644 --- a/stdlib/immutablemap.gr +++ b/stdlib/immutablemap.gr @@ -453,7 +453,7 @@ export let reject = (fn, map) => { * @since v0.5.4 */ export let fromList = list => { - List.reduce((map, (key, val)) => set(key, val, map), empty, list) + List.reduce((map, _ (key, val)) => set(key, val, map), empty, list) } /** @@ -477,7 +477,7 @@ export let toList = map => { * @since v0.5.4 */ export let fromArray = array => { - Array.reduce((map, (key, val)) => set(key, val, map), empty, array) + Array.reduce((map, _ (key, val)) => set(key, val, map), empty, array) } /** diff --git a/stdlib/list.gr b/stdlib/list.gr index 165838c27b..1d4ac6a80e 100644 --- a/stdlib/list.gr +++ b/stdlib/list.gr @@ -648,7 +648,7 @@ export let zipWith = (fn, list1, list2) => { * @since v0.5.3 */ export let unzip = list => { - reduceRight(((first, second), (firstUnzipped, secondUnzipped)) => { + reduceRight((itemPair (first, second), acc (firstUnzipped, secondUnzipped)) => { ([first, ...firstUnzipped], [second, ...secondUnzipped]) }, ([], []), list) } diff --git a/stdlib/path.gr b/stdlib/path.gr index 03b46d72b8..d184da69e2 100644 --- a/stdlib/path.gr +++ b/stdlib/path.gr @@ -541,7 +541,7 @@ let dirsUp = x => if (x == 0) Zero else Positive // helper function for relativizing paths; handles the correct number of // directories to "go up" from one path to another -let rec relativizeDepth = ((up1, s1), (up2, s2)) => +let rec relativizeDepth = (_ (up1, s1), _ (up2, s2)) => match ((dirsUp(up1), dirsUp(up2), s1, s2)) { (Zero, Zero, [hd1, ...tl1], [hd2, ...tl2]) when hd1 == hd2 => relativizeDepth((0, tl1), (0, tl2)), diff --git a/stdlib/regex.gr b/stdlib/regex.gr index 668bedaad4..8f44afa52c 100644 --- a/stdlib/regex.gr +++ b/stdlib/regex.gr @@ -1342,7 +1342,7 @@ parseInteger = (buf: RegExBuf, n) => { } }, parseMode = (buf: RegExBuf) => { - let processState = ((cs, ml)) => { + let processState = (state (cs, ml)) => { let withCs = match (cs) { None => buf.config, Some(true) => configWithCaseSensitive(buf.config, true), @@ -1354,7 +1354,7 @@ parseMode = (buf: RegExBuf) => { Some(_) => configWithMultiLine(withCs, false), } } - let rec help = ((cs, ml)) => { + let rec help = (state (cs, ml)) => { if (!more(buf)) { Ok(processState((cs, ml))) } else { @@ -2076,7 +2076,7 @@ let rec validate = (re: ParsedRegularExpression, numGroups) => { */ let rangeUtf8EncodingLengths = (rng: CharRange) => { - let (min, max, _) = List.reduce(((min1, max1, n), (segStart, segEnd)) => { + let (min, max, _) = List.reduce((_ (min1, max1, n), _ (segStart, segEnd)) => { if (rangeOverlaps(rng, segStart, segEnd)) { (min(min1, n), max(max1, n), n + 1) } else { @@ -2111,7 +2111,7 @@ let rec validate = (re: ParsedRegularExpression, numGroups) => { (min(min1, min2), max(max1, max2), max(maxL1, maxL2)) }, RESequence(elts, _) => { - List.reduce(((accMin, accMax, accMaxL), e) => { + List.reduce((_ (accMin, accMax, accMaxL), e) => { let (minE, maxE, maxLE) = loop(e) (accMin + minE, accMax + maxE, max(accMaxL, maxLE)) }, (0, 0, 0), elts) @@ -3011,7 +3011,7 @@ let makeReferenceMatcher = eq => (n, next_m) => } } -let referenceMatcher = makeReferenceMatcher(((a, b)) => a == b) +let referenceMatcher = makeReferenceMatcher((_ (a, b)) => a == b) let asciiCharToLower = c => { if (Char.code('Z') <= Char.code(c) && Char.code(c) <= Char.code('Z')) { @@ -3021,7 +3021,7 @@ let asciiCharToLower = c => { } } -let referenceMatcherCaseInsensitive = makeReferenceMatcher(((a, b)) => +let referenceMatcherCaseInsensitive = makeReferenceMatcher((_ (a, b)) => asciiCharToLower(a) == asciiCharToLower(b)) // Lookahead, Lookbehind, Conditionals, and Cut