Fate compiler (#62)
* Update to changes in icode format * Start on new intermediate code for FATE * Compile `let` to FATE * Fix and improve broken bytecode optimisations * Basic tuple patterns * Compile shallow matching on tuples * Liveness analysis for local variables * Fix minor bug * Use RETURNR when possible * Nicer debug printing * Refactor optimization rules * Compile tuple construction * Improve instruction analysis and generalize some optimizations * Compile nested pattern matching to case trees (Only tuple and variable patterns so far) * Reannotate and repeat optimization pass once it done Could try hard to keep annotations more precise, but would be more error prone * Get rid of unnecessary STORE instructions * Keep better track of liveness annotations when swapping instructions * Limit the number of iterations for the optimization loop Should finish in one iteration, but we shouldn't loop if there are bugs or corner cases where it doesn't. * Pattern matching on booleans * wip: rewrite case tree compiler to handle catch-alls still with debug printing, and can't compile it yet * Add missing case in renaming * Compile case trees all the way to Fate assembly * Simplify variables bindings in environment * Shortcut let x = y in ... * compile list literals * Fix various bugs in pattern match compilation * Pretty printer for fcode * Fix renaming bug * Another renaming bug * Handle switch_body in optimizations * Remove optimization for if-then-else * Tag instructions in annotated scode * Remove 'if' from fcode * Fix dialyzer things * Remove unused argument * Compile pattern matching on integer literals * Compile list patterns * Use op_view in more places * allow leaving out fields from record patterns * compile records (patterns and construction) * Compile record update * Use SETELEMENT instruction * Compile variants * Remove incorrect push for tuple switches * Optimize matching on single constructors datatypes * Use the fact that SWITCH and JUMPIF can use args and vars * string literals and pattern matching on the same * Compile character literals * Minor refactoring of op instruction handling * compile address literals * Get rid of unit in AST * Unary operators * Compile function calls (to fully saturated top-level functions only) * fix breakage after unary operators * variables are now lists of names in fcode * pretty printing for function calls * use STORE ?a instead of PUSH during optimizations * no-op fcode optimization pass * some constant propagation optimizations * Case on constructor optimization * fix minor bugs * Compile all the operators * Compile maps * Simplify JUMPIF on true/false * Fixed left-over reference to STR_EQ * Add compile-time evaluation for more operators * Distinguish local vars and top-level names already in fcode * Compile builtins * Compile bytes(N) Compile to FATE strings for now * Improve inlining of PUSH * Fix name resolution bug * Change map_get/set to operators in fcode * Compile lambdas and higher-order functions * Optimize single variable closure envs * Handle unapplied builtins and top-level functions * Missing case in fcode pretty printer * Fix variable binding bug in fcode compiler * Compiler side of state updates No support in FATE yet though * Compile statements * Compile events But no FATE support for events yet * Compile remote calls * Clearer distinction between applied and unapplied top-level things (def/builtin) in fcode * Tag for literals in fcode to make code cleaner * We now have block hash at height in FATE * Update aebytecode commit * Get rid of catchall todos * Jump some hoops to please Dialyzer
This commit is contained in:
parent
71b97cba62
commit
0aa1c89556
@ -3,7 +3,7 @@
|
||||
{erl_opts, [debug_info]}.
|
||||
|
||||
{deps, [ {aebytecode, {git, "https://github.com/aeternity/aebytecode.git",
|
||||
{ref, "1526ad3"}}}
|
||||
{ref, "2555868"}}}
|
||||
, {getopt, "1.0.1"}
|
||||
, {jsx, {git, "https://github.com/talentdeficit/jsx.git",
|
||||
{tag, "2.8.0"}}}
|
||||
|
@ -1,11 +1,11 @@
|
||||
{"1.1.0",
|
||||
[{<<"aebytecode">>,
|
||||
{git,"https://github.com/aeternity/aebytecode.git",
|
||||
{ref,"1526ad3bf057e72a1714aea0430b001bd1d576c9"}},
|
||||
{ref,"2555868990ac2a08876e86b1b798b4750273591f"}},
|
||||
0},
|
||||
{<<"aeserialization">>,
|
||||
{git,"https://github.com/aeternity/aeserialization.git",
|
||||
{ref,"6dce265753af4e651f77746e77ea125145c85dd3"}},
|
||||
{ref,"816bf994ffb5cee218c3f22dc5fea296c9e0882e"}},
|
||||
1},
|
||||
{<<"base58">>,
|
||||
{git,"https://github.com/aeternity/erl-base58.git",
|
||||
|
@ -103,6 +103,7 @@
|
||||
, typevars = unrestricted :: unrestricted | [name()]
|
||||
, fields = #{} :: #{ name() => [field_info()] } %% fields are global
|
||||
, namespace = [] :: qname()
|
||||
, in_pattern = false :: boolean()
|
||||
}).
|
||||
|
||||
-type env() :: #env{}.
|
||||
@ -358,7 +359,7 @@ global_env() ->
|
||||
Fun1 = fun(S, T) -> Fun([S], T) end,
|
||||
TVar = fun(X) -> {tvar, Ann, "'" ++ X} end,
|
||||
SignId = {id, Ann, "signature"},
|
||||
SignDef = {tuple, Ann, [{int, Ann, 0}, {int, Ann, 0}]},
|
||||
SignDef = {bytes, Ann, <<0:64/unit:8>>},
|
||||
Signature = {named_arg_t, Ann, SignId, SignId, {typed, Ann, SignDef, SignId}},
|
||||
SignFun = fun(Ts, T) -> {type_sig, Ann, [Signature], Ts, T} end,
|
||||
TTL = {qid, Ann, ["Chain", "ttl"]},
|
||||
@ -378,7 +379,7 @@ global_env() ->
|
||||
%% Abort
|
||||
{"abort", Fun1(String, A)}])
|
||||
, types = MkDefs(
|
||||
[{"int", 0}, {"bool", 0}, {"string", 0}, {"address", 0},
|
||||
[{"int", 0}, {"bool", 0}, {"char", 0}, {"string", 0}, {"address", 0},
|
||||
{"hash", {[], {alias_t, Bytes(32)}}},
|
||||
{"signature", {[], {alias_t, Bytes(64)}}},
|
||||
{"bits", 0},
|
||||
@ -908,6 +909,8 @@ infer_expr(_Env, Body={bool, As, _}) ->
|
||||
{typed, As, Body, {id, As, "bool"}};
|
||||
infer_expr(_Env, Body={int, As, _}) ->
|
||||
{typed, As, Body, {id, As, "int"}};
|
||||
infer_expr(_Env, Body={char, As, _}) ->
|
||||
{typed, As, Body, {id, As, "char"}};
|
||||
infer_expr(_Env, Body={string, As, _}) ->
|
||||
{typed, As, Body, {id, As, "string"}};
|
||||
infer_expr(_Env, Body={bytes, As, Bin}) ->
|
||||
@ -935,8 +938,6 @@ infer_expr(Env, Id = {Tag, As, _}) when Tag == id; Tag == qid ->
|
||||
infer_expr(Env, Id = {Tag, As, _}) when Tag == con; Tag == qcon ->
|
||||
{QName, Type} = lookup_name(Env, As, Id, [freshen]),
|
||||
{typed, As, QName, Type};
|
||||
infer_expr(Env, {unit, As}) ->
|
||||
infer_expr(Env, {tuple, As, []});
|
||||
infer_expr(Env, {tuple, As, Cpts}) ->
|
||||
NewCpts = [infer_expr(Env, C) || C <- Cpts],
|
||||
CptTypes = [T || {typed, _, _, T} <- NewCpts],
|
||||
@ -990,7 +991,7 @@ infer_expr(Env, {record, Attrs, Fields}) ->
|
||||
constrain([ #record_create_constraint{
|
||||
record_t = RecordType1,
|
||||
fields = [ FieldName || {field, _, [{proj, _, FieldName}], _} <- Fields ],
|
||||
context = Attrs } ] ++
|
||||
context = Attrs } || not Env#env.in_pattern ] ++
|
||||
[begin
|
||||
[{proj, _, FieldName}] = LV,
|
||||
#field_constraint{
|
||||
@ -1119,7 +1120,7 @@ infer_case(Env, Attrs, Pattern, ExprType, Branch, SwitchType) ->
|
||||
[] -> ok;
|
||||
Nonlinear -> type_error({non_linear_pattern, Pattern, lists:usort(Nonlinear)})
|
||||
end,
|
||||
NewEnv = bind_vars([{Var, fresh_uvar(Ann)} || Var = {id, Ann, _} <- Vars], Env),
|
||||
NewEnv = bind_vars([{Var, fresh_uvar(Ann)} || Var = {id, Ann, _} <- Vars], Env#env{ in_pattern = true }),
|
||||
NewPattern = {typed, _, _, PatType} = infer_expr(NewEnv, Pattern),
|
||||
NewBranch = check_expr(NewEnv, Branch, SwitchType),
|
||||
unify(Env, PatType, ExprType, {case_pat, Pattern, PatType, ExprType}),
|
||||
@ -1178,6 +1179,8 @@ infer_prefix({IntOp,As}) when IntOp =:= '-' ->
|
||||
|
||||
free_vars({int, _, _}) ->
|
||||
[];
|
||||
free_vars({char, _, _}) ->
|
||||
[];
|
||||
free_vars({string, _, _}) ->
|
||||
[];
|
||||
free_vars({bool, _, _}) ->
|
||||
|
1294
src/aeso_ast_to_fcode.erl
Normal file
1294
src/aeso_ast_to_fcode.erl
Normal file
File diff suppressed because it is too large
Load Diff
1360
src/aeso_fcode_to_fate.erl
Normal file
1360
src/aeso_fcode_to_fate.erl
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,299 +0,0 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author Ulf Norell
|
||||
%%% @copyright (C) 2019, Aeternity Anstalt
|
||||
%%% @doc
|
||||
%%% Fate backend for Sophia compiler
|
||||
%%% @end
|
||||
%%% Created : 11 Jan 2019
|
||||
%%%
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(aeso_icode_to_fate).
|
||||
|
||||
-include("aeso_icode.hrl").
|
||||
|
||||
-export([compile/2]).
|
||||
|
||||
%% -- Preamble ---------------------------------------------------------------
|
||||
|
||||
-define(TODO(What), error({todo, ?FILE, ?LINE, ?FUNCTION_NAME, What})).
|
||||
|
||||
-define(i(__X__), {immediate, __X__}).
|
||||
-define(a, {stack, 0}).
|
||||
|
||||
-record(env, { args = [], stack = [], tailpos = true }).
|
||||
|
||||
%% -- Debugging --------------------------------------------------------------
|
||||
|
||||
%% debug(Options, Fmt) -> debug(Options, Fmt, []).
|
||||
debug(Options, Fmt, Args) ->
|
||||
case proplists:get_value(debug, Options, true) of
|
||||
true -> io:format(Fmt, Args);
|
||||
false -> ok
|
||||
end.
|
||||
|
||||
%% -- Main -------------------------------------------------------------------
|
||||
|
||||
%% @doc Main entry point.
|
||||
compile(ICode, Options) ->
|
||||
#{ contract_name := _ContractName,
|
||||
state_type := _StateType,
|
||||
functions := Functions } = ICode,
|
||||
SFuns = functions_to_scode(Functions, Options),
|
||||
SFuns1 = optimize_scode(SFuns, Options),
|
||||
to_basic_blocks(SFuns1, Options).
|
||||
|
||||
functions_to_scode(Functions, Options) ->
|
||||
maps:from_list(
|
||||
[ {list_to_binary(Name), function_to_scode(Name, Args, Body, Type, Options)}
|
||||
|| {Name, _Ann, Args, Body, Type} <- Functions, Name /= "init" ]). %% TODO: skip init for now
|
||||
|
||||
function_to_scode(Name, Args, Body, Type, Options) ->
|
||||
debug(Options, "Compiling ~p ~p : ~p ->\n ~p\n", [Name, Args, Type, Body]),
|
||||
ArgTypes = [ icode_type_to_fate(T) || {_, T} <- Args ],
|
||||
ResType = icode_type_to_fate(Type),
|
||||
SCode = to_scode(init_env(Args), Body),
|
||||
debug(Options, " scode: ~p\n", [SCode]),
|
||||
{{ArgTypes, ResType}, SCode}.
|
||||
|
||||
%% -- Types ------------------------------------------------------------------
|
||||
|
||||
%% TODO: the Fate types don't seem to be specified anywhere...
|
||||
icode_type_to_fate(word) -> integer;
|
||||
icode_type_to_fate(string) -> string;
|
||||
icode_type_to_fate({tuple, Types}) ->
|
||||
{tuple, lists:map(fun icode_type_to_fate/1, Types)};
|
||||
icode_type_to_fate({list, Type}) ->
|
||||
{list, icode_type_to_fate(Type)};
|
||||
icode_type_to_fate(typerep) -> typerep;
|
||||
icode_type_to_fate(Type) -> ?TODO(Type).
|
||||
|
||||
%% -- Phase I ----------------------------------------------------------------
|
||||
%% Icode to structured assembly
|
||||
|
||||
%% -- Environment functions --
|
||||
|
||||
init_env(Args) ->
|
||||
#env{ args = Args, stack = [], tailpos = true }.
|
||||
|
||||
push_env(Type, Env) ->
|
||||
Env#env{ stack = [{"_", Type} | Env#env.stack] }.
|
||||
|
||||
notail(Env) -> Env#env{ tailpos = false }.
|
||||
|
||||
lookup_var(#env{ args = Args, stack = S }, X) ->
|
||||
case {keyfind_index(X, 1, S), keyfind_index(X, 1, Args)} of
|
||||
{false, false} -> false;
|
||||
{false, Arg} -> {arg, Arg};
|
||||
{Local, _} -> {stack, Local}
|
||||
end.
|
||||
|
||||
%% -- The compiler --
|
||||
|
||||
to_scode(_Env, #integer{ value = N }) ->
|
||||
[aeb_fate_code:push(?i(N))]; %% Doesn't exist (yet), translated by desugaring
|
||||
|
||||
to_scode(Env, #var_ref{name = X}) ->
|
||||
case lookup_var(Env, X) of
|
||||
false -> error({unbound_variable, X, Env});
|
||||
{stack, N} -> [aeb_fate_code:dup(?i(N))];
|
||||
{arg, N} -> [aeb_fate_code:push({arg, N})]
|
||||
end;
|
||||
to_scode(Env, #binop{ op = Op, left = A, right = B }) ->
|
||||
[ to_scode(notail(Env), B)
|
||||
, to_scode(push_env(binop_type_r(Op), Env), A)
|
||||
, binop_to_scode(Op) ];
|
||||
|
||||
to_scode(Env, #ifte{decision = Dec, then = Then, else = Else}) ->
|
||||
[ to_scode(notail(Env), Dec)
|
||||
, {ifte, to_scode(Env, Then), to_scode(Env, Else)} ];
|
||||
|
||||
to_scode(_Env, Icode) -> ?TODO(Icode).
|
||||
|
||||
%% -- Operators --
|
||||
|
||||
binop_types('+') -> {word, word};
|
||||
binop_types('-') -> {word, word};
|
||||
binop_types('==') -> {word, word};
|
||||
binop_types(Op) -> ?TODO(Op).
|
||||
|
||||
%% binop_type_l(Op) -> element(1, binop_types(Op)).
|
||||
binop_type_r(Op) -> element(2, binop_types(Op)).
|
||||
|
||||
binop_to_scode('+') -> add_a_a_a(); %% Optimization introduces other variants
|
||||
binop_to_scode('-') -> sub_a_a_a();
|
||||
binop_to_scode('==') -> eq_a_a_a().
|
||||
% binop_to_scode(Op) -> ?TODO(Op).
|
||||
|
||||
add_a_a_a() -> aeb_fate_code:add(?a, ?a, ?a).
|
||||
sub_a_a_a() -> aeb_fate_code:sub(?a, ?a, ?a).
|
||||
eq_a_a_a() -> aeb_fate_code:eq(?a, ?a, ?a).
|
||||
|
||||
%% -- Phase II ---------------------------------------------------------------
|
||||
%% Optimize
|
||||
|
||||
optimize_scode(Funs, Options) ->
|
||||
maps:map(fun(Name, Def) -> optimize_fun(Funs, Name, Def, Options) end,
|
||||
Funs).
|
||||
|
||||
flatten(Code) -> lists:map(fun flatten_s/1, lists:flatten(Code)).
|
||||
|
||||
flatten_s({ifte, Then, Else}) -> {ifte, flatten(Then), flatten(Else)};
|
||||
flatten_s(I) -> I.
|
||||
|
||||
optimize_fun(_Funs, Name, {{Args, Res}, Code}, Options) ->
|
||||
Code0 = flatten(Code),
|
||||
debug(Options, "Optimizing ~s\n", [Name]),
|
||||
debug(Options, " original : ~p\n", [Code0]),
|
||||
Code1 = simplify(Code0),
|
||||
debug(Options, " simplified: ~p\n", [Code1]),
|
||||
Code2 = desugar(Code1),
|
||||
debug(Options, " desugared : ~p\n", [Code2]),
|
||||
{{Args, Res}, Code2}.
|
||||
|
||||
simplify([]) -> [];
|
||||
simplify([I | Code]) ->
|
||||
simpl_top(simpl_s(I), simplify(Code)).
|
||||
|
||||
simpl_s({ifte, Then, Else}) ->
|
||||
{ifte, simplify(Then), simplify(Else)};
|
||||
simpl_s(I) -> I.
|
||||
|
||||
%% add_i 0 --> nop
|
||||
simpl_top({'ADD', _, ?i(0), _}, Code) -> Code;
|
||||
%% push n, add_a --> add_i n
|
||||
simpl_top({'PUSH', ?a, ?i(N)},
|
||||
[{'ADD', ?a, ?a, ?a} | Code]) ->
|
||||
simpl_top( aeb_fate_code:add(?a, ?i(N), ?a), Code);
|
||||
%% push n, add_i m --> add_i (n + m)
|
||||
simpl_top({'PUSH', ?a, ?i(N)}, [{'ADD', ?a, ?i(M), ?a} | Code]) ->
|
||||
simpl_top(aeb_fate_code:push(?i(N + M)), Code);
|
||||
%% add_i n, add_i m --> add_i (n + m)
|
||||
simpl_top({'ADD', ?a, ?i(N), ?a}, [{'ADD', ?a, ?i(M), ?a} | Code]) ->
|
||||
simpl_top({'ADD', ?a, ?i(N + M), ?a}, Code);
|
||||
|
||||
simpl_top(I, Code) -> [I | Code].
|
||||
|
||||
%% Desugar and specialize
|
||||
desugar({'ADD', ?a, ?i(1), ?a}) -> [aeb_fate_code:inc()];
|
||||
desugar({ifte, Then, Else}) -> [{ifte, desugar(Then), desugar(Else)}];
|
||||
desugar(Code) when is_list(Code) ->
|
||||
lists:flatmap(fun desugar/1, Code);
|
||||
desugar(I) -> [I].
|
||||
|
||||
%% -- Phase III --------------------------------------------------------------
|
||||
%% Constructing basic blocks
|
||||
|
||||
to_basic_blocks(Funs, Options) ->
|
||||
maps:from_list([ {Name, {{Args, Res},
|
||||
bb(Name, Code ++ [aeb_fate_code:return()], Options)}}
|
||||
|| {Name, {{Args, Res}, Code}} <- maps:to_list(Funs) ]).
|
||||
|
||||
bb(Name, Code, Options) ->
|
||||
Blocks0 = blocks(Code),
|
||||
Blocks = optimize_blocks(Blocks0),
|
||||
Labels = maps:from_list([ {Ref, I} || {I, {Ref, _}} <- with_ixs(Blocks) ]),
|
||||
BBs = [ set_labels(Labels, B) || B <- Blocks ],
|
||||
debug(Options, "Final code for ~s:\n ~p\n", [Name, BBs]),
|
||||
maps:from_list(BBs).
|
||||
|
||||
%% -- Break up scode into basic blocks --
|
||||
|
||||
blocks(Code) ->
|
||||
Top = make_ref(),
|
||||
blocks([{Top, Code}], []).
|
||||
|
||||
blocks([], Acc) ->
|
||||
lists:reverse(Acc);
|
||||
blocks([{Ref, Code} | Blocks], Acc) ->
|
||||
block(Ref, Code, [], Blocks, Acc).
|
||||
|
||||
block(Ref, [], CodeAcc, Blocks, BlockAcc) ->
|
||||
blocks(Blocks, [{Ref, lists:reverse(CodeAcc)} | BlockAcc]);
|
||||
block(Ref, [{ifte, Then, Else} | Code], Acc, Blocks, BlockAcc) ->
|
||||
ThenLbl = make_ref(),
|
||||
RestLbl = make_ref(),
|
||||
block(Ref, Else ++ [{jump, RestLbl}],
|
||||
[{jumpif, ThenLbl} | Acc],
|
||||
[{ThenLbl, Then ++ [{jump, RestLbl}]},
|
||||
{RestLbl, Code} | Blocks],
|
||||
BlockAcc);
|
||||
block(Ref, [I | Code], Acc, Blocks, BlockAcc) ->
|
||||
block(Ref, Code, [I | Acc], Blocks, BlockAcc).
|
||||
|
||||
%% -- Reorder, inline, and remove dead blocks --
|
||||
|
||||
optimize_blocks(Blocks) ->
|
||||
%% We need to look at the last instruction a lot, so reverse all blocks.
|
||||
Rev = fun(Bs) -> [ {Ref, lists:reverse(Code)} || {Ref, Code} <- Bs ] end,
|
||||
RBlocks = Rev(Blocks),
|
||||
RBlockMap = maps:from_list(RBlocks),
|
||||
RBlocks1 = reorder_blocks(RBlocks, []),
|
||||
RBlocks2 = [ {Ref, inline_block(RBlockMap, Ref, Code)} || {Ref, Code} <- RBlocks1 ],
|
||||
RBlocks3 = remove_dead_blocks(RBlocks2),
|
||||
Rev(RBlocks3).
|
||||
|
||||
%% Choose the next block based on the final jump.
|
||||
reorder_blocks([], Acc) ->
|
||||
lists:reverse(Acc);
|
||||
reorder_blocks([{Ref, Code} | Blocks], Acc) ->
|
||||
reorder_blocks(Ref, Code, Blocks, Acc).
|
||||
|
||||
reorder_blocks(Ref, Code, Blocks, Acc) ->
|
||||
Acc1 = [{Ref, Code} | Acc],
|
||||
case Code of
|
||||
['RETURN'|_] -> reorder_blocks(Blocks, Acc1);
|
||||
[{'RETURNR', _}|_] -> reorder_blocks(Blocks, Acc1);
|
||||
[{jump, L}|_] ->
|
||||
NotL = fun({L1, _}) -> L1 /= L end,
|
||||
case lists:splitwith(NotL, Blocks) of
|
||||
{Blocks1, [{L, Code1} | Blocks2]} ->
|
||||
reorder_blocks(L, Code1, Blocks1 ++ Blocks2, Acc1);
|
||||
{_, []} -> reorder_blocks(Blocks, Acc1)
|
||||
end
|
||||
end.
|
||||
|
||||
%% Inline short blocks (≤ 2 instructions)
|
||||
inline_block(BlockMap, Ref, [{jump, L} | Code] = Code0) when L /= Ref ->
|
||||
case maps:get(L, BlockMap, nocode) of
|
||||
Dest when length(Dest) < 3 ->
|
||||
%% Remove Ref to avoid infinite loops
|
||||
inline_block(maps:remove(Ref, BlockMap), L, Dest) ++ Code;
|
||||
_ -> Code0
|
||||
end;
|
||||
inline_block(_, _, Code) -> Code.
|
||||
|
||||
%% Remove unused blocks
|
||||
remove_dead_blocks(Blocks = [{Top, _} | _]) ->
|
||||
BlockMap = maps:from_list(Blocks),
|
||||
LiveBlocks = chase_labels([Top], BlockMap, #{}),
|
||||
[ B || B = {L, _} <- Blocks, maps:is_key(L, LiveBlocks) ].
|
||||
|
||||
chase_labels([], _, Live) -> Live;
|
||||
chase_labels([L | Ls], Map, Live) ->
|
||||
Code = maps:get(L, Map),
|
||||
Jump = fun({jump, A}) -> [A || not maps:is_key(A, Live)];
|
||||
({jumpif, A}) -> [A || not maps:is_key(A, Live)];
|
||||
(_) -> [] end,
|
||||
New = lists:flatmap(Jump, Code),
|
||||
chase_labels(New ++ Ls, Map, Live#{ L => true }).
|
||||
|
||||
|
||||
%% -- Translate label refs to indices --
|
||||
|
||||
set_labels(Labels, {Ref, Code}) when is_reference(Ref) ->
|
||||
{maps:get(Ref, Labels), [ set_labels(Labels, I) || I <- Code ]};
|
||||
set_labels(Labels, {jump, Ref}) -> aeb_fate_code:jump(maps:get(Ref, Labels));
|
||||
set_labels(Labels, {jumpif, Ref}) -> aeb_fate_code:jumpif(?a, maps:get(Ref, Labels));
|
||||
set_labels(_, I) -> I.
|
||||
|
||||
%% -- Helpers ----------------------------------------------------------------
|
||||
|
||||
with_ixs(Xs) ->
|
||||
lists:zip(lists:seq(0, length(Xs) - 1), Xs).
|
||||
|
||||
keyfind_index(X, J, Xs) ->
|
||||
case [ I || {I, E} <- with_ixs(Xs), X == element(J, E) ] of
|
||||
[I | _] -> I;
|
||||
[] -> false
|
||||
end.
|
||||
|
@ -441,7 +441,7 @@ build_if(Ann, Cond, Then, [{elif, Ann1, Cond1, Then1} | Elses]) ->
|
||||
build_if(Ann, Cond, Then, [{else, _Ann, Else}]) ->
|
||||
{'if', Ann, Cond, Then, Else};
|
||||
build_if(Ann, Cond, Then, []) ->
|
||||
{'if', Ann, Cond, Then, {unit, [{origin, system}]}}.
|
||||
{'if', Ann, Cond, Then, {tuple, [{origin, system}], []}}.
|
||||
|
||||
else_branches([Elif = {elif, _, _, _} | Stmts], Acc) ->
|
||||
else_branches(Stmts, [Elif | Acc]);
|
||||
@ -457,7 +457,6 @@ fun_t(Domains, Type) ->
|
||||
lists:foldr(fun({Dom, Ann}, T) -> {fun_t, Ann, [], Dom, T} end,
|
||||
Type, Domains).
|
||||
|
||||
tuple_e(Ann, []) -> {unit, Ann};
|
||||
tuple_e(_Ann, [Expr]) -> Expr; %% Not a tuple
|
||||
tuple_e(Ann, Exprs) -> {tuple, Ann, Exprs}.
|
||||
|
||||
@ -478,7 +477,6 @@ parse_pattern({record, Ann, Fs}) ->
|
||||
{record, Ann, lists:map(fun parse_field_pattern/1, Fs)};
|
||||
parse_pattern(E = {con, _, _}) -> E;
|
||||
parse_pattern(E = {id, _, _}) -> E;
|
||||
parse_pattern(E = {unit, _}) -> E;
|
||||
parse_pattern(E = {int, _, _}) -> E;
|
||||
parse_pattern(E = {bool, _, _}) -> E;
|
||||
parse_pattern(E = {bytes, _, _}) -> E;
|
||||
|
@ -332,7 +332,6 @@ expr_p(_, {Type, _, Bin})
|
||||
Type == oracle_pubkey;
|
||||
Type == oracle_query_id ->
|
||||
text(binary_to_list(aeser_api_encoder:encode(Type, Bin)));
|
||||
expr_p(_, {unit, _}) -> text("()");
|
||||
expr_p(_, {string, _, S}) -> term(binary_to_list(S));
|
||||
expr_p(_, {char, _, C}) ->
|
||||
case C of
|
||||
|
@ -75,7 +75,6 @@
|
||||
| {contract_pubkey, binary()}
|
||||
| {oracle_pubkey, binary()}
|
||||
| {oracle_query_id, binary()}
|
||||
| {unit, ann()}
|
||||
| {string, ann(), binary()}
|
||||
| {char, ann(), integer()}.
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user