-
Notifications
You must be signed in to change notification settings - Fork 10
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
12 changed files
with
3,467 additions
and
111 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,154 @@ | ||
import "codegen/tree" | ||
import "datatypes" | ||
import "codegen/instr" | ||
import "codegen/instr_count" | ||
import "codegen/symtab" | ||
import "codegen/register" | ||
import "codegen/preprocess" | ||
import "codegen/optimizer" | ||
import "codegen/postprocess" | ||
-- import "bridge" | ||
|
||
--Frontend bridge entry | ||
-- entry make_from_frontend [n] | ||
-- (node_types: [n]front_node_type) | ||
-- (node_res_types: [n]front_data_type) | ||
-- (node_parents: [n]front_node_idx_type) | ||
-- (node_depth: [n]front_depth_type) | ||
-- (node_child_idx : [n]front_child_idx_type) | ||
-- (node_data: [n]front_node_data_type) | ||
-- (max_depth: front_depth_type) : Tree[n] = | ||
-- backend_convert node_types node_res_types node_parents node_depth node_child_idx node_data max_depth | ||
|
||
let make_variable (data_type: u8) (offset: u32) : Variable = | ||
{ | ||
decl_type = i32.u8 data_type, | ||
offset = offset | ||
} | ||
|
||
let make_node (node_type: u8) (data_type: u8, parent: i32, depth: i32, child_idx: i32, node_data: u32) : Node = | ||
{ | ||
node_type = i32.u8 node_type, | ||
resulting_type = i32.u8 data_type, | ||
parent = parent, | ||
depth = depth, | ||
child_idx = child_idx, | ||
node_data = node_data | ||
} | ||
|
||
let make_functab (id: u32) (start: u32) (size: u32) = | ||
{ | ||
id = id, | ||
start = start, | ||
size = size | ||
} | ||
|
||
-- Data structure rewrite functions | ||
entry make_symtab [m] (data_types: [m]u8) (offsets: [m]u32) : Symtab[m] = | ||
{ | ||
variables = map2 make_variable data_types offsets | ||
} | ||
|
||
entry make_tree [n] (max_depth: i32) (node_types: [n]u8) (data_types: [n]u8) (parents: [n]i32) | ||
(depth: [n]i32) (child_idx: [n]i32) (node_data: [n]u32): Tree[n] = | ||
{ | ||
nodes = zip5 data_types parents depth child_idx node_data |> map2 make_node node_types, | ||
max_depth = max_depth | ||
} | ||
|
||
--Stage 1, preprocessor | ||
entry stage_preprocess [n] (tree: Tree[n]) : (Tree[n]) = | ||
tree |> preprocess_tree | ||
|
||
--Stage 2: instruction counting | ||
entry stage_instr_count [n] (tree: Tree[n]) : [n]u32 = | ||
instr_count tree | ||
|
||
--Stage 2.2: function table creation | ||
entry stage_instr_count_make_function_table [n] (tree: Tree[n]) (instr_offset: [n]u32) = | ||
get_function_table tree instr_offset | ||
|
||
entry stage_compact_functab [n] (func_id: [n]u32) (func_start: [n]u32) (func_size: [n]u32) : [n]FuncInfo = | ||
map3 make_functab func_id func_start func_size | ||
|
||
let split_instr (instr: Instr) = | ||
(instr.instr, instr.rd, instr.rs1, instr.rs2, instr.jt) | ||
|
||
--Stage 3: instruction gen | ||
entry stage_instr_gen [n] [m] [k] (tree: Tree[n]) (symtab: Symtab[m]) (instr_offset: [n]u32) (func_tab: [k]FuncInfo) : []Instr = | ||
let func_start = map (.start) func_tab | ||
let func_size = map (.size) func_tab | ||
let max_instrs = if n == 0 then 0 else i64.u32 instr_offset[n-1] | ||
let instr_offset_i64 = map i64.u32 instr_offset | ||
let func_ends = iota k |> map (\i -> func_start[i] + func_size[i]) | ||
in | ||
compile_tree tree symtab instr_offset_i64 max_instrs func_start func_ends | ||
|
||
let make_instr (instr: u32) (rd: i64) (rs1: i64) (rs2: i64) (jt: u32) = | ||
{ | ||
instr = instr, | ||
rd = rd, | ||
rs1 = rs1, | ||
rs2 = rs2, | ||
jt = jt | ||
} | ||
|
||
let split_functab (func_info: FuncInfo) = | ||
(func_info.id, func_info.start, func_info.size) | ||
|
||
let fix_func_tab [n] (instr_offsets: [n]i32) (func_info: FuncInfo) = | ||
let func_start = u32.i32 instr_offsets[i64.u32 func_info.start] | ||
let func_end_loc = i64.u32 (func_info.start + func_info.size) | ||
let func_end = if func_end_loc >= n then u32.i32 instr_offsets[n-1]+1 else u32.i32 instr_offsets[func_end_loc] | ||
let func_size = func_end - func_start | ||
in | ||
{ | ||
id = func_info.id, | ||
start = func_start, | ||
size = func_size | ||
} | ||
|
||
--Stage 4, optimizer | ||
entry stage_optimize [n] [m] (instr_data: [n]Instr) (func_tab: [m]FuncInfo) : ([n]Instr, [m]FuncInfo, [n]bool) = | ||
-- let instr_data = map5 make_instr instrs rd rs1 rs2 jt | ||
-- let func_tab = map3 make_functab func_id func_start func_size | ||
let (instrs, functab, optimize_away) = optimize instr_data func_tab | ||
|
||
-- let (res_instr, res_rd, res_rs1, res_rs2, res_jt) = instrs |> map split_instr |> unzip5 | ||
-- let (res_id, res_start, res_size) = functab |> map split_functab |> unzip3 | ||
in | ||
(instrs, functab, optimize_away) | ||
|
||
|
||
--Stage 5,6 regalloc + instr-split | ||
entry stage_regalloc [n] [m] (instrs: [n]Instr) (func_tab: [m]FuncInfo) (func_symbols: [m]u32) (optimize_away: [n]bool) : ([]Instr, [m]FuncInfo) = | ||
-- let instrs = map5 make_instr instrs rd rs1 rs2 jt | ||
-- let func_tab = map3 make_functab func_id func_start func_size | ||
|
||
let (instr_offset, lifetime_mask, registers, overflows, swapped, instrs) = (instrs, func_tab, optimize_away, func_symbols) |> register_alloc | ||
let func_tab = map (fix_func_tab instr_offset) func_tab | ||
let new_instrs = fill_stack_frames func_tab func_symbols overflows instrs lifetime_mask | ||
|
||
-- let (res_instr, res_rd, res_rs1, res_rs2, res_jt) = new_instrs |> map split_instr |> unzip5 | ||
in | ||
(new_instrs, func_tab) | ||
|
||
--Stage 7: jump fix | ||
entry stage_fix_jumps [n] [m] (instrs: [n]Instr) (func_tab: [m]FuncInfo) : ([]Instr, [m]u32, [m]u32, [m]u32) = | ||
let (instrs, instr_offset) = instrs |> finalize_jumps | ||
let func_tab = map (fix_func_tab instr_offset) func_tab | ||
|
||
let (res_id, res_start, res_size) = func_tab |> map split_functab |> unzip3 | ||
in | ||
(instrs, res_id, res_start, res_size) | ||
|
||
-- Stage 8: postprocess | ||
entry stage_postprocess [n] (instrs: [n]Instr) = | ||
-- let instrs = map5 make_instr instrs rd rs1 rs2 jt | ||
|
||
let result = instrs |> finalize_instr | ||
let (res_instrs, _, _, _, _) = result |> map split_instr |> unzip5 | ||
|
||
in | ||
|
||
res_instrs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
import "tree" | ||
import "datatypes" | ||
|
||
type front_node_type = i32 | ||
type front_data_type = i32 | ||
type front_node_idx_type = i32 | ||
type front_depth_type = i32 | ||
type front_child_idx_type = i32 | ||
type front_node_data_type = u32 | ||
|
||
let NODE_TYPE_LOOKUP : []NodeType = [ | ||
--TODO: fill in table to map node types | ||
0 | ||
] | ||
|
||
let DATA_TYPE_LOOKUP : []DataType = [ | ||
0, --Invalid | ||
1, --Void | ||
2, --Int | ||
3, --Float | ||
4, --Int_ref | ||
5 --Float_ref | ||
] | ||
|
||
let convert_node_type (node_type: front_node_type) = | ||
NODE_TYPE_LOOKUP[node_type] | ||
|
||
let convert_data_type (data_type: front_data_type) = | ||
DATA_TYPE_LOOKUP[data_type] | ||
|
||
let convert_node_idx (idx: front_node_idx_type) = | ||
idx | ||
|
||
let convert_depth (depth: front_depth_type) = | ||
depth | ||
|
||
let convert_child_idx (child_idx: front_child_idx_type) = | ||
child_idx | ||
|
||
let convert_node_data (node_data: front_node_data_type) = | ||
node_data | ||
|
||
let backend_convert_node ( | ||
node_type: front_node_type, | ||
data_type: front_data_type, | ||
parent: front_node_idx_type, | ||
depth: front_depth_type, | ||
child_idx: front_child_idx_type, | ||
data: front_node_data_type) : Node = | ||
{ | ||
node_type = convert_node_type node_type, | ||
resulting_type = convert_data_type data_type, | ||
parent = convert_node_idx parent, | ||
depth = convert_depth depth, | ||
child_idx = convert_child_idx child_idx, | ||
node_data = convert_node_data data | ||
} | ||
|
||
let zip6 [n] 'a 'b 'c 'd 'e 'f | ||
(x0: [n]a) | ||
(x1: [n]b) | ||
(x2: [n]c) | ||
(x3: [n]d) | ||
(x4: [n]e) | ||
(x5: [n]f) = | ||
|
||
let c1 = zip5 x0 x1 x2 x3 x4 | ||
in | ||
map2 (\(t0, t1, t2, t3, t4) t5 -> (t0, t1, t2, t3, t4, t5)) c1 x5 | ||
|
||
let backend_convert [n] | ||
(node_types: [n]front_node_type) | ||
(node_res_types: [n]front_data_type) | ||
(node_parents: [n]front_node_idx_type) | ||
(node_depth: [n]front_depth_type) | ||
(node_child_idx : [n]front_child_idx_type) | ||
(node_data: [n]front_node_data_type) | ||
(max_depth: front_depth_type) : Tree[n] = | ||
|
||
let input = zip6 node_types node_res_types node_parents node_depth node_child_idx node_data | ||
|
||
let nodes: [n]Node = input |> | ||
map backend_convert_node | ||
|
||
in { | ||
nodes = nodes, | ||
max_depth = convert_depth max_depth | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
import "tree" | ||
import "datatypes" | ||
|
||
type front_node_type = i32 | ||
type front_data_type = i32 | ||
type front_node_idx_type = i32 | ||
type front_depth_type = i32 | ||
type front_child_idx_type = i32 | ||
type front_node_data_type = u32 | ||
|
||
let NODE_TYPE_LOOKUP : []NodeType = [ | ||
--TODO: fill in table to map node types | ||
0 | ||
] | ||
|
||
let DATA_TYPE_LOOKUP : []DataType = [ | ||
0, --Invalid | ||
1, --Void | ||
2, --Int | ||
3, --Float | ||
4, --Int_ref | ||
5 --Float_ref | ||
] | ||
|
||
let convert_node_type (node_type: front_node_type) = | ||
NODE_TYPE_LOOKUP[node_type] | ||
|
||
let convert_data_type (data_type: front_data_type) = | ||
DATA_TYPE_LOOKUP[data_type] | ||
|
||
let convert_node_idx (idx: front_node_idx_type) = | ||
idx | ||
|
||
let convert_depth (depth: front_depth_type) = | ||
depth | ||
|
||
let convert_child_idx (child_idx: front_child_idx_type) = | ||
child_idx | ||
|
||
let convert_node_data (node_data: front_node_data_type) = | ||
node_data | ||
|
||
let backend_convert_node ( | ||
node_type: front_node_type, | ||
data_type: front_data_type, | ||
parent: front_node_idx_type, | ||
depth: front_depth_type, | ||
child_idx: front_child_idx_type, | ||
data: front_node_data_type) : Node = | ||
{ | ||
node_type = convert_node_type node_type, | ||
resulting_type = convert_data_type data_type, | ||
parent = convert_node_idx parent, | ||
depth = convert_depth depth, | ||
child_idx = convert_child_idx child_idx, | ||
node_data = convert_node_data data | ||
} | ||
|
||
let zip6 [n] 'a 'b 'c 'd 'e 'f | ||
(x0: [n]a) | ||
(x1: [n]b) | ||
(x2: [n]c) | ||
(x3: [n]d) | ||
(x4: [n]e) | ||
(x5: [n]f) = | ||
|
||
let c1 = zip5 x0 x1 x2 x3 x4 | ||
in | ||
map2 (\(t0, t1, t2, t3, t4) t5 -> (t0, t1, t2, t3, t4, t5)) c1 x5 | ||
|
||
let backend_convert [n] | ||
(node_types: [n]front_node_type) | ||
(node_res_types: [n]front_data_type) | ||
(node_parents: [n]front_node_idx_type) | ||
(node_depth: [n]front_depth_type) | ||
(node_child_idx : [n]front_child_idx_type) | ||
(node_data: [n]front_node_data_type) | ||
(max_depth: front_depth_type) : Tree[n] = | ||
|
||
let input = zip6 node_types node_res_types node_parents node_depth node_child_idx node_data | ||
|
||
let nodes: [n]Node = input |> | ||
map backend_convert_node | ||
|
||
in { | ||
nodes = nodes, | ||
max_depth = convert_depth max_depth | ||
} |
Oops, something went wrong.