Encode/decode AST into metadata, re-instantiate inlined items

This commit is contained in:
Niko Matsakis 2012-02-14 15:21:53 -08:00
parent be9914625b
commit f3ca50c9ca
33 changed files with 10751 additions and 1019 deletions

View File

@ -6,7 +6,7 @@ import syntax::parse::{parser};
import syntax::{ast, codemap};
import front::attr;
import middle::{trans, resolve, freevars, kind, ty, typeck, fn_usage,
last_use, lint};
last_use, lint, inline};
import syntax::print::{pp, pprust};
import util::{ppaux, filesearch};
import back::link;
@ -157,7 +157,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
bind middle::check_alt::check_crate(ty_cx, crate));
time(time_passes, "typestate checking",
bind middle::tstate::ck::check_crate(ty_cx, crate));
let mut_map =
let mutbl_map =
time(time_passes, "mutability checking",
bind middle::mutbl::check_crate(ty_cx, crate));
let (copy_map, ref_map) =
@ -173,12 +173,19 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
if upto == cu_no_trans { ret {crate: crate, tcx: some(ty_cx)}; }
let outputs = option::get(outputs);
let maps = {mutbl_map: mutbl_map, copy_map: copy_map,
last_uses: last_uses, impl_map: impl_map,
method_map: method_map, dict_map: dict_map};
let inline_map =
time(time_passes, "inline",
bind inline::instantiate_inlines(ty_cx, maps, crate));
let (llmod, link_meta) =
time(time_passes, "translation",
bind trans::base::trans_crate(
sess, crate, ty_cx, outputs.obj_filename, exp_map, ast_map,
mut_map, copy_map, last_uses, impl_map, method_map,
dict_map));
sess, crate, ty_cx, outputs.obj_filename,
exp_map, maps, inline_map));
time(time_passes, "LLVM passes",
bind link::write::run_passes(sess, llmod, outputs.obj_filename));

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -76,6 +76,30 @@ const tag_path_len: uint = 0x41u;
const tag_path_elt_mod: uint = 0x42u;
const tag_path_elt_name: uint = 0x43u;
// used to encode crate_ctxt side tables
enum astencode_tag { // Reserves 0x50 -- 0x6f
tag_ast = 0x50,
tag_tree = 0x51,
tag_id_range = 0x52,
tag_table = 0x53,
tag_table_id = 0x54,
tag_table_val = 0x55,
tag_table_def = 0x56,
tag_table_node_type = 0x57,
tag_table_node_type_subst = 0x58,
tag_table_freevars = 0x59,
tag_table_tcache,
tag_table_param_bounds,
tag_table_inferred_modes,
tag_table_mutbl,
tag_table_copy,
tag_table_last_use,
tag_table_method_map,
tag_table_dict_map
}
// djb's cdb hashes.
fn hash_node_id(&&node_id: int) -> uint { ret 177573u ^ (node_id as uint); }

View File

@ -5,6 +5,7 @@ import syntax::ast_util;
import middle::{ty, ast_map};
import option::{some, none};
import driver::session;
import middle::trans::common::maps;
export get_symbol;
export get_type_param_count;
@ -16,6 +17,7 @@ export get_iface_methods;
export get_type;
export get_impl_iface;
export get_item_path;
export maybe_get_item_ast;
fn get_symbol(cstore: cstore::cstore, def: ast::def_id) -> str {
let cdata = cstore::get_crate_data(cstore, def.crate).data;
@ -73,6 +75,16 @@ fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path {
[ast_map::path_mod(cdata.name)] + path
}
// Finds the AST for this item in the crate metadata, if any. If the item was
// not marked for inlining, then the AST will not be present and hence none
// will be returned.
fn maybe_get_item_ast(tcx: ty::ctxt, maps: maps, def: ast::def_id)
-> option<@ast::item> {
let cstore = tcx.sess.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::maybe_get_item_ast(cdata, tcx, maps, def.node)
}
fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> [ty::variant_info] {
let cstore = tcx.sess.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);

View File

@ -11,6 +11,7 @@ import common::*;
import tydecode::{parse_ty_data, parse_def_id, parse_bounds_data};
import syntax::print::pprust;
import cmd=cstore::crate_metadata;
import middle::trans::common::maps;
export get_symbol;
export get_enum_variants;
@ -30,6 +31,10 @@ export get_impls_for_mod;
export get_iface_methods;
export get_crate_module_paths;
export get_item_path;
export maybe_get_item_ast;
// Used internally by astencode:
export translate_def_id;
// A function that takes a def_id relative to the crate being searched and
// returns a def_id relative to the compilation environment, i.e. if we hit a
@ -42,14 +47,14 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]) -> bool, hash: uint) ->
let index = ebml::get_doc(d, tag_index);
let table = ebml::get_doc(index, tag_index_table);
let hash_pos = table.start + hash % 256u * 4u;
let pos = ebml::be_u64_from_bytes(d.data, hash_pos, 4u) as uint;
let pos = io::u64_from_be_bytes(*d.data, hash_pos, 4u) as uint;
let {tag:_, doc:bucket} = ebml::doc_at(d.data, pos);
// Awkward logic because we can't ret from foreach yet
let result: [ebml::doc] = [];
let belt = tag_index_buckets_bucket_elt;
ebml::tagged_docs(bucket, belt) {|elt|
let pos = ebml::be_u64_from_bytes(elt.data, elt.start, 4u) as uint;
let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint;
if eq_fn(vec::slice::<u8>(*elt.data, elt.start + 4u, elt.end)) {
result += [ebml::doc_at(d.data, pos).doc];
}
@ -59,7 +64,7 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]) -> bool, hash: uint) ->
fn maybe_find_item(item_id: int, items: ebml::doc) -> option<ebml::doc> {
fn eq_item(bytes: [u8], item_id: int) -> bool {
ret ebml::be_u64_from_bytes(@bytes, 0u, 4u) as int == item_id;
ret io::u64_from_be_bytes(bytes, 0u, 4u) as int == item_id;
}
let eqer = bind eq_item(_, item_id);
let found = lookup_hash(items, eqer, hash_node_id(item_id));
@ -178,17 +183,17 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path {
let path_doc = ebml::get_doc(item_doc, tag_path);
let len_doc = ebml::get_doc(path_doc, tag_path_len);
let len = ebml::doc_as_vuint(len_doc);
let len = ebml::doc_as_u32(len_doc) as uint;
let result = [];
vec::reserve(result, len);
ebml::docs(path_doc) {|tag, elt_doc|
if tag == tag_path_elt_mod {
let str = ebml::doc_str(elt_doc);
let str = ebml::doc_as_str(elt_doc);
result += [ast_map::path_mod(str)];
} else if tag == tag_path_elt_name {
let str = ebml::doc_str(elt_doc);
let str = ebml::doc_as_str(elt_doc);
result += [ast_map::path_name(str)];
} else {
// ignore tag_path_len element
@ -258,6 +263,13 @@ fn get_item_path(cdata: cmd, id: ast::node_id) -> ast_map::path {
item_path(lookup_item(id, cdata.data))
}
fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt, maps: maps,
id: ast::node_id) -> option<@ast::item> {
let item_doc = lookup_item(id, cdata.data);
let path = vec::init(item_path(item_doc));
astencode::decode_inlined_item(cdata, tcx, maps, path, item_doc)
}
fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
-> [ty::variant_info] {
let data = cdata.data;
@ -353,7 +365,7 @@ fn family_names_type(fam_ch: char) -> bool {
fn read_path(d: ebml::doc) -> {path: str, pos: uint} {
let desc = ebml::doc_data(d);
let pos = ebml::be_u64_from_bytes(@desc, 0u, 4u) as uint;
let pos = io::u64_from_be_bytes(desc, 0u, 4u) as uint;
let pathbytes = vec::slice::<u8>(desc, 4u, vec::len::<u8>(desc));
let path = str::from_bytes(pathbytes);
ret {path: path, pos: pos};

View File

@ -2,8 +2,9 @@
import std::{io, ebml, map, list};
import io::writer_util;
import ebml::writer_util;
import ebml::writer;
import syntax::ast::*;
import syntax::print::pprust;
import syntax::ast_util;
import syntax::ast_util::local_def;
import common::*;
@ -13,25 +14,40 @@ import middle::ty::node_id_to_type;
import middle::ast_map;
import front::attr;
import driver::session::session;
import std::serialization::serializer;
export encode_metadata;
export encoded_ty;
// used by astencode:
export def_to_str;
export encode_ctxt;
export write_type;
export encode_def_id;
type abbrev_map = map::hashmap<ty::t, tyencode::ty_abbrev>;
type encode_ctxt = {ccx: crate_ctxt, type_abbrevs: abbrev_map};
fn should_inline(path: ast_map::path, item: @item) -> bool {
if item.ident == "iter" { // XXX
#debug["should_inline(%s::%s)? attrs=%s result=%b",
ast_map::path_to_str(path),
item.ident,
str::connect(vec::map(item.attrs, pprust::attr_to_str), ", "),
attr::attrs_contains_name(item.attrs, "inline")];
}
attr::attrs_contains_name(item.attrs, "inline")
}
// Path table encoding
fn encode_name(ebml_w: ebml::writer, name: str) {
ebml_w.wr_tag(tag_paths_data_name) {||
ebml_w.wr_str(name);
}
ebml_w.wr_tagged_str(tag_paths_data_name, name);
}
fn encode_def_id(ebml_w: ebml::writer, id: def_id) {
ebml_w.wr_tag(tag_def_id) {||
ebml_w.wr_str(def_to_str(id));
}
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id));
}
fn encode_named_def_id(ebml_w: ebml::writer, name: str, id: def_id) {
@ -85,57 +101,57 @@ fn encode_module_item_paths(ebml_w: ebml::writer, module: _mod, path: [str],
}
item_mod(_mod) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_mod);
ebml_w.start_tag(tag_paths_data_mod);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
encode_module_item_paths(ebml_w, _mod, path + [it.ident], index);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_native_mod(nmod) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_mod);
ebml_w.start_tag(tag_paths_data_mod);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
encode_native_module_item_paths(ebml_w, nmod, path + [it.ident],
index);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_ty(_, tps) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_res(_, tps, _, _, ctor_id) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(ctor_id));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_class(_,_,_,_,_) {
fail "encode: implement item_class";
}
item_enum(variants, tps) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
encode_enum_variant_paths(ebml_w, variants, path, index);
}
item_iface(_, _) {
add_to_index(ebml_w, path, index, it.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, it.ident);
encode_def_id(ebml_w, local_def(it.id));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_impl(_, _, _, _) {}
}
@ -146,10 +162,10 @@ fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate)
-> [entry<str>] {
let index: [entry<str>] = [];
let path: [str] = [];
ebml::start_tag(ebml_w, tag_paths);
ebml_w.start_tag(tag_paths);
encode_module_item_paths(ebml_w, crate.node.module, path, index);
encode_reexport_paths(ebml_w, ecx, index);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
ret index;
}
@ -158,10 +174,10 @@ fn encode_reexport_paths(ebml_w: ebml::writer,
ecx.ccx.exp_map.items {|path, defs|
for def in *defs {
index += [{val: path, pos: ebml_w.writer.tell()}];
ebml::start_tag(ebml_w, tag_paths_data_item);
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, path);
encode_def_id(ebml_w, ast_util::def_id_of_def(def));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
}
}
@ -169,9 +185,9 @@ fn encode_reexport_paths(ebml_w: ebml::writer,
// Item info table encoding
fn encode_family(ebml_w: ebml::writer, c: char) {
ebml::start_tag(ebml_w, tag_items_data_item_family);
ebml_w.start_tag(tag_items_data_item_family);
ebml_w.writer.write([c as u8]);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn def_to_str(did: def_id) -> str { ret #fmt["%d:%d", did.crate, did.node]; }
@ -182,17 +198,17 @@ fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
tcx: ecx.ccx.tcx,
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
for param in params {
ebml::start_tag(ebml_w, tag_items_data_item_ty_param_bounds);
ebml_w.start_tag(tag_items_data_item_ty_param_bounds);
let bs = ecx.ccx.tcx.ty_param_bounds.get(param.id);
tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, bs);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
}
fn encode_variant_id(ebml_w: ebml::writer, vid: def_id) {
ebml::start_tag(ebml_w, tag_items_data_item_variant);
ebml_w.start_tag(tag_items_data_item_variant);
ebml_w.writer.write(str::bytes(def_to_str(vid)));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn write_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
@ -204,33 +220,33 @@ fn write_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
}
fn encode_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
ebml::start_tag(ebml_w, tag_items_data_item_type);
ebml_w.start_tag(tag_items_data_item_type);
write_type(ecx, ebml_w, typ);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
ebml::start_tag(ebml_w, tag_items_data_item_symbol);
ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::bytes(ecx.ccx.item_symbols.get(id)));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_discriminant(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
ebml::start_tag(ebml_w, tag_items_data_item_symbol);
ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::bytes(ecx.ccx.discrim_symbols.get(id)));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_disr_val(_ecx: @encode_ctxt, ebml_w: ebml::writer, disr_val: int) {
ebml::start_tag(ebml_w, tag_disr_val);
ebml_w.start_tag(tag_disr_val);
ebml_w.writer.write(str::bytes(int::to_str(disr_val,10u)));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_enum_id(ebml_w: ebml::writer, id: def_id) {
ebml::start_tag(ebml_w, tag_items_data_item_enum_id);
ebml_w.start_tag(tag_items_data_item_enum_id);
ebml_w.writer.write(str::bytes(def_to_str(id)));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
@ -242,7 +258,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id});
for variant: variant in variants {
index += [{val: variant.node.id, pos: ebml_w.writer.tell()}];
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id));
encode_family(ebml_w, 'v');
encode_name(ebml_w, variant.node.name);
@ -259,7 +275,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
}
encode_type_param_bounds(ebml_w, ecx, ty_params);
encode_path(ebml_w, path, ast_map::path_name(variant.node.name));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
disr_val += 1;
i += 1;
}
@ -274,15 +290,11 @@ fn encode_path(ebml_w: ebml::writer,
ast_map::path_name(name) { (tag_path_elt_name, name) }
};
ebml_w.wr_tag(tag) {||
ebml_w.wr_str(name)
}
ebml_w.wr_tagged_str(tag, name);
}
ebml_w.wr_tag(tag_path) {||
ebml_w.wr_tag(tag_path_len) {||
ebml_w.wr_vuint(vec::len(path) + 1u);
}
ebml_w.wr_tagged_u32(tag_path_len, (vec::len(path) + 1u) as u32);
vec::iter(path) {|pe| encode_path_elt(ebml_w, pe); }
encode_path_elt(ebml_w, name);
}
@ -290,17 +302,15 @@ fn encode_path(ebml_w: ebml::writer,
fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
id: node_id, path: ast_map::path, name: ident) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ebml_w, name);
alt ecx.ccx.impl_map.get(id) {
alt ecx.ccx.maps.impl_map.get(id) {
list::cons(impls, @list::nil) {
for i in *impls {
if ast_util::is_exported(i.ident, md) {
ebml::start_tag(ebml_w, tag_mod_impl);
ebml_w.writer.write(str::bytes(def_to_str(i.did)));
ebml::end_tag(ebml_w);
ebml_w.wr_tagged_str(tag_mod_impl, def_to_str(i.did));
}
}
}
@ -308,7 +318,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
undocumented invariant"); }
}
encode_path(ebml_w, path, ast_map::path_mod(name));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn purity_fn_family(p: purity) -> char {
@ -325,47 +335,50 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
let tcx = ecx.ccx.tcx;
alt item.node {
item_const(_, _) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'c');
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_symbol(ecx, ebml_w, item.id);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_fn(decl, tps, _) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, purity_fn_family(decl.purity));
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_symbol(ecx, ebml_w, item.id);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
if should_inline(path, item) {
astencode::encode_inlined_item(ecx, ebml_w, item);
}
ebml_w.end_tag();
}
item_mod(m) {
encode_info_for_mod(ecx, ebml_w, m, item.id, path, item.ident);
}
item_native_mod(_) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'n');
encode_name(ebml_w, item.ident);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_ty(_, tps) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'y');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_enum(variants, tps) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 't');
encode_type_param_bounds(ebml_w, ecx, tps);
@ -375,7 +388,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_variant_id(ebml_w, local_def(v.node.id));
}
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
encode_enum_variant_info(ecx, ebml_w, item.id, variants,
path, index, tps);
}
@ -385,7 +398,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
item_res(_, tps, _, _, ctor_id) {
let fn_ty = node_id_to_type(tcx, ctor_id);
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(ctor_id));
encode_family(ebml_w, 'y');
encode_type_param_bounds(ebml_w, ecx, tps);
@ -393,47 +406,47 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_name(ebml_w, item.ident);
encode_symbol(ecx, ebml_w, item.id);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
index += [{val: ctor_id, pos: ebml_w.writer.tell()}];
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(ctor_id));
encode_family(ebml_w, 'f');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, fn_ty);
encode_symbol(ecx, ebml_w, ctor_id);
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
item_impl(tps, ifce, _, methods) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'i');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident);
for m in methods {
ebml::start_tag(ebml_w, tag_item_method);
ebml_w.start_tag(tag_item_method);
ebml_w.writer.write(str::bytes(def_to_str(local_def(m.id))));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
alt ifce {
some(_) {
encode_symbol(ecx, ebml_w, item.id);
let i_ty = ty::lookup_item_type(tcx, local_def(item.id)).ty;
ebml::start_tag(ebml_w, tag_impl_iface);
ebml_w.start_tag(tag_impl_iface);
write_type(ecx, ebml_w, i_ty);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
_ {}
}
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
let impl_path = path + [ast_map::path_name(item.ident)];
for m in methods {
index += [{val: m.id, pos: ebml_w.writer.tell()}];
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(m.id));
encode_family(ebml_w, purity_fn_family(m.decl.purity));
encode_type_param_bounds(ebml_w, ecx, tps + m.tps);
@ -441,11 +454,11 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_name(ebml_w, m.ident);
encode_symbol(ecx, ebml_w, m.id);
encode_path(ebml_w, impl_path, ast_map::path_name(m.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
}
item_iface(tps, ms) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'I');
encode_type_param_bounds(ebml_w, ecx, tps);
@ -453,23 +466,23 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_name(ebml_w, item.ident);
let i = 0u;
for mty in *ty::iface_methods(tcx, local_def(item.id)) {
ebml::start_tag(ebml_w, tag_item_method);
ebml_w.start_tag(tag_item_method);
encode_name(ebml_w, mty.ident);
encode_type_param_bounds(ebml_w, ecx, ms[i].tps);
encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty));
encode_family(ebml_w, purity_fn_family(mty.purity));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
i += 1u;
}
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
}
}
fn encode_info_for_native_item(ecx: @encode_ctxt, ebml_w: ebml::writer,
nitem: @native_item, path: ast_map::path) {
ebml::start_tag(ebml_w, tag_items_data_item);
ebml_w.start_tag(tag_items_data_item);
alt nitem.node {
native_item_fn(fn_decl, tps) {
encode_def_id(ebml_w, local_def(nitem.id));
@ -480,16 +493,16 @@ fn encode_info_for_native_item(ecx: @encode_ctxt, ebml_w: ebml::writer,
encode_path(ebml_w, path, ast_map::path_name(nitem.ident));
}
}
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
crate_mod: _mod) -> [entry<int>] {
let index: [entry<int>] = [];
ebml::start_tag(ebml_w, tag_items_data);
ebml_w.start_tag(tag_items_data);
index += [{val: crate_node_id, pos: ebml_w.writer.tell()}];
encode_info_for_mod(ecx, ebml_w, crate_mod, crate_node_id, [], "");
ecx.ccx.ast_map.items {|key, val|
ecx.ccx.tcx.items.items {|key, val|
alt val {
middle::ast_map::node_item(i, path) {
index += [{val: key, pos: ebml_w.writer.tell()}];
@ -502,7 +515,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
_ { }
}
};
ebml::end_tag(ebml_w);
ebml_w.end_tag();
ret index;
}
@ -528,25 +541,25 @@ fn create_index<T: copy>(index: [entry<T>], hash_fn: fn@(T) -> uint) ->
fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
write_fn: fn(io::writer, T)) {
let writer = ebml_w.writer;
ebml::start_tag(ebml_w, tag_index);
ebml_w.start_tag(tag_index);
let bucket_locs: [uint] = [];
ebml::start_tag(ebml_w, tag_index_buckets);
ebml_w.start_tag(tag_index_buckets);
for bucket: @[entry<T>] in buckets {
bucket_locs += [ebml_w.writer.tell()];
ebml::start_tag(ebml_w, tag_index_buckets_bucket);
ebml_w.start_tag(tag_index_buckets_bucket);
for elt: entry<T> in *bucket {
ebml::start_tag(ebml_w, tag_index_buckets_bucket_elt);
ebml_w.start_tag(tag_index_buckets_bucket_elt);
writer.write_be_uint(elt.pos, 4u);
write_fn(writer, elt.val);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
ebml::end_tag(ebml_w);
ebml::start_tag(ebml_w, tag_index_table);
ebml_w.end_tag();
ebml_w.start_tag(tag_index_table);
for pos: uint in bucket_locs { writer.write_be_uint(pos, 4u); }
ebml::end_tag(ebml_w);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
ebml_w.end_tag();
}
fn write_str(writer: io::writer, &&s: str) { writer.write_str(s); }
@ -558,48 +571,48 @@ fn write_int(writer: io::writer, &&n: int) {
fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
alt mi.node {
meta_word(name) {
ebml::start_tag(ebml_w, tag_meta_item_word);
ebml::start_tag(ebml_w, tag_meta_item_name);
ebml_w.start_tag(tag_meta_item_word);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::bytes(name));
ebml::end_tag(ebml_w);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
ebml_w.end_tag();
}
meta_name_value(name, value) {
alt value.node {
lit_str(value) {
ebml::start_tag(ebml_w, tag_meta_item_name_value);
ebml::start_tag(ebml_w, tag_meta_item_name);
ebml_w.start_tag(tag_meta_item_name_value);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::bytes(name));
ebml::end_tag(ebml_w);
ebml::start_tag(ebml_w, tag_meta_item_value);
ebml_w.end_tag();
ebml_w.start_tag(tag_meta_item_value);
ebml_w.writer.write(str::bytes(value));
ebml::end_tag(ebml_w);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
ebml_w.end_tag();
}
_ {/* FIXME (#611) */ }
}
}
meta_list(name, items) {
ebml::start_tag(ebml_w, tag_meta_item_list);
ebml::start_tag(ebml_w, tag_meta_item_name);
ebml_w.start_tag(tag_meta_item_list);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::bytes(name));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
for inner_item: @meta_item in items {
encode_meta_item(ebml_w, *inner_item);
}
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
}
}
fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) {
ebml::start_tag(ebml_w, tag_attributes);
ebml_w.start_tag(tag_attributes);
for attr: attribute in attrs {
ebml::start_tag(ebml_w, tag_attribute);
ebml_w.start_tag(tag_attribute);
encode_meta_item(ebml_w, attr.node.value);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
// So there's a special crate attribute called 'link' which defines the
@ -687,19 +700,19 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
// that they are numbered 1 to n.
// FIXME: This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working.
ebml::start_tag(ebml_w, tag_crate_deps);
ebml_w.start_tag(tag_crate_deps);
for cname: str in get_ordered_names(cstore) {
ebml::start_tag(ebml_w, tag_crate_dep);
ebml_w.start_tag(tag_crate_dep);
ebml_w.writer.write(str::bytes(cname));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_hash(ebml_w: ebml::writer, hash: str) {
ebml::start_tag(ebml_w, tag_crate_hash);
ebml_w.start_tag(tag_crate_hash);
ebml_w.writer.write(str::bytes(hash));
ebml::end_tag(ebml_w);
ebml_w.end_tag();
}
fn encode_metadata(cx: crate_ctxt, crate: @crate) -> [u8] {
@ -709,7 +722,7 @@ fn encode_metadata(cx: crate_ctxt, crate: @crate) -> [u8] {
let buf = io::mk_mem_buffer();
let buf_w = io::mem_buffer_writer(buf);
let ebml_w = ebml::create_writer(buf_w);
let ebml_w = ebml::mk_writer(buf_w);
encode_hash(ebml_w, cx.link_meta.extras_hash);
@ -719,18 +732,18 @@ fn encode_metadata(cx: crate_ctxt, crate: @crate) -> [u8] {
encode_crate_deps(ebml_w, cx.sess.cstore);
// Encode and index the paths.
ebml::start_tag(ebml_w, tag_paths);
ebml_w.start_tag(tag_paths);
let paths_index = encode_item_paths(ebml_w, ecx, crate);
let paths_buckets = create_index(paths_index, hash_path);
encode_index(ebml_w, paths_buckets, write_str);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
// Encode and index the items.
ebml::start_tag(ebml_w, tag_items);
ebml_w.start_tag(tag_items);
let items_index = encode_info_for_items(ecx, ebml_w, crate.node.module);
let items_buckets = create_index(items_index, hash_node_id);
encode_index(ebml_w, items_buckets, write_int);
ebml::end_tag(ebml_w);
ebml_w.end_tag();
// Pad this, since something (LLVM, presumably) is cutting off the
// remaining % 4 bytes.

View File

@ -14,6 +14,7 @@ export ac_no_abbrevs;
export ac_use_abbrevs;
export enc_ty;
export enc_bounds;
export enc_mode;
type ctxt =
// Def -> str Callback:
@ -211,16 +212,20 @@ fn enc_proto(w: io::writer, proto: proto) {
}
}
fn enc_mode(w: io::writer, cx: @ctxt, m: mode) {
alt ty::resolved_mode(cx.tcx, m) {
by_mutbl_ref { w.write_char('&'); }
by_move { w.write_char('-'); }
by_copy { w.write_char('+'); }
by_ref { w.write_char('='); }
by_val { w.write_char('#'); }
}
}
fn enc_ty_fn(w: io::writer, cx: @ctxt, ft: ty::fn_ty) {
w.write_char('[');
for arg: ty::arg in ft.inputs {
alt ty::resolved_mode(cx.tcx, arg.mode) {
by_mutbl_ref { w.write_char('&'); }
by_move { w.write_char('-'); }
by_copy { w.write_char('+'); }
by_ref { w.write_char('='); }
by_val { w.write_char('#'); }
}
enc_mode(w, cx, arg.mode);
enc_ty(w, cx, arg.ty);
}
w.write_char(']');

View File

@ -37,11 +37,8 @@ type map = std::map::map<node_id, ast_node>;
type ctx = {map: map, mutable path: path, mutable local_id: uint};
type vt = visit::vt<ctx>;
fn map_crate(c: crate) -> map {
let cx = {map: std::map::new_int_hash(),
mutable path: [],
mutable local_id: 0u};
visit::visit_crate(c, cx, visit::mk_vt(@{
fn mk_ast_map_visitor() -> vt {
ret visit::mk_vt(@{
visit_item: map_item,
visit_native_item: map_native_item,
visit_expr: map_expr,
@ -49,10 +46,33 @@ fn map_crate(c: crate) -> map {
visit_local: map_local,
visit_arm: map_arm
with *visit::default_visitor()
}));
});
}
fn map_crate(c: crate) -> map {
let cx = {map: std::map::new_int_hash(),
mutable path: [],
mutable local_id: 0u};
visit::visit_crate(c, cx, mk_ast_map_visitor());
ret cx.map;
}
// Used for items loaded from external crate that are being inlined into this
// crate:
fn map_decoded_item(map: map, path: path, i: @item) {
// I believe it is ok for the local IDs of inlined items from other crates
// to overlap with the local ids from this crate, so just generate the ids
// starting from 0. (In particular, I think these ids are only used in
// alias analysis, which we will not be running on the inlined items, and
// even if we did I think it only needs an ordering between local
// variables that are simultaneously in scope).
let cx = {map: map,
mutable path: path,
mutable local_id: 0u};
let v = mk_ast_map_visitor();
v.visit_item(i, cx, v);
}
fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
sp: codemap::span, id: node_id, cx: ctx, v: vt) {
for a in decl.inputs {

View File

@ -11,6 +11,7 @@ import syntax::codemap::span;
export annotate_freevars;
export freevar_map;
export freevar_info;
export freevar_entry;
export get_freevars;
export has_freevars;

96
src/comp/middle/inline.rs Normal file
View File

@ -0,0 +1,96 @@
import std::map::hashmap;
import syntax::ast;
import syntax::ast_util;
import syntax::visit;
import middle::typeck::method_map;
import middle::trans::common::maps;
import metadata::csearch;
export inline_map;
export instantiate_inlines;
type inline_map = hashmap<ast::def_id, @ast::item>;
enum ctxt = {
tcx: ty::ctxt,
maps: maps,
inline_map: inline_map,
mutable to_process: [@ast::item]
};
fn instantiate_inlines(tcx: ty::ctxt,
maps: maps,
crate: @ast::crate) -> inline_map {
let vt = visit::mk_vt(@{
visit_expr: fn@(e: @ast::expr, cx: ctxt, vt: visit::vt<ctxt>) {
visit::visit_expr(e, cx, vt);
cx.visit_expr(e);
}
with *visit::default_visitor::<ctxt>()
});
let inline_map = ast_util::new_def_id_hash();
let cx = ctxt({tcx: tcx, maps: maps,
inline_map: inline_map, mutable to_process: []});
visit::visit_crate(*crate, cx, vt);
while !vec::is_empty(cx.to_process) {
let to_process = [];
to_process <-> cx.to_process;
#debug["Recursively looking at inlined items"];
vec::iter(to_process, {|i| visit::visit_item(i, cx, vt)});
}
ret inline_map;
}
impl methods for ctxt {
fn visit_expr(e: @ast::expr) {
// Look for fn items or methods that are referenced which
// ought to be inlined.
alt e.node {
ast::expr_path(_) {
alt self.tcx.def_map.get(e.id) {
ast::def_fn(did, _) {
self.maybe_enqueue_fn(did);
}
_ { /* not a fn item, fallthrough */ }
}
}
ast::expr_field(_, _, _) {
alt self.maps.method_map.find(e.id) {
some(origin) {
self.maybe_enqueue_impl_method(origin);
}
_ { /* not an impl method, fallthrough */ }
}
}
_ { /* fallthrough */ }
}
}
fn maybe_enqueue_fn(did: ast::def_id) {
if did.crate == ast::local_crate { ret; }
if self.inline_map.contains_key(did) { ret; }
alt csearch::maybe_get_item_ast(self.tcx, self.maps, did) {
none {
/* no AST attached, do not inline */
#debug["No AST attached to def %s",
ty::item_path_str(self.tcx, did)];
}
some(item) { /* Found an AST, add to table: */
#debug["Inlining def %s", ty::item_path_str(self.tcx, did)];
self.to_process += [item];
self.inline_map.insert(did, item);
}
}
}
fn maybe_enqueue_impl_method(_origin: typeck::method_origin) {
// alt method_origin {
// method_static(did) { self.maybe_enqueue_fn(did); }
// method_param(_, _, _, _) | method_iface(_, _) {
// /* fallthrough */
// }
// }
}
}

View File

@ -585,7 +585,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node],
if success {
// Copy references that the alias analysis considered unsafe
ids.values {|node_id|
if bcx.ccx().copy_map.contains_key(node_id) {
if bcx.ccx().maps.copy_map.contains_key(node_id) {
let local = alt bcx.fcx.lllocals.find(node_id) {
some(local_mem(x)) { x }
_ { bcx.tcx().sess.bug("Someone \
@ -675,7 +675,7 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef,
alt pat.node {
ast::pat_ident(_,inner) {
if pat_is_variant(bcx.tcx().def_map, pat) { ret bcx; }
if make_copy || ccx.copy_map.contains_key(pat.id) {
if make_copy || ccx.maps.copy_map.contains_key(pat.id) {
let ty = node_id_type(bcx, pat.id);
let llty = type_of::type_of(ccx, ty);
let alloc = alloca(bcx, llty);

View File

@ -21,6 +21,7 @@ import driver::session;
import session::session;
import front::attr;
import middle::freevars::*;
import middle::inline::inline_map;
import back::{link, abi, upcall};
import syntax::{ast, ast_util, codemap};
import ast_util::local_def;
@ -59,6 +60,14 @@ enum dest {
ignore,
}
fn dest_str(ccx: crate_ctxt, d: dest) -> str {
alt d {
by_val(v) { #fmt["by_val(%s)", val_str(ccx.tn, *v)] }
save_in(v) { #fmt["save_in(%s)", val_str(ccx.tn, v)] }
ignore { "ignore" }
}
}
fn empty_dest_cell() -> @mutable ValueRef {
ret @mutable llvm::LLVMGetUndef(T_nil());
}
@ -1561,7 +1570,7 @@ fn trans_lit(cx: block, lit: ast::lit, dest: dest) -> block {
fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr,
un_expr: @ast::expr, dest: dest) -> block {
// Check for user-defined method call
alt bcx.ccx().method_map.find(un_expr.id) {
alt bcx.ccx().maps.method_map.find(un_expr.id) {
some(origin) {
let callee_id = ast_util::op_expr_callee_id(un_expr);
let fty = node_id_type(bcx, callee_id);
@ -1741,7 +1750,7 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
assert (lhs_res.kind == owned);
// A user-defined operator method
alt bcx.ccx().method_map.find(ex.id) {
alt bcx.ccx().maps.method_map.find(ex.id) {
some(origin) {
let callee_id = ast_util::op_expr_callee_id(ex);
let fty = node_id_type(bcx, callee_id);
@ -1852,7 +1861,7 @@ fn trans_lazy_binop(bcx: block, op: lazy_binop_ty, a: @ast::expr,
fn trans_binary(bcx: block, op: ast::binop, lhs: @ast::expr,
rhs: @ast::expr, dest: dest, ex: @ast::expr) -> block {
// User-defined operators
alt bcx.ccx().method_map.find(ex.id) {
alt bcx.ccx().maps.method_map.find(ex.id) {
some(origin) {
let callee_id = ast_util::op_expr_callee_id(ex);
let fty = node_id_type(bcx, callee_id);
@ -2110,8 +2119,29 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id,
substs: option<([ty::t], typeck::dict_res)>)
-> lval_maybe_callee {
let ccx = bcx.ccx();
let tcx = ccx.tcx;
let tys = node_id_type_params(bcx, id);
let tpt = ty::lookup_item_type(ccx.tcx, fn_id);
let tpt = ty::lookup_item_type(tcx, fn_id);
// Check whether this fn has an inlined copy and, if so, redirect fn_id to
// the local id of the inlined copy.
let fn_id = {
if fn_id.crate == ast::local_crate {
fn_id
} else {
alt ccx.inline_map.find(fn_id) {
none { fn_id }
some(item) {
#debug["Found inlined version of %s with id %d",
ty::item_path_str(tcx, fn_id),
item.id];
{crate: ast::local_crate,
node: item.id}
}
}
}
};
// The awkwardness below mostly stems from the fact that we're mixing
// monomorphized and non-monomorphized functions at the moment. If
// monomorphizing becomes the only approach, this'll be much simpler.
@ -2126,7 +2156,7 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id,
} else { none }
}
none {
alt ccx.dict_map.find(id) {
alt ccx.maps.dict_map.find(id) {
some(dicts) {
alt impl::resolve_dicts_in_fn_ctxt(bcx.fcx, dicts) {
some(dicts) { monomorphic_fn(ccx, fn_id, tys, some(dicts)) }
@ -2146,6 +2176,7 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id,
none {}
}
}
let val = if fn_id.crate == ast::local_crate {
// Internal reference.
assert (ccx.item_ids.contains_key(fn_id.node));
@ -2181,7 +2212,7 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id,
static_tis: tis,
tydescs: tydescs,
param_bounds: tpt.bounds,
origins: ccx.dict_map.find(id)});
origins: ccx.maps.dict_map.find(id)});
}
ret {bcx: bcx, val: val, kind: owned, env: null_env, generic: gen};
}
@ -2347,7 +2378,7 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr,
fn expr_is_lval(bcx: block, e: @ast::expr) -> bool {
let ccx = bcx.ccx();
ty::expr_is_lval(ccx.method_map, e)
ty::expr_is_lval(ccx.maps.method_map, e)
}
fn trans_callee(bcx: block, e: @ast::expr) -> lval_maybe_callee {
@ -2356,7 +2387,7 @@ fn trans_callee(bcx: block, e: @ast::expr) -> lval_maybe_callee {
ast::expr_field(base, ident, _) {
// Lval means this is a record field, so not a method
if !expr_is_lval(bcx, e) {
alt bcx.ccx().method_map.find(e.id) {
alt bcx.ccx().maps.method_map.find(e.id) {
some(origin) { // An impl method
ret impl::trans_method_callee(bcx, e.id, base, origin);
}
@ -2553,7 +2584,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef,
val = do_spill_noroot(bcx, val);
copied = true;
}
if ccx.copy_map.contains_key(e.id) && lv.kind != temporary {
if ccx.maps.copy_map.contains_key(e.id) && lv.kind != temporary {
if !copied {
let alloc = alloc_ty(bcx, e_ty);
bcx = copy_val(alloc.bcx, INIT, alloc.val,
@ -2568,7 +2599,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef,
} else if arg_mode == ast::by_copy || arg_mode == ast::by_move {
let {bcx: cx, val: alloc} = alloc_ty(bcx, e_ty);
let move_out = arg_mode == ast::by_move ||
ccx.last_uses.contains_key(e.id);
ccx.maps.last_uses.contains_key(e.id);
bcx = cx;
if lv.kind == temporary { revoke_clean(bcx, val); }
if lv.kind == owned || !ty::type_is_immediate(e_ty) {
@ -2983,7 +3014,11 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
let tcx = bcx.tcx();
debuginfo::update_source_pos(bcx, e.span);
#debug["trans_expr(%s,%?)", expr_to_str(e), dest];
#debug["trans_expr(e=%s,e.id=%d,dest=%s,ty=%s)",
expr_to_str(e),
e.id,
dest_str(bcx.ccx(), dest),
ty_to_str(tcx, expr_ty(bcx, e))];
if expr_is_lval(bcx, e) {
ret lval_to_dps(bcx, e, dest);
@ -3056,7 +3091,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
}
ast::expr_index(base, idx) {
// If it is here, it's not an lval, so this is a user-defined index op
let origin = bcx.ccx().method_map.get(e.id);
let origin = bcx.ccx().maps.method_map.get(e.id);
let callee_id = ast_util::op_expr_callee_id(e);
let fty = node_id_type(bcx, callee_id);
ret trans_call_inner(bcx, fty, {|bcx|
@ -3128,7 +3163,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
assert kind == owned;
ret store_temp_expr(bcx, DROP_EXISTING, addr, src_r,
expr_ty(bcx, src),
bcx.ccx().last_uses.contains_key(src.id));
bcx.ccx().maps.last_uses.contains_key(src.id));
}
ast::expr_move(dst, src) {
// FIXME: calculate copy init-ness in typestate.
@ -3164,7 +3199,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block {
let lv = trans_lval(bcx, e), ccx = bcx.ccx();
let {bcx, val, kind} = lv;
let last_use = kind == owned && ccx.last_uses.contains_key(e.id);
let last_use = kind == owned && ccx.maps.last_uses.contains_key(e.id);
let ty = expr_ty(bcx, e);
alt dest {
by_val(cell) {
@ -3717,8 +3752,8 @@ fn alloc_local(cx: block, local: @ast::local) -> block {
// Do not allocate space for locals that can be kept immediate.
let ccx = cx.ccx();
if option::is_some(simple_name) &&
!ccx.mutbl_map.contains_key(local.node.pat.id) &&
!ccx.last_uses.contains_key(local.node.pat.id) &&
!ccx.maps.mutbl_map.contains_key(local.node.pat.id) &&
!ccx.maps.last_uses.contains_key(local.node.pat.id) &&
ty::type_is_immediate(t) {
alt local.node.init {
some({op: ast::init_assign, _}) { ret cx; }
@ -4258,6 +4293,12 @@ fn trans_mod(ccx: crate_ctxt, m: ast::_mod) {
for item in m.items { trans_item(ccx, *item); }
}
fn trans_inlined_items(ccx: crate_ctxt, inline_map: inline_map) {
inline_map.values {|item|
trans_item(ccx, *item)
}
}
fn get_pair_fn_ty(llpairty: TypeRef) -> TypeRef {
// Bit of a kludge: pick the fn typeref out of the pair.
ret struct_elt(llpairty, 0u);
@ -4290,6 +4331,9 @@ fn register_fn_fuller(ccx: crate_ctxt, sp: span, path: path, _flav: str,
ccx.item_ids.insert(node_id, llfn);
ccx.item_symbols.insert(node_id, ps);
#debug["register_fn_fuller created fn %s for item %d with path %s",
val_str(ccx.tn, llfn), node_id, ast_map::path_to_str(path)];
let is_main = is_main_name(path) && !ccx.sess.building_library;
if is_main { create_main_wrapper(ccx, sp, llfn, node_type); }
}
@ -4519,6 +4563,13 @@ fn collect_items(ccx: crate_ctxt, crate: @ast::crate) {
}));
}
fn collect_inlined_items(ccx: crate_ctxt, inline_map: inline::inline_map) {
let abi = @mutable none::<ast::native_abi>;
inline_map.values {|item|
collect_item(ccx, abi, item);
}
}
// The constant translation pass.
fn trans_constant(ccx: crate_ctxt, it: @ast::item) {
alt it.node {
@ -4718,10 +4769,8 @@ fn write_abi_version(ccx: crate_ctxt) {
}
fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
output: str, emap: resolve::exp_map, amap: ast_map::map,
mutbl_map: mutbl::mutbl_map, copy_map: alias::copy_map,
last_uses: last_use::last_uses, impl_map: resolve::impl_map,
method_map: typeck::method_map, dict_map: typeck::dict_map)
output: str, emap: resolve::exp_map, maps: maps,
inline_map: inline::inline_map)
-> (ModuleRef, link::link_meta) {
let sha = std::sha1::mk_sha1();
let link_meta = link::build_link_meta(sess, *crate, output, sha);
@ -4769,6 +4818,7 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
} else {
option::none
};
let ccx =
@{sess: sess,
llmod: llmod,
@ -4777,7 +4827,6 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
externs: new_str_hash::<ValueRef>(),
intrinsics: intrinsics,
item_ids: new_int_hash::<ValueRef>(),
ast_map: amap,
exp_map: emap,
item_symbols: new_int_hash::<str>(),
mutable main_fn: none::<ValueRef>,
@ -4796,12 +4845,8 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
type_sha1s: ty::new_ty_hash(),
type_short_names: ty::new_ty_hash(),
tcx: tcx,
mutbl_map: mutbl_map,
copy_map: copy_map,
last_uses: last_uses,
impl_map: impl_map,
method_map: method_map,
dict_map: dict_map,
maps: maps,
inline_map: inline_map,
stats:
{mutable n_static_tydescs: 0u,
mutable n_derived_tydescs: 0u,
@ -4823,8 +4868,10 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
dbg_cx: dbg_cx,
mutable do_not_commit_warning_issued: false};
collect_items(ccx, crate);
collect_inlined_items(ccx, inline_map);
trans_constants(ccx, crate);
trans_mod(ccx, crate.node.module);
trans_inlined_items(ccx, inline_map);
fill_crate_map(ccx, crate_map);
emit_tydescs(ccx);
gen_shape_tables(ccx);

View File

@ -322,6 +322,9 @@ fn Load(cx: block, PointerVal: ValueRef) -> ValueRef {
fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) {
if cx.unreachable { ret; }
#debug["Store %s -> %s",
val_str(cx.ccx().tn, Val),
val_str(cx.ccx().tn, Ptr)];
llvm::LLVMBuildStore(B(cx), Val, Ptr);
}

View File

@ -277,6 +277,7 @@ fn store_environment(
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let cboxptr_ty = ty::mk_ptr(tcx, {ty:cbox_ty, mutbl:ast::m_imm});
let llbox = cast_if_we_can(bcx, llbox, cboxptr_ty);
#debug["tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty)];
// If necessary, copy tydescs describing type parameters into the
// appropriate slot in the closure.
@ -298,8 +299,9 @@ fn store_environment(
}
// Copy expr values into boxed bindings.
// Silly check
vec::iteri(bound_values) { |i, bv|
#debug["Copy %s into closure", ev_to_str(ccx, bv)];
if (!ccx.sess.opts.no_asm_comments) {
add_comment(bcx, #fmt("Copy %s into closure",
ev_to_str(ccx, bv)));

View File

@ -19,6 +19,7 @@ import lib::llvm::{ModuleRef, ValueRef, TypeRef, BasicBlockRef, BuilderRef};
import lib::llvm::{True, False, Bool};
import metadata::csearch;
import ast_map::path;
import middle::inline::inline_map;
type namegen = fn@(str) -> str;
fn new_namegen() -> namegen {
@ -63,6 +64,16 @@ type stats =
resource BuilderRef_res(B: BuilderRef) { llvm::LLVMDisposeBuilder(B); }
// Misc. auxiliary maps used in the crate_ctxt
type maps = {
mutbl_map: middle::mutbl::mutbl_map,
copy_map: middle::alias::copy_map,
last_uses: middle::last_use::last_uses,
impl_map: middle::resolve::impl_map,
method_map: middle::typeck::method_map,
dict_map: middle::typeck::dict_map
};
// Crate context. Every crate we compile has one of these.
type crate_ctxt = @{
sess: session::session,
@ -72,7 +83,6 @@ type crate_ctxt = @{
externs: hashmap<str, ValueRef>,
intrinsics: hashmap<str, ValueRef>,
item_ids: hashmap<ast::node_id, ValueRef>,
ast_map: ast_map::map,
exp_map: resolve::exp_map,
item_symbols: hashmap<ast::node_id, str>,
mutable main_fn: option<ValueRef>,
@ -91,12 +101,8 @@ type crate_ctxt = @{
type_sha1s: hashmap<ty::t, str>,
type_short_names: hashmap<ty::t, str>,
tcx: ty::ctxt,
mutbl_map: mutbl::mutbl_map,
copy_map: alias::copy_map,
last_uses: last_use::last_uses,
impl_map: resolve::impl_map,
method_map: typeck::method_map,
dict_map: typeck::dict_map,
maps: maps,
inline_map: inline_map,
stats: stats,
upcalls: @upcall::upcalls,
tydesc_type: TypeRef,

View File

@ -780,7 +780,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> {
let sp = option::get(fcx.span);
log(debug, codemap::span_to_str(sp, cx.sess.codemap));
let (ident, ret_ty, id) = alt cx.ast_map.get(fcx.id) {
let (ident, ret_ty, id) = alt cx.tcx.items.get(fcx.id) {
ast_map::node_item(item, _) {
alt item.node {
ast::item_fn(decl, _, _) | ast::item_res(decl, _, _, _, _) {

View File

@ -134,7 +134,7 @@ fn trans_vtable_callee(bcx: block, env: callee_env, dict: ValueRef,
static_tis: tis,
tydescs: tydescs,
param_bounds: method.tps,
origins: ccx.dict_map.find(callee_id)});
origins: ccx.maps.dict_map.find(callee_id)});
}
{bcx: bcx, val: mptr, kind: owned,
env: env,
@ -531,7 +531,7 @@ fn trans_cast(bcx: block, val: @ast::expr, id: ast::node_id, dest: dest)
let result = get_dest_addr(dest);
Store(bcx, box, PointerCast(bcx, GEPi(bcx, result, [0, 1]),
T_ptr(val_ty(box))));
let {bcx, val: dict} = get_dict(bcx, ccx.dict_map.get(id)[0]);
let {bcx, val: dict} = get_dict(bcx, ccx.maps.dict_map.get(id)[0]);
Store(bcx, dict, PointerCast(bcx, GEPi(bcx, result, [0, 0]),
T_ptr(val_ty(dict))));
bcx

View File

@ -129,6 +129,7 @@ export param_bound, param_bounds, bound_copy, bound_send, bound_iface;
export param_bounds_to_kind;
export default_arg_mode_for_ty;
export item_path;
export item_path_str;
// Data types
@ -2174,6 +2175,10 @@ fn substd_enum_variants(cx: ctxt, id: ast::def_id, tps: [ty::t])
}
}
fn item_path_str(cx: ctxt, id: ast::def_id) -> str {
ast_map::path_to_str(item_path(cx, id))
}
fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path {
if id.crate != ast::local_crate {
csearch::get_item_path(cx, id)

View File

@ -444,8 +444,18 @@ fn ty_of_item(tcx: ty::ctxt, mode: mode, it: @ast::item)
// call to resolve any named types.
let tpt = {
let t0 = ast_ty_to_ty(tcx, mode, t);
{bounds: ty_param_bounds(tcx, mode, tps),
ty: ty::mk_with_id(tcx, t0, def_id)}
let t1 = {
// Do not associate a def id with a named, parameterized type
// like "foo<X>". This is because otherwise ty_to_str will
// print the name as merely "foo", as it has no way to
// reconstruct the value of X.
if vec::is_empty(tps) {
ty::mk_with_id(tcx, t0, def_id)
} else {
t0
}
};
{bounds: ty_param_bounds(tcx, mode, tps), ty: t1}
};
tcx.tcache.insert(local_def(it.id), tpt);
ret tpt;

View File

@ -28,6 +28,7 @@ mod middle {
mod shape;
mod debuginfo;
}
mod inline;
mod ty;
mod ast_map;
mod resolve;
@ -121,6 +122,8 @@ mod metadata {
mod common;
mod tyencode;
mod tydecode;
mod astencode;
mod astencode_gen;
mod encoder;
mod decoder;
mod creader;

View File

@ -19,6 +19,14 @@ fn path_name_i(idents: [ident]) -> str { str::connect(idents, "::") }
fn local_def(id: node_id) -> def_id { ret {crate: local_crate, node: id}; }
fn stmt_id(s: stmt) -> node_id {
alt s.node {
stmt_decl(_, id) { id }
stmt_expr(_, id) { id }
stmt_semi(_, id) { id }
}
}
fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
alt d { def_variant(enum_id, var_id) {
ret {enm: enum_id, var: var_id}; }

View File

@ -125,9 +125,10 @@ fn fold_attribute_(at: attribute, fmi: fn@(&&@meta_item) -> @meta_item) ->
}
//used in noop_fold_native_item and noop_fold_fn_decl
fn fold_arg_(a: arg, fld: ast_fold) -> arg {
ret {ty: fld.fold_ty(a.ty),
ident: fld.fold_ident(a.ident)
with a};
ret {mode: a.mode,
ty: fld.fold_ty(a.ty),
ident: fld.fold_ident(a.ident),
id: fld.new_id(a.id)};
}
//used in noop_fold_expr, and possibly elsewhere in the future
fn fold_mac_(m: mac, fld: ast_fold) -> mac {
@ -156,6 +157,23 @@ fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl {
constraints: vec::map(decl.constraints, fld.fold_constr)}
}
fn fold_ty_param_bound(tpb: ty_param_bound, fld: ast_fold) -> ty_param_bound {
alt tpb {
bound_copy | bound_send { tpb }
bound_iface(ty) { bound_iface(fld.fold_ty(ty)) }
}
}
fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param {
{ident: tp.ident,
id: fld.new_id(tp.id),
bounds: @vec::map(*tp.bounds, fold_ty_param_bound(_, fld))}
}
fn fold_ty_params(tps: [ty_param], fld: ast_fold) -> [ty_param] {
vec::map(tps, fold_ty_param(_, fld))
}
fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ {
let fold_meta_item = bind fold_meta_item_(_, fld);
let fold_attribute = bind fold_attribute_(_, fold_meta_item);
@ -202,11 +220,12 @@ fn noop_fold_native_item(&&ni: @native_item, fld: ast_fold) -> @native_item {
cf: fdec.cf,
constraints:
vec::map(fdec.constraints,
fld.fold_constr)}, typms)
fld.fold_constr)},
fold_ty_params(typms, fld))
}
},
id: ni.id,
span: ni.span};
id: fld.new_id(ni.id),
span: fld.new_span(ni.span)};
}
fn noop_fold_item(&&i: @item, fld: ast_fold) -> @item {
@ -215,9 +234,9 @@ fn noop_fold_item(&&i: @item, fld: ast_fold) -> @item {
ret @{ident: fld.fold_ident(i.ident),
attrs: vec::map(i.attrs, fold_attribute),
id: i.id,
id: fld.new_id(i.id),
node: fld.fold_item_underscore(i.node),
span: i.span};
span: fld.new_span(i.span)};
}
fn noop_fold_class_item(&&ci: @class_item, fld: ast_fold)
@ -238,17 +257,20 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
ret alt i {
item_const(t, e) { item_const(fld.fold_ty(t), fld.fold_expr(e)) }
item_fn(decl, typms, body) {
let body = fld.fold_block(body);
item_fn(fold_fn_decl(decl, fld), typms, body)
item_fn(fold_fn_decl(decl, fld),
fold_ty_params(typms, fld),
fld.fold_block(body))
}
item_mod(m) { item_mod(fld.fold_mod(m)) }
item_native_mod(nm) { item_native_mod(fld.fold_native_mod(nm)) }
item_ty(t, typms) { item_ty(fld.fold_ty(t), typms) }
item_ty(t, typms) { item_ty(fld.fold_ty(t),
fold_ty_params(typms, fld)) }
item_enum(variants, typms) {
item_enum(vec::map(variants, fld.fold_variant), typms)
item_enum(vec::map(variants, fld.fold_variant),
fold_ty_params(typms, fld))
}
item_class(typms, items, id, ctor_decl, ctor_body) {
item_class(typms,
item_class(fold_ty_params(typms, fld),
vec::map(items, fld.fold_class_item),
id,
fold_fn_decl(ctor_decl, fld),
@ -260,16 +282,23 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
}
item_iface(tps, methods) { item_iface(tps, methods) }
item_res(decl, typms, body, did, cid) {
item_res(fold_fn_decl(decl, fld), typms, fld.fold_block(body),
did, cid)
item_res(fold_fn_decl(decl, fld),
fold_ty_params(typms, fld),
fld.fold_block(body),
did,
cid)
}
};
}
fn noop_fold_method(&&m: @method, fld: ast_fold) -> @method {
ret @{ident: fld.fold_ident(m.ident),
attrs: m.attrs,
tps: fold_ty_params(m.tps, fld),
decl: fold_fn_decl(m.decl, fld),
body: fld.fold_block(m.body) with *m};
body: fld.fold_block(m.body),
id: fld.new_id(m.id),
span: fld.new_span(m.span)};
}
@ -277,15 +306,15 @@ fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ {
ret {view_items: vec::map(b.view_items, fld.fold_view_item),
stmts: vec::map(b.stmts, fld.fold_stmt),
expr: option::map(b.expr, fld.fold_expr),
id: b.id,
id: fld.new_id(b.id),
rules: b.rules};
}
fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ {
ret alt s {
stmt_decl(d, nid) { stmt_decl(fld.fold_decl(d), nid) }
stmt_expr(e, nid) { stmt_expr(fld.fold_expr(e), nid) }
stmt_semi(e, nid) { stmt_semi(fld.fold_expr(e), nid) }
stmt_decl(d, nid) { stmt_decl(fld.fold_decl(d), fld.new_id(nid)) }
stmt_expr(e, nid) { stmt_expr(fld.fold_expr(e), fld.new_id(nid)) }
stmt_semi(e, nid) { stmt_semi(fld.fold_expr(e), fld.new_id(nid)) }
};
}
@ -459,7 +488,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
}
fn noop_fold_constr(c: constr_, fld: ast_fold) -> constr_ {
{path: fld.fold_path(c.path), args: c.args, id: c.id}
{path: fld.fold_path(c.path), args: c.args, id: fld.new_id(c.id)}
}
// ...nor do modules
@ -475,7 +504,7 @@ fn noop_fold_native_mod(nm: native_mod, fld: ast_fold) -> native_mod {
fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
fn fold_variant_arg_(va: variant_arg, fld: ast_fold) -> variant_arg {
ret {ty: fld.fold_ty(va.ty), id: va.id};
ret {ty: fld.fold_ty(va.ty), id: fld.new_id(va.id)};
}
let fold_variant_arg = bind fold_variant_arg_(_, fld);
let args = vec::map(v.args, fold_variant_arg);
@ -490,7 +519,7 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
};
ret {name: v.name,
attrs: attrs,
args: args, id: v.id,
args: args, id: fld.new_id(v.id),
disr_expr: de};
}
@ -513,7 +542,7 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
expr: fld.fold_expr(init.expr)})
}
},
id: l.id};
id: fld.new_id(l.id)};
}
/* temporarily eta-expand because of a compiler bug with using `fn<T>` as a
@ -621,7 +650,7 @@ fn make_fold(afp: ast_fold_precursor) -> ast_fold {
class_method(i) {
class_method(afp.fold_item(i, f))
}
}}, span: ci.span}
}}, span: afp.new_span(ci.span)}
}
fn f_item_underscore(afp: ast_fold_precursor, f: ast_fold, i: item_) ->
item_ {

View File

@ -2280,9 +2280,6 @@ fn fn_expr_lookahead(tok: token::token) -> bool {
fn parse_item(p: parser, attrs: [ast::attribute]) -> option<@ast::item> {
if eat_word(p, "const") {
ret some(parse_item_const(p, attrs));
} else if eat_word(p, "inline") {
expect_word(p, "fn");
ret some(parse_item_fn(p, ast::impure_fn, attrs));
} else if is_word(p, "fn") && !fn_expr_lookahead(p.look_ahead(1u)) {
p.bump();
ret some(parse_item_fn(p, ast::impure_fn, attrs));

View File

@ -89,6 +89,8 @@ fn stmt_to_str(s: ast::stmt) -> str { be to_str(s, print_stmt); }
fn item_to_str(i: @ast::item) -> str { be to_str(i, print_item); }
fn attr_to_str(i: ast::attribute) -> str { be to_str(i, print_attribute); }
fn typarams_to_str(tps: [ast::ty_param]) -> str {
be to_str(tps, print_type_params)
}

57
src/etc/gen-astencode Executable file
View File

@ -0,0 +1,57 @@
#!/bin/sh
M=src/comp/metadata
GEN_TYPES="syntax::ast::item syntax::ast::def middle::typeck::method_origin \
middle::freevars::freevar_entry syntax::ast::def_id"
# Find serializer tool:
for S in build/*/stage2/bin/serializer; do
# Find rustc:
D=$(dirname "$S")
R="${D}/rustc"
if [ ! -x "$R" ]; then
echo "rustc not found or not executable at path '$R'"
exit 1
fi
echo "Generating src/comp/metadata/astencode_gen.rs"
# First, generate dummy fns so that the compiler can type
# everything.
echo "// TEMPORARY DEFINITIONS: re-run gen-astencode" \
> $M/astencode_gen.rs
for T in $GEN_TYPES; do
echo "fn serialize_${T//::/_}<S>(_s: S, _v: $T) {}" \
>> $M/astencode_gen.rs
echo "fn deserialize_${T//::/_}<S>(_s: S) -> $T { fail; }" \
>> $M/astencode_gen.rs
done
# Generate the real code into a temporary file.
if ! "$S" src/comp/rustc.rc $GEN_TYPES > tmp.$$.rs
then
echo ""
echo ""
echo "****************************************"
echo "* Compilation errors encountered *"
echo "* *"
echo "* Dummy versions of the AST encoder *"
echo "* have been left in astencode_gen.rs. *"
echo "* Fix the compilation errors and rerun *"
echo "* this script to generate the real *"
echo "* versions. *"
echo "****************************************"
rm tmp.$$.rs
exit 1
fi
# Copy over into the final destination and clean up.
"$R" --pretty normal tmp.$$.rs > $M/astencode_gen.rs
# rm -f tmp.$$.rs
exit 0
done
# If we made it this far, must not have found any
# serializer:
echo "serializer tool not found."

View File

@ -18,8 +18,11 @@ def report_err(s):
print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s))
err=1
file_names = [s for s in sys.argv[1:] if not s.endswith("_gen.rs")]
try:
for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")):
for line in fileinput.input(file_names,
openhook=fileinput.hook_encoded("utf-8")):
if (line.find('\t') != -1 and
fileinput.filename().find("Makefile") == -1):
report_err("tab character")

View File

@ -407,7 +407,6 @@ fn pop<T>(&v: [const T]) -> T unsafe {
val
}
#[inline]
/*
Function: push

View File

@ -6,11 +6,31 @@
import core::option;
import option::{some, none};
export doc;
export new_doc;
export doc_at;
export maybe_get_doc;
export get_doc;
export docs;
export tagged_docs;
export doc_data;
export doc_as_str;
export doc_as_u8;
export doc_as_u16;
export doc_as_u32;
export doc_as_u64;
export doc_as_i8;
export doc_as_i16;
export doc_as_i32;
export doc_as_i64;
export writer;
export mk_writer;
type ebml_tag = {id: uint, size: uint};
type ebml_state = {ebml_tag: ebml_tag, tag_pos: uint, data_pos: uint};
// TODO: When we have module renaming, make "reader" and "writer" separate
// modules within this file.
@ -19,35 +39,29 @@ type doc = {data: @[u8], start: uint, end: uint};
type tagged_doc = {tag: uint, doc: doc};
fn vu64_at(data: [u8], start: uint) -> {val: u64, next: uint} {
fn vuint_at(data: [u8], start: uint) -> {val: uint, next: uint} {
let a = data[start];
if a & 0x80u8 != 0u8 {
ret {val: (a & 0x7fu8) as u64, next: start + 1u};
ret {val: (a & 0x7fu8) as uint, next: start + 1u};
}
if a & 0x40u8 != 0u8 {
ret {val: ((a & 0x3fu8) as u64) << 8u64 |
(data[start + 1u] as u64),
ret {val: ((a & 0x3fu8) as uint) << 8u |
(data[start + 1u] as uint),
next: start + 2u};
} else if a & 0x20u8 != 0u8 {
ret {val: ((a & 0x1fu8) as u64) << 16u64 |
(data[start + 1u] as u64) << 8u64 |
(data[start + 2u] as u64),
ret {val: ((a & 0x1fu8) as uint) << 16u |
(data[start + 1u] as uint) << 8u |
(data[start + 2u] as uint),
next: start + 3u};
} else if a & 0x10u8 != 0u8 {
ret {val: ((a & 0x0fu8) as u64) << 24u64 |
(data[start + 1u] as u64) << 16u64 |
(data[start + 2u] as u64) << 8u64 |
(data[start + 3u] as u64),
ret {val: ((a & 0x0fu8) as uint) << 24u |
(data[start + 1u] as uint) << 16u |
(data[start + 2u] as uint) << 8u |
(data[start + 3u] as uint),
next: start + 4u};
} else { #error("vint too big"); fail; }
}
fn vuint_at(data: [u8], start: uint) -> {val: uint, next: uint} {
let {val, next} = vu64_at(data, start);
ret {val: val as uint, next: next};
}
fn new_doc(data: @[u8]) -> doc {
ret {data: data, start: 0u, end: vec::len::<u8>(*data)};
}
@ -77,7 +91,7 @@ fn get_doc(d: doc, tg: uint) -> doc {
alt maybe_get_doc(d, tg) {
some(d) { ret d; }
none {
#error("failed to find block with enum %u", tg);
#error("failed to find block with tag %u", tg);
fail;
}
}
@ -107,111 +121,144 @@ fn tagged_docs(d: doc, tg: uint, it: fn(doc)) {
fn doc_data(d: doc) -> [u8] { ret vec::slice::<u8>(*d.data, d.start, d.end); }
fn doc_str(d: doc) -> str { ret str::from_bytes(doc_data(d)); }
fn be_u64_from_bytes(data: @[u8], start: uint, size: uint) -> u64 {
let sz = size;
assert (sz <= 4u);
let val = 0_u64;
let pos = start;
while sz > 0u {
sz -= 1u;
val += (data[pos] as u64) << ((sz * 8u) as u64);
pos += 1u;
}
ret val;
}
fn doc_as_str(d: doc) -> str { ret str::from_bytes(doc_data(d)); }
fn doc_as_u8(d: doc) -> u8 {
assert d.end == d.start + 1u;
ret (*d.data)[d.start];
}
fn doc_as_vu64(d: doc) -> u64 {
ret vu64_at(*d.data, d.start).val;
fn doc_as_u16(d: doc) -> u16 {
assert d.end == d.start + 2u;
ret io::u64_from_be_bytes(*d.data, d.start, 2u) as u16;
}
fn doc_as_vuint(d: doc) -> uint {
ret vuint_at(*d.data, d.start).val;
fn doc_as_u32(d: doc) -> u32 {
assert d.end == d.start + 4u;
ret io::u64_from_be_bytes(*d.data, d.start, 4u) as u32;
}
fn doc_as_u64(d: doc) -> u64 {
assert d.end == d.start + 8u;
ret io::u64_from_be_bytes(*d.data, d.start, 8u);
}
fn doc_as_i8(d: doc) -> i8 { doc_as_u8(d) as i8 }
fn doc_as_i16(d: doc) -> i16 { doc_as_u16(d) as i16 }
fn doc_as_i32(d: doc) -> i32 { doc_as_u32(d) as i32 }
fn doc_as_i64(d: doc) -> i64 { doc_as_u64(d) as i64 }
// ebml writing
type writer = {writer: io::writer, mutable size_positions: [uint]};
fn write_sized_vu64(w: io::writer, n: u64, size: uint) {
fn write_sized_vuint(w: io::writer, n: uint, size: uint) {
let buf: [u8];
alt size {
1u { buf = [0x80u8 | (n as u8)]; }
2u { buf = [0x40u8 | ((n >> 8_u64) as u8), n as u8]; }
2u { buf = [0x40u8 | ((n >> 8_u) as u8), n as u8]; }
3u {
buf = [0x20u8 | ((n >> 16_u64) as u8), (n >> 8_u64) as u8,
buf = [0x20u8 | ((n >> 16_u) as u8), (n >> 8_u) as u8,
n as u8];
}
4u {
buf = [0x10u8 | ((n >> 24_u64) as u8), (n >> 16_u64) as u8,
(n >> 8_u64) as u8, n as u8];
buf = [0x10u8 | ((n >> 24_u) as u8), (n >> 16_u) as u8,
(n >> 8_u) as u8, n as u8];
}
_ { #error("vint to write too big"); fail; }
_ { fail #fmt("vint to write too big: %?", n); }
}
w.write(buf);
}
fn write_vu64(w: io::writer, n: u64) {
if n < 0x7f_u64 { write_sized_vu64(w, n, 1u); ret; }
if n < 0x4000_u64 { write_sized_vu64(w, n, 2u); ret; }
if n < 0x200000_u64 { write_sized_vu64(w, n, 3u); ret; }
if n < 0x10000000_u64 { write_sized_vu64(w, n, 4u); ret; }
#error("vint to write too big");
fail;
fn write_vuint(w: io::writer, n: uint) {
if n < 0x7f_u { write_sized_vuint(w, n, 1u); ret; }
if n < 0x4000_u { write_sized_vuint(w, n, 2u); ret; }
if n < 0x200000_u { write_sized_vuint(w, n, 3u); ret; }
if n < 0x10000000_u { write_sized_vuint(w, n, 4u); ret; }
fail #fmt("vint to write too big: %?", n);
}
fn create_writer(w: io::writer) -> writer {
fn mk_writer(w: io::writer) -> writer {
let size_positions: [uint] = [];
ret {writer: w, mutable size_positions: size_positions};
}
// TODO: Provide a function to write the standard ebml header.
fn start_tag(w: writer, tag_id: uint) {
#debug["Start tag %u", tag_id];
impl writer for writer {
fn start_tag(tag_id: uint) {
#debug["Start tag %u", tag_id];
// Write the enum ID:
write_vu64(w.writer, tag_id as u64);
// Write the enum ID:
write_vuint(self.writer, tag_id);
// Write a placeholder four-byte size.
w.size_positions += [w.writer.tell()];
let zeroes: [u8] = [0u8, 0u8, 0u8, 0u8];
w.writer.write(zeroes);
}
// Write a placeholder four-byte size.
self.size_positions += [self.writer.tell()];
let zeroes: [u8] = [0u8, 0u8, 0u8, 0u8];
self.writer.write(zeroes);
}
fn end_tag(w: writer) {
let last_size_pos = vec::pop::<uint>(w.size_positions);
let cur_pos = w.writer.tell();
w.writer.seek(last_size_pos as int, io::seek_set);
let size = (cur_pos - last_size_pos - 4u);
write_sized_vu64(w.writer, size as u64, 4u);
w.writer.seek(cur_pos as int, io::seek_set);
fn end_tag() {
let last_size_pos = vec::pop::<uint>(self.size_positions);
let cur_pos = self.writer.tell();
self.writer.seek(last_size_pos as int, io::seek_set);
let size = (cur_pos - last_size_pos - 4u);
write_sized_vuint(self.writer, size, 4u);
self.writer.seek(cur_pos as int, io::seek_set);
#debug["End tag (size = %u)", size];
}
#debug["End tag (size = %u)", size];
}
impl writer_util for writer {
fn wr_tag(tag_id: uint, blk: fn()) {
start_tag(self, tag_id);
self.start_tag(tag_id);
blk();
end_tag(self);
self.end_tag();
}
fn wr_vu64(id: u64) {
#debug["Write u64 0x%02x%02x",
(id >> 32u64) as uint,
(id & 0xFFFFFFFFu64) as uint];
write_vu64(self.writer, id);
fn wr_tagged_bytes(tag_id: uint, b: [u8]) {
write_vuint(self.writer, tag_id);
write_vuint(self.writer, vec::len(b));
self.writer.write(b);
}
fn wr_vuint(id: uint) {
#debug["Write uint: %u", id];
write_vu64(self.writer, id as u64);
fn wr_tagged_u64(tag_id: uint, v: u64) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v, 8u));
}
fn wr_tagged_u32(tag_id: uint, v: u32) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v as u64, 4u));
}
fn wr_tagged_u16(tag_id: uint, v: u16) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v as u64, 2u));
}
fn wr_tagged_u8(tag_id: uint, v: u8) {
self.wr_tagged_bytes(tag_id, [v]);
}
fn wr_tagged_i64(tag_id: uint, v: i64) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v as u64, 8u));
}
fn wr_tagged_i32(tag_id: uint, v: i32) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v as u64, 4u));
}
fn wr_tagged_i16(tag_id: uint, v: i16) {
self.wr_tagged_bytes(tag_id, io::u64_to_be_bytes(v as u64, 2u));
}
fn wr_tagged_i8(tag_id: uint, v: i8) {
self.wr_tagged_bytes(tag_id, [v as u8]);
}
fn wr_tagged_str(tag_id: uint, v: str) {
// Lame: can't use str::as_bytes() here because the resulting
// vector is NULL-terminated. Annoyingly, the underlying
// writer interface doesn't permit us to write a slice of a
// vector. We need first-class slices, I think.
// str::as_bytes(v) {|b| self.wr_tagged_bytes(tag_id, b); }
self.wr_tagged_bytes(tag_id, str::bytes(v));
}
fn wr_bytes(b: [u8]) {

View File

@ -366,19 +366,41 @@ fn mk_file_writer(path: str, flags: [fileflag])
}
}
fn uint_to_le_bytes(n: uint, size: uint) -> [u8] {
fn u64_to_le_bytes(n: u64, size: uint) -> [u8] {
let bytes: [u8] = [], i = size, n = n;
while i > 0u { bytes += [(n & 255u) as u8]; n >>= 8u; i -= 1u; }
while i > 0u {
bytes += [(n & 255_u64) as u8];
n >>= 8_u64;
i -= 1u;
}
ret bytes;
}
fn uint_to_be_bytes(n: uint, size: uint) -> [u8] {
fn u64_to_be_bytes(n: u64, size: uint) -> [u8] {
assert size <= 8u;
let bytes: [u8] = [];
let i = (size - 1u) as int;
while i >= 0 { bytes += [(n >> ((i * 8) as uint) & 255u) as u8]; i -= 1; }
let i = size;
while i > 0u {
let shift = ((i - 1u) * 8u) as u64;
bytes += [(n >> shift) as u8];
i -= 1u;
}
ret bytes;
}
fn u64_from_be_bytes(data: [u8], start: uint, size: uint) -> u64 {
let sz = size;
assert (sz <= 8u);
let val = 0_u64;
let pos = start;
while sz > 0u {
sz -= 1u;
val += (data[pos] as u64) << ((sz * 8u) as u64);
pos += 1u;
}
ret val;
}
impl writer_util for writer {
fn write_char(ch: char) {
if ch as uint < 128u {
@ -393,14 +415,36 @@ impl writer_util for writer {
fn write_uint(n: uint) { self.write(str::bytes(uint::to_str(n, 10u))); }
fn write_le_uint(n: uint, size: uint) {
self.write(uint_to_le_bytes(n, size));
self.write(u64_to_le_bytes(n as u64, size));
}
fn write_le_int(n: int, size: uint) {
self.write(uint_to_le_bytes(n as uint, size));
self.write(u64_to_le_bytes(n as u64, size));
}
fn write_be_uint(n: uint, size: uint) {
self.write(uint_to_be_bytes(n, size));
self.write(u64_to_be_bytes(n as u64, size));
}
fn write_be_int(n: int, size: uint) {
self.write(u64_to_be_bytes(n as u64, size));
}
fn write_be_u64(n: u64) { self.write(u64_to_be_bytes(n, 8u)); }
fn write_be_u32(n: u32) { self.write(u64_to_be_bytes(n as u64, 4u)); }
fn write_be_u16(n: u16) { self.write(u64_to_be_bytes(n as u64, 2u)); }
fn write_be_i64(n: i64) { self.write(u64_to_be_bytes(n as u64, 8u)); }
fn write_be_i32(n: i32) { self.write(u64_to_be_bytes(n as u64, 4u)); }
fn write_be_i16(n: i16) { self.write(u64_to_be_bytes(n as u64, 2u)); }
fn write_le_u64(n: u64) { self.write(u64_to_le_bytes(n, 8u)); }
fn write_le_u32(n: u32) { self.write(u64_to_le_bytes(n as u64, 4u)); }
fn write_le_u16(n: u16) { self.write(u64_to_le_bytes(n as u64, 2u)); }
fn write_le_i64(n: i64) { self.write(u64_to_le_bytes(n as u64, 8u)); }
fn write_le_i32(n: i32) { self.write(u64_to_le_bytes(n as u64, 4u)); }
fn write_le_i16(n: i16) { self.write(u64_to_le_bytes(n as u64, 2u)); }
fn write_u8(n: u8) { self.write([n]) }
}
fn file_writer(path: str, flags: [fileflag]) -> result::t<writer, str> {

View File

@ -5,15 +5,25 @@ Support code for serialization.
*/
import list::list;
import ebml::writer_util;
import ebml::writer;
iface serializer {
// Primitive types:
fn emit_nil();
fn emit_uint(v: uint);
fn emit_u64(v: u64);
fn emit_u32(v: u32);
fn emit_u16(v: u16);
fn emit_u8(v: u8);
fn emit_int(v: int);
fn emit_i64(v: i64);
fn emit_i32(v: i32);
fn emit_i16(v: i16);
fn emit_i8(v: i8);
fn emit_bool(v: bool);
fn emit_float(v: float);
fn emit_f64(v: f64);
fn emit_f32(v: f32);
fn emit_str(v: str);
// Compound types:
@ -33,12 +43,28 @@ iface serializer {
iface deserializer {
// Primitive types:
fn read_nil() -> ();
fn read_uint() -> uint;
fn read_u64() -> u64;
fn read_u32() -> u32;
fn read_u16() -> u16;
fn read_u8() -> u8;
fn read_int() -> int;
fn read_i64() -> i64;
fn read_i32() -> i32;
fn read_i16() -> i16;
fn read_i8() -> i8;
fn read_bool() -> bool;
fn read_f64() -> f64;
fn read_str() -> str;
fn read_f64() -> f64;
fn read_f32() -> f32;
fn read_float() -> float;
// Compound types:
fn read_enum<T:copy>(name: str, f: fn() -> T) -> T;
fn read_enum_variant<T:copy>(f: fn(uint) -> T) -> T;
@ -54,8 +80,11 @@ iface deserializer {
}
enum ebml_serializer_tag {
es_u64, es_i64, es_bool,
es_uint, es_u64, es_u32, es_u16, es_u8,
es_int, es_i64, es_i32, es_i16, es_i8,
es_bool,
es_str,
es_f64, es_f32, es_float,
es_enum, es_enum_vid, es_enum_body,
es_vec, es_vec_len, es_vec_elt
}
@ -63,38 +92,54 @@ enum ebml_serializer_tag {
impl of serializer for ebml::writer {
fn emit_nil() {}
fn emit_num(tag: ebml_serializer_tag, v: u64) {
self.wr_tag(tag as uint) {|| self.wr_vu64(v) }
// used internally to emit things like the vector length and so on
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint) {
assert v <= 0xFFFF_FFFF_u;
self.wr_tagged_u32(t as uint, v as u32);
}
fn emit_u64(v: u64) { self.emit_num(es_u64, v) }
fn emit_i64(v: i64) { self.emit_num(es_i64, v as u64) }
fn emit_bool(v: bool) { self.emit_num(es_bool, v as u64) }
fn emit_uint(v: uint) { self.wr_tagged_u64(es_uint as uint, v as u64); }
fn emit_u64(v: u64) { self.wr_tagged_u64(es_u64 as uint, v); }
fn emit_u32(v: u32) { self.wr_tagged_u32(es_u32 as uint, v); }
fn emit_u16(v: u16) { self.wr_tagged_u16(es_u16 as uint, v); }
fn emit_u8(v: u8) { self.wr_tagged_u8 (es_u8 as uint, v); }
fn emit_int(v: int) { self.wr_tagged_i64(es_int as uint, v as i64); }
fn emit_i64(v: i64) { self.wr_tagged_i64(es_i64 as uint, v); }
fn emit_i32(v: i32) { self.wr_tagged_i32(es_i32 as uint, v); }
fn emit_i16(v: i16) { self.wr_tagged_i16(es_i16 as uint, v); }
fn emit_i8(v: i8) { self.wr_tagged_i8 (es_i8 as uint, v); }
fn emit_bool(v: bool) { self.wr_tagged_u8(es_bool as uint, v as u8) }
fn emit_f64(_v: f64) { fail "TODO"; }
fn emit_str(v: str) { self.wr_tag(es_str as uint) {|| self.wr_str(v) } }
fn emit_f32(_v: f32) { fail "TODO"; }
fn emit_float(_v: float) { fail "TODO"; }
fn emit_str(v: str) { self.wr_tagged_str(es_str as uint, v) }
fn emit_enum(_name: str, f: fn()) {
self.wr_tag(es_enum as uint) {|| f() }
self.wr_tag(es_enum as uint, f)
}
fn emit_enum_variant(_v_name: str, v_id: uint, _cnt: uint, f: fn()) {
self.emit_num(es_enum_vid, v_id as u64);
self.wr_tag(es_enum_body as uint) {|| f() }
self._emit_tagged_uint(es_enum_vid, v_id);
self.wr_tag(es_enum_body as uint, f)
}
fn emit_enum_variant_arg(_idx: uint, f: fn()) { f() }
fn emit_vec(len: uint, f: fn()) {
self.wr_tag(es_vec as uint) {||
self.emit_num(es_vec_len, len as u64);
self._emit_tagged_uint(es_vec_len, len);
f()
}
}
fn emit_vec_elt(_idx: uint, f: fn()) {
self.wr_tag(es_vec_elt as uint) {|| f() }
self.wr_tag(es_vec_elt as uint, f)
}
fn emit_vec_elt(_idx: uint, f: fn()) {
self.wr_tag(es_vec_elt as uint) {|| f() }
self.wr_tag(es_vec_elt as uint, f)
}
fn emit_box(f: fn()) { f() }
@ -145,18 +190,45 @@ impl of deserializer for ebml_deserializer {
ret r;
}
fn next_u64(exp_tag: ebml_serializer_tag) -> u64 {
let r = ebml::doc_as_vu64(self.next_doc(exp_tag));
#debug["next_u64 exp_tag=%? result=%?", exp_tag, r];
ret r;
fn _next_uint(exp_tag: ebml_serializer_tag) -> uint {
let r = ebml::doc_as_u32(self.next_doc(exp_tag));
#debug["_next_uint exp_tag=%? result=%?", exp_tag, r];
ret r as uint;
}
fn read_nil() -> () { () }
fn read_u64() -> u64 { self.next_u64(es_u64) }
fn read_i64() -> i64 { self.next_u64(es_i64) as i64 }
fn read_bool() -> bool { self.next_u64(es_bool) as bool }
fn read_f64() -> f64 { fail "Float"; }
fn read_str() -> str { ebml::doc_str(self.next_doc(es_str)) }
fn read_u64() -> u64 { ebml::doc_as_u64(self.next_doc(es_u64)) }
fn read_u32() -> u32 { ebml::doc_as_u32(self.next_doc(es_u32)) }
fn read_u16() -> u16 { ebml::doc_as_u16(self.next_doc(es_u16)) }
fn read_u8 () -> u8 { ebml::doc_as_u8 (self.next_doc(es_u8 )) }
fn read_uint() -> uint {
let v = ebml::doc_as_u64(self.next_doc(es_uint));
if v > (uint::max_value as u64) {
fail #fmt["uint %? too large for this architecture", v];
}
ret v as uint;
}
fn read_i64() -> i64 { ebml::doc_as_u64(self.next_doc(es_i64)) as i64 }
fn read_i32() -> i32 { ebml::doc_as_u32(self.next_doc(es_i32)) as i32 }
fn read_i16() -> i16 { ebml::doc_as_u16(self.next_doc(es_i16)) as i16 }
fn read_i8 () -> i8 { ebml::doc_as_u8 (self.next_doc(es_i8 )) as i8 }
fn read_int() -> int {
let v = ebml::doc_as_u64(self.next_doc(es_int)) as i64;
if v > (int::max_value as i64) || v < (int::min_value as i64) {
fail #fmt["int %? out of range for this architecture", v];
}
ret v as int;
}
fn read_bool() -> bool { ebml::doc_as_u8(self.next_doc(es_bool)) as bool }
fn read_f64() -> f64 { fail "read_f64()"; }
fn read_f32() -> f32 { fail "read_f32()"; }
fn read_float() -> float { fail "read_float()"; }
fn read_str() -> str { ebml::doc_as_str(self.next_doc(es_str)) }
// Compound types:
fn read_enum<T:copy>(_name: str, f: fn() -> T) -> T {
@ -164,7 +236,7 @@ impl of deserializer for ebml_deserializer {
}
fn read_enum_variant<T:copy>(f: fn(uint) -> T) -> T {
let idx = self.next_u64(es_enum_vid) as uint;
let idx = self._next_uint(es_enum_vid);
self.push_doc(self.next_doc(es_enum_body)) {||
f(idx)
}
@ -176,7 +248,7 @@ impl of deserializer for ebml_deserializer {
fn read_vec<T:copy>(f: fn(uint) -> T) -> T {
self.push_doc(self.next_doc(es_vec)) {||
let len = self.next_u64(es_vec_len) as uint;
let len = self._next_uint(es_vec_len);
f(len)
}
}
@ -210,6 +282,36 @@ impl of deserializer for ebml_deserializer {
}
}
// ___________________________________________________________________________
// Helper routines
//
// These should eventually be coded as traits.
impl serializer_helpers<S: serializer> for S {
fn emit_from_vec<T>(v: [T], f: fn(T)) {
self.emit_vec(vec::len(v)) {||
vec::iteri(v) {|i,e|
self.emit_vec_elt(i) {||
f(e)
}
}
}
}
}
impl deserializer_helpers<D: deserializer> for D {
fn read_to_vec<T>(f: fn() -> T) -> [T] {
self.read_vec {|len|
let v = [];
vec::reserve(v, len);
uint::range(0u, len) {|i|
self.read_vec_elt(i) {|| v += [f()] }
}
v
}
}
}
// ___________________________________________________________________________
// Testing
@ -257,7 +359,7 @@ fn test_option_int() {
fn test_v(v: option<int>) {
#debug["v == %?", v];
let mbuf = io::mk_mem_buffer();
let ebml_w = ebml::create_writer(io::mem_buffer_writer(mbuf));
let ebml_w = ebml::mk_writer(io::mem_buffer_writer(mbuf));
serialize_0(ebml_w, v);
let ebml_doc = ebml::new_doc(@io::mem_buffer_buf(mbuf));
let deser = mk_ebml_deserializer(ebml_doc);

View File

@ -12,7 +12,7 @@ export c_vec, four, tri, util;
export bitv, deque, fun_treemap, list, map, smallintmap, sort, treemap, ufind;
export rope;
export ebml, dbg, getopts, json, rand, sha1, term, time;
export test, tempfile;
export test, tempfile, serialization;
// FIXME: generic_os and os_fs shouldn't be exported
export generic_os, os, os_fs;
@ -111,5 +111,4 @@ mod os_fs;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C .. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:

View File

@ -56,7 +56,8 @@ type serialize_ctx = {
crate: @ast::crate,
tcx: ty::ctxt,
tyfns: hashmap<ty::t, str>,
serialize_tyfns: hashmap<ty::t, str>,
deserialize_tyfns: hashmap<ty::t, str>,
mutable item_fns: [ast_item],
mutable constants: [str]
@ -84,19 +85,10 @@ fn lookup(_mod: ast::_mod, idx: uint, names: [str]) -> @ast::item {
}
impl serialize_ctx for serialize_ctx {
// fn session() -> parser::parse_sess { self.psess }
fn add_item(item: ast_item) {
self.item_fns += [item];
}
fn mk_serialize_named_item_fn(name: str) -> str {
let names = str::split_str(name, "::");
let item = lookup(self.crate.node.module, 0u, names);
let def_id = {crate: ast::local_crate, node: item.id};
self.mk_serialize_item_fn(def_id, [])
}
fn tp_map(ty_params: [ast::ty_param], tps: [ty::t]) -> tp_map {
assert vec::len(tps) == vec::len(ty_params);
let tps_map = new_int_hash();
@ -123,10 +115,51 @@ impl serialize_ctx for serialize_ctx {
};
}
fn mk_serialize_item_fn(id: ast::def_id,
tps: [ty::t]) -> str {
let item_ty = self.instantiate(id, tps);
self.mk_serialize_ty_fn(item_ty)
fn memoize(map: hashmap<ty::t, str>, base_name: str,
ty0: ty::t, mk_fn: fn(str)) -> str {
// check for existing function
alt map.find(ty0) {
some(name) { ret name; }
none { /* fallthrough */ }
}
// define the name and insert into the hashtable
// in case of recursive calls:
let id = map.size();
let name = #fmt["%s_%u", base_name, id];
map.insert(ty0, name);
mk_fn(name);
ret name;
}
fn exec_named_item_fn(name: str, f: fn(ty::t) -> str) -> str {
let names = str::split_str(name, "::");
let item = lookup(self.crate.node.module, 0u, names);
let def_id = {crate: ast::local_crate, node: item.id};
let item_ty = self.instantiate(def_id, []);
f(item_ty)
}
}
impl serialize_methods for serialize_ctx {
// fn session() -> parser::parse_sess { self.psess }
fn mk_serialize_named_item_fn(name: str) -> str {
self.exec_named_item_fn(name) {|item_ty|
let fname = self.mk_serialize_ty_fn(item_ty);
let ty_str = ppaux::ty_to_str(self.tcx, item_ty);
check str::is_not_empty("::");
let namep = str::replace(name, "::", "_");
let item = #fmt["fn serialize_%s\
<S:std::serialization::serializer>\n\
(s: S, v: %s) {\n\
%s(s, v);\n\
}", namep, ty_str, fname];
self.add_item(item);
fname
}
}
fn blk(stmts: [ast_stmt]) -> ast_blk {
@ -145,59 +178,75 @@ impl serialize_ctx for serialize_ctx {
// Returns an AST fragment that names this function.
fn serialize_ty(ty0: ty::t, v: ast_expr) -> ast_expr {
let fname = self.mk_serialize_ty_fn(ty0);
#fmt["%s(cx, %s)", fname, v]
let ty0_str = ppaux::ty_to_str(self.tcx, ty0);
#fmt["/*%s*/ %s(s, %s)", ty0_str, fname, v]
}
fn mk_serialize_ty_fn(ty0: ty::t) -> str {
// check for existing function
alt self.tyfns.find(ty0) {
some(name) { ret name; }
none { /* fallthrough */ }
self.memoize(self.serialize_tyfns, "serialize", ty0) {|name|
self.mk_serialize_ty_fn0(ty0, name)
}
}
// define the name and insert into the hashtable
// in case of recursive calls:
let id = self.tyfns.size();
fn mk_serialize_ty_fn0(ty0: ty::t, name: str) {
let ty0_str = ppaux::ty_to_str(self.tcx, ty0);
#debug["ty0_str = %s / ty0 = %?", ty0_str, ty0];
let name = #fmt["serialize_%u /*%s*/", id, ty0_str];
self.tyfns.insert(ty0, name);
let v = "v";
let body_node = alt ty::get(ty0).struct {
ty::ty_nil | ty::ty_bot { "()" }
ty::ty_int(_) { #fmt["s.emit_i64(%s as i64)", v] }
ty::ty_uint(_) { #fmt["s.emit_u64(%s as u64)", v] }
ty::ty_float(_) { #fmt["s.emit_f64(%s as f64)", v] }
ty::ty_bool { #fmt["s.emit_bool(%s)", v] }
ty::ty_str { #fmt["s.emit_str(%s)", v] }
ty::ty_int(ast::ty_i) { #fmt["\ns.emit_int(%s)\n", v] }
ty::ty_int(ast::ty_i64) { #fmt["\ns.emit_i64(%s)\n", v] }
ty::ty_int(ast::ty_i32) { #fmt["\ns.emit_i32(%s)\n", v] }
ty::ty_int(ast::ty_i16) { #fmt["\ns.emit_i16(%s)\n", v] }
ty::ty_int(ast::ty_i8) { #fmt["\ns.emit_i8(%s)\n", v] }
ty::ty_int(ast::ty_char) { #fmt["\ns.emit_i8(%s as i8)\n", v] }
ty::ty_uint(ast::ty_u) { #fmt["\ns.emit_uint(%s)\n", v] }
ty::ty_uint(ast::ty_u64) { #fmt["\ns.emit_u64(%s)\n", v] }
ty::ty_uint(ast::ty_u32) { #fmt["\ns.emit_u32(%s)\n", v] }
ty::ty_uint(ast::ty_u16) { #fmt["\ns.emit_u16(%s)\n", v] }
ty::ty_uint(ast::ty_u8) { #fmt["\ns.emit_u8(%s)\n", v] }
ty::ty_float(ast::ty_f64) { #fmt["\ns.emit_f64(%s)\n", v] }
ty::ty_float(ast::ty_f32) { #fmt["\ns.emit_f32(%s)\n", v] }
ty::ty_float(ast::ty_f) { #fmt["\ns.emit_float(%s)\n", v] }
ty::ty_bool { #fmt["\ns.emit_bool(%s)\n", v] }
ty::ty_str { #fmt["\ns.emit_str(%s)\n", v] }
ty::ty_enum(def_id, tps) { self.serialize_enum(v, def_id, tps) }
ty::ty_box(mt) {
let s = self.serialize_ty(mt.ty, #fmt["*%s", v]);
#fmt["s.emit_box({||%s})", s]
let s = self.serialize_ty(mt.ty, #fmt["\n*%s\n", v]);
#fmt["\ns.emit_box({||%s})\n", s]
}
ty::ty_uniq(mt) {
let s = self.serialize_ty(mt.ty, #fmt["*%s", v]);
#fmt["s.emit_uniq({||%s})", s]
let s = self.serialize_ty(mt.ty, #fmt["\n*%s\n", v]);
#fmt["\ns.emit_uniq({||%s})\n", s]
}
ty::ty_vec(mt) {
let selem = self.serialize_ty(mt.ty, "i");
#fmt["s.emit_vec(vec::len(v), {|| \
uint::range(0, vec::len(v), {|i| \
s.emit_vec_elt(i, {||\
%s;\
})})})", selem]
let selem = self.serialize_ty(mt.ty, "e");
#fmt["\ns.emit_vec(vec::len(v), {||\n\
vec::iteri(v, {|i, e|\n\
s.emit_vec_elt(i, {||\n\
%s\n\
})})})\n", selem]
}
ty::ty_class(_, _) {
fail "TODO--implement class";
}
ty::ty_rec(fields) {
let stmts = vec::map(fields) {|field|
let stmts = vec::init_fn(vec::len(fields)) {|i|
let field = fields[i];
let f_name = field.ident;
let f_ty = field.mt.ty;
self.serialize_ty(f_ty, #fmt["%s.%s", v, f_name])
let efld = self.serialize_ty(f_ty, #fmt["\n%s.%s\n", v, f_name]);
#fmt["\ns.emit_rec_field(\"%s\", %uu, {||%s})\n",
f_name, i, efld]
};
#fmt["s.emit_rec({||%s})", self.blk_expr(stmts)]
#fmt["\ns.emit_rec({||%s})\n", self.blk_expr(stmts)]
}
ty::ty_tup(tys) {
let (pat, stmts) = self.serialize_arm("", "emit_tup_elt", tys);
@ -219,12 +268,13 @@ impl serialize_ctx for serialize_ctx {
}
};
let item = #fmt["fn %s<S:std::serialization::serializer>\
(s: S, v: %s) {\
%s;\
}", name, ty0_str, body_node];
let item = #fmt["/*%s*/ fn %s\n\
<S:std::serialization::serializer>\n\
(s: S,\n\
v: %s) {\n\
%s;\n\
}", ty0_str, name, ty0_str, body_node];
self.add_item(item);
ret name;
}
fn serialize_enum(v: ast_expr,
@ -249,19 +299,19 @@ impl serialize_ctx for serialize_ctx {
let v_id = idx;
idx += 1u;
#fmt["%s { \
s.emit_enum_variant(\"%s\", %uu, %uu) {||\
%s \
} \
#fmt["%s {\n\
s.emit_enum_variant(\"%s\", %uu, %uu, {||\n\
%s\n\
})\n\
}", v_pat, v_path, v_id, n_args, self.blk(stmts)]
};
let enum_name = ast_map::path_to_str(ty::item_path(self.tcx, id));
#fmt["s.emit_enum(\"%s\") {||\
alt %s { \
%s \
}\
}", enum_name, v, str::connect(arms, "\n")]
#fmt["\ns.emit_enum(\"%s\", {||\n\
alt %s {\n\
%s\n\
}\n\
})\n", enum_name, v, str::connect(arms, "\n")]
}
fn serialize_arm(v_path: str, emit_fn: str, args: [ty::t])
@ -269,17 +319,177 @@ impl serialize_ctx for serialize_ctx {
let n_args = vec::len(args);
let arg_nms = vec::init_fn(n_args) {|i| #fmt["v%u", i] };
let v_pat =
#fmt["%s(%s)", v_path, str::connect(arg_nms, ", ")];
#fmt["\n%s(%s)\n", v_path, str::connect(arg_nms, ",")];
let stmts = vec::init_fn(n_args) {|i|
let arg_ty = args[i];
let serialize_expr =
self.serialize_ty(arg_ty, arg_nms[i]);
#fmt["s.%s(%uu, {|| %s })", emit_fn, i, serialize_expr]
#fmt["\ns.%s(%uu, {||\n%s\n})\n", emit_fn, i, serialize_expr]
};
(v_pat, stmts)
}
}
impl deserialize_methods for serialize_ctx {
fn mk_deserialize_named_item_fn(name: str) -> str {
self.exec_named_item_fn(name) {|item_ty|
let fname = self.mk_deserialize_ty_fn(item_ty);
let ty_str = ppaux::ty_to_str(self.tcx, item_ty);
check str::is_not_empty("::");
let namep = str::replace(name, "::", "_");
let item = #fmt["fn deserialize_%s\
<S:std::serialization::deserializer>\n\
(s: S) -> %s {\n\
%s(s)\
}", namep, ty_str, fname];
self.add_item(item);
fname
}
}
// Generates a function to serialize the given type.
// Returns an AST fragment that names this function.
fn deserialize_ty(ty0: ty::t) -> ast_expr {
let fname = self.mk_deserialize_ty_fn(ty0);
let ty0_str = ppaux::ty_to_str(self.tcx, ty0);
#fmt["\n/*%s*/ %s(s)\n", ty0_str, fname]
}
fn mk_deserialize_ty_fn(ty0: ty::t) -> str {
self.memoize(self.deserialize_tyfns, "deserialize", ty0) {|name|
self.mk_deserialize_ty_fn0(ty0, name)
}
}
fn mk_deserialize_ty_fn0(ty0: ty::t, name: str) {
let ty0_str = ppaux::ty_to_str(self.tcx, ty0);
let body_node = alt ty::get(ty0).struct {
ty::ty_nil | ty::ty_bot { "()" }
ty::ty_int(ast::ty_i) { #fmt["s.read_int()"] }
ty::ty_int(ast::ty_i64) { #fmt["s.read_i64()"] }
ty::ty_int(ast::ty_i32) { #fmt["s.read_i32()"] }
ty::ty_int(ast::ty_i16) { #fmt["s.read_i16()"] }
ty::ty_int(ast::ty_i8) { #fmt["s.read_i8()"] }
ty::ty_int(ast::ty_char) { #fmt["s.read_char()"] }
ty::ty_uint(ast::ty_u) { #fmt["s.read_uint()"] }
ty::ty_uint(ast::ty_u64) { #fmt["s.read_u64()"] }
ty::ty_uint(ast::ty_u32) { #fmt["s.read_u32()"] }
ty::ty_uint(ast::ty_u16) { #fmt["s.read_u16()"] }
ty::ty_uint(ast::ty_u8) { #fmt["s.read_u8()"] }
ty::ty_float(ast::ty_f64) { #fmt["s.read_f64()"] }
ty::ty_float(ast::ty_f32) { #fmt["s.read_f32()"] }
ty::ty_float(ast::ty_f) { #fmt["s.read_float()"] }
ty::ty_bool { #fmt["s.read_bool()"] }
ty::ty_str { #fmt["s.read_str()"] }
ty::ty_enum(def_id, tps) { self.deserialize_enum(def_id, tps) }
ty::ty_box(mt) {
let s = self.deserialize_ty(mt.ty);
#fmt["\ns.read_box({||@%s})\n", s]
}
ty::ty_uniq(mt) {
let s = self.deserialize_ty(mt.ty);
#fmt["\ns.read_uniq({||~%s})\n", s]
}
ty::ty_vec(mt) {
let selem = self.deserialize_ty(mt.ty);
#fmt["s.read_vec({|len|\n\
vec::init_fn(len, {|i|\n\
s.read_vec_elt(i, {||\n\
%s\n\
})})})", selem]
}
ty::ty_class(_, _) {
fail "TODO--implement class";
}
ty::ty_rec(fields) {
let i = 0u;
let flds = vec::map(fields) {|field|
let f_name = field.ident;
let f_ty = field.mt.ty;
let rfld = self.deserialize_ty(f_ty);
let idx = i;
i += 1u;
#fmt["\n%s: s.read_rec_field(\"%s\", %uu, {||\n%s\n})\n",
f_name, f_name, idx, rfld]
};
#fmt["\ns.read_rec({||{\n%s\n}})\n", str::connect(flds, ",")]
}
ty::ty_tup(tys) {
let rexpr = self.deserialize_arm("", "read_tup_elt", tys);
#fmt["\ns.read_tup(%uu, {||\n%s\n})\n", vec::len(tys), rexpr]
}
ty::ty_constr(t, _) {
self.deserialize_ty(t)
}
ty::ty_ptr(_) |
ty::ty_fn(_) |
ty::ty_iface(_, _) |
ty::ty_res(_, _, _) |
ty::ty_var(_) | ty::ty_param(_, _) |
ty::ty_self(_) | ty::ty_type | ty::ty_send_type |
ty::ty_opaque_closure_ptr(_) | ty::ty_opaque_box {
fail #fmt["Unhandled type %s", ty0_str]
}
};
let item = #fmt["/*%s*/\n\
fn %s\n\
<S:std::serialization::deserializer>(s: S)\n\
-> %s {\n\
%s\n\
}", ty0_str, name, ty0_str, body_node];
self.add_item(item);
}
fn deserialize_enum(id: ast::def_id,
tps: [ty::t]) -> ast_expr {
let variants = ty::substd_enum_variants(self.tcx, id, tps);
let arms = vec::init_fn(vec::len(variants)) {|v_id|
let variant = variants[v_id];
let item_path = ty::item_path(self.tcx, variant.id);
let v_path = ast_map::path_to_str(item_path);
let n_args = vec::len(variant.args);
let rexpr = {
if n_args == 0u {
#fmt["\n%s\n", v_path]
} else {
self.deserialize_arm(v_path, "read_enum_variant_arg",
variant.args)
}
};
#fmt["\n%uu { %s }\n", v_id, rexpr]
};
let enum_name = ast_map::path_to_str(ty::item_path(self.tcx, id));
#fmt["s.read_enum(\"%s\", {||\n\
s.read_enum_variant({|v_id|\n\
alt check v_id {\n\
%s\n\
}\n\
})})", enum_name, str::connect(arms, "\n")]
}
fn deserialize_arm(v_path: str, read_fn: str, args: [ty::t])
-> ast_expr {
let exprs = vec::init_fn(vec::len(args)) {|i|
let rexpr = self.deserialize_ty(args[i]);
#fmt["\ns.%s(%uu, {||%s})\n", read_fn, i, rexpr]
};
#fmt["\n%s(%s)\n", v_path, str::connect(exprs, ",")]
}
}
fn main(argv: [str]) {
let {crate, tcx, roots} = parse(argv);
let sctx: serialize_ctx = {
@ -294,13 +504,15 @@ fn main(argv: [str]) {
// };
{crate: crate,
tcx: tcx,
tyfns: ty::new_ty_hash::<str>(),
serialize_tyfns: ty::new_ty_hash::<str>(),
deserialize_tyfns: ty::new_ty_hash::<str>(),
mutable item_fns: [],
mutable constants: []}
};
vec::iter(roots) {|root|
sctx.mk_serialize_named_item_fn(root);
sctx.mk_deserialize_named_item_fn(root);
}
let stdout = io::stdout();