mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-23 07:14:28 +00:00
librustc: Change most uses of &fn()
to ||
.
This commit is contained in:
parent
492677ec1e
commit
7e3f20133a
@ -386,8 +386,7 @@ pub fn basic_options() -> @options {
|
||||
}
|
||||
|
||||
// Seems out of place, but it uses session, so I'm putting it here
|
||||
pub fn expect<T:Clone>(sess: Session, opt: Option<T>, msg: &fn() -> ~str)
|
||||
-> T {
|
||||
pub fn expect<T:Clone>(sess: Session, opt: Option<T>, msg: || -> ~str) -> T {
|
||||
diagnostic::expect(sess.diagnostic(), opt, msg)
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ impl<'self> fold::ast_fold for Context<'self> {
|
||||
}
|
||||
|
||||
pub fn strip_items(crate: ast::Crate,
|
||||
in_cfg: &fn(attrs: &[ast::Attribute]) -> bool)
|
||||
in_cfg: |attrs: &[ast::Attribute]| -> bool)
|
||||
-> ast::Crate {
|
||||
let ctxt = Context {
|
||||
in_cfg: in_cfg,
|
||||
|
@ -44,7 +44,8 @@ pub fn get_type_param_count(cstore: @mut cstore::CStore, def: ast::DefId)
|
||||
/// Iterates over all the language items in the given crate.
|
||||
pub fn each_lang_item(cstore: @mut cstore::CStore,
|
||||
cnum: ast::CrateNum,
|
||||
f: &fn(ast::NodeId, uint) -> bool) -> bool {
|
||||
f: |ast::NodeId, uint| -> bool)
|
||||
-> bool {
|
||||
let crate_data = cstore::get_crate_data(cstore, cnum);
|
||||
decoder::each_lang_item(crate_data, f)
|
||||
}
|
||||
@ -52,8 +53,9 @@ pub fn each_lang_item(cstore: @mut cstore::CStore,
|
||||
/// Iterates over each child of the given item.
|
||||
pub fn each_child_of_item(cstore: @mut cstore::CStore,
|
||||
def_id: ast::DefId,
|
||||
callback: &fn(decoder::DefLike, ast::Ident,
|
||||
ast::visibility)) {
|
||||
callback: |decoder::DefLike,
|
||||
ast::Ident,
|
||||
ast::visibility|) {
|
||||
let crate_data = cstore::get_crate_data(cstore, def_id.crate);
|
||||
let get_crate_data: decoder::GetCrateDataCb = |cnum| {
|
||||
cstore::get_crate_data(cstore, cnum)
|
||||
@ -68,9 +70,9 @@ pub fn each_child_of_item(cstore: @mut cstore::CStore,
|
||||
/// Iterates over each top-level crate item.
|
||||
pub fn each_top_level_item_of_crate(cstore: @mut cstore::CStore,
|
||||
cnum: ast::CrateNum,
|
||||
callback: &fn(decoder::DefLike,
|
||||
ast::Ident,
|
||||
ast::visibility)) {
|
||||
callback: |decoder::DefLike,
|
||||
ast::Ident,
|
||||
ast::visibility|) {
|
||||
let crate_data = cstore::get_crate_data(cstore, cnum);
|
||||
let get_crate_data: decoder::GetCrateDataCb = |cnum| {
|
||||
cstore::get_crate_data(cstore, cnum)
|
||||
@ -178,7 +180,7 @@ pub fn get_static_methods_if_impl(cstore: @mut cstore::CStore,
|
||||
|
||||
pub fn get_item_attrs(cstore: @mut cstore::CStore,
|
||||
def_id: ast::DefId,
|
||||
f: &fn(~[@ast::MetaItem])) {
|
||||
f: |~[@ast::MetaItem]|) {
|
||||
let cdata = cstore::get_crate_data(cstore, def_id.crate);
|
||||
decoder::get_item_attrs(cdata, def_id.node, f)
|
||||
}
|
||||
@ -262,21 +264,21 @@ pub fn get_item_visibility(cstore: @mut cstore::CStore,
|
||||
|
||||
pub fn each_impl(cstore: @mut cstore::CStore,
|
||||
crate_num: ast::CrateNum,
|
||||
callback: &fn(ast::DefId)) {
|
||||
callback: |ast::DefId|) {
|
||||
let cdata = cstore::get_crate_data(cstore, crate_num);
|
||||
decoder::each_impl(cdata, callback)
|
||||
}
|
||||
|
||||
pub fn each_implementation_for_type(cstore: @mut cstore::CStore,
|
||||
def_id: ast::DefId,
|
||||
callback: &fn(ast::DefId)) {
|
||||
callback: |ast::DefId|) {
|
||||
let cdata = cstore::get_crate_data(cstore, def_id.crate);
|
||||
decoder::each_implementation_for_type(cdata, def_id.node, callback)
|
||||
}
|
||||
|
||||
pub fn each_implementation_for_trait(cstore: @mut cstore::CStore,
|
||||
def_id: ast::DefId,
|
||||
callback: &fn(ast::DefId)) {
|
||||
callback: |ast::DefId|) {
|
||||
let cdata = cstore::get_crate_data(cstore, def_id.crate);
|
||||
decoder::each_implementation_for_trait(cdata, def_id.node, callback)
|
||||
}
|
||||
|
@ -82,8 +82,7 @@ pub fn have_crate_data(cstore: &CStore, cnum: ast::CrateNum) -> bool {
|
||||
cstore.metas.contains_key(&cnum)
|
||||
}
|
||||
|
||||
pub fn iter_crate_data(cstore: &CStore,
|
||||
i: &fn(ast::CrateNum, @crate_metadata)) {
|
||||
pub fn iter_crate_data(cstore: &CStore, i: |ast::CrateNum, @crate_metadata|) {
|
||||
for (&k, &v) in cstore.metas.iter() {
|
||||
i(k, v);
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ type Cmd = @crate_metadata;
|
||||
// what crate that's in and give us a def_id that makes sense for the current
|
||||
// build.
|
||||
|
||||
fn lookup_hash(d: ebml::Doc, eq_fn: &fn(x:&[u8]) -> bool, hash: u64) ->
|
||||
fn lookup_hash(d: ebml::Doc, eq_fn: |&[u8]| -> bool, hash: u64) ->
|
||||
Option<ebml::Doc> {
|
||||
let index = reader::get_doc(d, tag_index);
|
||||
let table = reader::get_doc(index, tag_index_table);
|
||||
@ -205,7 +205,7 @@ fn get_provided_source(d: ebml::Doc, cdata: Cmd) -> Option<ast::DefId> {
|
||||
}
|
||||
}
|
||||
|
||||
fn each_reexport(d: ebml::Doc, f: &fn(ebml::Doc) -> bool) -> bool {
|
||||
fn each_reexport(d: ebml::Doc, f: |ebml::Doc| -> bool) -> bool {
|
||||
reader::tagged_docs(d, tag_items_data_item_reexport, f)
|
||||
}
|
||||
|
||||
@ -509,7 +509,7 @@ pub fn def_like_to_def(def_like: DefLike) -> ast::Def {
|
||||
}
|
||||
|
||||
/// Iterates over the language items in the given crate.
|
||||
pub fn each_lang_item(cdata: Cmd, f: &fn(ast::NodeId, uint) -> bool) -> bool {
|
||||
pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
|
||||
let root = reader::Doc(cdata.data);
|
||||
let lang_items = reader::get_doc(root, tag_lang_items);
|
||||
do reader::tagged_docs(lang_items, tag_lang_items_item) |item_doc| {
|
||||
@ -733,8 +733,9 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
|
||||
cdata: Cmd,
|
||||
item_doc: ebml::Doc,
|
||||
get_crate_data: GetCrateDataCb,
|
||||
callback: &fn(DefLike, ast::Ident,
|
||||
ast::visibility)) {
|
||||
callback: |DefLike,
|
||||
ast::Ident,
|
||||
ast::visibility|) {
|
||||
// Iterate over all children.
|
||||
let _ = do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
|
||||
let child_def_id = reader::with_doc_data(child_info_doc,
|
||||
@ -861,7 +862,7 @@ pub fn each_child_of_item(intr: @ident_interner,
|
||||
cdata: Cmd,
|
||||
id: ast::NodeId,
|
||||
get_crate_data: GetCrateDataCb,
|
||||
callback: &fn(DefLike, ast::Ident, ast::visibility)) {
|
||||
callback: |DefLike, ast::Ident, ast::visibility|) {
|
||||
// Find the item.
|
||||
let root_doc = reader::Doc(cdata.data);
|
||||
let items = reader::get_doc(root_doc, tag_items);
|
||||
@ -881,8 +882,9 @@ pub fn each_child_of_item(intr: @ident_interner,
|
||||
pub fn each_top_level_item_of_crate(intr: @ident_interner,
|
||||
cdata: Cmd,
|
||||
get_crate_data: GetCrateDataCb,
|
||||
callback: &fn(DefLike, ast::Ident,
|
||||
ast::visibility)) {
|
||||
callback: |DefLike,
|
||||
ast::Ident,
|
||||
ast::visibility|) {
|
||||
let root_doc = reader::Doc(cdata.data);
|
||||
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
|
||||
let crate_items_doc = reader::get_doc(misc_info_doc,
|
||||
@ -1201,8 +1203,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
|
||||
|
||||
pub fn get_item_attrs(cdata: Cmd,
|
||||
node_id: ast::NodeId,
|
||||
f: &fn(~[@ast::MetaItem])) {
|
||||
|
||||
f: |~[@ast::MetaItem]|) {
|
||||
let item = lookup_item(node_id, cdata.data);
|
||||
do reader::tagged_docs(item, tag_attributes) |attributes| {
|
||||
do reader::tagged_docs(attributes, tag_attribute) |attribute| {
|
||||
@ -1474,7 +1475,7 @@ pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn each_impl(cdata: Cmd, callback: &fn(ast::DefId)) {
|
||||
pub fn each_impl(cdata: Cmd, callback: |ast::DefId|) {
|
||||
let impls_doc = reader::get_doc(reader::Doc(cdata.data), tag_impls);
|
||||
let _ = do reader::tagged_docs(impls_doc, tag_impls_impl) |impl_doc| {
|
||||
callback(item_def_id(impl_doc, cdata));
|
||||
@ -1484,7 +1485,7 @@ pub fn each_impl(cdata: Cmd, callback: &fn(ast::DefId)) {
|
||||
|
||||
pub fn each_implementation_for_type(cdata: Cmd,
|
||||
id: ast::NodeId,
|
||||
callback: &fn(ast::DefId)) {
|
||||
callback: |ast::DefId|) {
|
||||
let item_doc = lookup_item(id, cdata.data);
|
||||
do reader::tagged_docs(item_doc, tag_items_data_item_inherent_impl)
|
||||
|impl_doc| {
|
||||
@ -1496,7 +1497,7 @@ pub fn each_implementation_for_type(cdata: Cmd,
|
||||
|
||||
pub fn each_implementation_for_trait(cdata: Cmd,
|
||||
id: ast::NodeId,
|
||||
callback: &fn(ast::DefId)) {
|
||||
callback: |ast::DefId|) {
|
||||
let item_doc = lookup_item(id, cdata.data);
|
||||
|
||||
let _ = do reader::tagged_docs(item_doc,
|
||||
|
@ -535,8 +535,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
|
||||
/// * For enums, iterates through the node IDs of the variants.
|
||||
///
|
||||
/// * For newtype structs, iterates through the node ID of the constructor.
|
||||
fn each_auxiliary_node_id(item: @item, callback: &fn(NodeId) -> bool)
|
||||
-> bool {
|
||||
fn each_auxiliary_node_id(item: @item, callback: |NodeId| -> bool) -> bool {
|
||||
let mut continue_ = true;
|
||||
match item.node {
|
||||
item_enum(ref enum_def, _) => {
|
||||
@ -912,7 +911,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
index: @mut ~[entry<i64>]) {
|
||||
index.push(entry { val: item.id as i64, pos: ebml_w.writer.tell() });
|
||||
}
|
||||
let add_to_index: &fn() = || add_to_index(item, ebml_w, index);
|
||||
let add_to_index: || = || add_to_index(item, ebml_w, index);
|
||||
|
||||
debug!("encoding info for item at {}",
|
||||
ecx.tcx.sess.codemap.span_to_str(item.span));
|
||||
@ -1412,7 +1411,7 @@ fn create_index<T:Clone + Hash + IterBytes + 'static>(
|
||||
fn encode_index<T:'static>(
|
||||
ebml_w: &mut writer::Encoder,
|
||||
buckets: ~[@~[entry<T>]],
|
||||
write_fn: &fn(@mut MemWriter, &T)) {
|
||||
write_fn: |@mut MemWriter, &T|) {
|
||||
ebml_w.start_tag(tag_index);
|
||||
let mut bucket_locs = ~[];
|
||||
ebml_w.start_tag(tag_index_buckets);
|
||||
|
@ -35,7 +35,7 @@ pub fn pick_file(file: Path, path: &Path) -> Option<Path> {
|
||||
|
||||
pub trait FileSearch {
|
||||
fn sysroot(&self) -> @Path;
|
||||
fn for_each_lib_search_path(&self, f: &fn(&Path) -> FileMatch);
|
||||
fn for_each_lib_search_path(&self, f: |&Path| -> FileMatch);
|
||||
fn get_target_lib_path(&self) -> Path;
|
||||
fn get_target_lib_file_path(&self, file: &Path) -> Path;
|
||||
}
|
||||
@ -51,7 +51,8 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
|
||||
}
|
||||
impl FileSearch for FileSearchImpl {
|
||||
fn sysroot(&self) -> @Path { self.sysroot }
|
||||
fn for_each_lib_search_path(&self, f: &fn(&Path) -> FileMatch) {
|
||||
|
||||
fn for_each_lib_search_path(&self, f: |&Path| -> FileMatch) {
|
||||
let mut visited_dirs = HashSet::new();
|
||||
let mut found = false;
|
||||
|
||||
|
@ -79,9 +79,7 @@ fn next_byte(st: &mut PState) -> u8 {
|
||||
return b;
|
||||
}
|
||||
|
||||
fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
|
||||
op: &fn(&[u8]) -> R) -> R
|
||||
{
|
||||
fn scan<R>(st: &mut PState, is_last: |char| -> bool, op: |&[u8]| -> R) -> R {
|
||||
let start_pos = st.pos;
|
||||
debug!("scan: '{}' (start)", st.data[st.pos] as char);
|
||||
while !is_last(st.data[st.pos] as char) {
|
||||
@ -98,7 +96,7 @@ pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
|
||||
return parse_ident_(st, |a| is_last(last, a) );
|
||||
}
|
||||
|
||||
fn parse_ident_(st: &mut PState, is_last: &fn(char) -> bool) -> ast::Ident {
|
||||
fn parse_ident_(st: &mut PState, is_last: |char| -> bool) -> ast::Ident {
|
||||
let rslt = scan(st, is_last, str::from_utf8);
|
||||
return st.tcx.sess.ident_of(rslt);
|
||||
}
|
||||
@ -292,7 +290,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_opt<T>(st: &mut PState, f: &fn(&mut PState) -> T) -> Option<T> {
|
||||
fn parse_opt<T>(st: &mut PState, f: |&mut PState| -> T) -> Option<T> {
|
||||
match next(st) {
|
||||
'n' => None,
|
||||
's' => Some(f(st)),
|
||||
|
@ -120,7 +120,7 @@ fn enc_mt(w: @mut MemWriter, cx: @ctxt, mt: ty::mt) {
|
||||
enc_ty(w, cx, mt.ty);
|
||||
}
|
||||
|
||||
fn enc_opt<T>(w: @mut MemWriter, t: Option<T>, enc_f: &fn(T)) {
|
||||
fn enc_opt<T>(w: @mut MemWriter, t: Option<T>, enc_f: |T|) {
|
||||
match t {
|
||||
None => mywrite!(w, "n"),
|
||||
Some(v) => {
|
||||
|
@ -845,14 +845,12 @@ impl ebml_writer_helpers for writer::Encoder {
|
||||
}
|
||||
|
||||
trait write_tag_and_id {
|
||||
fn tag(&mut self, tag_id: c::astencode_tag, f: &fn(&mut Self));
|
||||
fn tag(&mut self, tag_id: c::astencode_tag, f: |&mut Self|);
|
||||
fn id(&mut self, id: ast::NodeId);
|
||||
}
|
||||
|
||||
impl write_tag_and_id for writer::Encoder {
|
||||
fn tag(&mut self,
|
||||
tag_id: c::astencode_tag,
|
||||
f: &fn(&mut writer::Encoder)) {
|
||||
fn tag(&mut self, tag_id: c::astencode_tag, f: |&mut writer::Encoder|) {
|
||||
self.start_tag(tag_id as uint);
|
||||
f(self);
|
||||
self.end_tag();
|
||||
|
@ -124,9 +124,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||
|
||||
pub fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
pub fn each_issued_loan(&self,
|
||||
scope_id: ast::NodeId,
|
||||
op: &fn(&Loan) -> bool)
|
||||
pub fn each_issued_loan(&self, scope_id: ast::NodeId, op: |&Loan| -> bool)
|
||||
-> bool {
|
||||
//! Iterates over each loan that has been issued
|
||||
//! on entrance to `scope_id`, regardless of whether it is
|
||||
@ -142,7 +140,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||
|
||||
pub fn each_in_scope_loan(&self,
|
||||
scope_id: ast::NodeId,
|
||||
op: &fn(&Loan) -> bool)
|
||||
op: |&Loan| -> bool)
|
||||
-> bool {
|
||||
//! Like `each_issued_loan()`, but only considers loans that are
|
||||
//! currently in scope.
|
||||
@ -160,7 +158,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||
pub fn each_in_scope_restriction(&self,
|
||||
scope_id: ast::NodeId,
|
||||
loan_path: @LoanPath,
|
||||
op: &fn(&Loan, &Restriction) -> bool)
|
||||
op: |&Loan, &Restriction| -> bool)
|
||||
-> bool {
|
||||
//! Iterates through all the in-scope restrictions for the
|
||||
//! given `loan_path`
|
||||
|
@ -533,7 +533,7 @@ impl BorrowckCtxt {
|
||||
pub fn cat_pattern(&self,
|
||||
cmt: mc::cmt,
|
||||
pat: @ast::Pat,
|
||||
op: &fn(mc::cmt, @ast::Pat)) {
|
||||
op: |mc::cmt, @ast::Pat|) {
|
||||
let mc = self.mc_ctxt();
|
||||
mc.cat_pattern(cmt, pat, op);
|
||||
}
|
||||
|
@ -412,9 +412,7 @@ impl MoveData {
|
||||
}
|
||||
}
|
||||
|
||||
fn each_base_path(&self,
|
||||
index: MovePathIndex,
|
||||
f: &fn(MovePathIndex) -> bool)
|
||||
fn each_base_path(&self, index: MovePathIndex, f: |MovePathIndex| -> bool)
|
||||
-> bool {
|
||||
let mut p = index;
|
||||
while p != InvalidMovePathIndex {
|
||||
@ -428,7 +426,8 @@ impl MoveData {
|
||||
|
||||
fn each_extending_path(&self,
|
||||
index: MovePathIndex,
|
||||
f: &fn(MovePathIndex) -> bool) -> bool {
|
||||
f: |MovePathIndex| -> bool)
|
||||
-> bool {
|
||||
if !f(index) {
|
||||
return false;
|
||||
}
|
||||
@ -446,7 +445,8 @@ impl MoveData {
|
||||
|
||||
fn each_applicable_move(&self,
|
||||
index0: MovePathIndex,
|
||||
f: &fn(MoveIndex) -> bool) -> bool {
|
||||
f: |MoveIndex| -> bool)
|
||||
-> bool {
|
||||
let mut ret = true;
|
||||
do self.each_extending_path(index0) |index| {
|
||||
let mut p = self.path(index).first_move;
|
||||
@ -505,7 +505,7 @@ impl FlowedMoveData {
|
||||
|
||||
pub fn each_path_moved_by(&self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(&Move, @LoanPath) -> bool)
|
||||
f: |&Move, @LoanPath| -> bool)
|
||||
-> bool {
|
||||
/*!
|
||||
* Iterates through each path moved by `id`
|
||||
@ -521,7 +521,7 @@ impl FlowedMoveData {
|
||||
pub fn each_move_of(&self,
|
||||
id: ast::NodeId,
|
||||
loan_path: @LoanPath,
|
||||
f: &fn(&Move, @LoanPath) -> bool)
|
||||
f: |&Move, @LoanPath| -> bool)
|
||||
-> bool {
|
||||
/*!
|
||||
* Iterates through each move of `loan_path` (or some base path
|
||||
@ -587,7 +587,7 @@ impl FlowedMoveData {
|
||||
pub fn each_assignment_of(&self,
|
||||
id: ast::NodeId,
|
||||
loan_path: @LoanPath,
|
||||
f: &fn(&Assignment) -> bool)
|
||||
f: |&Assignment| -> bool)
|
||||
-> bool {
|
||||
/*!
|
||||
* Iterates through every assignment to `loan_path` that
|
||||
|
@ -121,7 +121,7 @@ fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
|
||||
for pat in arm.pats.iter() {
|
||||
|
||||
// Check that we do not match against a static NaN (#6804)
|
||||
let pat_matches_nan: &fn(&Pat) -> bool = |p| {
|
||||
let pat_matches_nan: |&Pat| -> bool = |p| {
|
||||
match cx.tcx.def_map.find(&p.id) {
|
||||
Some(&DefStatic(did, false)) => {
|
||||
let const_expr = lookup_const_by_id(cx.tcx, did).unwrap();
|
||||
@ -900,7 +900,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||
}
|
||||
}
|
||||
|
||||
let check_move: &fn(&Pat, Option<@Pat>) = |p, sub| {
|
||||
let check_move: |&Pat, Option<@Pat>| = |p, sub| {
|
||||
// check legality of moving out of the enum
|
||||
|
||||
// x @ Foo(*) is legal, but x @ Foo(y) isn't.
|
||||
|
@ -234,7 +234,8 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||
|
||||
pub fn each_bit_on_entry_frozen(&self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(uint) -> bool) -> bool {
|
||||
f: |uint| -> bool)
|
||||
-> bool {
|
||||
//! Iterates through each bit that is set on entry to `id`.
|
||||
//! Only useful after `propagate()` has been called.
|
||||
if !self.nodeid_to_bitset.contains_key(&id) {
|
||||
@ -249,7 +250,8 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||
|
||||
pub fn each_bit_on_entry(&mut self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(uint) -> bool) -> bool {
|
||||
f: |uint| -> bool)
|
||||
-> bool {
|
||||
//! Iterates through each bit that is set on entry to `id`.
|
||||
//! Only useful after `propagate()` has been called.
|
||||
|
||||
@ -260,9 +262,8 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||
self.each_bit(on_entry, f)
|
||||
}
|
||||
|
||||
pub fn each_gen_bit(&mut self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(uint) -> bool) -> bool {
|
||||
pub fn each_gen_bit(&mut self, id: ast::NodeId, f: |uint| -> bool)
|
||||
-> bool {
|
||||
//! Iterates through each bit in the gen set for `id`.
|
||||
|
||||
let (start, end) = self.compute_id_range(id);
|
||||
@ -272,9 +273,8 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||
self.each_bit(gens, f)
|
||||
}
|
||||
|
||||
pub fn each_gen_bit_frozen(&self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(uint) -> bool) -> bool {
|
||||
pub fn each_gen_bit_frozen(&self, id: ast::NodeId, f: |uint| -> bool)
|
||||
-> bool {
|
||||
//! Iterates through each bit in the gen set for `id`.
|
||||
if !self.nodeid_to_bitset.contains_key(&id) {
|
||||
return true;
|
||||
@ -286,9 +286,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||
self.each_bit(gens, f)
|
||||
}
|
||||
|
||||
fn each_bit(&self,
|
||||
words: &[uint],
|
||||
f: &fn(uint) -> bool) -> bool {
|
||||
fn each_bit(&self, words: &[uint], f: |uint| -> bool) -> bool {
|
||||
//! Helper for iterating over the bits in a bit set.
|
||||
|
||||
for (word_index, &word) in words.iter().enumerate() {
|
||||
@ -978,9 +976,8 @@ fn join_bits<O:DataFlowOperator>(oper: &O,
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn bitwise(out_vec: &mut [uint],
|
||||
in_vec: &[uint],
|
||||
op: &fn(uint, uint) -> uint) -> bool {
|
||||
fn bitwise(out_vec: &mut [uint], in_vec: &[uint], op: |uint, uint| -> uint)
|
||||
-> bool {
|
||||
assert_eq!(out_vec.len(), in_vec.len());
|
||||
let mut changed = false;
|
||||
for (out_elt, in_elt) in out_vec.mut_iter().zip(in_vec.iter()) {
|
||||
|
@ -185,19 +185,20 @@ impl<N,E> Graph<N,E> {
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
// Iterating over nodes, edges
|
||||
|
||||
pub fn each_node(&self, f: &fn(NodeIndex, &Node<N>) -> bool) -> bool {
|
||||
pub fn each_node(&self, f: |NodeIndex, &Node<N>| -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph.
|
||||
self.nodes.iter().enumerate().advance(|(i, node)| f(NodeIndex(i), node))
|
||||
}
|
||||
|
||||
pub fn each_edge(&self, f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
|
||||
pub fn each_edge(&self, f: |EdgeIndex, &Edge<E>| -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph
|
||||
self.edges.iter().enumerate().advance(|(i, edge)| f(EdgeIndex(i), edge))
|
||||
}
|
||||
|
||||
pub fn each_outgoing_edge(&self,
|
||||
source: NodeIndex,
|
||||
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
|
||||
f: |EdgeIndex, &Edge<E>| -> bool)
|
||||
-> bool {
|
||||
//! Iterates over all outgoing edges from the node `from`
|
||||
|
||||
self.each_adjacent_edge(source, Outgoing, f)
|
||||
@ -205,7 +206,8 @@ impl<N,E> Graph<N,E> {
|
||||
|
||||
pub fn each_incoming_edge(&self,
|
||||
target: NodeIndex,
|
||||
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
|
||||
f: |EdgeIndex, &Edge<E>| -> bool)
|
||||
-> bool {
|
||||
//! Iterates over all incoming edges to the node `target`
|
||||
|
||||
self.each_adjacent_edge(target, Incoming, f)
|
||||
@ -214,7 +216,8 @@ impl<N,E> Graph<N,E> {
|
||||
pub fn each_adjacent_edge(&self,
|
||||
node: NodeIndex,
|
||||
dir: Direction,
|
||||
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
|
||||
f: |EdgeIndex, &Edge<E>| -> bool)
|
||||
-> bool {
|
||||
//! Iterates over all edges adjacent to the node `node`
|
||||
//! in the direction `dir` (either `Outgoing` or `Incoming)
|
||||
|
||||
@ -239,9 +242,10 @@ impl<N,E> Graph<N,E> {
|
||||
// computation.
|
||||
|
||||
pub fn iterate_until_fixed_point(&self,
|
||||
op: &fn(iter_index: uint,
|
||||
edge_index: EdgeIndex,
|
||||
edge: &Edge<E>) -> bool) {
|
||||
op: |iter_index: uint,
|
||||
edge_index: EdgeIndex,
|
||||
edge: &Edge<E>|
|
||||
-> bool) {
|
||||
let mut iteration = 0;
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
@ -254,7 +258,7 @@ impl<N,E> Graph<N,E> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn each_edge_index(max_edge_index: EdgeIndex, f: &fn(EdgeIndex) -> bool) {
|
||||
pub fn each_edge_index(max_edge_index: EdgeIndex, f: |EdgeIndex| -> bool) {
|
||||
let mut i = 0;
|
||||
let n = *max_edge_index;
|
||||
while i < n {
|
||||
|
@ -167,8 +167,9 @@ fn check_item(cx: &mut Context, item: @item) {
|
||||
// Yields the appropriate function to check the kind of closed over
|
||||
// variables. `id` is the NodeId for some expression that creates the
|
||||
// closure.
|
||||
fn with_appropriate_checker(cx: &Context, id: NodeId,
|
||||
b: &fn(checker: &fn(&Context, @freevar_entry))) {
|
||||
fn with_appropriate_checker(cx: &Context,
|
||||
id: NodeId,
|
||||
b: |checker: |&Context, @freevar_entry||) {
|
||||
fn check_for_uniq(cx: &Context, fv: &freevar_entry, bounds: ty::BuiltinBounds) {
|
||||
// all captured data must be owned, regardless of whether it is
|
||||
// moved in or copied in.
|
||||
@ -351,9 +352,10 @@ fn check_ty(cx: &mut Context, aty: &Ty) {
|
||||
}
|
||||
|
||||
// Calls "any_missing" if any bounds were missing.
|
||||
pub fn check_builtin_bounds(cx: &Context, ty: ty::t, bounds: ty::BuiltinBounds,
|
||||
any_missing: &fn(ty::BuiltinBounds))
|
||||
{
|
||||
pub fn check_builtin_bounds(cx: &Context,
|
||||
ty: ty::t,
|
||||
bounds: ty::BuiltinBounds,
|
||||
any_missing: |ty::BuiltinBounds|) {
|
||||
let kind = ty::type_contents(cx.tcx, ty);
|
||||
let mut missing = ty::EmptyBuiltinBounds();
|
||||
for bound in bounds.iter() {
|
||||
|
@ -417,8 +417,9 @@ impl<'self> Context<'self> {
|
||||
* current lint context, call the provided function, then reset the
|
||||
* lints in effect to their previous state.
|
||||
*/
|
||||
fn with_lint_attrs(&mut self, attrs: &[ast::Attribute],
|
||||
f: &fn(&mut Context)) {
|
||||
fn with_lint_attrs(&mut self,
|
||||
attrs: &[ast::Attribute],
|
||||
f: |&mut Context|) {
|
||||
// Parse all of the lint attributes, and then add them all to the
|
||||
// current dictionary of lint information. Along the way, keep a history
|
||||
// of what we changed so we can roll everything back after invoking the
|
||||
@ -468,7 +469,7 @@ impl<'self> Context<'self> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_ids(&self, f: &fn(&mut ast_util::IdVisitor<Context>)) {
|
||||
fn visit_ids(&self, f: |&mut ast_util::IdVisitor<Context>|) {
|
||||
let mut v = ast_util::IdVisitor {
|
||||
operation: self,
|
||||
pass_through_items: false,
|
||||
@ -480,7 +481,8 @@ impl<'self> Context<'self> {
|
||||
|
||||
pub fn each_lint(sess: session::Session,
|
||||
attrs: &[ast::Attribute],
|
||||
f: &fn(@ast::MetaItem, level, @str) -> bool) -> bool {
|
||||
f: |@ast::MetaItem, level, @str| -> bool)
|
||||
-> bool {
|
||||
let xs = [allow, warn, deny, forbid];
|
||||
for &level in xs.iter() {
|
||||
let level_name = level_to_str(level);
|
||||
|
@ -657,7 +657,7 @@ impl Liveness {
|
||||
|
||||
pub fn pat_bindings(&self,
|
||||
pat: @Pat,
|
||||
f: &fn(LiveNode, Variable, Span, NodeId)) {
|
||||
f: |LiveNode, Variable, Span, NodeId|) {
|
||||
let def_map = self.tcx.def_map;
|
||||
do pat_util::pat_bindings(def_map, pat) |_bm, p_id, sp, _n| {
|
||||
let ln = self.live_node(p_id, sp);
|
||||
@ -668,7 +668,7 @@ impl Liveness {
|
||||
|
||||
pub fn arm_pats_bindings(&self,
|
||||
pats: &[@Pat],
|
||||
f: &fn(LiveNode, Variable, Span, NodeId)) {
|
||||
f: |LiveNode, Variable, Span, NodeId|) {
|
||||
// only consider the first pattern; any later patterns must have
|
||||
// the same bindings, and we also consider the first pattern to be
|
||||
// the "authoratative" set of ids
|
||||
@ -729,7 +729,7 @@ impl Liveness {
|
||||
self.assigned_on_entry(self.successors[*ln], var)
|
||||
}
|
||||
|
||||
pub fn indices(&self, ln: LiveNode, op: &fn(uint)) {
|
||||
pub fn indices(&self, ln: LiveNode, op: |uint|) {
|
||||
let node_base_idx = self.idx(ln, Variable(0));
|
||||
for var_idx in range(0u, self.ir.num_vars) {
|
||||
op(node_base_idx + var_idx)
|
||||
@ -739,7 +739,7 @@ impl Liveness {
|
||||
pub fn indices2(&self,
|
||||
ln: LiveNode,
|
||||
succ_ln: LiveNode,
|
||||
op: &fn(uint, uint)) {
|
||||
op: |uint, uint|) {
|
||||
let node_base_idx = self.idx(ln, Variable(0u));
|
||||
let succ_base_idx = self.idx(succ_ln, Variable(0u));
|
||||
for var_idx in range(0u, self.ir.num_vars) {
|
||||
@ -750,7 +750,7 @@ impl Liveness {
|
||||
pub fn write_vars(&self,
|
||||
wr: &mut io::Writer,
|
||||
ln: LiveNode,
|
||||
test: &fn(uint) -> LiveNode) {
|
||||
test: |uint| -> LiveNode) {
|
||||
let node_base_idx = self.idx(ln, Variable(0));
|
||||
for var_idx in range(0u, self.ir.num_vars) {
|
||||
let idx = node_base_idx + var_idx;
|
||||
@ -1406,12 +1406,13 @@ impl Liveness {
|
||||
cond_ln
|
||||
}
|
||||
|
||||
pub fn with_loop_nodes<R>(&self,
|
||||
loop_node_id: NodeId,
|
||||
break_ln: LiveNode,
|
||||
cont_ln: LiveNode,
|
||||
f: &fn() -> R)
|
||||
-> R {
|
||||
pub fn with_loop_nodes<R>(
|
||||
&self,
|
||||
loop_node_id: NodeId,
|
||||
break_ln: LiveNode,
|
||||
cont_ln: LiveNode,
|
||||
f: || -> R)
|
||||
-> R {
|
||||
debug!("with_loop_nodes: {} {}", loop_node_id, *break_ln);
|
||||
self.loop_scope.push(loop_node_id);
|
||||
self.break_ln.insert(loop_node_id, break_ln);
|
||||
|
@ -63,7 +63,7 @@ pub enum categorization {
|
||||
cat_rvalue(ast::NodeId), // temporary val, argument is its scope
|
||||
cat_static_item,
|
||||
cat_copied_upvar(CopiedUpvar), // upvar copied into @fn or ~fn env
|
||||
cat_stack_upvar(cmt), // by ref upvar from &fn
|
||||
cat_stack_upvar(cmt), // by ref upvar from ||
|
||||
cat_local(ast::NodeId), // local variable
|
||||
cat_arg(ast::NodeId), // formal argument
|
||||
cat_deref(cmt, uint, PointerKind), // deref of a ptr
|
||||
@ -822,7 +822,7 @@ impl mem_categorization_ctxt {
|
||||
pub fn cat_pattern(&self,
|
||||
cmt: cmt,
|
||||
pat: @ast::Pat,
|
||||
op: &fn(cmt, @ast::Pat)) {
|
||||
op: |cmt, @ast::Pat|) {
|
||||
// Here, `cmt` is the categorization for the value being
|
||||
// matched and pat is the pattern it is being matched against.
|
||||
//
|
||||
|
@ -148,7 +148,7 @@ use syntax::codemap::Span;
|
||||
pub enum CaptureMode {
|
||||
CapCopy, // Copy the value into the closure.
|
||||
CapMove, // Move the value into the closure.
|
||||
CapRef, // Reference directly from parent stack frame (used by `&fn()`).
|
||||
CapRef, // Reference directly from parent stack frame (used by `||`).
|
||||
}
|
||||
|
||||
#[deriving(Encodable, Decodable)]
|
||||
@ -686,7 +686,7 @@ impl VisitContext {
|
||||
let sigil = ty::ty_closure_sigil(fn_ty);
|
||||
let freevars = freevars::get_freevars(self.tcx, fn_expr_id);
|
||||
if sigil == BorrowedSigil {
|
||||
// &fn() captures everything by ref
|
||||
// || captures everything by ref
|
||||
at_vec::from_fn(freevars.len(), |i| {
|
||||
let fvar = &freevars[i];
|
||||
CaptureVar {def: fvar.def, span: fvar.span, mode: CapRef}
|
||||
|
@ -70,8 +70,9 @@ pub fn pat_is_binding_or_wild(dm: resolve::DefMap, pat: @Pat) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pat_bindings(dm: resolve::DefMap, pat: @Pat,
|
||||
it: &fn(BindingMode, NodeId, Span, &Path)) {
|
||||
pub fn pat_bindings(dm: resolve::DefMap,
|
||||
pat: @Pat,
|
||||
it: |BindingMode, NodeId, Span, &Path|) {
|
||||
do walk_pat(pat) |p| {
|
||||
match p.node {
|
||||
PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
|
||||
|
@ -1538,10 +1538,10 @@ impl Resolver {
|
||||
|
||||
/// Constructs the reduced graph for one foreign item.
|
||||
fn build_reduced_graph_for_foreign_item(&mut self,
|
||||
foreign_item: @foreign_item,
|
||||
parent: ReducedGraphParent,
|
||||
f: &fn(&mut Resolver,
|
||||
ReducedGraphParent)) {
|
||||
foreign_item: @foreign_item,
|
||||
parent: ReducedGraphParent,
|
||||
f: |&mut Resolver,
|
||||
ReducedGraphParent|) {
|
||||
let name = foreign_item.ident;
|
||||
let is_public = foreign_item.vis == ast::public;
|
||||
let (name_bindings, new_parent) =
|
||||
@ -3331,7 +3331,7 @@ impl Resolver {
|
||||
// generate a fake "implementation scope" containing all the
|
||||
// implementations thus found, for compatibility with old resolve pass.
|
||||
|
||||
fn with_scope(&mut self, name: Option<Ident>, f: &fn(&mut Resolver)) {
|
||||
fn with_scope(&mut self, name: Option<Ident>, f: |&mut Resolver|) {
|
||||
let orig_module = self.current_module;
|
||||
|
||||
// Move down in the graph.
|
||||
@ -3692,8 +3692,8 @@ impl Resolver {
|
||||
}
|
||||
|
||||
fn with_type_parameter_rib(&mut self,
|
||||
type_parameters: TypeParameters,
|
||||
f: &fn(&mut Resolver)) {
|
||||
type_parameters: TypeParameters,
|
||||
f: |&mut Resolver|) {
|
||||
match type_parameters {
|
||||
HasTypeParameters(generics, node_id, initial_index,
|
||||
rib_kind) => {
|
||||
@ -3735,13 +3735,13 @@ impl Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
fn with_label_rib(&mut self, f: &fn(&mut Resolver)) {
|
||||
fn with_label_rib(&mut self, f: |&mut Resolver|) {
|
||||
self.label_ribs.push(@Rib::new(NormalRibKind));
|
||||
f(self);
|
||||
self.label_ribs.pop();
|
||||
}
|
||||
|
||||
fn with_constant_rib(&mut self, f: &fn(&mut Resolver)) {
|
||||
fn with_constant_rib(&mut self, f: |&mut Resolver|) {
|
||||
self.value_ribs.push(@Rib::new(ConstantItemRibKind));
|
||||
self.type_ribs.push(@Rib::new(ConstantItemRibKind));
|
||||
f(self);
|
||||
@ -4888,7 +4888,7 @@ impl Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
fn with_no_errors<T>(&mut self, f: &fn(&mut Resolver) -> T) -> T {
|
||||
fn with_no_errors<T>(&mut self, f: |&mut Resolver| -> T) -> T {
|
||||
self.emit_errors = false;
|
||||
let rs = f(self);
|
||||
self.emit_errors = true;
|
||||
@ -4901,10 +4901,8 @@ impl Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_best_match_for_name(&mut self,
|
||||
name: &str,
|
||||
max_distance: uint)
|
||||
-> Option<@str> {
|
||||
fn find_best_match_for_name(&mut self, name: &str, max_distance: uint)
|
||||
-> Option<@str> {
|
||||
let this = &mut *self;
|
||||
|
||||
let mut maybes: ~[@str] = ~[];
|
||||
|
@ -2066,7 +2066,8 @@ fn mk_binding_alloca(mut bcx: @mut Block,
|
||||
p_id: ast::NodeId,
|
||||
path: &ast::Path,
|
||||
binding_mode: IrrefutablePatternBindingMode,
|
||||
populate: &fn(@mut Block, ty::t, ValueRef) -> @mut Block) -> @mut Block {
|
||||
populate: |@mut Block, ty::t, ValueRef| -> @mut Block)
|
||||
-> @mut Block {
|
||||
let var_ty = node_id_type(bcx, p_id);
|
||||
let ident = ast_util::path_to_ident(path);
|
||||
let llval = alloc_ty(bcx, var_ty, bcx.ident(ident));
|
||||
|
@ -93,7 +93,7 @@ pub use middle::trans::context::task_llcx;
|
||||
|
||||
local_data_key!(task_local_insn_key: ~[&'static str])
|
||||
|
||||
pub fn with_insn_ctxt(blk: &fn(&[&'static str])) {
|
||||
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
|
||||
do local_data::get(task_local_insn_key) |c| {
|
||||
match c {
|
||||
Some(ctx) => blk(*ctx),
|
||||
@ -787,10 +787,11 @@ pub fn cast_shift_const_rhs(op: ast::BinOp,
|
||||
}
|
||||
|
||||
pub fn cast_shift_rhs(op: ast::BinOp,
|
||||
lhs: ValueRef, rhs: ValueRef,
|
||||
trunc: &fn(ValueRef, Type) -> ValueRef,
|
||||
zext: &fn(ValueRef, Type) -> ValueRef)
|
||||
-> ValueRef {
|
||||
lhs: ValueRef,
|
||||
rhs: ValueRef,
|
||||
trunc: |ValueRef, Type| -> ValueRef,
|
||||
zext: |ValueRef, Type| -> ValueRef)
|
||||
-> ValueRef {
|
||||
// Shifts may have any size int on the rhs
|
||||
unsafe {
|
||||
if ast_util::is_shift_binop(op) {
|
||||
@ -966,7 +967,7 @@ pub fn have_cached_lpad(bcx: @mut Block) -> bool {
|
||||
return res;
|
||||
}
|
||||
|
||||
pub fn in_lpad_scope_cx(bcx: @mut Block, f: &fn(si: &mut ScopeInfo)) {
|
||||
pub fn in_lpad_scope_cx(bcx: @mut Block, f: |si: &mut ScopeInfo|) {
|
||||
let mut bcx = bcx;
|
||||
let mut cur_scope = bcx.scope;
|
||||
loop {
|
||||
@ -1430,7 +1431,8 @@ pub fn leave_block(bcx: @mut Block, out_of: @mut Block) -> @mut Block {
|
||||
pub fn with_scope(bcx: @mut Block,
|
||||
opt_node_info: Option<NodeInfo>,
|
||||
name: &str,
|
||||
f: &fn(@mut Block) -> @mut Block) -> @mut Block {
|
||||
f: |@mut Block| -> @mut Block)
|
||||
-> @mut Block {
|
||||
let _icx = push_ctxt("with_scope");
|
||||
|
||||
debug!("with_scope(bcx={}, opt_node_info={:?}, name={})",
|
||||
@ -1448,7 +1450,8 @@ pub fn with_scope(bcx: @mut Block,
|
||||
pub fn with_scope_result(bcx: @mut Block,
|
||||
opt_node_info: Option<NodeInfo>,
|
||||
_name: &str,
|
||||
f: &fn(@mut Block) -> Result) -> Result {
|
||||
f: |@mut Block| -> Result)
|
||||
-> Result {
|
||||
let _icx = push_ctxt("with_scope_result");
|
||||
|
||||
let scope = simple_block_scope(bcx.scope, opt_node_info);
|
||||
@ -1462,9 +1465,11 @@ pub fn with_scope_result(bcx: @mut Block,
|
||||
rslt(out_bcx, val)
|
||||
}
|
||||
|
||||
pub fn with_scope_datumblock(bcx: @mut Block, opt_node_info: Option<NodeInfo>,
|
||||
name: &str, f: &fn(@mut Block) -> datum::DatumBlock)
|
||||
-> datum::DatumBlock {
|
||||
pub fn with_scope_datumblock(bcx: @mut Block,
|
||||
opt_node_info: Option<NodeInfo>,
|
||||
name: &str,
|
||||
f: |@mut Block| -> datum::DatumBlock)
|
||||
-> datum::DatumBlock {
|
||||
use middle::trans::datum::DatumBlock;
|
||||
|
||||
let _icx = push_ctxt("with_scope_result");
|
||||
@ -1474,7 +1479,7 @@ pub fn with_scope_datumblock(bcx: @mut Block, opt_node_info: Option<NodeInfo>,
|
||||
DatumBlock {bcx: leave_block(bcx, scope_cx), datum: datum}
|
||||
}
|
||||
|
||||
pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) {
|
||||
pub fn block_locals(b: &ast::Block, it: |@ast::Local|) {
|
||||
for s in b.stmts.iter() {
|
||||
match s.node {
|
||||
ast::StmtDecl(d, _) => {
|
||||
@ -1488,7 +1493,10 @@ pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_cond(bcx: @mut Block, val: ValueRef, f: &fn(@mut Block) -> @mut Block) -> @mut Block {
|
||||
pub fn with_cond(bcx: @mut Block,
|
||||
val: ValueRef,
|
||||
f: |@mut Block| -> @mut Block)
|
||||
-> @mut Block {
|
||||
let _icx = push_ctxt("with_cond");
|
||||
let next_cx = base::sub_block(bcx, "next");
|
||||
let cond_cx = base::sub_block(bcx, "cond");
|
||||
@ -1885,7 +1893,7 @@ pub fn trans_closure(ccx: @mut CrateContext,
|
||||
id: ast::NodeId,
|
||||
_attributes: &[ast::Attribute],
|
||||
output_type: ty::t,
|
||||
maybe_load_env: &fn(@mut FunctionContext)) {
|
||||
maybe_load_env: |@mut FunctionContext|) {
|
||||
ccx.stats.n_closures += 1;
|
||||
let _icx = push_ctxt("trans_closure");
|
||||
set_uwtable(llfndecl);
|
||||
|
@ -337,9 +337,9 @@ pub fn compute_abi_info(_ccx: &mut CrateContext,
|
||||
rty: Type,
|
||||
ret_def: bool) -> FnType {
|
||||
fn x86_64_ty(ty: Type,
|
||||
is_mem_cls: &fn(cls: &[RegClass]) -> bool,
|
||||
attr: Attribute) -> ArgType {
|
||||
|
||||
is_mem_cls: |cls: &[RegClass]| -> bool,
|
||||
attr: Attribute)
|
||||
-> ArgType {
|
||||
if !ty.is_reg_ty() {
|
||||
let cls = classify_ty(ty);
|
||||
if is_mem_cls(cls) {
|
||||
|
@ -600,7 +600,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
|
||||
call_info: Option<NodeInfo>,
|
||||
callee_ty: ty::t,
|
||||
ret_ty: ty::t,
|
||||
get_callee: &fn(@mut Block) -> Callee,
|
||||
get_callee: |@mut Block| -> Callee,
|
||||
args: CallArgs,
|
||||
dest: Option<expr::Dest>,
|
||||
autoref_arg: AutorefArg)
|
||||
|
@ -51,7 +51,7 @@ use syntax::parse::token::special_idents;
|
||||
// };
|
||||
//
|
||||
// Note that the closure is itself a rust_opaque_box. This is true
|
||||
// even for ~fn and &fn, because we wish to keep binary compatibility
|
||||
// even for ~fn and ||, because we wish to keep binary compatibility
|
||||
// between all kinds of closures. The allocation strategy for this
|
||||
// closure depends on the closure type. For a sendfn, the closure
|
||||
// (and the referenced type descriptors) will be allocated in the
|
||||
@ -422,11 +422,12 @@ pub fn trans_expr_fn(bcx: @mut Block,
|
||||
return bcx;
|
||||
}
|
||||
|
||||
pub fn make_closure_glue(
|
||||
cx: @mut Block,
|
||||
v: ValueRef,
|
||||
t: ty::t,
|
||||
glue_fn: &fn(@mut Block, v: ValueRef, t: ty::t) -> @mut Block) -> @mut Block {
|
||||
pub fn make_closure_glue(cx: @mut Block,
|
||||
v: ValueRef,
|
||||
t: ty::t,
|
||||
glue_fn: |@mut Block, v: ValueRef, t: ty::t|
|
||||
-> @mut Block)
|
||||
-> @mut Block {
|
||||
let _icx = push_ctxt("closure::make_closure_glue");
|
||||
let bcx = cx;
|
||||
let tcx = cx.tcx();
|
||||
|
@ -769,7 +769,9 @@ pub fn val_ty(v: ValueRef) -> Type {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn in_scope_cx(cx: @mut Block, scope_id: Option<ast::NodeId>, f: &fn(si: &mut ScopeInfo)) {
|
||||
pub fn in_scope_cx(cx: @mut Block,
|
||||
scope_id: Option<ast::NodeId>,
|
||||
f: |si: &mut ScopeInfo|) {
|
||||
let mut cur = cx;
|
||||
let mut cur_scope = cur.scope;
|
||||
loop {
|
||||
|
@ -524,7 +524,7 @@ impl Datum {
|
||||
bcx: @mut Block,
|
||||
ty: ty::t,
|
||||
source: DatumCleanup,
|
||||
gep: &fn(ValueRef) -> ValueRef)
|
||||
gep: |ValueRef| -> ValueRef)
|
||||
-> Datum {
|
||||
let base_val = self.to_ref_llval(bcx);
|
||||
Datum {
|
||||
|
@ -2274,9 +2274,9 @@ fn populate_scope_map(cx: &mut CrateContext,
|
||||
scope_span: Span,
|
||||
scope_stack: &mut ~[ScopeStackEntry],
|
||||
scope_map: &mut HashMap<ast::NodeId, DIScope>,
|
||||
inner_walk: &fn(&mut CrateContext,
|
||||
&mut ~[ScopeStackEntry],
|
||||
&mut HashMap<ast::NodeId, DIScope>)) {
|
||||
inner_walk: |&mut CrateContext,
|
||||
&mut ~[ScopeStackEntry],
|
||||
&mut HashMap<ast::NodeId, DIScope>|) {
|
||||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess.codemap.lookup_char_pos(scope_span.lo);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
|
@ -1151,10 +1151,12 @@ pub fn trans_local_var(bcx: @mut Block, def: ast::Def) -> Datum {
|
||||
// The optional node ID here is the node ID of the path identifying the enum
|
||||
// variant in use. If none, this cannot possibly an enum variant (so, if it
|
||||
// is and `node_id_opt` is none, this function fails).
|
||||
pub fn with_field_tys<R>(tcx: ty::ctxt,
|
||||
ty: ty::t,
|
||||
node_id_opt: Option<ast::NodeId>,
|
||||
op: &fn(ty::Disr, (&[ty::field])) -> R) -> R {
|
||||
pub fn with_field_tys<R>(
|
||||
tcx: ty::ctxt,
|
||||
ty: ty::t,
|
||||
node_id_opt: Option<ast::NodeId>,
|
||||
op: |ty::Disr, (&[ty::field])| -> R)
|
||||
-> R {
|
||||
match ty::get(ty).sty {
|
||||
ty::ty_struct(did, ref substs) => {
|
||||
op(0, struct_fields(tcx, did, substs))
|
||||
|
@ -510,11 +510,13 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
|
||||
// Array for the arguments we will pass to the rust function.
|
||||
let mut llrust_args = ~[];
|
||||
let mut next_foreign_arg_counter: c_uint = 0;
|
||||
let next_foreign_arg: &fn(pad: bool) -> c_uint = {
|
||||
|pad: bool| {
|
||||
next_foreign_arg_counter += if pad { 2 } else { 1 };
|
||||
next_foreign_arg_counter - 1
|
||||
}
|
||||
let next_foreign_arg: |pad: bool| -> c_uint = |pad: bool| {
|
||||
next_foreign_arg_counter += if pad {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
};
|
||||
next_foreign_arg_counter - 1
|
||||
};
|
||||
|
||||
// If there is an out pointer on the foreign function
|
||||
|
@ -121,7 +121,7 @@ impl Reflector {
|
||||
pub fn bracketed(&mut self,
|
||||
bracket_name: &str,
|
||||
extra: &[ValueRef],
|
||||
inner: &fn(&mut Reflector)) {
|
||||
inner: |&mut Reflector|) {
|
||||
self.visit("enter_" + bracket_name, extra);
|
||||
inner(self);
|
||||
self.visit("leave_" + bracket_name, extra);
|
||||
|
@ -247,7 +247,7 @@ pub enum AutoRef {
|
||||
/// Convert from @[]/~[]/&[] to &&[] (or str)
|
||||
AutoBorrowVecRef(Region, ast::Mutability),
|
||||
|
||||
/// Convert from @fn()/~fn()/&fn() to &fn()
|
||||
/// Convert from @fn()/~fn()/|| to ||
|
||||
AutoBorrowFn(Region),
|
||||
|
||||
/// Convert from T to *T
|
||||
@ -651,7 +651,7 @@ pub enum sty {
|
||||
// "Fake" types, used for trans purposes
|
||||
ty_type, // type_desc*
|
||||
ty_opaque_box, // used by monomorphizer to represent any @ box
|
||||
ty_opaque_closure_ptr(Sigil), // ptr to env for &fn, @fn, ~fn
|
||||
ty_opaque_closure_ptr(Sigil), // ptr to env for ||, @fn, ~fn
|
||||
ty_unboxed_vec(mt),
|
||||
}
|
||||
|
||||
@ -1348,11 +1348,11 @@ pub fn mk_opaque_closure_ptr(cx: ctxt, sigil: ast::Sigil) -> t {
|
||||
|
||||
pub fn mk_opaque_box(cx: ctxt) -> t { mk_t(cx, ty_opaque_box) }
|
||||
|
||||
pub fn walk_ty(ty: t, f: &fn(t)) {
|
||||
pub fn walk_ty(ty: t, f: |t|) {
|
||||
maybe_walk_ty(ty, |t| { f(t); true });
|
||||
}
|
||||
|
||||
pub fn maybe_walk_ty(ty: t, f: &fn(t) -> bool) {
|
||||
pub fn maybe_walk_ty(ty: t, f: |t| -> bool) {
|
||||
if !f(ty) {
|
||||
return;
|
||||
}
|
||||
@ -1382,25 +1382,19 @@ pub fn maybe_walk_ty(ty: t, f: &fn(t) -> bool) {
|
||||
}
|
||||
|
||||
// Folds types from the bottom up.
|
||||
pub fn fold_ty(cx: ctxt, t0: t, fldop: &fn(t) -> t) -> t {
|
||||
pub fn fold_ty(cx: ctxt, t0: t, fldop: |t| -> t) -> t {
|
||||
let mut f = ty_fold::BottomUpFolder {tcx: cx, fldop: fldop};
|
||||
f.fold_ty(t0)
|
||||
}
|
||||
|
||||
pub fn walk_regions_and_ty(cx: ctxt,
|
||||
ty: t,
|
||||
fldr: &fn(r: Region),
|
||||
fldt: &fn(t: t))
|
||||
pub fn walk_regions_and_ty(cx: ctxt, ty: t, fldr: |r: Region|, fldt: |t: t|)
|
||||
-> t {
|
||||
ty_fold::RegionFolder::general(cx,
|
||||
|r| { fldr(r); r },
|
||||
|t| { fldt(t); t }).fold_ty(ty)
|
||||
}
|
||||
|
||||
pub fn fold_regions(cx: ctxt,
|
||||
ty: t,
|
||||
fldr: &fn(r: Region) -> Region)
|
||||
-> t {
|
||||
pub fn fold_regions(cx: ctxt, ty: t, fldr: |r: Region| -> Region) -> t {
|
||||
ty_fold::RegionFolder::regions(cx, fldr).fold_ty(ty)
|
||||
}
|
||||
|
||||
@ -1886,7 +1880,7 @@ impl TypeContents {
|
||||
*self & TC::ReachesAll)
|
||||
}
|
||||
|
||||
pub fn union<T>(v: &[T], f: &fn(&T) -> TypeContents) -> TypeContents {
|
||||
pub fn union<T>(v: &[T], f: |&T| -> TypeContents) -> TypeContents {
|
||||
v.iter().fold(TC::None, |tc, t| tc | f(t))
|
||||
}
|
||||
|
||||
@ -2223,7 +2217,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||
fn each_inherited_builtin_bound(cx: ctxt,
|
||||
bounds: BuiltinBounds,
|
||||
traits: &[@TraitRef],
|
||||
f: &fn(BuiltinBound)) {
|
||||
f: |BuiltinBound|) {
|
||||
for bound in bounds.iter() {
|
||||
f(bound);
|
||||
}
|
||||
@ -2351,10 +2345,8 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
|
||||
!subtypes_require(cx, &mut seen, r_ty, r_ty)
|
||||
}
|
||||
|
||||
pub fn type_structurally_contains(cx: ctxt,
|
||||
ty: t,
|
||||
test: &fn(x: &sty) -> bool)
|
||||
-> bool {
|
||||
pub fn type_structurally_contains(cx: ctxt, ty: t, test: |x: &sty| -> bool)
|
||||
-> bool {
|
||||
let sty = &get(ty).sty;
|
||||
debug!("type_structurally_contains: {}",
|
||||
::util::ppaux::ty_to_str(cx, ty));
|
||||
@ -2969,7 +2961,7 @@ pub fn adjust_ty(cx: ctxt,
|
||||
}
|
||||
|
||||
impl AutoRef {
|
||||
pub fn map_region(&self, f: &fn(Region) -> Region) -> AutoRef {
|
||||
pub fn map_region(&self, f: |Region| -> Region) -> AutoRef {
|
||||
match *self {
|
||||
ty::AutoPtr(r, m) => ty::AutoPtr(f(r), m),
|
||||
ty::AutoBorrowVec(r, m) => ty::AutoBorrowVec(f(r), m),
|
||||
@ -3525,11 +3517,10 @@ pub fn trait_ref_supertraits(cx: ctxt, trait_ref: &ty::TraitRef) -> ~[@TraitRef]
|
||||
}
|
||||
|
||||
fn lookup_locally_or_in_crate_store<V:Clone>(
|
||||
descr: &str,
|
||||
def_id: ast::DefId,
|
||||
map: &mut HashMap<ast::DefId, V>,
|
||||
load_external: &fn() -> V) -> V
|
||||
{
|
||||
descr: &str,
|
||||
def_id: ast::DefId,
|
||||
map: &mut HashMap<ast::DefId, V>,
|
||||
load_external: || -> V) -> V {
|
||||
/*!
|
||||
* Helper for looking things up in the various maps
|
||||
* that are populated during typeck::collect (e.g.,
|
||||
@ -3961,7 +3952,7 @@ pub fn lookup_trait_def(cx: ctxt, did: ast::DefId) -> @ty::TraitDef {
|
||||
/// Iterate over meta_items of a definition.
|
||||
// (This should really be an iterator, but that would require csearch and
|
||||
// decoder to use iterators instead of higher-order functions.)
|
||||
pub fn each_attr(tcx: ctxt, did: DefId, f: &fn(@MetaItem) -> bool) -> bool {
|
||||
pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool {
|
||||
if is_local(did) {
|
||||
match tcx.items.find(&did.node) {
|
||||
Some(&ast_map::node_item(@ast::item {attrs: ref attrs, _}, _)) =>
|
||||
@ -4341,7 +4332,8 @@ pub fn determine_inherited_purity(parent: (ast::purity, ast::NodeId),
|
||||
// list.
|
||||
pub fn each_bound_trait_and_supertraits(tcx: ctxt,
|
||||
bounds: &[@TraitRef],
|
||||
f: &fn(@TraitRef) -> bool) -> bool {
|
||||
f: |@TraitRef| -> bool)
|
||||
-> bool {
|
||||
for &bound_trait_ref in bounds.iter() {
|
||||
let mut supertrait_set = HashMap::new();
|
||||
let mut trait_refs = ~[];
|
||||
|
@ -33,7 +33,7 @@
|
||||
* scopes and (b) the default region may change. To understand case (a),
|
||||
* consider something like:
|
||||
*
|
||||
* type foo = { x: &a.int, y: &fn(&a.int) }
|
||||
* type foo = { x: &a.int, y: |&a.int| }
|
||||
*
|
||||
* The type of `x` is an error because there is no region `a` in scope.
|
||||
* In the type of `y`, however, region `a` is considered a bound region
|
||||
@ -290,13 +290,14 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
||||
// Handle @, ~, and & being able to mean estrs and evecs.
|
||||
// If a_seq_ty is a str or a vec, make it an estr/evec.
|
||||
// Also handle first-class trait types.
|
||||
fn mk_pointer<AC:AstConv,RS:RegionScope>(
|
||||
this: &AC,
|
||||
rscope: &RS,
|
||||
a_seq_ty: &ast::mt,
|
||||
vst: ty::vstore,
|
||||
constr: &fn(ty::mt) -> ty::t) -> ty::t
|
||||
{
|
||||
fn mk_pointer<AC:AstConv,
|
||||
RS:RegionScope>(
|
||||
this: &AC,
|
||||
rscope: &RS,
|
||||
a_seq_ty: &ast::mt,
|
||||
vst: ty::vstore,
|
||||
constr: |ty::mt| -> ty::t)
|
||||
-> ty::t {
|
||||
let tcx = this.tcx();
|
||||
debug!("mk_pointer(vst={:?})", vst);
|
||||
|
||||
@ -715,7 +716,7 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope>(
|
||||
ty::ReStatic
|
||||
}
|
||||
ast::BorrowedSigil => {
|
||||
// &fn() defaults as normal for an omitted lifetime:
|
||||
// || defaults as normal for an omitted lifetime:
|
||||
opt_ast_region_to_region(this, rscope, span, opt_lifetime)
|
||||
}
|
||||
}
|
||||
|
@ -655,7 +655,7 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
|
||||
span: Span,
|
||||
expected: ty::t) {
|
||||
let fcx = pcx.fcx;
|
||||
let check_inner: &fn(ty::mt) = |e_inner| {
|
||||
let check_inner: |ty::mt| = |e_inner| {
|
||||
check_pat(pcx, inner, e_inner.ty);
|
||||
fcx.write_ty(pat_id, expected);
|
||||
};
|
||||
|
@ -31,9 +31,11 @@ pub fn subtype(fcx: @mut FnCtxt, sp: Span, expected: ty::t, actual: ty::t) {
|
||||
}
|
||||
|
||||
pub fn suptype_with_fn(fcx: @mut FnCtxt,
|
||||
sp: Span, b_is_expected: bool,
|
||||
ty_a: ty::t, ty_b: ty::t,
|
||||
handle_err: &fn(Span, ty::t, ty::t, &ty::type_err)) {
|
||||
sp: Span,
|
||||
b_is_expected: bool,
|
||||
ty_a: ty::t,
|
||||
ty_b: ty::t,
|
||||
handle_err: |Span, ty::t, ty::t, &ty::type_err|) {
|
||||
// n.b.: order of actual, expected is reversed
|
||||
match infer::mk_subty(fcx.infcx(), b_is_expected, infer::Misc(sp),
|
||||
ty_b, ty_a) {
|
||||
|
@ -483,12 +483,13 @@ impl<'self> LookupContext<'self> {
|
||||
|
||||
// Do a search through a list of bounds, using a callback to actually
|
||||
// create the candidates.
|
||||
fn push_inherent_candidates_from_bounds_inner(
|
||||
&self,
|
||||
bounds: &[@TraitRef],
|
||||
mk_cand: &fn(trait_ref: @TraitRef, m: @ty::Method, method_num: uint,
|
||||
bound_num: uint) -> Candidate) {
|
||||
|
||||
fn push_inherent_candidates_from_bounds_inner(&self,
|
||||
bounds: &[@TraitRef],
|
||||
mk_cand: |tr: @TraitRef,
|
||||
m: @ty::Method,
|
||||
method_num: uint,
|
||||
bound_num: uint|
|
||||
-> Candidate) {
|
||||
let tcx = self.tcx();
|
||||
let mut next_bound_idx = 0; // count only trait bounds
|
||||
|
||||
@ -783,12 +784,12 @@ impl<'self> LookupContext<'self> {
|
||||
}
|
||||
|
||||
fn search_for_some_kind_of_autorefd_method(
|
||||
&self,
|
||||
kind: &fn(Region, ast::Mutability) -> ty::AutoRef,
|
||||
autoderefs: uint,
|
||||
mutbls: &[ast::Mutability],
|
||||
mk_autoref_ty: &fn(ast::Mutability, ty::Region) -> ty::t)
|
||||
-> Option<method_map_entry> {
|
||||
&self,
|
||||
kind: |Region, ast::Mutability| -> ty::AutoRef,
|
||||
autoderefs: uint,
|
||||
mutbls: &[ast::Mutability],
|
||||
mk_autoref_ty: |ast::Mutability, ty::Region| -> ty::t)
|
||||
-> Option<method_map_entry> {
|
||||
// This is hokey. We should have mutability inference as a
|
||||
// variable. But for now, try &const, then &, then &mut:
|
||||
let region =
|
||||
|
@ -1194,7 +1194,7 @@ impl FnCtxt {
|
||||
|
||||
pub fn opt_node_ty_substs(&self,
|
||||
id: ast::NodeId,
|
||||
f: &fn(&ty::substs) -> bool)
|
||||
f: |&ty::substs| -> bool)
|
||||
-> bool {
|
||||
match self.inh.node_type_substs.find(&id) {
|
||||
Some(s) => f(s),
|
||||
@ -1257,8 +1257,7 @@ impl FnCtxt {
|
||||
infer::mk_subr(self.infcx(), a_is_expected, origin, sub, sup)
|
||||
}
|
||||
|
||||
pub fn with_region_lb<R>(@mut self, lb: ast::NodeId, f: &fn() -> R)
|
||||
-> R {
|
||||
pub fn with_region_lb<R>(@mut self, lb: ast::NodeId, f: || -> R) -> R {
|
||||
let old_region_lb = self.region_lb;
|
||||
self.region_lb = lb;
|
||||
let v = f();
|
||||
@ -1268,7 +1267,7 @@ impl FnCtxt {
|
||||
|
||||
pub fn type_error_message(&self,
|
||||
sp: Span,
|
||||
mk_msg: &fn(~str) -> ~str,
|
||||
mk_msg: |~str| -> ~str,
|
||||
actual_ty: ty::t,
|
||||
err: Option<&ty::type_err>) {
|
||||
self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
|
||||
@ -1629,7 +1628,7 @@ fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
|
||||
pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
expr: @ast::Expr,
|
||||
expected: Option<ty::t>,
|
||||
unifier: &fn()) {
|
||||
unifier: ||) {
|
||||
debug!(">> typechecking");
|
||||
|
||||
fn check_method_argument_types(
|
||||
@ -2014,7 +2013,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
args: ~[@ast::Expr],
|
||||
deref_args: DerefArgs,
|
||||
autoderef_receiver: AutoderefReceiverFlag,
|
||||
unbound_method: &fn(),
|
||||
unbound_method: ||,
|
||||
_expected_result: Option<ty::t>
|
||||
)
|
||||
-> ty::t {
|
||||
@ -2198,10 +2197,11 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
// through the `unpack` function. It there is no expected type or
|
||||
// resolution is not possible (e.g., no constraints yet present), just
|
||||
// returns `none`.
|
||||
fn unpack_expected<O>(fcx: @mut FnCtxt,
|
||||
expected: Option<ty::t>,
|
||||
unpack: &fn(&ty::sty) -> Option<O>)
|
||||
-> Option<O> {
|
||||
fn unpack_expected<O>(
|
||||
fcx: @mut FnCtxt,
|
||||
expected: Option<ty::t>,
|
||||
unpack: |&ty::sty| -> Option<O>)
|
||||
-> Option<O> {
|
||||
match expected {
|
||||
Some(t) => {
|
||||
match resolve_type(fcx.infcx(), t, force_tvar) {
|
||||
|
@ -20,12 +20,11 @@ use util::ppaux;
|
||||
// Helper functions related to manipulating region types.
|
||||
|
||||
pub fn replace_bound_regions_in_fn_sig(
|
||||
tcx: ty::ctxt,
|
||||
opt_self_ty: Option<ty::t>,
|
||||
fn_sig: &ty::FnSig,
|
||||
mapf: &fn(ty::BoundRegion) -> ty::Region)
|
||||
-> (HashMap<ty::BoundRegion,ty::Region>, Option<ty::t>, ty::FnSig)
|
||||
{
|
||||
tcx: ty::ctxt,
|
||||
opt_self_ty: Option<ty::t>,
|
||||
fn_sig: &ty::FnSig,
|
||||
mapf: |ty::BoundRegion| -> ty::Region)
|
||||
-> (HashMap<ty::BoundRegion,ty::Region>, Option<ty::t>, ty::FnSig) {
|
||||
debug!("replace_bound_regions_in_fn_sig(self_ty={}, fn_sig={})",
|
||||
opt_self_ty.repr(tcx),
|
||||
fn_sig.repr(tcx));
|
||||
@ -47,12 +46,10 @@ pub fn replace_bound_regions_in_fn_sig(
|
||||
(map, opt_self_ty, fn_sig)
|
||||
}
|
||||
|
||||
pub fn relate_nested_regions(
|
||||
tcx: ty::ctxt,
|
||||
opt_region: Option<ty::Region>,
|
||||
ty: ty::t,
|
||||
relate_op: &fn(ty::Region, ty::Region))
|
||||
{
|
||||
pub fn relate_nested_regions(tcx: ty::ctxt,
|
||||
opt_region: Option<ty::Region>,
|
||||
ty: ty::t,
|
||||
relate_op: |ty::Region, ty::Region|) {
|
||||
/*!
|
||||
* This rather specialized function walks each region `r` that appear
|
||||
* in `ty` and invokes `relate_op(r_encl, r)` for each one. `r_encl`
|
||||
|
@ -451,7 +451,7 @@ impl CoherenceChecker {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter_impls_of_trait(&self, trait_def_id: DefId, f: &fn(@Impl)) {
|
||||
pub fn iter_impls_of_trait(&self, trait_def_id: DefId, f: |@Impl|) {
|
||||
match self.crate_context.tcx.trait_impls.find(&trait_def_id) {
|
||||
Some(impls) => {
|
||||
for &im in impls.iter() {
|
||||
|
@ -161,9 +161,7 @@ impl Coerce {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unpack_actual_value(&self,
|
||||
a: ty::t,
|
||||
f: &fn(&ty::sty) -> CoerceResult)
|
||||
pub fn unpack_actual_value(&self, a: ty::t, f: |&ty::sty| -> CoerceResult)
|
||||
-> CoerceResult {
|
||||
match resolve_type(self.infcx, a, try_resolve_tvar_shallow) {
|
||||
Ok(t) => {
|
||||
|
@ -196,7 +196,7 @@ impl Combine for Glb {
|
||||
// NB---I do not believe this algorithm computes
|
||||
// (necessarily) the GLB. As written it can
|
||||
// spuriously fail. In particular, if there is a case
|
||||
// like: &fn(fn(&a)) and fn(fn(&b)), where a and b are
|
||||
// like: |fn(&a)| and fn(fn(&b)), where a and b are
|
||||
// free, it will return fn(&c) where c = GLB(a,b). If
|
||||
// however this GLB is not defined, then the result is
|
||||
// an error, even though something like
|
||||
|
@ -442,12 +442,12 @@ pub fn resolve_region(cx: @mut InferCtxt, r: ty::Region, modes: uint)
|
||||
}
|
||||
|
||||
trait then {
|
||||
fn then<T:Clone>(&self, f: &fn() -> Result<T,ty::type_err>)
|
||||
fn then<T:Clone>(&self, f: || -> Result<T,ty::type_err>)
|
||||
-> Result<T,ty::type_err>;
|
||||
}
|
||||
|
||||
impl then for ures {
|
||||
fn then<T:Clone>(&self, f: &fn() -> Result<T,ty::type_err>)
|
||||
fn then<T:Clone>(&self, f: || -> Result<T,ty::type_err>)
|
||||
-> Result<T,ty::type_err> {
|
||||
self.and_then(|_i| f())
|
||||
}
|
||||
@ -467,11 +467,11 @@ impl<T> ToUres for cres<T> {
|
||||
}
|
||||
|
||||
trait CresCompare<T> {
|
||||
fn compare(&self, t: T, f: &fn() -> ty::type_err) -> cres<T>;
|
||||
fn compare(&self, t: T, f: || -> ty::type_err) -> cres<T>;
|
||||
}
|
||||
|
||||
impl<T:Clone + Eq> CresCompare<T> for cres<T> {
|
||||
fn compare(&self, t: T, f: &fn() -> ty::type_err) -> cres<T> {
|
||||
fn compare(&self, t: T, f: || -> ty::type_err) -> cres<T> {
|
||||
do (*self).clone().and_then |s| {
|
||||
if s == t {
|
||||
(*self).clone()
|
||||
@ -549,7 +549,7 @@ impl InferCtxt {
|
||||
}
|
||||
|
||||
/// Execute `f` and commit the bindings if successful
|
||||
pub fn commit<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
|
||||
pub fn commit<T,E>(@mut self, f: || -> Result<T,E>) -> Result<T,E> {
|
||||
assert!(!self.in_snapshot());
|
||||
|
||||
debug!("commit()");
|
||||
@ -564,7 +564,7 @@ impl InferCtxt {
|
||||
}
|
||||
|
||||
/// Execute `f`, unroll bindings on failure
|
||||
pub fn try<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
|
||||
pub fn try<T,E>(@mut self, f: || -> Result<T,E>) -> Result<T,E> {
|
||||
debug!("try()");
|
||||
let snapshot = self.start_snapshot();
|
||||
let r = f();
|
||||
@ -579,7 +579,7 @@ impl InferCtxt {
|
||||
}
|
||||
|
||||
/// Execute `f` then unroll any bindings it creates
|
||||
pub fn probe<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
|
||||
pub fn probe<T,E>(@mut self, f: || -> Result<T,E>) -> Result<T,E> {
|
||||
debug!("probe()");
|
||||
do indent {
|
||||
let snapshot = self.start_snapshot();
|
||||
@ -721,7 +721,7 @@ impl InferCtxt {
|
||||
// errors.
|
||||
pub fn type_error_message_str(@mut self,
|
||||
sp: Span,
|
||||
mk_msg: &fn(Option<~str>, ~str) -> ~str,
|
||||
mk_msg: |Option<~str>, ~str| -> ~str,
|
||||
actual_ty: ~str,
|
||||
err: Option<&ty::type_err>) {
|
||||
self.type_error_message_str_with_expected(sp, mk_msg, None, actual_ty, err)
|
||||
@ -729,9 +729,9 @@ impl InferCtxt {
|
||||
|
||||
pub fn type_error_message_str_with_expected(@mut self,
|
||||
sp: Span,
|
||||
mk_msg:
|
||||
&fn(Option<~str>, ~str) ->
|
||||
~str,
|
||||
mk_msg: |Option<~str>,
|
||||
~str|
|
||||
-> ~str,
|
||||
expected_ty: Option<ty::t>,
|
||||
actual_ty: ~str,
|
||||
err: Option<&ty::type_err>) {
|
||||
@ -760,7 +760,7 @@ impl InferCtxt {
|
||||
|
||||
pub fn type_error_message(@mut self,
|
||||
sp: Span,
|
||||
mk_msg: &fn(~str) -> ~str,
|
||||
mk_msg: |~str| -> ~str,
|
||||
actual_ty: ty::t,
|
||||
err: Option<&ty::type_err>) {
|
||||
let actual_ty = self.resolve_type_vars_if_possible(actual_ty);
|
||||
@ -813,11 +813,10 @@ impl InferCtxt {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fold_regions_in_sig(
|
||||
tcx: ty::ctxt,
|
||||
fn_sig: &ty::FnSig,
|
||||
fldr: &fn(r: ty::Region) -> ty::Region) -> ty::FnSig
|
||||
{
|
||||
pub fn fold_regions_in_sig(tcx: ty::ctxt,
|
||||
fn_sig: &ty::FnSig,
|
||||
fldr: |r: ty::Region| -> ty::Region)
|
||||
-> ty::FnSig {
|
||||
ty_fold::RegionFolder::regions(tcx, fldr).fold_sig(fn_sig)
|
||||
}
|
||||
|
||||
|
@ -389,7 +389,7 @@ The problem we are addressing is that there is a kind of subtyping
|
||||
between functions with bound region parameters. Consider, for
|
||||
example, whether the following relation holds:
|
||||
|
||||
fn(&'a int) <: &fn(&'b int)? (Yes, a => b)
|
||||
fn(&'a int) <: |&'b int|? (Yes, a => b)
|
||||
|
||||
The answer is that of course it does. These two types are basically
|
||||
the same, except that in one we used the name `a` and one we used
|
||||
@ -406,7 +406,7 @@ Now let's consider two more function types. Here, we assume that the
|
||||
`self` lifetime is defined somewhere outside and hence is not a
|
||||
lifetime parameter bound by the function type (it "appears free"):
|
||||
|
||||
fn<a>(&'a int) <: &fn(&'self int)? (Yes, a => self)
|
||||
fn<a>(&'a int) <: |&'self int|? (Yes, a => self)
|
||||
|
||||
This subtyping relation does in fact hold. To see why, you have to
|
||||
consider what subtyping means. One way to look at `T1 <: T2` is to
|
||||
@ -423,7 +423,7 @@ to the same thing: a function that accepts pointers with any lifetime
|
||||
|
||||
So, what if we reverse the order of the two function types, like this:
|
||||
|
||||
fn(&'self int) <: &fn<a>(&'a int)? (No)
|
||||
fn(&'self int) <: <a>|&'a int|? (No)
|
||||
|
||||
Does the subtyping relationship still hold? The answer of course is
|
||||
no. In this case, the function accepts *only the lifetime `&self`*,
|
||||
@ -432,8 +432,8 @@ accepted any lifetime.
|
||||
|
||||
What about these two examples:
|
||||
|
||||
fn<a,b>(&'a int, &'b int) <: &fn<a>(&'a int, &'a int)? (Yes)
|
||||
fn<a>(&'a int, &'a int) <: &fn<a,b>(&'a int, &'b int)? (No)
|
||||
fn<a,b>(&'a int, &'b int) <: <a>|&'a int, &'a int|? (Yes)
|
||||
fn<a>(&'a int, &'a int) <: <a,b>|&'a int, &'b int|? (No)
|
||||
|
||||
Here, it is true that functions which take two pointers with any two
|
||||
lifetimes can be treated as if they only accepted two pointers with
|
||||
@ -457,12 +457,12 @@ Let's walk through some examples and see how this algorithm plays out.
|
||||
|
||||
We'll start with the first example, which was:
|
||||
|
||||
1. fn<a>(&'a T) <: &fn<b>(&'b T)? Yes: a -> b
|
||||
1. fn<a>(&'a T) <: <b>|&'b T|? Yes: a -> b
|
||||
|
||||
After steps 1 and 2 of the algorithm we will have replaced the types
|
||||
like so:
|
||||
|
||||
1. fn(&'A T) <: &fn(&'x T)?
|
||||
1. fn(&'A T) <: |&'x T|?
|
||||
|
||||
Here the upper case `&A` indicates a *region variable*, that is, a
|
||||
region whose value is being inferred by the system. I also replaced
|
||||
@ -491,12 +491,12 @@ So far we have encountered no error, so the subtype check succeeds.
|
||||
|
||||
Now let's look first at the third example, which was:
|
||||
|
||||
3. fn(&'self T) <: &fn<b>(&'b T)? No!
|
||||
3. fn(&'self T) <: <b>|&'b T|? No!
|
||||
|
||||
After steps 1 and 2 of the algorithm we will have replaced the types
|
||||
like so:
|
||||
|
||||
3. fn(&'self T) <: &fn(&'x T)?
|
||||
3. fn(&'self T) <: |&'x T|?
|
||||
|
||||
This looks pretty much the same as before, except that on the LHS
|
||||
`&self` was not bound, and hence was left as-is and not replaced with
|
||||
@ -511,7 +511,7 @@ You may be wondering about that mysterious last step in the algorithm.
|
||||
So far it has not been relevant. The purpose of that last step is to
|
||||
catch something like *this*:
|
||||
|
||||
fn<a>() -> fn(&'a T) <: &fn() -> fn<b>(&'b T)? No.
|
||||
fn<a>() -> fn(&'a T) <: || -> fn<b>(&'b T)? No.
|
||||
|
||||
Here the function types are the same but for where the binding occurs.
|
||||
The subtype returns a function that expects a value in precisely one
|
||||
@ -525,15 +525,15 @@ So let's step through what happens when we perform this subtype check.
|
||||
We first replace the bound regions in the subtype (the supertype has
|
||||
no bound regions). This gives us:
|
||||
|
||||
fn() -> fn(&'A T) <: &fn() -> fn<b>(&'b T)?
|
||||
fn() -> fn(&'A T) <: || -> fn<b>(&'b T)?
|
||||
|
||||
Now we compare the return types, which are covariant, and hence we have:
|
||||
|
||||
fn(&'A T) <: &fn<b>(&'b T)?
|
||||
fn(&'A T) <: <b>|&'b T|?
|
||||
|
||||
Here we skolemize the bound region in the supertype to yield:
|
||||
|
||||
fn(&'A T) <: &fn(&'x T)?
|
||||
fn(&'A T) <: |&'x T|?
|
||||
|
||||
And then proceed to compare the argument types:
|
||||
|
||||
@ -550,7 +550,7 @@ The difference between this example and the first one is that the variable
|
||||
`A` already existed at the point where the skolemization occurred. In
|
||||
the first example, you had two functions:
|
||||
|
||||
fn<a>(&'a T) <: &fn<b>(&'b T)
|
||||
fn<a>(&'a T) <: <b>|&'b T|
|
||||
|
||||
and hence `&A` and `&x` were created "together". In general, the
|
||||
intention of the skolemized names is that they are supposed to be
|
||||
|
@ -359,9 +359,9 @@ impl RegionVarBindings {
|
||||
a: Region,
|
||||
b: Region,
|
||||
origin: SubregionOrigin,
|
||||
relate: &fn(this: &mut RegionVarBindings,
|
||||
old_r: Region,
|
||||
new_r: Region))
|
||||
relate: |this: &mut RegionVarBindings,
|
||||
old_r: Region,
|
||||
new_r: Region|)
|
||||
-> Region {
|
||||
let vars = TwoRegions { a: a, b: b };
|
||||
match self.combine_map(t).find(&vars) {
|
||||
@ -1254,7 +1254,7 @@ impl RegionVarBindings {
|
||||
|
||||
fn iterate_until_fixed_point(&self,
|
||||
tag: &str,
|
||||
body: &fn(constraint: &Constraint) -> bool) {
|
||||
body: |constraint: &Constraint| -> bool) {
|
||||
let mut iteration = 0;
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
|
@ -282,15 +282,14 @@ pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn require_same_types(
|
||||
tcx: ty::ctxt,
|
||||
maybe_infcx: Option<@mut infer::InferCtxt>,
|
||||
t1_is_expected: bool,
|
||||
span: Span,
|
||||
t1: ty::t,
|
||||
t2: ty::t,
|
||||
msg: &fn() -> ~str) -> bool {
|
||||
|
||||
pub fn require_same_types(tcx: ty::ctxt,
|
||||
maybe_infcx: Option<@mut infer::InferCtxt>,
|
||||
t1_is_expected: bool,
|
||||
span: Span,
|
||||
t1: ty::t,
|
||||
t2: ty::t,
|
||||
msg: || -> ~str)
|
||||
-> bool {
|
||||
let l_tcx;
|
||||
let l_infcx;
|
||||
match maybe_infcx {
|
||||
|
@ -150,8 +150,8 @@ constraints will be satisfied.
|
||||
As a simple example, consider:
|
||||
|
||||
enum Option<A> { Some(A), None }
|
||||
enum OptionalFn<B> { Some(&fn(B)), None }
|
||||
enum OptionalMap<C> { Some(&fn(C) -> C), None }
|
||||
enum OptionalFn<B> { Some(|B|), None }
|
||||
enum OptionalMap<C> { Some(|C| -> C), None }
|
||||
|
||||
Here, we will generate the constraints:
|
||||
|
||||
|
@ -17,7 +17,7 @@ use syntax::visit::Visitor;
|
||||
use std::hashmap::HashSet;
|
||||
use extra;
|
||||
|
||||
pub fn time<T, U>(do_it: bool, what: &str, u: U, f: &fn(U) -> T) -> T {
|
||||
pub fn time<T, U>(do_it: bool, what: &str, u: U, f: |U| -> T) -> T {
|
||||
if !do_it { return f(u); }
|
||||
let start = extra::time::precise_time_s();
|
||||
let rv = f(u);
|
||||
@ -26,7 +26,7 @@ pub fn time<T, U>(do_it: bool, what: &str, u: U, f: &fn(U) -> T) -> T {
|
||||
rv
|
||||
}
|
||||
|
||||
pub fn indent<R>(op: &fn() -> R) -> R {
|
||||
pub fn indent<R>(op: || -> R) -> R {
|
||||
// Use in conjunction with the log post-processor like `src/etc/indenter`
|
||||
// to make debug output more readable.
|
||||
debug!(">>");
|
||||
@ -79,7 +79,7 @@ impl<'self> Visitor<()> for LoopQueryVisitor<'self> {
|
||||
|
||||
// Takes a predicate p, returns true iff p is true for any subexpressions
|
||||
// of b -- skipping any inner loops (loop, while, loop_body)
|
||||
pub fn loop_query(b: &ast::Block, p: &fn(&ast::Expr_) -> bool) -> bool {
|
||||
pub fn loop_query(b: &ast::Block, p: |&ast::Expr_| -> bool) -> bool {
|
||||
let mut v = LoopQueryVisitor {
|
||||
p: p,
|
||||
flag: false,
|
||||
@ -102,7 +102,7 @@ impl<'self> Visitor<()> for BlockQueryVisitor<'self> {
|
||||
|
||||
// Takes a predicate p, returns true iff p is true for any subexpressions
|
||||
// of b -- skipping any inner loops (loop, while, loop_body)
|
||||
pub fn block_query(b: &ast::Block, p: &fn(@ast::Expr) -> bool) -> bool {
|
||||
pub fn block_query(b: &ast::Block, p: |@ast::Expr| -> bool) -> bool {
|
||||
let mut v = BlockQueryVisitor {
|
||||
p: p,
|
||||
flag: false,
|
||||
|
@ -286,7 +286,7 @@ pub fn vstore_ty_to_str(cx: ctxt, mt: &mt, vs: ty::vstore) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn vec_map_to_str<T>(ts: &[T], f: &fn(t: &T) -> ~str) -> ~str {
|
||||
pub fn vec_map_to_str<T>(ts: &[T], f: |t: &T| -> ~str) -> ~str {
|
||||
let tstrs = ts.map(f);
|
||||
format!("[{}]", tstrs.connect(", "))
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user