mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
Rollup merge of #22592 - nikomatsakis:deprecate-bracket-bracket, r=aturon
r? @aturon
This commit is contained in:
commit
3e794defda
@ -784,7 +784,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
// wrapped description
|
||||
row.push_str(&desc_rows.connect(&desc_sep[..])[]);
|
||||
row.push_str(&desc_rows.connect(&desc_sep[..]));
|
||||
|
||||
row
|
||||
});
|
||||
|
@ -588,7 +588,7 @@ impl LintPass for RawPointerDerive {
|
||||
}
|
||||
|
||||
fn check_item(&mut self, cx: &Context, item: &ast::Item) {
|
||||
if !attr::contains_name(&item.attrs[], "automatically_derived") {
|
||||
if !attr::contains_name(&item.attrs, "automatically_derived") {
|
||||
return
|
||||
}
|
||||
let did = match item.node {
|
||||
@ -652,7 +652,7 @@ impl LintPass for UnusedAttributes {
|
||||
|
||||
if !attr::is_used(attr) {
|
||||
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
|
||||
if KNOWN_ATTRIBUTES.contains(&(&attr.name()[], AttributeType::CrateLevel)) {
|
||||
if KNOWN_ATTRIBUTES.contains(&(&attr.name(), AttributeType::CrateLevel)) {
|
||||
let msg = match attr.node.style {
|
||||
ast::AttrOuter => "crate-level attribute should be an inner \
|
||||
attribute: add an exclamation mark: #![foo]",
|
||||
@ -732,7 +732,7 @@ impl LintPass for UnusedResults {
|
||||
ty::ty_enum(did, _) => {
|
||||
if ast_util::is_local(did) {
|
||||
if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) {
|
||||
warned |= check_must_use(cx, &it.attrs[], s.span);
|
||||
warned |= check_must_use(cx, &it.attrs, s.span);
|
||||
}
|
||||
} else {
|
||||
let attrs = csearch::get_item_attrs(&cx.sess().cstore, did);
|
||||
@ -1093,7 +1093,7 @@ impl UnusedParens {
|
||||
if !necessary {
|
||||
cx.span_lint(UNUSED_PARENS, value.span,
|
||||
&format!("unnecessary parentheses around {}",
|
||||
msg)[])
|
||||
msg))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1235,7 +1235,7 @@ impl LintPass for NonShorthandFieldPatterns {
|
||||
if ident.node.as_str() == fieldpat.node.ident.as_str() {
|
||||
cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span,
|
||||
&format!("the `{}:` in this pattern is redundant and can \
|
||||
be removed", ident.node.as_str())[])
|
||||
be removed", ident.node.as_str()))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1383,7 +1383,7 @@ impl LintPass for UnusedMut {
|
||||
fn check_expr(&mut self, cx: &Context, e: &ast::Expr) {
|
||||
if let ast::ExprMatch(_, ref arms, _) = e.node {
|
||||
for a in arms {
|
||||
self.check_unused_mut_pat(cx, &a.pats[])
|
||||
self.check_unused_mut_pat(cx, &a.pats)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1504,7 +1504,7 @@ impl MissingDoc {
|
||||
});
|
||||
if !has_doc {
|
||||
cx.span_lint(MISSING_DOCS, sp,
|
||||
&format!("missing documentation for {}", desc)[]);
|
||||
&format!("missing documentation for {}", desc));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1540,7 +1540,7 @@ impl LintPass for MissingDoc {
|
||||
}
|
||||
|
||||
fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) {
|
||||
self.check_missing_docs_attrs(cx, None, &krate.attrs[],
|
||||
self.check_missing_docs_attrs(cx, None, &krate.attrs,
|
||||
krate.span, "crate");
|
||||
}
|
||||
|
||||
@ -1554,7 +1554,7 @@ impl LintPass for MissingDoc {
|
||||
ast::ItemTy(..) => "a type alias",
|
||||
_ => return
|
||||
};
|
||||
self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs[],
|
||||
self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs,
|
||||
it.span, desc);
|
||||
}
|
||||
|
||||
@ -1567,13 +1567,13 @@ impl LintPass for MissingDoc {
|
||||
|
||||
// Otherwise, doc according to privacy. This will also check
|
||||
// doc for default methods defined on traits.
|
||||
self.check_missing_docs_attrs(cx, Some(m.id), &m.attrs[],
|
||||
self.check_missing_docs_attrs(cx, Some(m.id), &m.attrs,
|
||||
m.span, "a method");
|
||||
}
|
||||
}
|
||||
|
||||
fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) {
|
||||
self.check_missing_docs_attrs(cx, Some(tm.id), &tm.attrs[],
|
||||
self.check_missing_docs_attrs(cx, Some(tm.id), &tm.attrs,
|
||||
tm.span, "a type method");
|
||||
}
|
||||
|
||||
@ -1583,14 +1583,14 @@ impl LintPass for MissingDoc {
|
||||
let cur_struct_def = *self.struct_def_stack.last()
|
||||
.expect("empty struct_def_stack");
|
||||
self.check_missing_docs_attrs(cx, Some(cur_struct_def),
|
||||
&sf.node.attrs[], sf.span,
|
||||
&sf.node.attrs, sf.span,
|
||||
"a struct field")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) {
|
||||
self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs[],
|
||||
self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs,
|
||||
v.span, "a variant");
|
||||
assert!(!self.in_variant);
|
||||
self.in_variant = true;
|
||||
|
@ -105,7 +105,7 @@ impl LintStore {
|
||||
}
|
||||
|
||||
pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] {
|
||||
&self.lints[]
|
||||
&self.lints
|
||||
}
|
||||
|
||||
pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
|
||||
@ -276,7 +276,7 @@ impl LintStore {
|
||||
.collect::<Vec<()>>();
|
||||
}
|
||||
None => sess.err(&format!("unknown {} flag: {}",
|
||||
level.as_str(), lint_name)[]),
|
||||
level.as_str(), lint_name)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -527,7 +527,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
self.tcx.sess.span_err(span,
|
||||
&format!("{}({}) overruled by outer forbid({})",
|
||||
level.as_str(), lint_name,
|
||||
lint_name)[]);
|
||||
lint_name));
|
||||
} else if now != level {
|
||||
let src = self.lints.get_level_source(lint_id).1;
|
||||
self.level_stack.push((lint_id, (now, src)));
|
||||
@ -562,7 +562,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
fn visit_item(&mut self, it: &ast::Item) {
|
||||
self.with_lint_attrs(&it.attrs[], |cx| {
|
||||
self.with_lint_attrs(&it.attrs, |cx| {
|
||||
run_lints!(cx, check_item, it);
|
||||
cx.visit_ids(|v| v.visit_item(it));
|
||||
visit::walk_item(cx, it);
|
||||
@ -570,7 +570,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, it: &ast::ForeignItem) {
|
||||
self.with_lint_attrs(&it.attrs[], |cx| {
|
||||
self.with_lint_attrs(&it.attrs, |cx| {
|
||||
run_lints!(cx, check_foreign_item, it);
|
||||
visit::walk_foreign_item(cx, it);
|
||||
})
|
||||
@ -595,7 +595,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
body: &'v ast::Block, span: Span, id: ast::NodeId) {
|
||||
match fk {
|
||||
visit::FkMethod(_, _, m) => {
|
||||
self.with_lint_attrs(&m.attrs[], |cx| {
|
||||
self.with_lint_attrs(&m.attrs, |cx| {
|
||||
run_lints!(cx, check_fn, fk, decl, body, span, id);
|
||||
cx.visit_ids(|v| {
|
||||
v.visit_fn(fk, decl, body, span, id);
|
||||
@ -611,7 +611,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_ty_method(&mut self, t: &ast::TypeMethod) {
|
||||
self.with_lint_attrs(&t.attrs[], |cx| {
|
||||
self.with_lint_attrs(&t.attrs, |cx| {
|
||||
run_lints!(cx, check_ty_method, t);
|
||||
visit::walk_ty_method(cx, t);
|
||||
})
|
||||
@ -628,14 +628,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &ast::StructField) {
|
||||
self.with_lint_attrs(&s.node.attrs[], |cx| {
|
||||
self.with_lint_attrs(&s.node.attrs, |cx| {
|
||||
run_lints!(cx, check_struct_field, s);
|
||||
visit::walk_struct_field(cx, s);
|
||||
})
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) {
|
||||
self.with_lint_attrs(&v.node.attrs[], |cx| {
|
||||
self.with_lint_attrs(&v.node.attrs, |cx| {
|
||||
run_lints!(cx, check_variant, v, g);
|
||||
visit::walk_variant(cx, v, g);
|
||||
run_lints!(cx, check_variant_post, v, g);
|
||||
@ -779,7 +779,7 @@ pub fn check_crate(tcx: &ty::ctxt,
|
||||
let mut cx = Context::new(tcx, krate, exported_items);
|
||||
|
||||
// Visit the whole crate.
|
||||
cx.with_lint_attrs(&krate.attrs[], |cx| {
|
||||
cx.with_lint_attrs(&krate.attrs, |cx| {
|
||||
cx.visit_id(ast::CRATE_NODE_ID);
|
||||
cx.visit_ids(|v| {
|
||||
v.visited_outermost = true;
|
||||
|
@ -61,7 +61,7 @@ fn dump_crates(cstore: &CStore) {
|
||||
}
|
||||
|
||||
fn should_link(i: &ast::Item) -> bool {
|
||||
!attr::contains_name(&i.attrs[], "no_link")
|
||||
!attr::contains_name(&i.attrs, "no_link")
|
||||
}
|
||||
|
||||
struct CrateInfo {
|
||||
@ -85,7 +85,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
|
||||
for c in s.chars() {
|
||||
if c.is_alphanumeric() { continue }
|
||||
if c == '_' || c == '-' { continue }
|
||||
err(&format!("invalid character `{}` in crate name: `{}`", c, s)[]);
|
||||
err(&format!("invalid character `{}` in crate name: `{}`", c, s));
|
||||
}
|
||||
match sess {
|
||||
Some(sess) => sess.abort_if_errors(),
|
||||
@ -210,8 +210,8 @@ impl<'a> CrateReader<'a> {
|
||||
match self.extract_crate_info(i) {
|
||||
Some(info) => {
|
||||
let (cnum, _, _) = self.resolve_crate(&None,
|
||||
&info.ident[],
|
||||
&info.name[],
|
||||
&info.ident,
|
||||
&info.name,
|
||||
None,
|
||||
i.span,
|
||||
PathKind::Crate);
|
||||
@ -268,7 +268,7 @@ impl<'a> CrateReader<'a> {
|
||||
} else {
|
||||
self.sess.span_err(m.span,
|
||||
&format!("unknown kind: `{}`",
|
||||
k)[]);
|
||||
k));
|
||||
cstore::NativeUnknown
|
||||
}
|
||||
}
|
||||
@ -413,7 +413,7 @@ impl<'a> CrateReader<'a> {
|
||||
hash: hash.map(|a| &*a),
|
||||
filesearch: self.sess.target_filesearch(kind),
|
||||
target: &self.sess.target.target,
|
||||
triple: &self.sess.opts.target_triple[],
|
||||
triple: &self.sess.opts.target_triple,
|
||||
root: root,
|
||||
rejected_via_hash: vec!(),
|
||||
rejected_via_triple: vec!(),
|
||||
@ -440,8 +440,8 @@ impl<'a> CrateReader<'a> {
|
||||
decoder::get_crate_deps(cdata).iter().map(|dep| {
|
||||
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
||||
let (local_cnum, _, _) = self.resolve_crate(root,
|
||||
&dep.name[],
|
||||
&dep.name[],
|
||||
&dep.name,
|
||||
&dep.name,
|
||||
Some(&dep.hash),
|
||||
span,
|
||||
PathKind::Dependency);
|
||||
@ -450,7 +450,7 @@ impl<'a> CrateReader<'a> {
|
||||
}
|
||||
|
||||
fn read_extension_crate(&mut self, span: Span, info: &CrateInfo) -> ExtensionCrate {
|
||||
let target_triple = &self.sess.opts.target_triple[];
|
||||
let target_triple = &self.sess.opts.target_triple[..];
|
||||
let is_cross = target_triple != config::host_triple();
|
||||
let mut should_link = info.should_link && !is_cross;
|
||||
let mut target_only = false;
|
||||
@ -493,8 +493,8 @@ impl<'a> CrateReader<'a> {
|
||||
PathKind::Crate).is_none();
|
||||
let metadata = if register {
|
||||
// Register crate now to avoid double-reading metadata
|
||||
let (_, cmd, _) = self.register_crate(&None, &info.ident[],
|
||||
&info.name[], span, library);
|
||||
let (_, cmd, _) = self.register_crate(&None, &info.ident,
|
||||
&info.name, span, library);
|
||||
PMDSource::Registered(cmd)
|
||||
} else {
|
||||
// Not registering the crate; just hold on to the metadata
|
||||
|
@ -92,7 +92,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
|
||||
|
||||
// FIXME #1920: This path is not always correct if the crate is not linked
|
||||
// into the root namespace.
|
||||
let mut r = vec![ast_map::PathMod(token::intern(&cdata.name[]))];
|
||||
let mut r = vec![ast_map::PathMod(token::intern(&cdata.name))];
|
||||
r.push_all(&path);
|
||||
r
|
||||
}
|
||||
@ -391,7 +391,7 @@ pub fn is_staged_api(cstore: &cstore::CStore, def: ast::DefId) -> bool {
|
||||
let cdata = cstore.get_crate_data(def.krate);
|
||||
let attrs = decoder::get_crate_attributes(cdata.data());
|
||||
for attr in &attrs {
|
||||
if &attr.name()[] == "staged_api" {
|
||||
if &attr.name()[..] == "staged_api" {
|
||||
match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) }
|
||||
}
|
||||
}
|
||||
|
@ -1225,7 +1225,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
|
||||
}
|
||||
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
|
||||
let name = docstr(depdoc, tag_crate_dep_crate_name);
|
||||
let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash)[]);
|
||||
let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash));
|
||||
deps.push(CrateDep {
|
||||
cnum: crate_num,
|
||||
name: name,
|
||||
|
@ -94,7 +94,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
|
||||
}
|
||||
|
||||
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
|
||||
rbml_w.wr_tagged_str(tag_def_id, &def_to_string(id)[]);
|
||||
rbml_w.wr_tagged_str(tag_def_id, &def_to_string(id));
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -273,7 +273,7 @@ fn encode_symbol(ecx: &EncodeContext,
|
||||
}
|
||||
None => {
|
||||
ecx.diag.handler().bug(
|
||||
&format!("encode_symbol: id not found {}", id)[]);
|
||||
&format!("encode_symbol: id not found {}", id));
|
||||
}
|
||||
}
|
||||
rbml_w.end_tag();
|
||||
@ -341,8 +341,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||
encode_name(rbml_w, variant.node.name.name);
|
||||
encode_parent_item(rbml_w, local_def(id));
|
||||
encode_visibility(rbml_w, variant.node.vis);
|
||||
encode_attributes(rbml_w, &variant.node.attrs[]);
|
||||
encode_repr_attrs(rbml_w, ecx, &variant.node.attrs[]);
|
||||
encode_attributes(rbml_w, &variant.node.attrs);
|
||||
encode_repr_attrs(rbml_w, ecx, &variant.node.attrs);
|
||||
|
||||
let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id));
|
||||
encode_stability(rbml_w, stab);
|
||||
@ -394,12 +394,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder,
|
||||
exp.name, token::get_name(method_name));
|
||||
rbml_w.start_tag(tag_items_data_item_reexport);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
|
||||
rbml_w.wr_str(&def_to_string(method_def_id)[]);
|
||||
rbml_w.wr_str(&def_to_string(method_def_id));
|
||||
rbml_w.end_tag();
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
rbml_w.wr_str(&format!("{}::{}",
|
||||
exp.name,
|
||||
token::get_name(method_name))[]);
|
||||
token::get_name(method_name)));
|
||||
rbml_w.end_tag();
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -537,7 +537,7 @@ fn encode_reexports(ecx: &EncodeContext,
|
||||
id);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
|
||||
rbml_w.wr_str(&def_to_string(exp.def_id)[]);
|
||||
rbml_w.wr_str(&def_to_string(exp.def_id));
|
||||
rbml_w.end_tag();
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
rbml_w.wr_str(exp.name.as_str());
|
||||
@ -570,13 +570,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||
// Encode info about all the module children.
|
||||
for item in &md.items {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id)));
|
||||
rbml_w.end_tag();
|
||||
|
||||
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(local_def(
|
||||
auxiliary_node_id))[]);
|
||||
auxiliary_node_id)));
|
||||
rbml_w.end_tag();
|
||||
true
|
||||
});
|
||||
@ -588,7 +588,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||
did, ecx.tcx.map.node_to_string(did));
|
||||
|
||||
rbml_w.start_tag(tag_mod_impl);
|
||||
rbml_w.wr_str(&def_to_string(local_def(did))[]);
|
||||
rbml_w.wr_str(&def_to_string(local_def(did)));
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
}
|
||||
@ -623,7 +623,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) {
|
||||
ast::Public => 'y',
|
||||
ast::Inherited => 'i',
|
||||
};
|
||||
rbml_w.wr_str(&ch.to_string()[]);
|
||||
rbml_w.wr_str(&ch.to_string());
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
||||
@ -783,7 +783,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder,
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.wr_tagged_str(tag_region_param_def_def_id,
|
||||
&def_to_string(param.def_id)[]);
|
||||
&def_to_string(param.def_id));
|
||||
|
||||
rbml_w.wr_tagged_u64(tag_region_param_def_space,
|
||||
param.space.to_uint() as u64);
|
||||
@ -858,10 +858,10 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
|
||||
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
|
||||
match ast_item_opt {
|
||||
Some(&ast::MethodImplItem(ref ast_method)) => {
|
||||
encode_attributes(rbml_w, &ast_method.attrs[]);
|
||||
encode_attributes(rbml_w, &ast_method.attrs);
|
||||
let scheme = ty::lookup_item_type(ecx.tcx, m.def_id);
|
||||
let any_types = !scheme.generics.types.is_empty();
|
||||
if any_types || is_default_impl || should_inline(&ast_method.attrs[]) {
|
||||
if any_types || is_default_impl || should_inline(&ast_method.attrs) {
|
||||
encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id),
|
||||
ast_item_opt.unwrap()));
|
||||
}
|
||||
@ -906,7 +906,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext,
|
||||
match typedef_opt {
|
||||
None => {}
|
||||
Some(typedef) => {
|
||||
encode_attributes(rbml_w, &typedef.attrs[]);
|
||||
encode_attributes(rbml_w, &typedef.attrs);
|
||||
encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx,
|
||||
typedef.id));
|
||||
}
|
||||
@ -1040,7 +1040,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_path(rbml_w, path);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_stability(rbml_w, stab);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
ast::ItemConst(_, _) => {
|
||||
@ -1066,8 +1066,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_bounds_and_type_for_item(rbml_w, ecx, item.id);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_path(rbml_w, path);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
if tps_len > 0 || should_inline(&item.attrs[]) {
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
if tps_len > 0 || should_inline(&item.attrs) {
|
||||
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
|
||||
}
|
||||
if tps_len == 0 {
|
||||
@ -1083,7 +1083,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_info_for_mod(ecx,
|
||||
rbml_w,
|
||||
m,
|
||||
&item.attrs[],
|
||||
&item.attrs,
|
||||
item.id,
|
||||
path,
|
||||
item.ident,
|
||||
@ -1100,7 +1100,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
// Encode all the items in this module.
|
||||
for foreign_item in &fm.items {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]);
|
||||
rbml_w.wr_str(&def_to_string(local_def(foreign_item.id)));
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
encode_visibility(rbml_w, vis);
|
||||
@ -1128,8 +1128,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_item_variances(rbml_w, ecx, item.id);
|
||||
encode_bounds_and_type_for_item(rbml_w, ecx, item.id);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs);
|
||||
for v in &enum_definition.variants {
|
||||
encode_variant_id(rbml_w, local_def(v.node.id));
|
||||
}
|
||||
@ -1146,7 +1146,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_enum_variant_info(ecx,
|
||||
rbml_w,
|
||||
item.id,
|
||||
&(*enum_definition).variants[],
|
||||
&(*enum_definition).variants,
|
||||
index);
|
||||
}
|
||||
ast::ItemStruct(ref struct_def, _) => {
|
||||
@ -1172,11 +1172,11 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
|
||||
encode_item_variances(rbml_w, ecx, item.id);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
encode_path(rbml_w, path.clone());
|
||||
encode_stability(rbml_w, stab);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs);
|
||||
|
||||
/* Encode def_ids for each field and method
|
||||
for methods, write all the stuff get_trait_method
|
||||
@ -1213,7 +1213,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_family(rbml_w, 'i');
|
||||
encode_bounds_and_type_for_item(rbml_w, ecx, item.id);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
encode_unsafety(rbml_w, unsafety);
|
||||
encode_polarity(rbml_w, polarity);
|
||||
match ty.node {
|
||||
@ -1319,7 +1319,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_generics(rbml_w, ecx, &trait_def.generics, &trait_predicates, tag_item_generics);
|
||||
encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_attributes(rbml_w, &item.attrs);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_stability(rbml_w, stab);
|
||||
for &method_def_id in &*ty::trait_item_def_ids(tcx, def_id) {
|
||||
@ -1337,7 +1337,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(method_def_id.def_id())[]);
|
||||
rbml_w.wr_str(&def_to_string(method_def_id.def_id()));
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
encode_path(rbml_w, path.clone());
|
||||
@ -1426,14 +1426,14 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
};
|
||||
match trait_item {
|
||||
&ast::RequiredMethod(ref m) => {
|
||||
encode_attributes(rbml_w, &m.attrs[]);
|
||||
encode_attributes(rbml_w, &m.attrs);
|
||||
encode_trait_item(rbml_w);
|
||||
encode_item_sort(rbml_w, 'r');
|
||||
encode_method_argument_names(rbml_w, &*m.decl);
|
||||
}
|
||||
|
||||
&ast::ProvidedMethod(ref m) => {
|
||||
encode_attributes(rbml_w, &m.attrs[]);
|
||||
encode_attributes(rbml_w, &m.attrs);
|
||||
encode_trait_item(rbml_w);
|
||||
encode_item_sort(rbml_w, 'p');
|
||||
encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item));
|
||||
@ -1442,7 +1442,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
|
||||
&ast::TypeTraitItem(ref associated_type) => {
|
||||
encode_attributes(rbml_w,
|
||||
&associated_type.attrs[]);
|
||||
&associated_type.attrs);
|
||||
encode_item_sort(rbml_w, 't');
|
||||
}
|
||||
}
|
||||
@ -1867,10 +1867,10 @@ fn encode_macro_defs(rbml_w: &mut Encoder,
|
||||
rbml_w.start_tag(tag_macro_def);
|
||||
|
||||
encode_name(rbml_w, def.ident.name);
|
||||
encode_attributes(rbml_w, &def.attrs[]);
|
||||
encode_attributes(rbml_w, &def.attrs);
|
||||
|
||||
rbml_w.start_tag(tag_macro_def_body);
|
||||
rbml_w.wr_str(&pprust::tts_to_string(&def.body[])[]);
|
||||
rbml_w.wr_str(&pprust::tts_to_string(&def.body));
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.end_tag();
|
||||
@ -1887,7 +1887,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) {
|
||||
fn visit_struct_field(&mut self, field: &ast::StructField) {
|
||||
self.rbml_w.start_tag(tag_struct_field);
|
||||
self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id);
|
||||
encode_attributes(self.rbml_w, &field.node.attrs[]);
|
||||
encode_attributes(self.rbml_w, &field.node.attrs);
|
||||
self.rbml_w.end_tag();
|
||||
}
|
||||
}
|
||||
@ -1959,13 +1959,13 @@ fn encode_misc_info(ecx: &EncodeContext,
|
||||
rbml_w.start_tag(tag_misc_info_crate_items);
|
||||
for item in &krate.module.items {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id)));
|
||||
rbml_w.end_tag();
|
||||
|
||||
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&def_to_string(local_def(
|
||||
auxiliary_node_id))[]);
|
||||
auxiliary_node_id)));
|
||||
rbml_w.end_tag();
|
||||
true
|
||||
});
|
||||
@ -2132,17 +2132,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter,
|
||||
|
||||
let mut rbml_w = writer::Encoder::new(wr);
|
||||
|
||||
encode_crate_name(&mut rbml_w, &ecx.link_meta.crate_name[]);
|
||||
encode_crate_name(&mut rbml_w, &ecx.link_meta.crate_name);
|
||||
encode_crate_triple(&mut rbml_w,
|
||||
&tcx.sess
|
||||
.opts
|
||||
.target_triple
|
||||
[]);
|
||||
);
|
||||
encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash);
|
||||
encode_dylib_dependency_formats(&mut rbml_w, &ecx);
|
||||
|
||||
let mut i = rbml_w.writer.tell().unwrap();
|
||||
encode_attributes(&mut rbml_w, &krate.attrs[]);
|
||||
encode_attributes(&mut rbml_w, &krate.attrs);
|
||||
stats.attr_bytes = rbml_w.writer.tell().unwrap() - i;
|
||||
|
||||
i = rbml_w.writer.tell().unwrap();
|
||||
|
@ -329,7 +329,7 @@ impl<'a> Context<'a> {
|
||||
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
&format!("crate `{}`, path #{}, triple {}: {}",
|
||||
self.ident, i+1, got, path.display())[]);
|
||||
self.ident, i+1, got, path.display()));
|
||||
}
|
||||
}
|
||||
if self.rejected_via_hash.len() > 0 {
|
||||
@ -339,7 +339,7 @@ impl<'a> Context<'a> {
|
||||
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
&format!("crate `{}` path #{}: {}",
|
||||
self.ident, i+1, path.display())[]);
|
||||
self.ident, i+1, path.display()));
|
||||
}
|
||||
match self.root {
|
||||
&None => {}
|
||||
@ -347,7 +347,7 @@ impl<'a> Context<'a> {
|
||||
for (i, path) in r.paths().iter().enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
&format!("crate `{}` path #{}: {}",
|
||||
r.ident, i+1, path.display())[]);
|
||||
r.ident, i+1, path.display()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -359,7 +359,7 @@ impl<'a> Context<'a> {
|
||||
for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
&format!("crate `{}` path #{}: {}",
|
||||
self.ident, i+1, path.display())[]);
|
||||
self.ident, i+1, path.display()));
|
||||
}
|
||||
}
|
||||
self.sess.abort_if_errors();
|
||||
@ -472,26 +472,26 @@ impl<'a> Context<'a> {
|
||||
_ => {
|
||||
self.sess.span_err(self.span,
|
||||
&format!("multiple matching crates for `{}`",
|
||||
self.crate_name)[]);
|
||||
self.crate_name));
|
||||
self.sess.note("candidates:");
|
||||
for lib in &libraries {
|
||||
match lib.dylib {
|
||||
Some((ref p, _)) => {
|
||||
self.sess.note(&format!("path: {}",
|
||||
p.display())[]);
|
||||
p.display()));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
match lib.rlib {
|
||||
Some((ref p, _)) => {
|
||||
self.sess.note(&format!("path: {}",
|
||||
p.display())[]);
|
||||
p.display()));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
let data = lib.metadata.as_slice();
|
||||
let name = decoder::get_crate_name(data);
|
||||
note_crate_name(self.sess.diagnostic(), &name[]);
|
||||
note_crate_name(self.sess.diagnostic(), &name);
|
||||
}
|
||||
None
|
||||
}
|
||||
@ -545,11 +545,11 @@ impl<'a> Context<'a> {
|
||||
&format!("multiple {} candidates for `{}` \
|
||||
found",
|
||||
flavor,
|
||||
self.crate_name)[]);
|
||||
self.crate_name));
|
||||
self.sess.span_note(self.span,
|
||||
&format!(r"candidate #1: {}",
|
||||
ret.as_ref().unwrap().0
|
||||
.display())[]);
|
||||
.display()));
|
||||
error = 1;
|
||||
ret = None;
|
||||
}
|
||||
@ -557,7 +557,7 @@ impl<'a> Context<'a> {
|
||||
error += 1;
|
||||
self.sess.span_note(self.span,
|
||||
&format!(r"candidate #{}: {}", error,
|
||||
lib.display())[]);
|
||||
lib.display()));
|
||||
continue
|
||||
}
|
||||
*slot = Some(metadata);
|
||||
@ -630,14 +630,14 @@ impl<'a> Context<'a> {
|
||||
let locs = locs.iter().map(|l| Path::new(&l[..])).filter(|loc| {
|
||||
if !loc.exists() {
|
||||
sess.err(&format!("extern location for {} does not exist: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
self.crate_name, loc.display()));
|
||||
return false;
|
||||
}
|
||||
let file = match loc.filename_str() {
|
||||
Some(file) => file,
|
||||
None => {
|
||||
sess.err(&format!("extern location for {} is not a file: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
self.crate_name, loc.display()));
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@ -651,7 +651,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
}
|
||||
sess.err(&format!("extern location for {} is of an unknown type: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
self.crate_name, loc.display()));
|
||||
false
|
||||
});
|
||||
|
||||
@ -686,7 +686,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
|
||||
pub fn note_crate_name(diag: &SpanHandler, name: &str) {
|
||||
diag.handler().note(&format!("crate name: {}", name)[]);
|
||||
diag.handler().note(&format!("crate name: {}", name));
|
||||
}
|
||||
|
||||
impl ArchiveMetadata {
|
||||
|
@ -78,7 +78,7 @@ impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
|
||||
|
||||
for attr in &item.attrs {
|
||||
let mut used = true;
|
||||
match &attr.name()[] {
|
||||
match &attr.name()[..] {
|
||||
"phase" => {
|
||||
self.sess.span_err(attr.span, "#[phase] is deprecated");
|
||||
}
|
||||
@ -86,7 +86,7 @@ impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
|
||||
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
|
||||
self.sess.span_help(attr.span, &format!("use a crate attribute instead, \
|
||||
i.e. #![plugin({})]",
|
||||
item.ident.as_str())[]);
|
||||
item.ident.as_str()));
|
||||
}
|
||||
"macro_use" => {
|
||||
let names = attr.meta_item_list();
|
||||
|
@ -305,7 +305,7 @@ fn parse_bound_region_<F>(st: &mut PState, conv: &mut F) -> ty::BoundRegion wher
|
||||
}
|
||||
'[' => {
|
||||
let def = parse_def_(st, RegionParameter, conv);
|
||||
let ident = token::str_to_ident(&parse_str(st, ']')[]);
|
||||
let ident = token::str_to_ident(&parse_str(st, ']'));
|
||||
ty::BrNamed(def, ident.name)
|
||||
}
|
||||
'f' => {
|
||||
@ -344,7 +344,7 @@ fn parse_region_<F>(st: &mut PState, conv: &mut F) -> ty::Region where
|
||||
assert_eq!(next(st), '|');
|
||||
let index = parse_u32(st);
|
||||
assert_eq!(next(st), '|');
|
||||
let nm = token::str_to_ident(&parse_str(st, ']')[]);
|
||||
let nm = token::str_to_ident(&parse_str(st, ']'));
|
||||
ty::ReEarlyBound(node_id, space, index, nm.name)
|
||||
}
|
||||
'f' => {
|
||||
@ -485,7 +485,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w
|
||||
assert_eq!(next(st), '|');
|
||||
let space = parse_param_space(st);
|
||||
assert_eq!(next(st), '|');
|
||||
let name = token::intern(&parse_str(st, ']')[]);
|
||||
let name = token::intern(&parse_str(st, ']'));
|
||||
return ty::mk_param(tcx, space, index, name);
|
||||
}
|
||||
'~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)),
|
||||
|
@ -48,7 +48,7 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
|
||||
None => {
|
||||
tcx.sess.span_bug(ast_ty.span,
|
||||
&format!("unbound path {}",
|
||||
path.repr(tcx))[])
|
||||
path.repr(tcx)))
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
|
@ -1852,7 +1852,7 @@ fn decode_side_tables(dcx: &DecodeContext,
|
||||
None => {
|
||||
dcx.tcx.sess.bug(
|
||||
&format!("unknown tag found in side tables: {:x}",
|
||||
tag)[]);
|
||||
tag));
|
||||
}
|
||||
Some(value) => {
|
||||
let val_doc = entry_doc.get(c::tag_table_val as uint);
|
||||
@ -1937,7 +1937,7 @@ fn decode_side_tables(dcx: &DecodeContext,
|
||||
_ => {
|
||||
dcx.tcx.sess.bug(
|
||||
&format!("unknown tag found in side tables: {:x}",
|
||||
tag)[]);
|
||||
tag));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -327,7 +327,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
let mut cond_exit = discr_exit;
|
||||
for arm in arms {
|
||||
cond_exit = self.add_dummy_node(&[cond_exit]); // 2
|
||||
let pats_exit = self.pats_any(&arm.pats[],
|
||||
let pats_exit = self.pats_any(&arm.pats,
|
||||
cond_exit); // 3
|
||||
let guard_exit = self.opt_expr(&arm.guard,
|
||||
pats_exit); // 4
|
||||
@ -582,14 +582,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
&format!("no loop scope for id {}",
|
||||
loop_id)[]);
|
||||
loop_id));
|
||||
}
|
||||
|
||||
r => {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
&format!("bad entry `{:?}` in def_map for label",
|
||||
r)[]);
|
||||
r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ fn replace_newline_with_backslash_l(s: String) -> String {
|
||||
}
|
||||
|
||||
impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
|
||||
fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[]).ok().unwrap() }
|
||||
fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[..]).ok().unwrap() }
|
||||
|
||||
fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> {
|
||||
dot::Id::new(format!("N{}", i.node_id())).ok().unwrap()
|
||||
@ -92,7 +92,7 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
|
||||
let s = replace_newline_with_backslash_l(s);
|
||||
label.push_str(&format!("exiting scope_{} {}",
|
||||
i,
|
||||
&s[..])[]);
|
||||
&s[..]));
|
||||
}
|
||||
dot::LabelText::EscStr(label.into_cow())
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
|
||||
};
|
||||
|
||||
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
|
||||
to have {}", suffix)[]);
|
||||
to have {}", suffix));
|
||||
}
|
||||
|
||||
fn check_static_type(&self, e: &ast::Expr) {
|
||||
@ -382,7 +382,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
|
||||
if v.mode != Mode::Var {
|
||||
v.tcx.sess.span_err(e.span,
|
||||
&format!("{}s are not allowed to have destructors",
|
||||
v.msg())[]);
|
||||
v.msg()));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -163,7 +163,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
// First, check legality of move bindings.
|
||||
check_legality_of_move_bindings(cx,
|
||||
arm.guard.is_some(),
|
||||
&arm.pats[]);
|
||||
&arm.pats);
|
||||
|
||||
// Second, if there is a guard on each arm, make sure it isn't
|
||||
// assigning or borrowing anything mutably.
|
||||
@ -1101,7 +1101,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||
&format!("binding pattern {} is not an \
|
||||
identifier: {:?}",
|
||||
p.id,
|
||||
p.node)[]);
|
||||
p.node));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -554,7 +554,7 @@ fn bits_to_string(words: &[uint]) -> String {
|
||||
let mut v = word;
|
||||
for _ in 0..usize::BYTES {
|
||||
result.push(sep);
|
||||
result.push_str(&format!("{:02x}", v & 0xFF)[]);
|
||||
result.push_str(&format!("{:02x}", v & 0xFF));
|
||||
v >>= 8;
|
||||
sep = '-';
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session,
|
||||
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
|
||||
if src.rlib.is_some() { return }
|
||||
sess.err(&format!("dependency `{}` not found in rlib format",
|
||||
data.name)[]);
|
||||
data.name));
|
||||
});
|
||||
return Vec::new();
|
||||
}
|
||||
@ -197,7 +197,7 @@ fn calculate_type(sess: &session::Session,
|
||||
match kind {
|
||||
cstore::RequireStatic => "rlib",
|
||||
cstore::RequireDynamic => "dylib",
|
||||
})[]);
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -222,7 +222,7 @@ fn add_library(sess: &session::Session,
|
||||
let data = sess.cstore.get_crate_data(cnum);
|
||||
sess.err(&format!("cannot satisfy dependencies so `{}` only \
|
||||
shows up once",
|
||||
data.name)[]);
|
||||
data.name));
|
||||
sess.help("having upstream crates all available in one format \
|
||||
will likely make this go away");
|
||||
}
|
||||
|
@ -841,7 +841,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
ty::ty_rptr(r, ref m) => (m.mutbl, r),
|
||||
_ => self.tcx().sess.span_bug(expr.span,
|
||||
&format!("bad overloaded deref type {}",
|
||||
method_ty.repr(self.tcx()))[])
|
||||
method_ty.repr(self.tcx())))
|
||||
};
|
||||
let bk = ty::BorrowKind::from_mutbl(m);
|
||||
self.delegate.borrow(expr.id, expr.span, cmt,
|
||||
|
@ -433,7 +433,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C,
|
||||
&format!("{}: bot and var types should have been handled ({},{})",
|
||||
this.tag(),
|
||||
a.repr(this.infcx().tcx),
|
||||
b.repr(this.infcx().tcx))[]);
|
||||
b.repr(this.infcx().tcx)));
|
||||
}
|
||||
|
||||
(&ty::ty_err, _) | (_, &ty::ty_err) => {
|
||||
@ -818,7 +818,7 @@ impl<'cx, 'tcx> ty_fold::TypeFolder<'tcx> for Generalizer<'cx, 'tcx> {
|
||||
self.tcx().sess.span_bug(
|
||||
self.span,
|
||||
&format!("Encountered early bound region when generalizing: {}",
|
||||
r.repr(self.tcx()))[]);
|
||||
r.repr(self.tcx())));
|
||||
}
|
||||
|
||||
// Always make a fresh region variable for skolemized regions;
|
||||
|
@ -449,7 +449,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound `{}: {}`...",
|
||||
bound_kind.user_string(self.tcx),
|
||||
sub.user_string(self.tcx))[]);
|
||||
sub.user_string(self.tcx)));
|
||||
}
|
||||
|
||||
ty::ReStatic => {
|
||||
@ -460,7 +460,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
origin.span(),
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound `{}: 'static`...",
|
||||
bound_kind.user_string(self.tcx))[]);
|
||||
bound_kind.user_string(self.tcx)));
|
||||
}
|
||||
|
||||
_ => {
|
||||
@ -472,10 +472,10 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
origin.span(),
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound for `{}`",
|
||||
bound_kind.user_string(self.tcx))[]);
|
||||
bound_kind.user_string(self.tcx)));
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
&format!("{} must be valid for ", labeled_user_string)[],
|
||||
&format!("{} must be valid for ", labeled_user_string),
|
||||
sub,
|
||||
"...");
|
||||
}
|
||||
@ -525,7 +525,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
&format!("...but `{}` is only valid for ",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.to_string())[],
|
||||
.to_string()),
|
||||
sup,
|
||||
"");
|
||||
}
|
||||
@ -568,7 +568,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
&format!("captured variable `{}` does not \
|
||||
outlive the enclosing closure",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
id).to_string())[]);
|
||||
id).to_string()));
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"captured variable is valid for ",
|
||||
@ -610,7 +610,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("the type `{}` does not fulfill the \
|
||||
required lifetime",
|
||||
self.ty_to_string(ty))[]);
|
||||
self.ty_to_string(ty)));
|
||||
note_and_explain_region(self.tcx,
|
||||
"type must outlive ",
|
||||
sub,
|
||||
@ -636,7 +636,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("the type `{}` (provided as the value of \
|
||||
a type parameter) is not valid at this point",
|
||||
self.ty_to_string(ty))[]);
|
||||
self.ty_to_string(ty)));
|
||||
note_and_explain_region(self.tcx,
|
||||
"type must outlive ",
|
||||
sub,
|
||||
@ -713,7 +713,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("type of expression contains references \
|
||||
that are not valid during the expression: `{}`",
|
||||
self.ty_to_string(t))[]);
|
||||
self.ty_to_string(t)));
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"type is only valid for ",
|
||||
@ -752,7 +752,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("in type `{}`, reference has a longer lifetime \
|
||||
than the data it references",
|
||||
self.ty_to_string(ty))[]);
|
||||
self.ty_to_string(ty)));
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"the pointer is valid for ",
|
||||
@ -988,7 +988,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
names.push(lt_name);
|
||||
}
|
||||
names.sort();
|
||||
let name = token::str_to_ident(&names[0][]).name;
|
||||
let name = token::str_to_ident(&names[0]).name;
|
||||
return (name_to_dummy_lifetime(name), Kept);
|
||||
}
|
||||
return (self.life_giver.give_lifetime(), Fresh);
|
||||
@ -1240,7 +1240,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
.sess
|
||||
.fatal(&format!(
|
||||
"unbound path {}",
|
||||
pprust::path_to_string(path))[])
|
||||
pprust::path_to_string(path)))
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
@ -1479,7 +1479,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
var_origin.span(),
|
||||
&format!("cannot infer an appropriate lifetime{} \
|
||||
due to conflicting requirements",
|
||||
var_description)[]);
|
||||
var_description));
|
||||
}
|
||||
|
||||
fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
|
||||
@ -1527,7 +1527,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
&format!("...so that {} ({})",
|
||||
desc, values_str)[]);
|
||||
desc, values_str));
|
||||
}
|
||||
None => {
|
||||
// Really should avoid printing this error at
|
||||
@ -1536,7 +1536,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
// doing right now. - nmatsakis
|
||||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
&format!("...so that {}", desc)[]);
|
||||
&format!("...so that {}", desc));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1552,7 +1552,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
&format!(
|
||||
"...so that closure can access `{}`",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id)
|
||||
.to_string())[])
|
||||
.to_string()))
|
||||
}
|
||||
infer::InfStackClosure(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1577,7 +1577,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
does not outlive the enclosing closure",
|
||||
ty::local_var_name_str(
|
||||
self.tcx,
|
||||
id).to_string())[]);
|
||||
id).to_string()));
|
||||
}
|
||||
infer::IndexSlice(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1626,7 +1626,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("...so type `{}` of expression is valid during the \
|
||||
expression",
|
||||
self.ty_to_string(t))[]);
|
||||
self.ty_to_string(t)));
|
||||
}
|
||||
infer::BindingTypeIsNotValidAtDecl(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1638,14 +1638,14 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
&format!("...so that the reference type `{}` \
|
||||
does not outlive the data it points at",
|
||||
self.ty_to_string(ty))[]);
|
||||
self.ty_to_string(ty)));
|
||||
}
|
||||
infer::RelateParamBound(span, t) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
&format!("...so that the type `{}` \
|
||||
will meet its required lifetime bounds",
|
||||
self.ty_to_string(t))[]);
|
||||
self.ty_to_string(t)));
|
||||
}
|
||||
infer::RelateDefaultParamBound(span, t) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1653,13 +1653,13 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
&format!("...so that type parameter \
|
||||
instantiated with `{}`, \
|
||||
will meet its declared lifetime bounds",
|
||||
self.ty_to_string(t))[]);
|
||||
self.ty_to_string(t)));
|
||||
}
|
||||
infer::RelateRegionParamBound(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
&format!("...so that the declared lifetime parameter bounds \
|
||||
are satisfied")[]);
|
||||
are satisfied"));
|
||||
}
|
||||
infer::SafeDestructor(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1717,7 +1717,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
|
||||
Some(node) => match node {
|
||||
ast_map::NodeItem(item) => match item.node {
|
||||
ast::ItemFn(_, _, _, ref gen, _) => {
|
||||
taken.push_all(&gen.lifetimes[]);
|
||||
taken.push_all(&gen.lifetimes);
|
||||
None
|
||||
},
|
||||
_ => None
|
||||
@ -1725,7 +1725,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
|
||||
ast_map::NodeImplItem(ii) => {
|
||||
match *ii {
|
||||
ast::MethodImplItem(ref m) => {
|
||||
taken.push_all(&m.pe_generics().lifetimes[]);
|
||||
taken.push_all(&m.pe_generics().lifetimes);
|
||||
Some(m.id)
|
||||
}
|
||||
ast::TypeImplItem(_) => None,
|
||||
@ -1784,7 +1784,7 @@ impl LifeGiver {
|
||||
let mut lifetime;
|
||||
loop {
|
||||
let mut s = String::from_str("'");
|
||||
s.push_str(&num_to_string(self.counter.get())[]);
|
||||
s.push_str(&num_to_string(self.counter.get()));
|
||||
if !self.taken.contains(&s) {
|
||||
lifetime = name_to_dummy_lifetime(
|
||||
token::str_to_ident(&s[..]).name);
|
||||
|
@ -189,7 +189,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C
|
||||
span,
|
||||
&format!("region {:?} is not associated with \
|
||||
any bound region from A!",
|
||||
r0)[])
|
||||
r0))
|
||||
}
|
||||
}
|
||||
|
||||
@ -322,7 +322,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C
|
||||
}
|
||||
infcx.tcx.sess.span_bug(
|
||||
span,
|
||||
&format!("could not find original bound region for {:?}", r)[]);
|
||||
&format!("could not find original bound region for {:?}", r));
|
||||
}
|
||||
|
||||
fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region {
|
||||
@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T,
|
||||
r => {
|
||||
combiner.infcx().tcx.sess.span_bug(
|
||||
combiner.trace().origin.span(),
|
||||
&format!("found non-region-vid: {:?}", r)[]);
|
||||
&format!("found non-region-vid: {:?}", r));
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
@ -999,7 +999,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
|
||||
self.tcx.sess.span_err(sp, &format!("{}{}",
|
||||
mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty),
|
||||
error_str)[]);
|
||||
error_str));
|
||||
|
||||
if let Some(err) = err {
|
||||
ty::note_and_explain_type_err(self.tcx, err)
|
||||
|
@ -473,7 +473,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
origin.span(),
|
||||
&format!("cannot relate bound region: {} <= {}",
|
||||
sub.repr(self.tcx),
|
||||
sup.repr(self.tcx))[]);
|
||||
sup.repr(self.tcx)));
|
||||
}
|
||||
(_, ReStatic) => {
|
||||
// all regions are subregions of static, so we can ignore this
|
||||
@ -733,7 +733,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
self.tcx.sess.bug(
|
||||
&format!("cannot relate bound region: LUB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx))[]);
|
||||
b.repr(self.tcx)));
|
||||
}
|
||||
|
||||
(ReStatic, _) | (_, ReStatic) => {
|
||||
@ -750,7 +750,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
&format!("lub_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b)[]);
|
||||
b));
|
||||
}
|
||||
|
||||
(ReFree(ref fr), ReScope(s_id)) |
|
||||
@ -834,7 +834,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
self.tcx.sess.bug(
|
||||
&format!("cannot relate bound region: GLB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx))[]);
|
||||
b.repr(self.tcx)));
|
||||
}
|
||||
|
||||
(ReStatic, r) | (r, ReStatic) => {
|
||||
@ -854,7 +854,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
&format!("glb_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b)[]);
|
||||
b));
|
||||
}
|
||||
|
||||
(ReFree(ref fr), ReScope(s_id)) |
|
||||
@ -1417,7 +1417,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
for var {:?}, lower_bounds={}, upper_bounds={}",
|
||||
node_idx,
|
||||
lower_bounds.repr(self.tcx),
|
||||
upper_bounds.repr(self.tcx))[]);
|
||||
upper_bounds.repr(self.tcx)));
|
||||
}
|
||||
|
||||
fn collect_error_for_contracting_node(
|
||||
@ -1461,7 +1461,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
&format!("collect_error_for_contracting_node() could not find error \
|
||||
for var {:?}, upper_bounds={}",
|
||||
node_idx,
|
||||
upper_bounds.repr(self.tcx))[]);
|
||||
upper_bounds.repr(self.tcx)));
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
|
@ -96,7 +96,7 @@ impl<'a, 'tcx> ty_fold::TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> {
|
||||
ty::ty_infer(_) => {
|
||||
self.infcx.tcx.sess.bug(
|
||||
&format!("Unexpected type in full type resolver: {}",
|
||||
t.repr(self.infcx.tcx))[]);
|
||||
t.repr(self.infcx.tcx)));
|
||||
}
|
||||
_ => {
|
||||
ty_fold::super_fold_ty(self, t)
|
||||
|
@ -325,7 +325,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> {
|
||||
self.tcx
|
||||
.sess
|
||||
.span_bug(span, &format!("no variable registered for id {}",
|
||||
node_id)[]);
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -585,7 +585,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
self.ir.tcx.sess.span_bug(
|
||||
span,
|
||||
&format!("no live node registered for node {}",
|
||||
node_id)[]);
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -624,7 +624,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
|
||||
span,
|
||||
&format!("Upvar of non-closure {} - {}",
|
||||
fn_node_id,
|
||||
ty.repr(self.tcx()))[]);
|
||||
ty.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool {
|
||||
// monomorphized or it was marked with `#[inline]`. This will only return
|
||||
// true for functions.
|
||||
fn item_might_be_inlined(item: &ast::Item) -> bool {
|
||||
if attributes_specify_inlining(&item.attrs[]) {
|
||||
if attributes_specify_inlining(&item.attrs) {
|
||||
return true
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool {
|
||||
|
||||
fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method,
|
||||
impl_src: ast::DefId) -> bool {
|
||||
if attributes_specify_inlining(&method.attrs[]) ||
|
||||
if attributes_specify_inlining(&method.attrs) ||
|
||||
generics_require_inlining(method.pe_generics()) {
|
||||
return true
|
||||
}
|
||||
@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
ast::MethodImplItem(ref method) => {
|
||||
if generics_require_inlining(method.pe_generics()) ||
|
||||
attributes_specify_inlining(
|
||||
&method.attrs[]) {
|
||||
&method.attrs) {
|
||||
true
|
||||
} else {
|
||||
let impl_did = self.tcx
|
||||
@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
None => {
|
||||
self.tcx.sess.bug(&format!("found unmapped ID in worklist: \
|
||||
{}",
|
||||
search_item)[])
|
||||
search_item))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -342,7 +342,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
.bug(&format!("found unexpected thingy in worklist: {}",
|
||||
self.tcx
|
||||
.map
|
||||
.node_to_string(search_item))[])
|
||||
.node_to_string(search_item)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -181,7 +181,7 @@ impl Index {
|
||||
pub fn new(krate: &Crate) -> Index {
|
||||
let mut staged_api = false;
|
||||
for attr in &krate.attrs {
|
||||
if &attr.name()[] == "staged_api" {
|
||||
if &attr.name()[..] == "staged_api" {
|
||||
match attr.node.value.node {
|
||||
ast::MetaWord(_) => {
|
||||
attr::mark_used(attr);
|
||||
|
@ -639,7 +639,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
|
||||
(space={:?}, index={})",
|
||||
region_name.as_str(),
|
||||
self.root_ty.repr(self.tcx()),
|
||||
space, i)[]);
|
||||
space, i));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -696,7 +696,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> {
|
||||
p.space,
|
||||
p.idx,
|
||||
self.root_ty.repr(self.tcx()),
|
||||
self.substs.repr(self.tcx()))[]);
|
||||
self.substs.repr(self.tcx())));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -198,7 +198,7 @@ fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty::ty_err => {
|
||||
tcx.sess.bug(
|
||||
&format!("ty_is_local invoked on unexpected type: {}",
|
||||
ty.repr(tcx))[])
|
||||
ty.repr(tcx)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -422,5 +422,5 @@ pub fn suggest_new_overflow_limit(tcx: &ty::ctxt, span: Span) {
|
||||
span,
|
||||
&format!(
|
||||
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
|
||||
suggested_limit)[]);
|
||||
suggested_limit));
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ impl<'tcx> FulfillmentContext<'tcx> {
|
||||
}
|
||||
|
||||
pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] {
|
||||
&self.predicates[]
|
||||
&self.predicates
|
||||
}
|
||||
|
||||
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
|
||||
|
@ -1575,7 +1575,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
self.tcx().sess.bug(
|
||||
&format!(
|
||||
"asked to assemble builtin bounds of unexpected type: {}",
|
||||
self_ty.repr(self.tcx()))[]);
|
||||
self_ty.repr(self.tcx())));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1727,7 +1727,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
self.tcx().sess.span_bug(
|
||||
obligation.cause.span,
|
||||
&format!("builtin bound for {} was ambig",
|
||||
obligation.repr(self.tcx()))[]);
|
||||
obligation.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1995,7 +1995,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
self.tcx().sess.bug(
|
||||
&format!("Impl {} was matchable against {} but now is not",
|
||||
impl_def_id.repr(self.tcx()),
|
||||
obligation.repr(self.tcx()))[]);
|
||||
obligation.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2295,7 +2295,7 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> {
|
||||
_ => {
|
||||
cx.sess.bug(&format!("ParameterEnvironment::from_item(): \
|
||||
`{}` is not an item",
|
||||
cx.map.node_to_string(id))[])
|
||||
cx.map.node_to_string(id)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2737,7 +2737,7 @@ impl FlagComputation {
|
||||
fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) {
|
||||
let mut computation = FlagComputation::new();
|
||||
|
||||
computation.add_tys(&fn_sig.0.inputs[]);
|
||||
computation.add_tys(&fn_sig.0.inputs);
|
||||
|
||||
if let ty::FnConverging(output) = fn_sig.0.output {
|
||||
computation.add_ty(output);
|
||||
@ -3177,7 +3177,7 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
ty_str => mk_mach_uint(cx, ast::TyU8),
|
||||
ty_open(ty) => sequence_element_type(cx, ty),
|
||||
_ => cx.sess.bug(&format!("sequence_element_type called on non-sequence value: {}",
|
||||
ty_to_string(cx, ty))[]),
|
||||
ty_to_string(cx, ty))),
|
||||
}
|
||||
}
|
||||
|
||||
@ -3538,7 +3538,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
||||
let variants = substd_enum_variants(cx, did, substs);
|
||||
let mut res =
|
||||
TypeContents::union(&variants[..], |variant| {
|
||||
TypeContents::union(&variant.args[],
|
||||
TypeContents::union(&variant.args,
|
||||
|arg_ty| {
|
||||
tc_ty(cx, *arg_ty, cache)
|
||||
})
|
||||
@ -4121,7 +4121,7 @@ pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match ty.sty {
|
||||
ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}),
|
||||
_ => cx.sess.bug(&format!("Trying to close a non-open type {}",
|
||||
ty_to_string(cx, ty))[])
|
||||
ty_to_string(cx, ty)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -4222,7 +4222,7 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId)
|
||||
Some(ty) => ty.clone(),
|
||||
None => cx.sess.bug(
|
||||
&format!("node_id_to_trait_ref: no trait ref for node `{}`",
|
||||
cx.map.node_to_string(id))[])
|
||||
cx.map.node_to_string(id)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -4231,7 +4231,7 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> {
|
||||
Some(ty) => ty,
|
||||
None => cx.sess.bug(
|
||||
&format!("node_id_to_type: no type for node `{}`",
|
||||
cx.map.node_to_string(id))[])
|
||||
cx.map.node_to_string(id)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -4305,7 +4305,7 @@ pub fn ty_region(tcx: &ctxt,
|
||||
tcx.sess.span_bug(
|
||||
span,
|
||||
&format!("ty_region() invoked on an inappropriate ty: {:?}",
|
||||
s)[]);
|
||||
s));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4370,11 +4370,11 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
|
||||
Some(f) => {
|
||||
cx.sess.bug(&format!("Node id {} is not an expr: {:?}",
|
||||
id,
|
||||
f)[]);
|
||||
f));
|
||||
}
|
||||
None => {
|
||||
cx.sess.bug(&format!("Node id {} is not present \
|
||||
in the node map", id)[]);
|
||||
in the node map", id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4390,14 +4390,14 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
|
||||
cx.sess.bug(
|
||||
&format!("Variable id {} maps to {:?}, not local",
|
||||
id,
|
||||
pat)[]);
|
||||
pat));
|
||||
}
|
||||
}
|
||||
}
|
||||
r => {
|
||||
cx.sess.bug(&format!("Variable id {} maps to {:?}, not local",
|
||||
id,
|
||||
r)[]);
|
||||
r));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4428,7 +4428,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
|
||||
cx.sess.bug(
|
||||
&format!("AdjustReifyFnPointer adjustment on non-fn-item: \
|
||||
{:?}",
|
||||
b)[]);
|
||||
b));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4459,7 +4459,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
|
||||
{}",
|
||||
i,
|
||||
ty_to_string(cx, adjusted_ty))
|
||||
[]);
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4522,7 +4522,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
|
||||
}
|
||||
_ => cx.sess.span_bug(span,
|
||||
&format!("UnsizeLength with bad sty: {:?}",
|
||||
ty_to_string(cx, ty))[])
|
||||
ty_to_string(cx, ty)))
|
||||
},
|
||||
&UnsizeStruct(box ref k, tp_index) => match ty.sty {
|
||||
ty_struct(did, substs) => {
|
||||
@ -4534,7 +4534,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
|
||||
}
|
||||
_ => cx.sess.span_bug(span,
|
||||
&format!("UnsizeStruct with bad sty: {:?}",
|
||||
ty_to_string(cx, ty))[])
|
||||
ty_to_string(cx, ty)))
|
||||
},
|
||||
&UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => {
|
||||
mk_trait(cx, principal.clone(), bounds.clone())
|
||||
@ -4547,7 +4547,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def {
|
||||
Some(&def) => def,
|
||||
None => {
|
||||
tcx.sess.span_bug(expr.span, &format!(
|
||||
"no def-map entry for expr {}", expr.id)[]);
|
||||
"no def-map entry for expr {}", expr.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4639,7 +4639,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
|
||||
expr.span,
|
||||
&format!("uncategorized def for expr {}: {:?}",
|
||||
expr.id,
|
||||
def)[]);
|
||||
def));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4767,7 +4767,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
|
||||
token::get_name(name),
|
||||
fields.iter()
|
||||
.map(|f| token::get_name(f.name).to_string())
|
||||
.collect::<Vec<String>>())[]);
|
||||
.collect::<Vec<String>>()));
|
||||
}
|
||||
|
||||
pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem])
|
||||
@ -5019,14 +5019,14 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
_ => {
|
||||
cx.sess.bug(&format!("provided_trait_methods: `{:?}` is \
|
||||
not a trait",
|
||||
id)[])
|
||||
id))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(&format!("provided_trait_methods: `{:?}` is not a \
|
||||
trait",
|
||||
id)[])
|
||||
id))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -5262,7 +5262,7 @@ impl<'tcx> VariantInfo<'tcx> {
|
||||
};
|
||||
},
|
||||
ast::StructVariantKind(ref struct_def) => {
|
||||
let fields: &[StructField] = &struct_def.fields[];
|
||||
let fields: &[StructField] = &struct_def.fields;
|
||||
|
||||
assert!(fields.len() > 0);
|
||||
|
||||
@ -5624,7 +5624,7 @@ pub fn get_attrs<'tcx>(tcx: &'tcx ctxt, did: DefId)
|
||||
-> CowVec<'tcx, ast::Attribute> {
|
||||
if is_local(did) {
|
||||
let item = tcx.map.expect_item(did.node);
|
||||
Cow::Borrowed(&item.attrs[])
|
||||
Cow::Borrowed(&item.attrs)
|
||||
} else {
|
||||
Cow::Owned(csearch::get_item_attrs(&tcx.sess.cstore, did))
|
||||
}
|
||||
@ -5686,7 +5686,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
|
||||
_ => {
|
||||
cx.sess.bug(
|
||||
&format!("ID not mapped to struct fields: {}",
|
||||
cx.map.node_to_string(did.node))[]);
|
||||
cx.map.node_to_string(did.node)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -5719,7 +5719,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc
|
||||
pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec<field<'tcx>> {
|
||||
v.iter().enumerate().map(|(i, &f)| {
|
||||
field {
|
||||
name: token::intern(&i.to_string()[]),
|
||||
name: token::intern(&i.to_string()),
|
||||
mt: mt {
|
||||
ty: f,
|
||||
mutbl: MutImmutable
|
||||
|
@ -311,19 +311,19 @@ macro_rules! options {
|
||||
match (value, opt_type_desc) {
|
||||
(Some(..), None) => {
|
||||
early_error(&format!("{} option `{}` takes no \
|
||||
value", $outputname, key)[])
|
||||
value", $outputname, key))
|
||||
}
|
||||
(None, Some(type_desc)) => {
|
||||
early_error(&format!("{0} option `{1}` requires \
|
||||
{2} ({3} {1}=<value>)",
|
||||
$outputname, key,
|
||||
type_desc, $prefix)[])
|
||||
type_desc, $prefix))
|
||||
}
|
||||
(Some(value), Some(type_desc)) => {
|
||||
early_error(&format!("incorrect value `{}` for {} \
|
||||
option `{}` - {} was expected",
|
||||
value, $outputname,
|
||||
key, type_desc)[])
|
||||
key, type_desc))
|
||||
}
|
||||
(None, None) => unreachable!()
|
||||
}
|
||||
@ -333,7 +333,7 @@ macro_rules! options {
|
||||
}
|
||||
if !found {
|
||||
early_error(&format!("unknown {} option: `{}`",
|
||||
$outputname, key)[]);
|
||||
$outputname, key));
|
||||
}
|
||||
}
|
||||
return op;
|
||||
@ -590,10 +590,10 @@ pub fn default_lib_output() -> CrateType {
|
||||
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
use syntax::parse::token::intern_and_get_ident as intern;
|
||||
|
||||
let end = &sess.target.target.target_endian[];
|
||||
let arch = &sess.target.target.arch[];
|
||||
let wordsz = &sess.target.target.target_pointer_width[];
|
||||
let os = &sess.target.target.target_os[];
|
||||
let end = &sess.target.target.target_endian;
|
||||
let arch = &sess.target.target.arch;
|
||||
let wordsz = &sess.target.target.target_pointer_width;
|
||||
let os = &sess.target.target.target_os;
|
||||
|
||||
let fam = match sess.target.target.options.is_like_windows {
|
||||
true => InternedString::new("windows"),
|
||||
@ -634,18 +634,18 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
}
|
||||
|
||||
pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config {
|
||||
let target = match Target::search(&opts.target_triple[]) {
|
||||
let target = match Target::search(&opts.target_triple) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
sp.handler().fatal(&format!("Error loading target specification: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
let (int_type, uint_type) = match &target.target_pointer_width[] {
|
||||
let (int_type, uint_type) = match &target.target_pointer_width[..] {
|
||||
"32" => (ast::TyI32, ast::TyU32),
|
||||
"64" => (ast::TyI64, ast::TyU64),
|
||||
w => sp.handler().fatal(&format!("target specification was invalid: unrecognized \
|
||||
target-pointer-width {}", w)[])
|
||||
target-pointer-width {}", w))
|
||||
};
|
||||
|
||||
Config {
|
||||
@ -863,7 +863,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
"dep-info" => OutputTypeDepInfo,
|
||||
_ => {
|
||||
early_error(&format!("unknown emission type: `{}`",
|
||||
part)[])
|
||||
part))
|
||||
}
|
||||
};
|
||||
output_types.push(output_type)
|
||||
@ -955,7 +955,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
(_, s) => {
|
||||
early_error(&format!("unknown library kind `{}`, expected \
|
||||
one of dylib, framework, or static",
|
||||
s)[]);
|
||||
s));
|
||||
}
|
||||
};
|
||||
(name.to_string(), kind)
|
||||
@ -991,7 +991,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
Some(arg) => {
|
||||
early_error(&format!("argument for --color must be auto, always \
|
||||
or never (instead was `{}`)",
|
||||
arg)[])
|
||||
arg))
|
||||
}
|
||||
};
|
||||
|
||||
@ -1111,7 +1111,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_switch_implies_cfg_test() {
|
||||
let matches =
|
||||
&match getopts(&["--test".to_string()], &optgroups()[]) {
|
||||
&match getopts(&["--test".to_string()], &optgroups()) {
|
||||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
|
||||
};
|
||||
@ -1128,7 +1128,7 @@ mod test {
|
||||
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||
let matches =
|
||||
&match getopts(&["--test".to_string(), "--cfg=test".to_string()],
|
||||
&optgroups()[]) {
|
||||
&optgroups()) {
|
||||
Ok(m) => m,
|
||||
Err(f) => {
|
||||
panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
|
||||
@ -1148,7 +1148,7 @@ mod test {
|
||||
{
|
||||
let matches = getopts(&[
|
||||
"-Awarnings".to_string()
|
||||
], &optgroups()[]).unwrap();
|
||||
], &optgroups()).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
@ -1159,7 +1159,7 @@ mod test {
|
||||
let matches = getopts(&[
|
||||
"-Awarnings".to_string(),
|
||||
"-Dwarnings".to_string()
|
||||
], &optgroups()[]).unwrap();
|
||||
], &optgroups()).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
@ -1169,7 +1169,7 @@ mod test {
|
||||
{
|
||||
let matches = getopts(&[
|
||||
"-Adead_code".to_string()
|
||||
], &optgroups()[]).unwrap();
|
||||
], &optgroups()).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
|
@ -186,7 +186,7 @@ impl Session {
|
||||
// cases later on
|
||||
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
|
||||
self.span_bug(sp,
|
||||
&format!("impossible case reached: {}", msg)[]);
|
||||
&format!("impossible case reached: {}", msg));
|
||||
}
|
||||
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
|
||||
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
|
||||
@ -228,7 +228,7 @@ impl Session {
|
||||
}
|
||||
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
|
||||
filesearch::FileSearch::new(self.sysroot(),
|
||||
&self.opts.target_triple[],
|
||||
&self.opts.target_triple,
|
||||
&self.opts.search_paths,
|
||||
kind)
|
||||
}
|
||||
|
@ -58,12 +58,12 @@ pub fn note_and_explain_region(cx: &ctxt,
|
||||
(ref str, Some(span)) => {
|
||||
cx.sess.span_note(
|
||||
span,
|
||||
&format!("{}{}{}", prefix, *str, suffix)[]);
|
||||
&format!("{}{}{}", prefix, *str, suffix));
|
||||
Some(span)
|
||||
}
|
||||
(ref str, None) => {
|
||||
cx.sess.note(
|
||||
&format!("{}{}{}", prefix, *str, suffix)[]);
|
||||
&format!("{}{}{}", prefix, *str, suffix));
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -274,7 +274,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
};
|
||||
|
||||
if abi != abi::Rust {
|
||||
s.push_str(&format!("extern {} ", abi.to_string())[]);
|
||||
s.push_str(&format!("extern {} ", abi.to_string()));
|
||||
};
|
||||
|
||||
s.push_str("fn");
|
||||
@ -330,7 +330,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
ty::FnConverging(t) => {
|
||||
if !ty::type_is_nil(t) {
|
||||
s.push_str(" -> ");
|
||||
s.push_str(&ty_to_string(cx, t)[]);
|
||||
s.push_str(&ty_to_string(cx, t));
|
||||
}
|
||||
}
|
||||
ty::FnDiverging => {
|
||||
@ -367,7 +367,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
}
|
||||
ty_rptr(r, ref tm) => {
|
||||
let mut buf = region_ptr_to_string(cx, *r);
|
||||
buf.push_str(&mt_to_string(cx, tm)[]);
|
||||
buf.push_str(&mt_to_string(cx, tm));
|
||||
buf
|
||||
}
|
||||
ty_open(typ) =>
|
||||
@ -561,7 +561,7 @@ pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
|
||||
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
|
||||
&strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
|
||||
} else {
|
||||
&strs[0][]
|
||||
&strs[0][..]
|
||||
},
|
||||
tail)
|
||||
} else if strs.len() > 0 {
|
||||
|
@ -73,19 +73,19 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
|
||||
Ok(prog) => {
|
||||
let o = prog.wait_with_output().unwrap();
|
||||
if !o.status.success() {
|
||||
handler.err(&format!("{:?} failed with: {}", cmd, o.status)[]);
|
||||
handler.err(&format!("{:?} failed with: {}", cmd, o.status));
|
||||
handler.note(&format!("stdout ---\n{}",
|
||||
str::from_utf8(&o.output[]).unwrap())[]);
|
||||
str::from_utf8(&o.output).unwrap()));
|
||||
handler.note(&format!("stderr ---\n{}",
|
||||
str::from_utf8(&o.error[]).unwrap())
|
||||
[]);
|
||||
str::from_utf8(&o.error).unwrap())
|
||||
);
|
||||
handler.abort_if_errors();
|
||||
}
|
||||
o
|
||||
},
|
||||
Err(e) => {
|
||||
handler.err(&format!("could not exec `{}`: {}", &ar[..],
|
||||
e)[]);
|
||||
e));
|
||||
handler.abort_if_errors();
|
||||
panic!("rustc::back::archive::run_ar() should not reach this point");
|
||||
}
|
||||
@ -110,7 +110,7 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str,
|
||||
}
|
||||
handler.fatal(&format!("could not find native static library `{}`, \
|
||||
perhaps an -L flag is missing?",
|
||||
name)[]);
|
||||
name));
|
||||
}
|
||||
|
||||
impl<'a> Archive<'a> {
|
||||
@ -142,7 +142,7 @@ impl<'a> Archive<'a> {
|
||||
/// Lists all files in an archive
|
||||
pub fn files(&self) -> Vec<String> {
|
||||
let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]);
|
||||
let output = str::from_utf8(&output.output[]).unwrap();
|
||||
let output = str::from_utf8(&output.output).unwrap();
|
||||
// use lines_any because windows delimits output with `\r\n` instead of
|
||||
// just `\n`
|
||||
output.lines_any().map(|s| s.to_string()).collect()
|
||||
@ -174,9 +174,9 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
/// search in the relevant locations for a library named `name`.
|
||||
pub fn add_native_library(&mut self, name: &str) -> old_io::IoResult<()> {
|
||||
let location = find_library(name,
|
||||
&self.archive.slib_prefix[],
|
||||
&self.archive.slib_suffix[],
|
||||
&self.archive.lib_search_paths[],
|
||||
&self.archive.slib_prefix,
|
||||
&self.archive.slib_suffix,
|
||||
&self.archive.lib_search_paths,
|
||||
self.archive.handler);
|
||||
self.add_archive(&location, name, |_| false)
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
|
||||
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
|
||||
let mut ret = Vec::new();
|
||||
for rpath in rpaths {
|
||||
ret.push(format!("-Wl,-rpath,{}", &(*rpath)[]));
|
||||
ret.push(format!("-Wl,-rpath,{}", &(*rpath)));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ impl Svh {
|
||||
}
|
||||
|
||||
pub fn as_str<'a>(&'a self) -> &'a str {
|
||||
&self.hash[]
|
||||
&self.hash
|
||||
}
|
||||
|
||||
pub fn calculate(metadata: &Vec<String>, krate: &ast::Crate) -> Svh {
|
||||
@ -329,7 +329,7 @@ mod svh_visitor {
|
||||
// macro invocations, namely macro_rules definitions,
|
||||
// *can* appear as items, even in the expanded crate AST.
|
||||
|
||||
if ¯o_name(mac)[] == "macro_rules" {
|
||||
if ¯o_name(mac)[..] == "macro_rules" {
|
||||
// Pretty-printing definition to a string strips out
|
||||
// surface artifacts (currently), such as the span
|
||||
// information, yielding a content-based hash.
|
||||
@ -356,7 +356,7 @@ mod svh_visitor {
|
||||
fn macro_name(mac: &Mac) -> token::InternedString {
|
||||
match &mac.node {
|
||||
&MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => {
|
||||
let s = &path.segments[];
|
||||
let s = &path.segments;
|
||||
assert_eq!(s.len(), 1);
|
||||
content(s[0].identifier)
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ impl Target {
|
||||
.and_then(|os| os.map(|s| s.to_string())) {
|
||||
Some(val) => val,
|
||||
None =>
|
||||
handler.fatal(&format!("Field {} in target specification is required", name)[])
|
||||
handler.fatal(&format!("Field {} in target specification is required", name))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -466,7 +466,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
new_loan.span,
|
||||
&format!("cannot borrow `{}`{} as mutable \
|
||||
more than once at a time",
|
||||
nl, new_loan_msg)[])
|
||||
nl, new_loan_msg))
|
||||
}
|
||||
|
||||
(ty::UniqueImmBorrow, _) => {
|
||||
@ -474,7 +474,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
new_loan.span,
|
||||
&format!("closure requires unique access to `{}` \
|
||||
but {} is already borrowed{}",
|
||||
nl, ol_pronoun, old_loan_msg)[]);
|
||||
nl, ol_pronoun, old_loan_msg));
|
||||
}
|
||||
|
||||
(_, ty::UniqueImmBorrow) => {
|
||||
@ -482,7 +482,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
new_loan.span,
|
||||
&format!("cannot borrow `{}`{} as {} because \
|
||||
previous closure requires unique access",
|
||||
nl, new_loan_msg, new_loan.kind.to_user_str())[]);
|
||||
nl, new_loan_msg, new_loan.kind.to_user_str()));
|
||||
}
|
||||
|
||||
(_, _) => {
|
||||
@ -495,7 +495,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
new_loan.kind.to_user_str(),
|
||||
ol_pronoun,
|
||||
old_loan.kind.to_user_str(),
|
||||
old_loan_msg)[]);
|
||||
old_loan_msg));
|
||||
}
|
||||
}
|
||||
|
||||
@ -504,7 +504,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
self.bccx.span_note(
|
||||
span,
|
||||
&format!("borrow occurs due to use of `{}` in closure",
|
||||
nl)[]);
|
||||
nl));
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
@ -553,7 +553,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
|
||||
self.bccx.span_note(
|
||||
old_loan.span,
|
||||
&format!("{}; {}", borrow_summary, rule_summary)[]);
|
||||
&format!("{}; {}", borrow_summary, rule_summary));
|
||||
|
||||
let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id());
|
||||
self.bccx.span_end_note(old_loan_span,
|
||||
@ -623,13 +623,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
self.bccx.span_err(
|
||||
span,
|
||||
&format!("cannot use `{}` because it was mutably borrowed",
|
||||
&self.bccx.loan_path_to_string(copy_path)[])
|
||||
[]);
|
||||
&self.bccx.loan_path_to_string(copy_path))
|
||||
);
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
&format!("borrow of `{}` occurs here",
|
||||
&self.bccx.loan_path_to_string(&*loan_path)[])
|
||||
[]);
|
||||
&self.bccx.loan_path_to_string(&*loan_path))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -648,20 +648,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
let err_message = match move_kind {
|
||||
move_data::Captured =>
|
||||
format!("cannot move `{}` into closure because it is borrowed",
|
||||
&self.bccx.loan_path_to_string(move_path)[]),
|
||||
&self.bccx.loan_path_to_string(move_path)),
|
||||
move_data::Declared |
|
||||
move_data::MoveExpr |
|
||||
move_data::MovePat =>
|
||||
format!("cannot move out of `{}` because it is borrowed",
|
||||
&self.bccx.loan_path_to_string(move_path)[])
|
||||
&self.bccx.loan_path_to_string(move_path))
|
||||
};
|
||||
|
||||
self.bccx.span_err(span, &err_message[..]);
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
&format!("borrow of `{}` occurs here",
|
||||
&self.bccx.loan_path_to_string(&*loan_path)[])
|
||||
[]);
|
||||
&self.bccx.loan_path_to_string(&*loan_path))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -842,7 +842,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
&format!("cannot assign to {}",
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)));
|
||||
self.bccx.span_help(
|
||||
self.tcx().map.span(upvar_id.closure_expr_id),
|
||||
"consider changing this closure to take self by mutable reference");
|
||||
@ -851,7 +851,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
assignment_span,
|
||||
&format!("cannot assign to {} {}",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)));
|
||||
}
|
||||
}
|
||||
_ => match opt_loan_path(&assignee_cmt) {
|
||||
@ -861,14 +861,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
&format!("cannot assign to {} {} `{}`",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt),
|
||||
self.bccx.loan_path_to_string(&*lp))[]);
|
||||
self.bccx.loan_path_to_string(&*lp)));
|
||||
}
|
||||
None => {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
&format!("cannot assign to {} {}",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -988,10 +988,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
self.bccx.span_err(
|
||||
span,
|
||||
&format!("cannot assign to `{}` because it is borrowed",
|
||||
self.bccx.loan_path_to_string(loan_path))[]);
|
||||
self.bccx.loan_path_to_string(loan_path)));
|
||||
self.bccx.span_note(
|
||||
loan.span,
|
||||
&format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_string(loan_path))[]);
|
||||
self.bccx.loan_path_to_string(loan_path)));
|
||||
}
|
||||
}
|
||||
|
@ -123,12 +123,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
let attrs : &[ast::Attribute];
|
||||
attrs = match tcx.map.find(id) {
|
||||
Some(ast_map::NodeItem(ref item)) =>
|
||||
&item.attrs[],
|
||||
&item.attrs,
|
||||
Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) =>
|
||||
&m.attrs[],
|
||||
&m.attrs,
|
||||
Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) =>
|
||||
&m.attrs[],
|
||||
_ => &[][],
|
||||
&m.attrs,
|
||||
_ => &[],
|
||||
};
|
||||
|
||||
let span_err =
|
||||
@ -144,7 +144,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
for (i, mpi) in vec_rc.iter().enumerate() {
|
||||
let render = || this.path_loan_path(*mpi).user_string(tcx);
|
||||
if span_err {
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render()));
|
||||
}
|
||||
if print {
|
||||
println!("id:{} {}[{}] `{}`", id, kind, i, render());
|
||||
@ -156,7 +156,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
for (i, f) in vec_rc.iter().enumerate() {
|
||||
let render = || f.loan_path_user_string(this, tcx);
|
||||
if span_err {
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render()));
|
||||
}
|
||||
if print {
|
||||
println!("id:{} {}[{}] `{}`", id, kind, i, render());
|
||||
|
@ -307,7 +307,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
|
||||
self.tcx().sess.span_bug(
|
||||
cmt.span,
|
||||
&format!("invalid borrow lifetime: {:?}",
|
||||
loan_region)[]);
|
||||
loan_region));
|
||||
}
|
||||
};
|
||||
debug!("loan_scope = {:?}", loan_scope);
|
||||
|
@ -121,7 +121,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
mc::cat_static_item => {
|
||||
bccx.span_err(move_from.span,
|
||||
&format!("cannot move out of {}",
|
||||
move_from.descriptive_string(bccx.tcx))[]);
|
||||
move_from.descriptive_string(bccx.tcx)));
|
||||
}
|
||||
|
||||
mc::cat_interior(ref b, mc::InteriorElement(Kind::Index, _)) => {
|
||||
@ -130,7 +130,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
bccx.span_err(move_from.span,
|
||||
&format!("cannot move out of type `{}`, \
|
||||
a non-copy fixed-size array",
|
||||
b.ty.user_string(bccx.tcx))[]);
|
||||
b.ty.user_string(bccx.tcx)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -143,7 +143,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
move_from.span,
|
||||
&format!("cannot move out of type `{}`, \
|
||||
which defines the `Drop` trait",
|
||||
b.ty.user_string(bccx.tcx))[]);
|
||||
b.ty.user_string(bccx.tcx)));
|
||||
},
|
||||
_ => {
|
||||
bccx.span_bug(move_from.span, "this path should not cause illegal move")
|
||||
@ -170,10 +170,10 @@ fn note_move_destination(bccx: &BorrowckCtxt,
|
||||
&format!("to prevent the move, \
|
||||
use `ref {0}` or `ref mut {0}` to capture value by \
|
||||
reference",
|
||||
pat_name)[]);
|
||||
pat_name));
|
||||
} else {
|
||||
bccx.span_note(move_to_span,
|
||||
&format!("and here (use `ref {0}` or `ref mut {0}`)",
|
||||
pat_name)[]);
|
||||
pat_name));
|
||||
}
|
||||
}
|
||||
|
@ -524,7 +524,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
pub fn report(&self, err: BckError<'tcx>) {
|
||||
self.span_err(
|
||||
err.span,
|
||||
&self.bckerr_to_string(&err)[]);
|
||||
&self.bckerr_to_string(&err));
|
||||
self.note_and_explain_bckerr(err);
|
||||
}
|
||||
|
||||
@ -546,7 +546,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
use_span,
|
||||
&format!("{} of possibly uninitialized variable: `{}`",
|
||||
verb,
|
||||
self.loan_path_to_string(lp))[]);
|
||||
self.loan_path_to_string(lp)));
|
||||
(self.loan_path_to_string(moved_lp),
|
||||
String::new())
|
||||
}
|
||||
@ -588,7 +588,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
&format!("{} of {}moved value: `{}`",
|
||||
verb,
|
||||
msg,
|
||||
nl)[]);
|
||||
nl));
|
||||
(ol, moved_lp_msg)
|
||||
}
|
||||
};
|
||||
@ -607,7 +607,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
self.tcx.sess.bug(&format!("MoveExpr({}) maps to \
|
||||
{:?}, not Expr",
|
||||
the_move.id,
|
||||
r)[])
|
||||
r))
|
||||
}
|
||||
};
|
||||
let (suggestion, _) =
|
||||
@ -618,7 +618,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion)[]);
|
||||
suggestion));
|
||||
}
|
||||
|
||||
move_data::MovePat => {
|
||||
@ -629,7 +629,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
which is moved by default",
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
pat_ty.user_string(self.tcx))[]);
|
||||
pat_ty.user_string(self.tcx)));
|
||||
self.tcx.sess.span_help(span,
|
||||
"use `ref` to override");
|
||||
}
|
||||
@ -645,7 +645,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
self.tcx.sess.bug(&format!("Captured({}) maps to \
|
||||
{:?}, not Expr",
|
||||
the_move.id,
|
||||
r)[])
|
||||
r))
|
||||
}
|
||||
};
|
||||
let (suggestion, help) =
|
||||
@ -661,7 +661,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion)[]);
|
||||
suggestion));
|
||||
self.tcx.sess.span_help(expr_span, help);
|
||||
}
|
||||
}
|
||||
@ -704,7 +704,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
&format!("re-assignment of immutable variable `{}`",
|
||||
self.loan_path_to_string(lp))[]);
|
||||
self.loan_path_to_string(lp)));
|
||||
self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
|
||||
}
|
||||
|
||||
@ -825,7 +825,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
&format!("{} in an aliasable location",
|
||||
prefix)[]);
|
||||
prefix));
|
||||
}
|
||||
mc::AliasableClosure(id) => {
|
||||
self.tcx.sess.span_err(span,
|
||||
@ -847,12 +847,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
mc::AliasableStaticMut(..) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
&format!("{} in a static location", prefix)[]);
|
||||
&format!("{} in a static location", prefix));
|
||||
}
|
||||
mc::AliasableBorrowed => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
&format!("{} in a `&` reference", prefix)[]);
|
||||
&format!("{} in a `&` reference", prefix));
|
||||
}
|
||||
}
|
||||
|
||||
@ -920,12 +920,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
&format!("{} would have to be valid for ",
|
||||
descr)[],
|
||||
descr),
|
||||
loan_scope,
|
||||
"...");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
&format!("...but {} is only valid for ", descr)[],
|
||||
&format!("...but {} is only valid for ", descr),
|
||||
ptr_scope,
|
||||
"");
|
||||
}
|
||||
@ -945,7 +945,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
out.push('(');
|
||||
self.append_loan_path_to_string(&**lp_base, out);
|
||||
out.push_str(DOWNCAST_PRINTED_OPERATOR);
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id));
|
||||
out.push(')');
|
||||
}
|
||||
|
||||
@ -959,7 +959,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
mc::PositionalField(idx) => {
|
||||
out.push('.');
|
||||
out.push_str(&idx.to_string()[]);
|
||||
out.push_str(&idx.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -991,7 +991,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
out.push('(');
|
||||
self.append_autoderefd_loan_path_to_string(&**lp_base, out);
|
||||
out.push(':');
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id));
|
||||
out.push(')');
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
if seen_one { sets.push_str(" "); } else { seen_one = true; }
|
||||
sets.push_str(variant.short_name());
|
||||
sets.push_str(": ");
|
||||
sets.push_str(&self.dataflow_for_variant(e, n, variant)[]);
|
||||
sets.push_str(&self.dataflow_for_variant(e, n, variant));
|
||||
}
|
||||
sets
|
||||
}
|
||||
|
@ -77,10 +77,10 @@ pub fn compile_input(sess: Session,
|
||||
let outputs = build_output_filenames(input,
|
||||
outdir,
|
||||
output,
|
||||
&krate.attrs[],
|
||||
&krate.attrs,
|
||||
&sess);
|
||||
let id = link::find_crate_name(Some(&sess),
|
||||
&krate.attrs[],
|
||||
&krate.attrs,
|
||||
input);
|
||||
let expanded_crate
|
||||
= match phase_2_configure_and_expand(&sess,
|
||||
@ -378,9 +378,9 @@ pub fn phase_2_configure_and_expand(sess: &Session,
|
||||
let time_passes = sess.time_passes();
|
||||
|
||||
*sess.crate_types.borrow_mut() =
|
||||
collect_crate_types(sess, &krate.attrs[]);
|
||||
collect_crate_types(sess, &krate.attrs);
|
||||
*sess.crate_metadata.borrow_mut() =
|
||||
collect_crate_metadata(sess, &krate.attrs[]);
|
||||
collect_crate_metadata(sess, &krate.attrs);
|
||||
|
||||
time(time_passes, "recursion limit", (), |_| {
|
||||
middle::recursion_limit::update_recursion_limit(sess, &krate);
|
||||
@ -724,7 +724,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
|
||||
time(sess.time_passes(), "LLVM passes", (), |_|
|
||||
write::run_passes(sess,
|
||||
trans,
|
||||
&sess.opts.output_types[],
|
||||
&sess.opts.output_types,
|
||||
outputs));
|
||||
}
|
||||
|
||||
@ -745,7 +745,7 @@ pub fn phase_6_link_output(sess: &Session,
|
||||
link::link_binary(sess,
|
||||
trans,
|
||||
outputs,
|
||||
&trans.link.crate_name[]));
|
||||
&trans.link.crate_name));
|
||||
|
||||
env::set_var("PATH", &old_path);
|
||||
}
|
||||
@ -799,7 +799,7 @@ fn write_out_deps(sess: &Session,
|
||||
// write Makefile-compatible dependency rules
|
||||
let files: Vec<String> = sess.codemap().files.borrow()
|
||||
.iter().filter(|fmap| fmap.is_real_file())
|
||||
.map(|fmap| escape_dep_filename(&fmap.name[]))
|
||||
.map(|fmap| escape_dep_filename(&fmap.name))
|
||||
.collect();
|
||||
let mut file = try!(old_io::File::create(&deps_filename));
|
||||
for path in &out_filenames {
|
||||
@ -813,7 +813,7 @@ fn write_out_deps(sess: &Session,
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.fatal(&format!("error writing dependencies to `{}`: {}",
|
||||
deps_filename.display(), e)[]);
|
||||
deps_filename.display(), e));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -884,7 +884,7 @@ pub fn collect_crate_types(session: &Session,
|
||||
if !res {
|
||||
session.warn(&format!("dropping unsupported crate type `{}` \
|
||||
for target `{}`",
|
||||
*crate_type, session.opts.target_triple)[]);
|
||||
*crate_type, session.opts.target_triple));
|
||||
}
|
||||
|
||||
res
|
||||
|
@ -124,7 +124,7 @@ pub fn run_compiler<'a>(args: &[String],
|
||||
let sopts = config::build_session_options(&matches);
|
||||
|
||||
let (odir, ofile) = make_output(&matches);
|
||||
let (input, input_file_path) = match make_input(&matches.free[]) {
|
||||
let (input, input_file_path) = match make_input(&matches.free) {
|
||||
Some((input, input_file_path)) => callbacks.some_input(input, input_file_path),
|
||||
None => match callbacks.no_input(&matches, &sopts, &odir, &ofile, &descriptions) {
|
||||
Some((input, input_file_path)) => (input, input_file_path),
|
||||
@ -166,7 +166,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<Path>, Option<Path>) {
|
||||
// Extract input (string or file and optional path) from matches.
|
||||
fn make_input(free_matches: &[String]) -> Option<(Input, Option<Path>)> {
|
||||
if free_matches.len() == 1 {
|
||||
let ifile = &free_matches[0][];
|
||||
let ifile = &free_matches[0][..];
|
||||
if ifile == "-" {
|
||||
let contents = old_io::stdin().read_to_end().unwrap();
|
||||
let src = String::from_utf8(contents).unwrap();
|
||||
@ -277,7 +277,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
|
||||
println!("{}", description);
|
||||
}
|
||||
None => {
|
||||
early_error(&format!("no extended information for {}", code)[]);
|
||||
early_error(&format!("no extended information for {}", code));
|
||||
}
|
||||
}
|
||||
return Compilation::Stop;
|
||||
@ -680,7 +680,7 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
|
||||
}
|
||||
|
||||
let matches =
|
||||
match getopts::getopts(&args[..], &config::optgroups()[]) {
|
||||
match getopts::getopts(&args[..], &config::optgroups()) {
|
||||
Ok(m) => m,
|
||||
Err(f_stable_attempt) => {
|
||||
// redo option parsing, including unstable options this time,
|
||||
@ -813,7 +813,7 @@ pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
|
||||
Err(e) => {
|
||||
emitter.emit(None,
|
||||
&format!("failed to read internal \
|
||||
stderr: {}", e)[],
|
||||
stderr: {}", e),
|
||||
None,
|
||||
diagnostic::Error)
|
||||
}
|
||||
|
@ -312,7 +312,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> {
|
||||
try!(pp::word(&mut s.s,
|
||||
&ppaux::ty_to_string(
|
||||
tcx,
|
||||
ty::expr_ty(tcx, expr))[]));
|
||||
ty::expr_ty(tcx, expr))));
|
||||
s.pclose()
|
||||
}
|
||||
_ => Ok(())
|
||||
@ -602,7 +602,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
debug!("pretty printing flow graph for {:?}", opt_uii);
|
||||
let uii = opt_uii.unwrap_or_else(|| {
|
||||
sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or
|
||||
unique path suffix (b::c::d)")[])
|
||||
unique path suffix (b::c::d)"))
|
||||
|
||||
});
|
||||
let ast_map = ast_map.expect("--pretty flowgraph missing ast_map");
|
||||
@ -610,7 +610,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
|
||||
let node = ast_map.find(nodeid).unwrap_or_else(|| {
|
||||
sess.fatal(&format!("--pretty flowgraph couldn't find id: {}",
|
||||
nodeid)[])
|
||||
nodeid))
|
||||
});
|
||||
|
||||
let code = blocks::Code::from_node(node);
|
||||
|
@ -712,7 +712,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
method_id,
|
||||
None,
|
||||
&format!("method `{}`",
|
||||
string)[]));
|
||||
string)));
|
||||
}
|
||||
|
||||
// Checks that a path is in scope.
|
||||
@ -727,7 +727,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
self.ensure_public(span,
|
||||
def,
|
||||
Some(origdid),
|
||||
&format!("{} `{}`", tyname, name)[])
|
||||
&format!("{} `{}`", tyname, name))
|
||||
};
|
||||
|
||||
match self.last_private_map[path_id] {
|
||||
|
@ -220,14 +220,14 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
||||
self.resolve_error(sp,
|
||||
&format!("duplicate definition of {} `{}`",
|
||||
namespace_error_to_string(duplicate_type),
|
||||
token::get_name(name))[]);
|
||||
token::get_name(name)));
|
||||
{
|
||||
let r = child.span_for_namespace(ns);
|
||||
if let Some(sp) = r {
|
||||
self.session.span_note(sp,
|
||||
&format!("first definition of {} `{}` here",
|
||||
namespace_error_to_string(duplicate_type),
|
||||
token::get_name(name))[]);
|
||||
token::get_name(name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -307,8 +307,8 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
||||
ViewPathSimple(binding, ref full_path) => {
|
||||
let source_name =
|
||||
full_path.segments.last().unwrap().identifier.name;
|
||||
if &token::get_name(source_name)[] == "mod" ||
|
||||
&token::get_name(source_name)[] == "self" {
|
||||
if &token::get_name(source_name)[..] == "mod" ||
|
||||
&token::get_name(source_name)[..] == "self" {
|
||||
self.resolve_error(view_path.span,
|
||||
"`self` imports are only allowed within a { } list");
|
||||
}
|
||||
@ -1192,7 +1192,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
||||
debug!("(building import directive) building import \
|
||||
directive: {}::{}",
|
||||
self.names_to_string(&module_.imports.borrow().last().unwrap().
|
||||
module_path[]),
|
||||
module_path),
|
||||
token::get_name(target));
|
||||
|
||||
let mut import_resolutions = module_.import_resolutions
|
||||
|
@ -1068,7 +1068,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
let msg = format!("unresolved import `{}`{}",
|
||||
self.import_path_to_string(
|
||||
&import_directive.module_path[],
|
||||
&import_directive.module_path,
|
||||
import_directive.subclass),
|
||||
help);
|
||||
self.resolve_error(span, &msg[..]);
|
||||
@ -2247,7 +2247,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
true) {
|
||||
Failed(Some((span, msg))) =>
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]),
|
||||
msg)),
|
||||
Failed(None) => (), // Continue up the search chain.
|
||||
Indeterminate => {
|
||||
// We couldn't see through the higher scope because of an
|
||||
@ -2603,7 +2603,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match def_like {
|
||||
DlDef(d @ DefUpvar(..)) => {
|
||||
self.session.span_bug(span,
|
||||
&format!("unexpected {:?} in bindings", d)[])
|
||||
&format!("unexpected {:?} in bindings", d))
|
||||
}
|
||||
DlDef(d @ DefLocal(_)) => {
|
||||
let node_id = d.def_id().node;
|
||||
@ -2931,7 +2931,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
|
||||
self.resolve_struct(item.id,
|
||||
generics,
|
||||
&struct_def.fields[]);
|
||||
&struct_def.fields);
|
||||
}
|
||||
|
||||
ItemMod(ref module_) => {
|
||||
@ -3019,7 +3019,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
parameter in this type \
|
||||
parameter list",
|
||||
token::get_name(
|
||||
name))[])
|
||||
name)))
|
||||
}
|
||||
seen_bindings.insert(name);
|
||||
|
||||
@ -3204,14 +3204,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
self.resolve_error(trait_reference.path.span,
|
||||
&format!("`{}` is not a trait",
|
||||
self.path_names_to_string(
|
||||
&trait_reference.path))[]);
|
||||
&trait_reference.path)));
|
||||
|
||||
// If it's a typedef, give a note
|
||||
if let DefTy(..) = def {
|
||||
self.session.span_note(
|
||||
trait_reference.path.span,
|
||||
&format!("`type` aliases cannot be used for traits")
|
||||
[]);
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3408,7 +3408,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
self.resolve_error(span,
|
||||
&format!("method `{}` is not a member of trait `{}`",
|
||||
token::get_name(name),
|
||||
path_str)[]);
|
||||
path_str));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3477,7 +3477,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&format!("variable `{}` from pattern #1 is \
|
||||
not bound in pattern #{}",
|
||||
token::get_name(key),
|
||||
i + 1)[]);
|
||||
i + 1));
|
||||
}
|
||||
Some(binding_i) => {
|
||||
if binding_0.binding_mode != binding_i.binding_mode {
|
||||
@ -3486,7 +3486,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&format!("variable `{}` is bound with different \
|
||||
mode in pattern #{} than in pattern #1",
|
||||
token::get_name(key),
|
||||
i + 1)[]);
|
||||
i + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3499,7 +3499,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&format!("variable `{}` from pattern {}{} is \
|
||||
not bound in pattern {}1",
|
||||
token::get_name(key),
|
||||
"#", i + 1, "#")[]);
|
||||
"#", i + 1, "#"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3698,7 +3698,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&format!("declaration of `{}` shadows an enum \
|
||||
variant or unit-like struct in \
|
||||
scope",
|
||||
token::get_name(renamed))[]);
|
||||
token::get_name(renamed)));
|
||||
}
|
||||
FoundConst(ref def, lp) if mode == RefutableMode => {
|
||||
debug!("(resolving pattern) resolving `{}` to \
|
||||
@ -3750,7 +3750,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
list",
|
||||
token::get_ident(
|
||||
ident))
|
||||
[])
|
||||
)
|
||||
} else if bindings_list.get(&renamed) ==
|
||||
Some(&pat_id) {
|
||||
// Then this is a duplicate variable in the
|
||||
@ -3759,7 +3759,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&format!("identifier `{}` is bound \
|
||||
more than once in the same \
|
||||
pattern",
|
||||
token::get_ident(ident))[]);
|
||||
token::get_ident(ident)));
|
||||
}
|
||||
// Else, not bound in the same pattern: do
|
||||
// nothing.
|
||||
@ -3883,7 +3883,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match err {
|
||||
Some((span, msg)) => {
|
||||
self.resolve_error(span, &format!("failed to resolve: {}",
|
||||
msg)[]);
|
||||
msg));
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
@ -4093,7 +4093,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]);
|
||||
msg));
|
||||
return None;
|
||||
}
|
||||
Indeterminate => panic!("indeterminate unexpected"),
|
||||
@ -4152,7 +4152,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]);
|
||||
msg));
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -4193,7 +4193,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
TypeNS => {
|
||||
let name = ident.name;
|
||||
self.search_ribs(&self.type_ribs[], name, span)
|
||||
self.search_ribs(&self.type_ribs, name, span)
|
||||
}
|
||||
};
|
||||
|
||||
@ -4248,7 +4248,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match err {
|
||||
Some((span, msg)) =>
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]),
|
||||
msg)),
|
||||
None => ()
|
||||
}
|
||||
|
||||
@ -4410,7 +4410,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
values[smallest] != usize::MAX &&
|
||||
values[smallest] < name.len() + 2 &&
|
||||
values[smallest] <= max_distance &&
|
||||
name != &maybes[smallest][] {
|
||||
name != &maybes[smallest][..] {
|
||||
|
||||
Some(maybes[smallest].to_string())
|
||||
|
||||
@ -4502,7 +4502,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
false // Stop advancing
|
||||
});
|
||||
|
||||
if method_scope && &token::get_name(self.self_name)[]
|
||||
if method_scope && &token::get_name(self.self_name)[..]
|
||||
== path_name {
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
@ -4592,7 +4592,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
&format!("use of undeclared label `{}`",
|
||||
token::get_ident(label))[])
|
||||
token::get_ident(label)))
|
||||
}
|
||||
Some(DlDef(def @ DefLabel(_))) => {
|
||||
// Since this def is a label, it is never read.
|
||||
@ -4731,7 +4731,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
then {:?}",
|
||||
node_id,
|
||||
*entry.get(),
|
||||
def)[]);
|
||||
def));
|
||||
},
|
||||
Vacant(entry) => { entry.insert(def); },
|
||||
}
|
||||
@ -4747,7 +4747,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
self.resolve_error(pat.span,
|
||||
&format!("cannot use `ref` binding mode \
|
||||
with {}",
|
||||
descr)[]);
|
||||
descr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4783,7 +4783,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
return "???".to_string();
|
||||
}
|
||||
self.names_to_string(&names.into_iter().rev()
|
||||
.collect::<Vec<ast::Name>>()[])
|
||||
.collect::<Vec<ast::Name>>())
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // useful for debugging
|
||||
|
@ -191,17 +191,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
// to be independent of one another in the crate.
|
||||
|
||||
symbol_hasher.reset();
|
||||
symbol_hasher.input_str(&link_meta.crate_name[]);
|
||||
symbol_hasher.input_str(&link_meta.crate_name);
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(link_meta.crate_hash.as_str());
|
||||
for meta in &*tcx.sess.crate_metadata.borrow() {
|
||||
symbol_hasher.input_str(&meta[..]);
|
||||
}
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]);
|
||||
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t));
|
||||
// Prefix with 'h' so that it never blends into adjacent digits
|
||||
let mut hash = String::from_str("h");
|
||||
hash.push_str(&truncated_hash_result(symbol_hasher)[]);
|
||||
hash.push_str(&truncated_hash_result(symbol_hasher));
|
||||
hash
|
||||
}
|
||||
|
||||
@ -288,7 +288,7 @@ pub fn mangle<PI: Iterator<Item=PathElem>>(path: PI,
|
||||
|
||||
fn push(n: &mut String, s: &str) {
|
||||
let sani = sanitize(s);
|
||||
n.push_str(&format!("{}{}", sani.len(), sani)[]);
|
||||
n.push_str(&format!("{}{}", sani.len(), sani));
|
||||
}
|
||||
|
||||
// First, connect each component with <len, name> pairs.
|
||||
@ -361,7 +361,7 @@ pub fn remove(sess: &Session, path: &Path) {
|
||||
Err(e) => {
|
||||
sess.err(&format!("failed to remove {}: {}",
|
||||
path.display(),
|
||||
e)[]);
|
||||
e));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -376,7 +376,7 @@ pub fn link_binary(sess: &Session,
|
||||
for &crate_type in &*sess.crate_types.borrow() {
|
||||
if invalid_output_for_target(sess, crate_type) {
|
||||
sess.bug(&format!("invalid output type `{:?}` for target os `{}`",
|
||||
crate_type, sess.opts.target_triple)[]);
|
||||
crate_type, sess.opts.target_triple));
|
||||
}
|
||||
let out_file = link_binary_output(sess, trans, crate_type, outputs,
|
||||
crate_name);
|
||||
@ -441,8 +441,8 @@ pub fn filename_for_input(sess: &Session,
|
||||
out_filename.with_filename(format!("lib{}.rlib", libname))
|
||||
}
|
||||
config::CrateTypeDylib => {
|
||||
let (prefix, suffix) = (&sess.target.target.options.dll_prefix[],
|
||||
&sess.target.target.options.dll_suffix[]);
|
||||
let (prefix, suffix) = (&sess.target.target.options.dll_prefix,
|
||||
&sess.target.target.options.dll_suffix);
|
||||
out_filename.with_filename(format!("{}{}{}",
|
||||
prefix,
|
||||
libname,
|
||||
@ -452,7 +452,7 @@ pub fn filename_for_input(sess: &Session,
|
||||
out_filename.with_filename(format!("lib{}.a", libname))
|
||||
}
|
||||
config::CrateTypeExecutable => {
|
||||
let suffix = &sess.target.target.options.exe_suffix[];
|
||||
let suffix = &sess.target.target.options.exe_suffix;
|
||||
out_filename.with_filename(format!("{}{}", libname, suffix))
|
||||
}
|
||||
}
|
||||
@ -481,12 +481,12 @@ fn link_binary_output(sess: &Session,
|
||||
if !out_is_writeable {
|
||||
sess.fatal(&format!("output file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
out_filename.display())[]);
|
||||
out_filename.display()));
|
||||
}
|
||||
else if !obj_is_writeable {
|
||||
sess.fatal(&format!("object file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
obj_filename.display())[]);
|
||||
obj_filename.display()));
|
||||
}
|
||||
|
||||
match crate_type {
|
||||
@ -588,12 +588,12 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
// the same filename for metadata (stomping over one another)
|
||||
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
|
||||
let metadata = tmpdir.path().join(METADATA_FILENAME);
|
||||
match fs::File::create(&metadata).write_all(&trans.metadata[]) {
|
||||
match fs::File::create(&metadata).write_all(&trans.metadata) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(&format!("failed to write {}: {}",
|
||||
metadata.display(),
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -611,25 +611,25 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
// was exactly 16 bytes.
|
||||
let bc_filename = obj_filename.with_extension(&format!("{}.bc", i));
|
||||
let bc_deflated_filename = obj_filename.with_extension(
|
||||
&format!("{}.bytecode.deflate", i)[]);
|
||||
&format!("{}.bytecode.deflate", i));
|
||||
|
||||
let bc_data = match fs::File::open(&bc_filename).read_to_end() {
|
||||
Ok(buffer) => buffer,
|
||||
Err(e) => sess.fatal(&format!("failed to read bytecode: {}",
|
||||
e)[])
|
||||
e))
|
||||
};
|
||||
|
||||
let bc_data_deflated = match flate::deflate_bytes(&bc_data[..]) {
|
||||
Some(compressed) => compressed,
|
||||
None => sess.fatal(&format!("failed to compress bytecode from {}",
|
||||
bc_filename.display())[])
|
||||
bc_filename.display()))
|
||||
};
|
||||
|
||||
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
sess.fatal(&format!("failed to create compressed bytecode \
|
||||
file: {}", e)[])
|
||||
file: {}", e))
|
||||
}
|
||||
};
|
||||
|
||||
@ -638,7 +638,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.err(&format!("failed to write compressed bytecode: \
|
||||
{}", e)[]);
|
||||
{}", e));
|
||||
sess.abort_if_errors()
|
||||
}
|
||||
};
|
||||
@ -729,7 +729,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
||||
let p = match *path {
|
||||
Some(ref p) => p.clone(), None => {
|
||||
sess.err(&format!("could not find rlib for: `{}`",
|
||||
name)[]);
|
||||
name));
|
||||
continue
|
||||
}
|
||||
};
|
||||
@ -755,7 +755,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
||||
cstore::NativeUnknown => "library",
|
||||
cstore::NativeFramework => "framework",
|
||||
};
|
||||
sess.note(&format!("{}: {}", name, *lib)[]);
|
||||
sess.note(&format!("{}: {}", name, *lib));
|
||||
}
|
||||
}
|
||||
|
||||
@ -771,10 +771,10 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(&pname[..]);
|
||||
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
cmd.args(&sess.target.target.options.pre_link_args);
|
||||
link_args(&mut cmd, sess, dylib, tmpdir.path(),
|
||||
trans, obj_filename, out_filename);
|
||||
cmd.args(&sess.target.target.options.post_link_args[]);
|
||||
cmd.args(&sess.target.target.options.post_link_args);
|
||||
if !sess.target.target.options.no_compiler_rt {
|
||||
cmd.arg("-lcompiler-rt");
|
||||
}
|
||||
@ -794,10 +794,10 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
if !prog.status.success() {
|
||||
sess.err(&format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status)[]);
|
||||
sess.note(&format!("{:?}", &cmd)[]);
|
||||
prog.status));
|
||||
sess.note(&format!("{:?}", &cmd));
|
||||
let mut output = prog.error.clone();
|
||||
output.push_all(&prog.output[]);
|
||||
output.push_all(&prog.output);
|
||||
sess.note(str::from_utf8(&output[..]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
@ -807,7 +807,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
Err(e) => {
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -819,7 +819,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
match Command::new("dsymutil").arg(out_filename).output() {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(&format!("failed to run dsymutil: {}", e)[]);
|
||||
sess.err(&format!("failed to run dsymutil: {}", e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1005,7 +1005,7 @@ fn link_args(cmd: &mut Command,
|
||||
// addl_lib_search_paths
|
||||
if sess.opts.cg.rpath {
|
||||
let sysroot = sess.sysroot();
|
||||
let target_triple = &sess.opts.target_triple[];
|
||||
let target_triple = &sess.opts.target_triple;
|
||||
let get_install_prefix_lib_path = || {
|
||||
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
|
||||
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
|
||||
@ -1022,13 +1022,13 @@ fn link_args(cmd: &mut Command,
|
||||
get_install_prefix_lib_path: get_install_prefix_lib_path,
|
||||
realpath: ::util::fs::realpath
|
||||
};
|
||||
cmd.args(&rpath::get_rpath_flags(rpath_config)[]);
|
||||
cmd.args(&rpath::get_rpath_flags(rpath_config));
|
||||
}
|
||||
|
||||
// Finally add all the linker arguments provided on the command line along
|
||||
// with any #[link_args] attributes found inside the crate
|
||||
let empty = Vec::new();
|
||||
cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
|
||||
cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty));
|
||||
cmd.args(&used_link_args[..]);
|
||||
}
|
||||
|
||||
@ -1189,7 +1189,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
let name = cratepath.filename_str().unwrap();
|
||||
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
|
||||
time(sess.time_passes(),
|
||||
&format!("altering {}.rlib", name)[],
|
||||
&format!("altering {}.rlib", name),
|
||||
(), |()| {
|
||||
let dst = tmpdir.join(cratepath.filename().unwrap());
|
||||
match fs::copy(&cratepath, &dst) {
|
||||
@ -1198,7 +1198,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
sess.err(&format!("failed to copy {} to {}: {}",
|
||||
cratepath.display(),
|
||||
dst.display(),
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1210,7 +1210,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
Err(e) => {
|
||||
sess.err(&format!("failed to chmod {} when preparing \
|
||||
for LTO: {}", dst.display(),
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1224,9 +1224,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
maybe_ar_prog: sess.opts.cg.ar.clone()
|
||||
};
|
||||
let mut archive = Archive::open(config);
|
||||
archive.remove_file(&format!("{}.o", name)[]);
|
||||
archive.remove_file(&format!("{}.o", name));
|
||||
let files = archive.files();
|
||||
if files.iter().any(|s| s[].ends_with(".o")) {
|
||||
if files.iter().any(|s| s.ends_with(".o")) {
|
||||
cmd.arg(dst);
|
||||
}
|
||||
});
|
||||
|
@ -54,7 +54,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
Some(p) => p,
|
||||
None => {
|
||||
sess.fatal(&format!("could not find rlib for: `{}`",
|
||||
name)[]);
|
||||
name));
|
||||
}
|
||||
};
|
||||
|
||||
@ -68,7 +68,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
(),
|
||||
|_| {
|
||||
archive.read(&format!("{}.{}.bytecode.deflate",
|
||||
file, i)[])
|
||||
file, i))
|
||||
});
|
||||
let bc_encoded = match bc_encoded {
|
||||
Some(data) => data,
|
||||
@ -76,7 +76,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
if i == 0 {
|
||||
// No bitcode was found at all.
|
||||
sess.fatal(&format!("missing compressed bytecode in {}",
|
||||
path.display())[]);
|
||||
path.display()));
|
||||
}
|
||||
// No more bitcode files to read.
|
||||
break;
|
||||
@ -99,12 +99,12 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
Some(inflated) => inflated,
|
||||
None => {
|
||||
sess.fatal(&format!("failed to decompress bc of `{}`",
|
||||
name)[])
|
||||
name))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sess.fatal(&format!("Unsupported bytecode format version {}",
|
||||
version)[])
|
||||
version))
|
||||
}
|
||||
})
|
||||
} else {
|
||||
@ -115,7 +115,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
Some(bc) => bc,
|
||||
None => {
|
||||
sess.fatal(&format!("failed to decompress bc of `{}`",
|
||||
name)[])
|
||||
name))
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -124,7 +124,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
let ptr = bc_decoded.as_ptr();
|
||||
debug!("linking {}, part {}", name, i);
|
||||
time(sess.time_passes(),
|
||||
&format!("ll link {}.{}", name, i)[],
|
||||
&format!("ll link {}.{}", name, i),
|
||||
(),
|
||||
|()| unsafe {
|
||||
if !llvm::LLVMRustLinkInExternalBitcode(llmod,
|
||||
|
@ -54,7 +54,7 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! {
|
||||
libc::free(cstr as *mut _);
|
||||
handler.fatal(&format!("{}: {}",
|
||||
&msg[..],
|
||||
&err[..])[]);
|
||||
&err[..]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -104,13 +104,13 @@ impl SharedEmitter {
|
||||
match diag.code {
|
||||
Some(ref code) => {
|
||||
handler.emit_with_code(None,
|
||||
&diag.msg[],
|
||||
&diag.msg,
|
||||
&code[..],
|
||||
diag.lvl);
|
||||
},
|
||||
None => {
|
||||
handler.emit(None,
|
||||
&diag.msg[],
|
||||
&diag.msg,
|
||||
diag.lvl);
|
||||
},
|
||||
}
|
||||
@ -166,7 +166,7 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel {
|
||||
fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let reloc_model_arg = match sess.opts.cg.relocation_model {
|
||||
Some(ref s) => &s[..],
|
||||
None => &sess.target.target.options.relocation_model[]
|
||||
None => &sess.target.target.options.relocation_model[..],
|
||||
};
|
||||
let reloc_model = match reloc_model_arg {
|
||||
"pic" => llvm::RelocPIC,
|
||||
@ -177,7 +177,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
sess.err(&format!("{:?} is not a valid relocation mode",
|
||||
sess.opts
|
||||
.cg
|
||||
.relocation_model)[]);
|
||||
.relocation_model));
|
||||
sess.abort_if_errors();
|
||||
unreachable!();
|
||||
}
|
||||
@ -199,7 +199,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
|
||||
let code_model_arg = match sess.opts.cg.code_model {
|
||||
Some(ref s) => &s[..],
|
||||
None => &sess.target.target.options.code_model[]
|
||||
None => &sess.target.target.options.code_model[..],
|
||||
};
|
||||
|
||||
let code_model = match code_model_arg {
|
||||
@ -212,13 +212,13 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
sess.err(&format!("{:?} is not a valid code model",
|
||||
sess.opts
|
||||
.cg
|
||||
.code_model)[]);
|
||||
.code_model));
|
||||
sess.abort_if_errors();
|
||||
unreachable!();
|
||||
}
|
||||
};
|
||||
|
||||
let triple = &sess.target.target.llvm_target[];
|
||||
let triple = &sess.target.target.llvm_target;
|
||||
|
||||
let tm = unsafe {
|
||||
let triple = CString::new(triple.as_bytes()).unwrap();
|
||||
@ -526,14 +526,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
}
|
||||
|
||||
if config.emit_asm {
|
||||
let path = output_names.with_extension(&format!("{}.s", name_extra)[]);
|
||||
let path = output_names.with_extension(&format!("{}.s", name_extra));
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType);
|
||||
});
|
||||
}
|
||||
|
||||
if config.emit_obj {
|
||||
let path = output_names.with_extension(&format!("{}.o", name_extra)[]);
|
||||
let path = output_names.with_extension(&format!("{}.o", name_extra));
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType);
|
||||
});
|
||||
@ -647,7 +647,7 @@ pub fn run_passes(sess: &Session,
|
||||
|
||||
// Process the work items, optionally using worker threads.
|
||||
if sess.opts.cg.codegen_units == 1 {
|
||||
run_work_singlethreaded(sess, &trans.reachable[], work_items);
|
||||
run_work_singlethreaded(sess, &trans.reachable, work_items);
|
||||
} else {
|
||||
run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
|
||||
}
|
||||
@ -679,7 +679,7 @@ pub fn run_passes(sess: &Session,
|
||||
// 2) Multiple codegen units, with `-o some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.warn(&format!("ignoring -o because multiple .{} files were produced",
|
||||
ext)[]);
|
||||
ext));
|
||||
} else {
|
||||
// 3) Multiple codegen units, but no `-o some_name`. We
|
||||
// just leave the `foo.0.x` files in place.
|
||||
@ -713,18 +713,18 @@ pub fn run_passes(sess: &Session,
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(&pname[..]);
|
||||
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
cmd.args(&sess.target.target.options.pre_link_args);
|
||||
cmd.arg("-nostdlib");
|
||||
|
||||
for index in 0..trans.modules.len() {
|
||||
cmd.arg(crate_output.with_extension(&format!("{}.o", index)[]));
|
||||
cmd.arg(crate_output.with_extension(&format!("{}.o", index)));
|
||||
}
|
||||
|
||||
cmd.arg("-r")
|
||||
.arg("-o")
|
||||
.arg(windows_output_path.as_ref().unwrap_or(output_path));
|
||||
|
||||
cmd.args(&sess.target.target.options.post_link_args[]);
|
||||
cmd.args(&sess.target.target.options.post_link_args);
|
||||
|
||||
if sess.opts.debugging_opts.print_link_args {
|
||||
println!("{:?}", &cmd);
|
||||
@ -737,14 +737,14 @@ pub fn run_passes(sess: &Session,
|
||||
Ok(status) => {
|
||||
if !status.success() {
|
||||
sess.err(&format!("linking of {} with `{:?}` failed",
|
||||
output_path.display(), cmd)[]);
|
||||
output_path.display(), cmd));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
},
|
||||
}
|
||||
@ -971,10 +971,10 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
if !prog.status.success() {
|
||||
sess.err(&format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status)[]);
|
||||
sess.note(&format!("{:?}", &cmd)[]);
|
||||
prog.status));
|
||||
sess.note(&format!("{:?}", &cmd));
|
||||
let mut note = prog.error.clone();
|
||||
note.push_all(&prog.output[]);
|
||||
note.push_all(&prog.output);
|
||||
sess.note(str::from_utf8(¬e[..]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
@ -982,7 +982,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
Err(e) => {
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e)[]);
|
||||
e));
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1018,7 +1018,7 @@ unsafe fn configure_llvm(sess: &Session) {
|
||||
if sess.target.target.arch == "aarch64" { add("-fast-isel=0"); }
|
||||
|
||||
for arg in &sess.opts.cg.llvm_args {
|
||||
add(&(*arg)[]);
|
||||
add(&(*arg));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
|
||||
// dump info about all the external crates referenced from this crate
|
||||
self.sess.cstore.iter_crate_data(|n, cmd| {
|
||||
self.fmt.external_crate_str(krate.span, &cmd.name[], n);
|
||||
self.fmt.external_crate_str(krate.span, &cmd.name, n);
|
||||
});
|
||||
self.fmt.recorder.record("end_external_crates\n");
|
||||
}
|
||||
@ -216,7 +216,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> {
|
||||
if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) {
|
||||
self.sess.bug(&format!("def_map has no key for {} in lookup_type_ref",
|
||||
ref_id)[]);
|
||||
ref_id));
|
||||
}
|
||||
let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id];
|
||||
match def {
|
||||
@ -229,7 +229,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
let def_map = self.analysis.ty_cx.def_map.borrow();
|
||||
if !def_map.contains_key(&ref_id) {
|
||||
self.sess.span_bug(span, &format!("def_map has no key for {} in lookup_def_kind",
|
||||
ref_id)[]);
|
||||
ref_id));
|
||||
}
|
||||
let def = (*def_map)[ref_id];
|
||||
match def {
|
||||
@ -258,7 +258,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
def::DefMethod(..) |
|
||||
def::DefPrimTy(_) => {
|
||||
self.sess.span_bug(span, &format!("lookup_def_kind for unexpected item: {:?}",
|
||||
def)[]);
|
||||
def));
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -279,7 +279,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
span_utils.span_for_last_ident(p.span),
|
||||
id,
|
||||
qualname,
|
||||
&path_to_string(p)[],
|
||||
&path_to_string(p),
|
||||
&typ[..]);
|
||||
}
|
||||
self.collected_paths.clear();
|
||||
@ -302,7 +302,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
match item.node {
|
||||
ast::ItemImpl(_, _, _, _, ref ty, _) => {
|
||||
let mut result = String::from_str("<");
|
||||
result.push_str(&ty_to_string(&**ty)[]);
|
||||
result.push_str(&ty_to_string(&**ty));
|
||||
|
||||
match ty::trait_of_item(&self.analysis.ty_cx,
|
||||
ast_util::local_def(method.id)) {
|
||||
@ -319,7 +319,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
_ => {
|
||||
self.sess.span_bug(method.span,
|
||||
&format!("Container {} for method {} not an impl?",
|
||||
impl_id.node, method.id)[]);
|
||||
impl_id.node, method.id));
|
||||
},
|
||||
}
|
||||
},
|
||||
@ -329,7 +329,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
"Container {} for method {} is not a node item {:?}",
|
||||
impl_id.node,
|
||||
method.id,
|
||||
self.analysis.ty_cx.map.get(impl_id.node))[]);
|
||||
self.analysis.ty_cx.map.get(impl_id.node)));
|
||||
},
|
||||
},
|
||||
None => match ty::trait_of_item(&self.analysis.ty_cx,
|
||||
@ -343,14 +343,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
_ => {
|
||||
self.sess.span_bug(method.span,
|
||||
&format!("Could not find container {} for method {}",
|
||||
def_id.node, method.id)[]);
|
||||
def_id.node, method.id));
|
||||
}
|
||||
}
|
||||
},
|
||||
None => {
|
||||
self.sess.span_bug(method.span,
|
||||
&format!("Could not find container for method {}",
|
||||
method.id)[]);
|
||||
method.id));
|
||||
},
|
||||
},
|
||||
};
|
||||
@ -442,7 +442,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
scope_id),
|
||||
None => self.sess.span_bug(field.span,
|
||||
&format!("Could not find sub-span for field {}",
|
||||
qualname)[]),
|
||||
qualname)),
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
@ -528,7 +528,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
&get_ident(item.ident),
|
||||
&qualname[..],
|
||||
&value[..],
|
||||
&ty_to_string(&*typ)[],
|
||||
&ty_to_string(&*typ),
|
||||
self.cur_scope);
|
||||
|
||||
// walk type and init value
|
||||
@ -551,7 +551,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
&get_ident(item.ident),
|
||||
&qualname[..],
|
||||
"",
|
||||
&ty_to_string(&*typ)[],
|
||||
&ty_to_string(&*typ),
|
||||
self.cur_scope);
|
||||
|
||||
// walk type and init value
|
||||
@ -603,7 +603,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
&val[..]),
|
||||
None => self.sess.span_bug(item.span,
|
||||
&format!("Could not find subspan for enum {}",
|
||||
enum_name)[]),
|
||||
enum_name)),
|
||||
}
|
||||
for variant in &enum_definition.variants {
|
||||
let name = get_ident(variant.node.name);
|
||||
@ -872,7 +872,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
&format!("Unexpected def kind while looking \
|
||||
up path in `{}`: `{:?}`",
|
||||
self.span.snippet(span),
|
||||
*def)[]),
|
||||
*def)),
|
||||
}
|
||||
// modules or types in the path prefix
|
||||
match *def {
|
||||
@ -1007,7 +1007,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
None => {
|
||||
self.sess.span_bug(p.span,
|
||||
&format!("Could not find struct_def for `{}`",
|
||||
self.span.snippet(p.span))[]);
|
||||
self.span.snippet(p.span)));
|
||||
}
|
||||
};
|
||||
for &Spanned { node: ref field, span } in fields {
|
||||
@ -1255,7 +1255,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
None => {
|
||||
self.sess.span_bug(method_type.span,
|
||||
&format!("Could not find trait for method {}",
|
||||
method_type.id)[]);
|
||||
method_type.id));
|
||||
},
|
||||
};
|
||||
|
||||
@ -1362,7 +1362,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
}
|
||||
_ => self.sess.span_bug(ex.span,
|
||||
&format!("Expected struct type, found {:?}", ty)[]),
|
||||
&format!("Expected struct type, found {:?}", ty)),
|
||||
}
|
||||
},
|
||||
ast::ExprTupField(ref sub_ex, idx) => {
|
||||
@ -1391,7 +1391,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
ty::ty_tup(_) => {}
|
||||
_ => self.sess.span_bug(ex.span,
|
||||
&format!("Expected struct or tuple \
|
||||
type, found {:?}", ty)[]),
|
||||
type, found {:?}", ty)),
|
||||
}
|
||||
},
|
||||
ast::ExprClosure(_, ref decl, ref body) => {
|
||||
@ -1400,7 +1400,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
|
||||
let mut id = String::from_str("$");
|
||||
id.push_str(&ex.id.to_string()[]);
|
||||
id.push_str(&ex.id.to_string());
|
||||
self.process_formals(&decl.inputs, &id[..]);
|
||||
|
||||
// walk arg and return types
|
||||
@ -1448,7 +1448,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
if !def_map.contains_key(&id) {
|
||||
self.sess.span_bug(p.span,
|
||||
&format!("def_map has no key for {} in visit_arm",
|
||||
id)[]);
|
||||
id));
|
||||
}
|
||||
let def = &(*def_map)[id];
|
||||
match *def {
|
||||
@ -1463,7 +1463,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.variable_str(p.span,
|
||||
Some(p.span),
|
||||
id,
|
||||
&path_to_string(p)[],
|
||||
&path_to_string(p),
|
||||
&value[..],
|
||||
"")
|
||||
}
|
||||
@ -1519,7 +1519,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.variable_str(p.span,
|
||||
sub_span,
|
||||
id,
|
||||
&path_to_string(p)[],
|
||||
&path_to_string(p),
|
||||
&value[..],
|
||||
&typ[..]);
|
||||
}
|
||||
@ -1540,7 +1540,7 @@ pub fn process_crate(sess: &Session,
|
||||
}
|
||||
|
||||
assert!(analysis.glob_map.is_some());
|
||||
let cratename = match attr::find_crate_name(&krate.attrs[]) {
|
||||
let cratename = match attr::find_crate_name(&krate.attrs) {
|
||||
Some(name) => name.to_string(),
|
||||
None => {
|
||||
info!("Could not find crate name, using 'unknown_crate'");
|
||||
@ -1561,7 +1561,7 @@ pub fn process_crate(sess: &Session,
|
||||
|
||||
match fs::mkdir_recursive(&root_path, old_io::USER_RWX) {
|
||||
Err(e) => sess.err(&format!("Could not create directory {}: {}",
|
||||
root_path.display(), e)[]),
|
||||
root_path.display(), e)),
|
||||
_ => (),
|
||||
}
|
||||
|
||||
@ -1578,7 +1578,7 @@ pub fn process_crate(sess: &Session,
|
||||
Ok(f) => box f,
|
||||
Err(e) => {
|
||||
let disp = root_path.display();
|
||||
sess.fatal(&format!("Could not open {}: {}", disp, e)[]);
|
||||
sess.fatal(&format!("Could not open {}: {}", disp, e));
|
||||
}
|
||||
};
|
||||
root_path.pop();
|
||||
|
@ -162,7 +162,7 @@ impl<'a> FmtStrs<'a> {
|
||||
if values.len() != fields.len() {
|
||||
self.span.sess.span_bug(span, &format!(
|
||||
"Mismatch between length of fields for '{}', expected '{}', found '{}'",
|
||||
kind, fields.len(), values.len())[]);
|
||||
kind, fields.len(), values.len()));
|
||||
}
|
||||
|
||||
let values = values.iter().map(|s| {
|
||||
@ -191,7 +191,7 @@ impl<'a> FmtStrs<'a> {
|
||||
if needs_span {
|
||||
self.span.sess.span_bug(span, &format!(
|
||||
"Called record_without_span for '{}' which does requires a span",
|
||||
label)[]);
|
||||
label));
|
||||
}
|
||||
assert!(!dump_spans);
|
||||
|
||||
@ -268,7 +268,7 @@ impl<'a> FmtStrs<'a> {
|
||||
// variable def's node id
|
||||
let mut qualname = String::from_str(name);
|
||||
qualname.push_str("$");
|
||||
qualname.push_str(&id.to_string()[]);
|
||||
qualname.push_str(&id.to_string());
|
||||
self.check_and_record(Variable,
|
||||
span,
|
||||
sub_span,
|
||||
|
@ -219,7 +219,7 @@ impl<'a> SpanUtils<'a> {
|
||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
||||
self.sess.span_bug(span,
|
||||
&format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line)[]);
|
||||
self.snippet(span), loc.file.name, loc.line));
|
||||
}
|
||||
if result.is_none() && prev.tok.is_ident() && bracket_count == 0 {
|
||||
return self.make_sub_span(span, Some(prev.sp));
|
||||
@ -245,7 +245,7 @@ impl<'a> SpanUtils<'a> {
|
||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
||||
self.sess.span_bug(span, &format!(
|
||||
"Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line)[]);
|
||||
self.snippet(span), loc.file.name, loc.line));
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
@ -444,7 +444,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
let _indenter = indenter();
|
||||
|
||||
m.iter().filter_map(|br| {
|
||||
e(&br.pats[]).map(|pats| {
|
||||
e(&br.pats).map(|pats| {
|
||||
let this = br.pats[col];
|
||||
let mut bound_ptrs = br.bound_ptrs.clone();
|
||||
match this.node {
|
||||
@ -825,7 +825,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
|
||||
let did = langcall(cx,
|
||||
None,
|
||||
&format!("comparison of `{}`",
|
||||
cx.ty_to_string(rhs_t))[],
|
||||
cx.ty_to_string(rhs_t)),
|
||||
StrEqFnLangItem);
|
||||
let t = ty::mk_str_slice(cx.tcx(), cx.tcx().mk_region(ty::ReStatic), ast::MutImmutable);
|
||||
// The comparison function gets the slices by value, so we have to make copies here. Even
|
||||
@ -1375,7 +1375,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
|
||||
"__llmatch");
|
||||
trmode = TrByCopy(alloca_no_lifetime(bcx,
|
||||
llvariable_ty,
|
||||
&bcx.ident(ident)[]));
|
||||
&bcx.ident(ident)));
|
||||
}
|
||||
ast::BindByValue(_) => {
|
||||
// in this case, the final type of the variable will be T,
|
||||
@ -1383,13 +1383,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
|
||||
// above
|
||||
llmatch = alloca_no_lifetime(bcx,
|
||||
llvariable_ty.ptr_to(),
|
||||
&bcx.ident(ident)[]);
|
||||
&bcx.ident(ident));
|
||||
trmode = TrByMove;
|
||||
}
|
||||
ast::BindByRef(_) => {
|
||||
llmatch = alloca_no_lifetime(bcx,
|
||||
llvariable_ty,
|
||||
&bcx.ident(ident)[]);
|
||||
&bcx.ident(ident));
|
||||
trmode = TrByRef;
|
||||
}
|
||||
};
|
||||
@ -1610,7 +1610,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
|
||||
let var_ty = node_id_type(bcx, p_id);
|
||||
|
||||
// Allocate memory on stack for the binding.
|
||||
let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident)[]);
|
||||
let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident));
|
||||
|
||||
// Subtle: be sure that we *populate* the memory *before*
|
||||
// we schedule the cleanup.
|
||||
@ -1648,7 +1648,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
if bcx.sess().asm_comments() {
|
||||
add_comment(bcx, &format!("bind_irrefutable_pat(pat={})",
|
||||
pat.repr(bcx.tcx()))[]);
|
||||
pat.repr(bcx.tcx())));
|
||||
}
|
||||
|
||||
let _indenter = indenter();
|
||||
|
@ -177,7 +177,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
ty::ty_enum(def_id, substs) => {
|
||||
let cases = get_cases(cx.tcx(), def_id, substs);
|
||||
let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0)
|
||||
let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).get(0)
|
||||
.unwrap_or(&attr::ReprAny);
|
||||
|
||||
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
|
||||
@ -210,7 +210,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
cx.sess().bug(&format!("non-C-like enum {} with specified \
|
||||
discriminants",
|
||||
ty::item_path_str(cx.tcx(),
|
||||
def_id))[]);
|
||||
def_id)));
|
||||
}
|
||||
|
||||
if cases.len() == 1 {
|
||||
@ -228,7 +228,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let mut discr = 0;
|
||||
while discr < 2 {
|
||||
if cases[1 - discr].is_zerolen(cx, t) {
|
||||
let st = mk_struct(cx, &cases[discr].tys[],
|
||||
let st = mk_struct(cx, &cases[discr].tys,
|
||||
false, t);
|
||||
match cases[discr].find_ptr(cx) {
|
||||
Some(ref df) if df.len() == 1 && st.fields.len() == 1 => {
|
||||
@ -318,7 +318,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let fields : Vec<_> = cases.iter().map(|c| {
|
||||
let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
|
||||
ftys.push_all(&c.tys[]);
|
||||
ftys.push_all(&c.tys);
|
||||
if dtor { ftys.push(cx.tcx().types.bool); }
|
||||
mk_struct(cx, &ftys[..], false, t)
|
||||
}).collect();
|
||||
@ -328,7 +328,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
General(ity, fields, dtor)
|
||||
}
|
||||
_ => cx.sess().bug(&format!("adt::represent_type called on non-ADT type: {}",
|
||||
ty_to_string(cx.tcx(), t))[])
|
||||
ty_to_string(cx.tcx(), t)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -414,7 +414,7 @@ fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
|
||||
impl<'tcx> Case<'tcx> {
|
||||
fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool {
|
||||
mk_struct(cx, &self.tys[], false, scapegoat).size == 0
|
||||
mk_struct(cx, &self.tys, false, scapegoat).size == 0
|
||||
}
|
||||
|
||||
fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<DiscrField> {
|
||||
@ -504,7 +504,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp
|
||||
return ity;
|
||||
}
|
||||
attr::ReprExtern => {
|
||||
attempts = match &cx.sess().target.target.arch[] {
|
||||
attempts = match &cx.sess().target.target.arch[..] {
|
||||
// WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32`
|
||||
// appears to be used on Linux and NetBSD, but some systems may use the variant
|
||||
// corresponding to `choose_shortest`. However, we don't run on those yet...?
|
||||
@ -624,7 +624,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
match *r {
|
||||
CEnum(..) | General(..) | RawNullablePointer { .. } => { }
|
||||
Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } =>
|
||||
llty.set_struct_body(&struct_llfields(cx, st, false, false)[],
|
||||
llty.set_struct_body(&struct_llfields(cx, st, false, false),
|
||||
st.packed)
|
||||
}
|
||||
}
|
||||
@ -640,7 +640,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => {
|
||||
match name {
|
||||
None => {
|
||||
Type::struct_(cx, &struct_llfields(cx, st, sizing, dst)[],
|
||||
Type::struct_(cx, &struct_llfields(cx, st, sizing, dst),
|
||||
st.packed)
|
||||
}
|
||||
Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) }
|
||||
@ -965,7 +965,7 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
for (discr, case) in cases.iter().enumerate() {
|
||||
let mut variant_cx = fcx.new_temp_block(
|
||||
&format!("enum-variant-iter-{}", &discr.to_string())[]
|
||||
&format!("enum-variant-iter-{}", &discr.to_string())
|
||||
);
|
||||
let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true);
|
||||
AddCase(llswitch, rhs_val, variant_cx.llbb);
|
||||
@ -1070,7 +1070,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
|
||||
if discr == nndiscr {
|
||||
C_struct(ccx, &build_const_struct(ccx,
|
||||
nonnull,
|
||||
vals)[],
|
||||
vals),
|
||||
false)
|
||||
} else {
|
||||
let vals = nonnull.fields.iter().map(|&ty| {
|
||||
@ -1080,7 +1080,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
|
||||
}).collect::<Vec<ValueRef>>();
|
||||
C_struct(ccx, &build_const_struct(ccx,
|
||||
nonnull,
|
||||
&vals[..])[],
|
||||
&vals[..]),
|
||||
false)
|
||||
}
|
||||
}
|
||||
|
@ -365,7 +365,7 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
Err(s) => {
|
||||
bcx.sess().fatal(&format!("allocation of `{}` {}",
|
||||
bcx.ty_to_string(info_ty),
|
||||
s)[]);
|
||||
s));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -444,7 +444,7 @@ pub fn set_llvm_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: Val
|
||||
|
||||
for attr in attrs {
|
||||
let mut used = true;
|
||||
match &attr.name()[] {
|
||||
match &attr.name()[..] {
|
||||
"no_stack_check" => unset_split_stack(llfn),
|
||||
"no_split_stack" => {
|
||||
unset_split_stack(llfn);
|
||||
@ -486,7 +486,7 @@ pub fn unset_split_stack(f: ValueRef) {
|
||||
// silently mangles such symbols, breaking our linkage model.
|
||||
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) {
|
||||
if ccx.all_llvm_symbols().borrow().contains(&sym) {
|
||||
ccx.sess().bug(&format!("duplicate LLVM symbol: {}", sym)[]);
|
||||
ccx.sess().bug(&format!("duplicate LLVM symbol: {}", sym));
|
||||
}
|
||||
ccx.all_llvm_symbols().borrow_mut().insert(sym);
|
||||
}
|
||||
@ -541,7 +541,7 @@ pub fn bin_op_to_icmp_predicate(ccx: &CrateContext, op: ast::BinOp_, signed: boo
|
||||
ast::BiGe => if signed { llvm::IntSGE } else { llvm::IntUGE },
|
||||
op => {
|
||||
ccx.sess().bug(&format!("comparison_op_to_icmp_predicate: expected \
|
||||
comparison operator, found {:?}", op)[]);
|
||||
comparison operator, found {:?}", op));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -557,7 +557,7 @@ pub fn bin_op_to_fcmp_predicate(ccx: &CrateContext, op: ast::BinOp_)
|
||||
ast::BiGe => llvm::RealOGE,
|
||||
op => {
|
||||
ccx.sess().bug(&format!("comparison_op_to_fcmp_predicate: expected \
|
||||
comparison operator, found {:?}", op)[]);
|
||||
comparison operator, found {:?}", op));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -735,8 +735,8 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
|
||||
let variant_cx =
|
||||
fcx.new_temp_block(
|
||||
&format!("enum-iter-variant-{}",
|
||||
&variant.disr_val.to_string()[])
|
||||
[]);
|
||||
&variant.disr_val.to_string())
|
||||
);
|
||||
match adt::trans_case(cx, &*repr, variant.disr_val) {
|
||||
_match::SingleResult(r) => {
|
||||
AddCase(llswitch, r.val, variant_cx.llbb)
|
||||
@ -761,7 +761,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
cx.sess().unimpl(&format!("type in iter_structural_ty: {}",
|
||||
ty_to_string(cx.tcx(), t))[])
|
||||
ty_to_string(cx.tcx(), t)))
|
||||
}
|
||||
}
|
||||
return cx;
|
||||
@ -843,7 +843,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>(
|
||||
}
|
||||
_ => {
|
||||
cx.sess().bug(&format!("fail-if-zero on unexpected type: {}",
|
||||
ty_to_string(cx.tcx(), rhs_t))[]);
|
||||
ty_to_string(cx.tcx(), rhs_t)));
|
||||
}
|
||||
};
|
||||
let bcx = with_cond(cx, is_zero, |bcx| {
|
||||
@ -1116,7 +1116,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
|
||||
pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) {
|
||||
let _icx = push_ctxt("call_memcpy");
|
||||
let ccx = cx.ccx();
|
||||
let key = match &ccx.sess().target.target.target_pointer_width[] {
|
||||
let key = match &ccx.sess().target.target.target_pointer_width[..] {
|
||||
"32" => "llvm.memcpy.p0i8.p0i8.i32",
|
||||
"64" => "llvm.memcpy.p0i8.p0i8.i64",
|
||||
tws => panic!("Unsupported target word size for memcpy: {}", tws),
|
||||
@ -1163,7 +1163,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) {
|
||||
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
|
||||
let intrinsic_key = match &ccx.sess().target.target.target_pointer_width[] {
|
||||
let intrinsic_key = match &ccx.sess().target.target.target_pointer_width[..] {
|
||||
"32" => "llvm.memset.p0i8.i32",
|
||||
"64" => "llvm.memset.p0i8.i64",
|
||||
tws => panic!("Unsupported target word size for memset: {}", tws),
|
||||
@ -1833,14 +1833,14 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
closure::ClosureEnv::NotClosure => {
|
||||
copy_args_to_allocas(bcx,
|
||||
arg_scope,
|
||||
&decl.inputs[],
|
||||
&decl.inputs,
|
||||
arg_datums)
|
||||
}
|
||||
closure::ClosureEnv::Closure(_) => {
|
||||
copy_closure_args_to_allocas(
|
||||
bcx,
|
||||
arg_scope,
|
||||
&decl.inputs[],
|
||||
&decl.inputs,
|
||||
arg_datums,
|
||||
&monomorphized_arg_types[..])
|
||||
}
|
||||
@ -1964,7 +1964,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
_ => ccx.sess().bug(
|
||||
&format!("trans_enum_variant_constructor: \
|
||||
unexpected ctor return type {}",
|
||||
ctor_ty.repr(tcx))[])
|
||||
ctor_ty.repr(tcx)))
|
||||
};
|
||||
|
||||
// Get location to store the result. If the user does not care about
|
||||
@ -2042,7 +2042,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx
|
||||
_ => ccx.sess().bug(
|
||||
&format!("trans_enum_variant_or_tuple_like_struct: \
|
||||
unexpected ctor return type {}",
|
||||
ty_to_string(ccx.tcx(), ctor_ty))[])
|
||||
ty_to_string(ccx.tcx(), ctor_ty)))
|
||||
};
|
||||
|
||||
let (arena, fcx): (TypedArena<_>, FunctionContext);
|
||||
@ -2143,7 +2143,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span,
|
||||
*lvlsrc.unwrap(), Some(sp),
|
||||
&format!("enum variant is more than three times larger \
|
||||
({} bytes) than the next largest (ignoring padding)",
|
||||
largest)[]);
|
||||
largest));
|
||||
|
||||
ccx.sess().span_note(enum_def.variants[largest_index].span,
|
||||
"this variant is the largest");
|
||||
@ -2261,7 +2261,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
|
||||
match item.node {
|
||||
ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => {
|
||||
if !generics.is_type_parameterized() {
|
||||
let trans_everywhere = attr::requests_inline(&item.attrs[]);
|
||||
let trans_everywhere = attr::requests_inline(&item.attrs);
|
||||
// Ignore `trans_everywhere` for cross-crate inlined items
|
||||
// (`from_external`). `trans_item` will be called once for each
|
||||
// compilation unit that references the item, so it will still get
|
||||
@ -2273,7 +2273,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
|
||||
foreign::trans_rust_fn_with_foreign_abi(ccx,
|
||||
&**decl,
|
||||
&**body,
|
||||
&item.attrs[],
|
||||
&item.attrs,
|
||||
llfn,
|
||||
empty_substs,
|
||||
item.id,
|
||||
@ -2285,7 +2285,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
|
||||
llfn,
|
||||
empty_substs,
|
||||
item.id,
|
||||
&item.attrs[]);
|
||||
&item.attrs);
|
||||
}
|
||||
update_linkage(ccx,
|
||||
llfn,
|
||||
@ -2332,7 +2332,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
|
||||
|
||||
// Do static_assert checking. It can't really be done much earlier
|
||||
// because we need to get the value of the bool out of LLVM
|
||||
if attr::contains_name(&item.attrs[], "static_assert") {
|
||||
if attr::contains_name(&item.attrs, "static_assert") {
|
||||
if m == ast::MutMutable {
|
||||
ccx.sess().span_fatal(expr.span,
|
||||
"cannot have static_assert on a mutable \
|
||||
@ -2746,7 +2746,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
let val = match item {
|
||||
ast_map::NodeItem(i) => {
|
||||
let ty = ty::node_id_to_type(ccx.tcx(), i.id);
|
||||
let sym = || exported_name(ccx, id, ty, &i.attrs[]);
|
||||
let sym = || exported_name(ccx, id, ty, &i.attrs);
|
||||
|
||||
let v = match i.node {
|
||||
ast::ItemStatic(_, _, ref expr) => {
|
||||
@ -2773,13 +2773,13 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
if contains_null(&sym[..]) {
|
||||
ccx.sess().fatal(
|
||||
&format!("Illegal null byte in export_name \
|
||||
value: `{}`", sym)[]);
|
||||
value: `{}`", sym));
|
||||
}
|
||||
let buf = CString::new(sym.clone()).unwrap();
|
||||
let g = llvm::LLVMAddGlobal(ccx.llmod(), llty,
|
||||
buf.as_ptr());
|
||||
|
||||
if attr::contains_name(&i.attrs[],
|
||||
if attr::contains_name(&i.attrs,
|
||||
"thread_local") {
|
||||
llvm::set_thread_local(g, true);
|
||||
}
|
||||
@ -2798,19 +2798,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
sym,
|
||||
i.id)
|
||||
};
|
||||
set_llvm_fn_attrs(ccx, &i.attrs[], llfn);
|
||||
set_llvm_fn_attrs(ccx, &i.attrs, llfn);
|
||||
llfn
|
||||
}
|
||||
|
||||
_ => panic!("get_item_val: weird result in table")
|
||||
};
|
||||
|
||||
match attr::first_attr_value_str_by_name(&i.attrs[],
|
||||
match attr::first_attr_value_str_by_name(&i.attrs,
|
||||
"link_section") {
|
||||
Some(sect) => {
|
||||
if contains_null(§) {
|
||||
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`",
|
||||
§)[]);
|
||||
§));
|
||||
}
|
||||
unsafe {
|
||||
let buf = CString::new(sect.as_bytes()).unwrap();
|
||||
@ -2876,7 +2876,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
let sym = exported_name(ccx,
|
||||
id,
|
||||
ty,
|
||||
&enm.attrs[]);
|
||||
&enm.attrs);
|
||||
|
||||
llfn = match enm.node {
|
||||
ast::ItemEnum(_, _) => {
|
||||
@ -2903,7 +2903,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
let sym = exported_name(ccx,
|
||||
id,
|
||||
ty,
|
||||
&struct_item.attrs[]);
|
||||
&struct_item.attrs);
|
||||
let llfn = register_fn(ccx, struct_item.span,
|
||||
sym, ctor_id, ty);
|
||||
set_inline_hint(llfn);
|
||||
@ -2912,7 +2912,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
|
||||
ref variant => {
|
||||
ccx.sess().bug(&format!("get_item_val(): unexpected variant: {:?}",
|
||||
variant)[])
|
||||
variant))
|
||||
}
|
||||
};
|
||||
|
||||
@ -2933,10 +2933,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId,
|
||||
m: &ast::Method) -> ValueRef {
|
||||
let mty = ty::node_id_to_type(ccx.tcx(), id);
|
||||
|
||||
let sym = exported_name(ccx, id, mty, &m.attrs[]);
|
||||
let sym = exported_name(ccx, id, mty, &m.attrs);
|
||||
|
||||
let llfn = register_fn(ccx, m.span, sym, id, mty);
|
||||
set_llvm_fn_attrs(ccx, &m.attrs[], llfn);
|
||||
set_llvm_fn_attrs(ccx, &m.attrs, llfn);
|
||||
llfn
|
||||
}
|
||||
|
||||
@ -3104,7 +3104,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>)
|
||||
let link_meta = link::build_link_meta(&tcx.sess, krate, name);
|
||||
|
||||
let codegen_units = tcx.sess.opts.cg.codegen_units;
|
||||
let shared_ccx = SharedCrateContext::new(&link_meta.crate_name[],
|
||||
let shared_ccx = SharedCrateContext::new(&link_meta.crate_name,
|
||||
codegen_units,
|
||||
tcx,
|
||||
export_map,
|
||||
@ -3206,7 +3206,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>)
|
||||
llmod: shared_ccx.metadata_llmod(),
|
||||
};
|
||||
let formats = shared_ccx.tcx().dependency_formats.borrow().clone();
|
||||
let no_builtins = attr::contains_name(&krate.attrs[], "no_builtins");
|
||||
let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
|
||||
|
||||
let translation = CrateTranslation {
|
||||
modules: modules,
|
||||
|
@ -109,7 +109,7 @@ pub fn compute_abi_info(ccx: &CrateContext,
|
||||
atys: &[Type],
|
||||
rty: Type,
|
||||
ret_def: bool) -> FnType {
|
||||
match &ccx.sess().target.target.arch[] {
|
||||
match &ccx.sess().target.target.arch[..] {
|
||||
"x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def),
|
||||
"x86_64" => if ccx.sess().target.target.options.is_like_windows {
|
||||
cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def)
|
||||
@ -128,6 +128,6 @@ pub fn compute_abi_info(ccx: &CrateContext,
|
||||
"mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def),
|
||||
"powerpc" => cabi_powerpc::compute_abi_info(ccx, atys, rty, ret_def),
|
||||
a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a)
|
||||
[]),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
|
||||
expr.span,
|
||||
&format!("type of callee is neither bare-fn nor closure: \
|
||||
{}",
|
||||
bcx.ty_to_string(datum.ty))[]);
|
||||
bcx.ty_to_string(datum.ty)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -215,7 +215,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
|
||||
bcx.tcx().sess.span_bug(
|
||||
ref_expr.span,
|
||||
&format!("cannot translate def {:?} \
|
||||
to a callable thing!", def)[]);
|
||||
to a callable thing!", def));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -298,7 +298,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
|
||||
|
||||
_ => {
|
||||
tcx.sess.bug(&format!("trans_fn_pointer_shim invoked on invalid type: {}",
|
||||
bare_fn_ty.repr(tcx))[]);
|
||||
bare_fn_ty.repr(tcx)));
|
||||
}
|
||||
};
|
||||
let sig = ty::erase_late_bound_regions(tcx, sig);
|
||||
|
@ -513,7 +513,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
|
||||
|
||||
self.ccx.sess().bug(
|
||||
&format!("no cleanup scope {} found",
|
||||
self.ccx.tcx().map.node_to_string(cleanup_scope))[]);
|
||||
self.ccx.tcx().map.node_to_string(cleanup_scope)));
|
||||
}
|
||||
|
||||
/// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope.
|
||||
@ -695,7 +695,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
|
||||
LoopExit(id, _) => {
|
||||
self.ccx.sess().bug(&format!(
|
||||
"cannot exit from scope {}, \
|
||||
not in scope", id)[]);
|
||||
not in scope", id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1135,7 +1135,7 @@ pub fn temporary_scope(tcx: &ty::ctxt,
|
||||
}
|
||||
None => {
|
||||
tcx.sess.bug(&format!("no temporary scope available for expr {}",
|
||||
id)[])
|
||||
id))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ pub fn gensym_name(name: &str) -> PathElem {
|
||||
let num = token::gensym(name).usize();
|
||||
// use one colon which will get translated to a period by the mangler, and
|
||||
// we're guaranteed that `num` is globally unique for this crate.
|
||||
PathName(token::gensym(&format!("{}:{}", name, num)[]))
|
||||
PathName(token::gensym(&format!("{}:{}", name, num)))
|
||||
}
|
||||
|
||||
#[derive(Copy)]
|
||||
@ -606,7 +606,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
|
||||
Some(v) => v.clone(),
|
||||
None => {
|
||||
self.tcx().sess.bug(&format!(
|
||||
"no def associated with node id {}", nid)[]);
|
||||
"no def associated with node id {}", nid));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1011,7 +1011,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
span,
|
||||
&format!("Encountered error `{}` selecting `{}` during trans",
|
||||
e.repr(tcx),
|
||||
trait_ref.repr(tcx))[])
|
||||
trait_ref.repr(tcx)))
|
||||
}
|
||||
};
|
||||
|
||||
@ -1104,7 +1104,7 @@ pub fn drain_fulfillment_cx<'a,'tcx,T>(span: Span,
|
||||
infcx.tcx.sess.span_bug(
|
||||
span,
|
||||
&format!("Encountered errors `{}` fulfilling during trans",
|
||||
errors.repr(infcx.tcx))[]);
|
||||
errors.repr(infcx.tcx)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1144,7 +1144,7 @@ pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
if substs.types.any(|t| ty::type_needs_infer(*t)) {
|
||||
tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
|
||||
node, substs.repr(tcx))[]);
|
||||
node, substs.repr(tcx)));
|
||||
}
|
||||
|
||||
monomorphize::apply_param_substs(tcx,
|
||||
|
@ -54,7 +54,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
|
||||
_ => cx.sess().span_bug(lit.span,
|
||||
&format!("integer literal has type {} (expected int \
|
||||
or uint)",
|
||||
ty_to_string(cx.tcx(), lit_int_ty))[])
|
||||
ty_to_string(cx.tcx(), lit_int_ty)))
|
||||
}
|
||||
}
|
||||
ast::LitFloat(ref fs, t) => {
|
||||
@ -152,7 +152,7 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
None => {
|
||||
cx.sess().bug(&format!("unexpected dereferenceable type {}",
|
||||
ty_to_string(cx.tcx(), ty))[])
|
||||
ty_to_string(cx.tcx(), ty)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,7 +174,7 @@ pub fn get_const_expr<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
} else {
|
||||
ccx.sess().span_bug(ref_expr.span,
|
||||
&format!("get_const_val given non-constant item {}",
|
||||
item.repr(ccx.tcx()))[]);
|
||||
item.repr(ccx.tcx())));
|
||||
}
|
||||
}
|
||||
|
||||
@ -301,7 +301,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
None => {
|
||||
cx.sess().bug(&format!("unexpected dereferenceable type {}",
|
||||
ty_to_string(cx.tcx(), ty))[])
|
||||
ty_to_string(cx.tcx(), ty)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -309,7 +309,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
Some(autoref) => {
|
||||
cx.sess().span_bug(e.span,
|
||||
&format!("unimplemented const first autoref {:?}", autoref)[])
|
||||
&format!("unimplemented const first autoref {:?}", autoref))
|
||||
}
|
||||
};
|
||||
match second_autoref {
|
||||
@ -333,7 +333,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
Some(autoref) => {
|
||||
cx.sess().span_bug(e.span,
|
||||
&format!("unimplemented const second autoref {:?}", autoref)[])
|
||||
&format!("unimplemented const second autoref {:?}", autoref))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -351,7 +351,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
cx.sess().bug(&format!("const {} of type {} has size {} instead of {}",
|
||||
e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety_adjusted),
|
||||
csize, tsize)[]);
|
||||
csize, tsize));
|
||||
}
|
||||
(llconst, ety_adjusted)
|
||||
}
|
||||
@ -485,7 +485,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
_ => cx.sess().span_bug(base.span,
|
||||
&format!("index-expr base must be a vector \
|
||||
or string type, found {}",
|
||||
ty_to_string(cx.tcx(), bt))[])
|
||||
ty_to_string(cx.tcx(), bt)))
|
||||
},
|
||||
ty::ty_rptr(_, mt) => match mt.ty.sty {
|
||||
ty::ty_vec(_, Some(u)) => {
|
||||
@ -494,12 +494,12 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
_ => cx.sess().span_bug(base.span,
|
||||
&format!("index-expr base must be a vector \
|
||||
or string type, found {}",
|
||||
ty_to_string(cx.tcx(), bt))[])
|
||||
ty_to_string(cx.tcx(), bt)))
|
||||
},
|
||||
_ => cx.sess().span_bug(base.span,
|
||||
&format!("index-expr base must be a vector \
|
||||
or string type, found {}",
|
||||
ty_to_string(cx.tcx(), bt))[])
|
||||
ty_to_string(cx.tcx(), bt)))
|
||||
};
|
||||
|
||||
let len = llvm::LLVMConstIntGetZExtValue(len) as u64;
|
||||
|
@ -378,7 +378,7 @@ impl<'tcx> LocalCrateContext<'tcx> {
|
||||
.target
|
||||
.target
|
||||
.data_layout
|
||||
[]);
|
||||
);
|
||||
|
||||
let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo {
|
||||
Some(debuginfo::CrateDebugContext::new(llmod))
|
||||
@ -731,7 +731,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
/// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable
|
||||
/// address space on 64-bit ARMv8 and x86_64.
|
||||
pub fn obj_size_bound(&self) -> u64 {
|
||||
match &self.sess().target.target.target_pointer_width[] {
|
||||
match &self.sess().target.target.target_pointer_width[..] {
|
||||
"32" => 1 << 31,
|
||||
"64" => 1 << 47,
|
||||
_ => unreachable!() // error handled by config::build_target_config
|
||||
@ -741,7 +741,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! {
|
||||
self.sess().fatal(
|
||||
&format!("the type `{}` is too big for the current architecture",
|
||||
obj.repr(self.tcx()))[])
|
||||
obj.repr(self.tcx())))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
|
||||
debug!("trans_stmt({})", s.repr(cx.tcx()));
|
||||
|
||||
if cx.sess().asm_comments() {
|
||||
add_span_comment(cx, s.span, &s.repr(cx.tcx())[]);
|
||||
add_span_comment(cx, s.span, &s.repr(cx.tcx()));
|
||||
}
|
||||
|
||||
let mut bcx = cx;
|
||||
@ -310,7 +310,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
Some(&def::DefLabel(loop_id)) => loop_id,
|
||||
ref r => {
|
||||
bcx.tcx().sess.bug(&format!("{:?} in def-map for label",
|
||||
r)[])
|
||||
r))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -375,7 +375,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
let v_str = C_str_slice(ccx, fail_str);
|
||||
let loc = bcx.sess().codemap().lookup_char_pos(call_info.span.lo);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name[]);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name);
|
||||
let filename = C_str_slice(ccx, filename);
|
||||
let line = C_uint(ccx, loc.line);
|
||||
let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false);
|
||||
@ -402,7 +402,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
// Extract the file/line from the span
|
||||
let loc = bcx.sess().codemap().lookup_char_pos(call_info.span.lo);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name[]);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name);
|
||||
|
||||
// Invoke the lang item
|
||||
let filename = C_str_slice(ccx, filename);
|
||||
|
@ -557,7 +557,7 @@ impl<'tcx> Datum<'tcx, Lvalue> {
|
||||
}
|
||||
_ => bcx.tcx().sess.bug(
|
||||
&format!("Unexpected unsized type in get_element: {}",
|
||||
bcx.ty_to_string(self.ty))[])
|
||||
bcx.ty_to_string(self.ty)))
|
||||
};
|
||||
Datum {
|
||||
val: val,
|
||||
|
@ -286,7 +286,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
metadata: DIType) {
|
||||
if self.type_to_metadata.insert(type_, metadata).is_some() {
|
||||
cx.sess().bug(&format!("Type metadata for Ty '{}' is already in the TypeMap!",
|
||||
ppaux::ty_to_string(cx.tcx(), type_))[]);
|
||||
ppaux::ty_to_string(cx.tcx(), type_)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -299,7 +299,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
|
||||
let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
|
||||
cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!",
|
||||
&unique_type_id_str[..])[]);
|
||||
&unique_type_id_str[..]));
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,7 +412,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
ty::ty_vec(inner_type, optional_length) => {
|
||||
match optional_length {
|
||||
Some(len) => {
|
||||
unique_type_id.push_str(&format!("[{}]", len)[]);
|
||||
unique_type_id.push_str(&format!("[{}]", len));
|
||||
}
|
||||
None => {
|
||||
unique_type_id.push_str("[]");
|
||||
@ -481,8 +481,8 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
},
|
||||
_ => {
|
||||
cx.sess().bug(&format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}",
|
||||
&ppaux::ty_to_string(cx.tcx(), type_)[],
|
||||
type_.sty)[])
|
||||
&ppaux::ty_to_string(cx.tcx(), type_),
|
||||
type_.sty))
|
||||
}
|
||||
};
|
||||
|
||||
@ -525,7 +525,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
|
||||
output.push_str(crate_hash.as_str());
|
||||
output.push_str("/");
|
||||
output.push_str(&format!("{:x}", def_id.node)[]);
|
||||
output.push_str(&format!("{:x}", def_id.node));
|
||||
|
||||
// Maybe check that there is no self type here.
|
||||
|
||||
@ -600,7 +600,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
-> UniqueTypeId {
|
||||
let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type);
|
||||
let enum_variant_type_id = format!("{}::{}",
|
||||
&self.get_unique_type_id_as_string(enum_type_id)[],
|
||||
&self.get_unique_type_id_as_string(enum_type_id),
|
||||
variant_name);
|
||||
let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id));
|
||||
UniqueTypeId(interner_key)
|
||||
@ -783,19 +783,19 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
||||
create_global_var_metadata() -
|
||||
Captured var-id refers to \
|
||||
unexpected ast_item variant: {:?}",
|
||||
var_item)[])
|
||||
var_item))
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => cx.sess().bug(&format!("debuginfo::create_global_var_metadata() \
|
||||
- Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
var_item)[])
|
||||
var_item))
|
||||
};
|
||||
|
||||
let (file_metadata, line_number) = if span != codemap::DUMMY_SP {
|
||||
let loc = span_start(cx, span);
|
||||
(file_metadata(cx, &loc.file.name[]), loc.line as c_uint)
|
||||
(file_metadata(cx, &loc.file.name), loc.line as c_uint)
|
||||
} else {
|
||||
(UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER)
|
||||
};
|
||||
@ -847,7 +847,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) {
|
||||
None => {
|
||||
bcx.sess().span_bug(span,
|
||||
&format!("no entry in lllocals table for {}",
|
||||
node_id)[]);
|
||||
node_id));
|
||||
}
|
||||
};
|
||||
|
||||
@ -903,7 +903,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
"debuginfo::create_captured_var_metadata() - \
|
||||
Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
ast_item)[]);
|
||||
ast_item));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -913,7 +913,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
&format!("debuginfo::create_captured_var_metadata() - \
|
||||
Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
ast_item)[]);
|
||||
ast_item));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1025,7 +1025,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) {
|
||||
None => {
|
||||
bcx.sess().span_bug(span,
|
||||
&format!("no entry in lllocals table for {}",
|
||||
node_id)[]);
|
||||
node_id));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1319,7 +1319,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
cx.sess()
|
||||
.bug(&format!("create_function_debug_context: \
|
||||
unexpected sort of node: {:?}",
|
||||
fnitem)[])
|
||||
fnitem))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1330,7 +1330,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
_ => cx.sess().bug(&format!("create_function_debug_context: \
|
||||
unexpected sort of node: {:?}",
|
||||
fnitem)[])
|
||||
fnitem))
|
||||
};
|
||||
|
||||
// This can be the case for functions inlined from another crate
|
||||
@ -1339,7 +1339,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name[]);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name);
|
||||
|
||||
let function_type_metadata = unsafe {
|
||||
let fn_signature = get_function_signature(cx,
|
||||
@ -1751,7 +1751,7 @@ fn scope_metadata(fcx: &FunctionContext,
|
||||
|
||||
fcx.ccx.sess().span_bug(error_reporting_span,
|
||||
&format!("debuginfo: Could not find scope info for node {:?}",
|
||||
node)[]);
|
||||
node));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1947,7 +1947,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> {
|
||||
cx.sess().bug(&format!("Forward declaration of potentially recursive type \
|
||||
'{}' was not found in TypeMap!",
|
||||
ppaux::ty_to_string(cx.tcx(), unfinished_type))
|
||||
[]);
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2370,7 +2370,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
.iter()
|
||||
.map(|&t| type_of::type_of(cx, t))
|
||||
.collect::<Vec<_>>()
|
||||
[],
|
||||
,
|
||||
struct_def.packed);
|
||||
// Could do some consistency checks here: size, align, field count, discr type
|
||||
|
||||
@ -2437,7 +2437,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
|
||||
let loc = span_start(cx, definition_span);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name[]);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name);
|
||||
|
||||
let variants = ty::enum_variants(cx.tcx(), enum_def_id);
|
||||
|
||||
@ -2624,7 +2624,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
|
||||
Please use a rustc built with anewer \
|
||||
version of LLVM.",
|
||||
llvm_version_major,
|
||||
llvm_version_minor)[]);
|
||||
llvm_version_minor));
|
||||
} else {
|
||||
cx.sess().bug("debuginfo::set_members_of_composite_type() - \
|
||||
Already completed forward declaration re-encountered.");
|
||||
@ -2786,7 +2786,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
assert!(member_descriptions.len() == member_llvm_types.len());
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name[]);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name);
|
||||
|
||||
let metadata = composite_type_metadata(cx,
|
||||
slice_llvm_type,
|
||||
@ -2865,7 +2865,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type);
|
||||
cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \
|
||||
trait_pointer_metadata(): {}",
|
||||
&pp_type_name[..])[]);
|
||||
&pp_type_name[..]));
|
||||
}
|
||||
};
|
||||
|
||||
@ -3005,7 +3005,7 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
cx.sess().bug(&format!("debuginfo: unexpected type in type_metadata: {:?}",
|
||||
sty)[])
|
||||
sty))
|
||||
}
|
||||
};
|
||||
|
||||
@ -3248,7 +3248,7 @@ fn create_scope_map(cx: &CrateContext,
|
||||
{
|
||||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name[]);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name);
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
@ -3370,7 +3370,7 @@ fn create_scope_map(cx: &CrateContext,
|
||||
if need_new_scope {
|
||||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name[]);
|
||||
let file_metadata = file_metadata(cx, &loc.file.name);
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
@ -3832,7 +3832,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
ty::ty_projection(..) |
|
||||
ty::ty_param(_) => {
|
||||
cx.sess().bug(&format!("debuginfo: Trying to create type name for \
|
||||
unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]);
|
||||
unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -3915,13 +3915,13 @@ impl NamespaceTreeNode {
|
||||
None => {}
|
||||
}
|
||||
let string = token::get_name(node.name);
|
||||
output.push_str(&format!("{}", string.len())[]);
|
||||
output.push_str(&format!("{}", string.len()));
|
||||
output.push_str(&string);
|
||||
}
|
||||
|
||||
let mut name = String::from_str("_ZN");
|
||||
fill_nested(self, &mut name);
|
||||
name.push_str(&format!("{}", item_name.len())[]);
|
||||
name.push_str(&format!("{}", item_name.len()));
|
||||
name.push_str(item_name);
|
||||
name.push('E');
|
||||
name
|
||||
@ -3929,7 +3929,7 @@ impl NamespaceTreeNode {
|
||||
}
|
||||
|
||||
fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str {
|
||||
&cx.link_meta().crate_name[]
|
||||
&cx.link_meta().crate_name
|
||||
}
|
||||
|
||||
fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTreeNode> {
|
||||
@ -4005,7 +4005,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
|
||||
None => {
|
||||
cx.sess().bug(&format!("debuginfo::namespace_for_item(): \
|
||||
path too short for {:?}",
|
||||
def_id)[]);
|
||||
def_id));
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -308,7 +308,7 @@ pub fn unsized_info<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
|
||||
unsized_info(ccx, k, id, ty_substs[tp_index], param_substs, identity)
|
||||
}
|
||||
_ => ccx.sess().bug(&format!("UnsizeStruct with bad sty: {}",
|
||||
unadjusted_ty.repr(ccx.tcx()))[])
|
||||
unadjusted_ty.repr(ccx.tcx())))
|
||||
},
|
||||
&ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
|
||||
// Note that we preserve binding levels here:
|
||||
@ -524,7 +524,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
let unboxed_ty = match datum_ty.sty {
|
||||
ty::ty_uniq(t) => t,
|
||||
_ => bcx.sess().bug(&format!("Expected ty_uniq, found {}",
|
||||
bcx.ty_to_string(datum_ty))[])
|
||||
bcx.ty_to_string(datum_ty)))
|
||||
};
|
||||
let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
|
||||
|
||||
@ -696,7 +696,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
expr.span,
|
||||
&format!("trans_rvalue_datum_unadjusted reached \
|
||||
fall-through case: {:?}",
|
||||
expr.node)[]);
|
||||
expr.node));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1020,7 +1020,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
expr.span,
|
||||
&format!("trans_rvalue_stmt_unadjusted reached \
|
||||
fall-through case: {:?}",
|
||||
expr.node)[]);
|
||||
expr.node));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1216,7 +1216,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
expr.span,
|
||||
&format!("trans_rvalue_dps_unadjusted reached fall-through \
|
||||
case: {:?}",
|
||||
expr.node)[]);
|
||||
expr.node));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1266,7 +1266,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
_ => {
|
||||
bcx.tcx().sess.span_bug(ref_expr.span, &format!(
|
||||
"Non-DPS def {:?} referened by {}",
|
||||
def, bcx.node_id_to_string(ref_expr.id))[]);
|
||||
def, bcx.node_id_to_string(ref_expr.id)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1295,7 +1295,7 @@ pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ccx.tcx().sess.span_bug(ref_expr.span, &format!(
|
||||
"trans_def_fn_unadjusted invoked on: {:?} for {}",
|
||||
def,
|
||||
ref_expr.repr(ccx.tcx()))[]);
|
||||
ref_expr.repr(ccx.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1315,7 +1315,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
None => {
|
||||
bcx.sess().bug(&format!(
|
||||
"trans_local_var: no llval for upvar {} found",
|
||||
nid)[]);
|
||||
nid));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1325,7 +1325,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
None => {
|
||||
bcx.sess().bug(&format!(
|
||||
"trans_local_var: no datum for local/arg {} found",
|
||||
nid)[]);
|
||||
nid));
|
||||
}
|
||||
};
|
||||
debug!("take_local(nid={}, v={}, ty={})",
|
||||
@ -1335,7 +1335,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
_ => {
|
||||
bcx.sess().unimpl(&format!(
|
||||
"unsupported def type in trans_local_var: {:?}",
|
||||
def)[]);
|
||||
def));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1358,7 +1358,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
|
||||
}
|
||||
|
||||
ty::ty_tup(ref v) => {
|
||||
op(0, &tup_fields(&v[..])[])
|
||||
op(0, &tup_fields(&v[..]))
|
||||
}
|
||||
|
||||
ty::ty_enum(_, substs) => {
|
||||
@ -1368,7 +1368,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
|
||||
tcx.sess.bug(&format!(
|
||||
"cannot get field types from the enum type {} \
|
||||
without a node ID",
|
||||
ty.repr(tcx))[]);
|
||||
ty.repr(tcx)));
|
||||
}
|
||||
Some(node_id) => {
|
||||
let def = tcx.def_map.borrow()[node_id].clone();
|
||||
@ -1392,7 +1392,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
|
||||
_ => {
|
||||
tcx.sess.bug(&format!(
|
||||
"cannot get field types from the type {}",
|
||||
ty.repr(tcx))[]);
|
||||
ty.repr(tcx)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2097,7 +2097,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
t_in.repr(bcx.tcx()),
|
||||
k_in,
|
||||
t_out.repr(bcx.tcx()),
|
||||
k_out)[])
|
||||
k_out))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2106,7 +2106,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
t_in.repr(bcx.tcx()),
|
||||
k_in,
|
||||
t_out.repr(bcx.tcx()),
|
||||
k_out)[])
|
||||
k_out))
|
||||
};
|
||||
return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
|
||||
}
|
||||
@ -2272,7 +2272,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
bcx.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
&format!("deref invoked on expr of illegal type {}",
|
||||
datum.ty.repr(bcx.tcx()))[]);
|
||||
datum.ty.repr(bcx.tcx())));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -111,7 +111,7 @@ pub fn register_static(ccx: &CrateContext,
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
|
||||
let ident = link_name(foreign_item);
|
||||
match attr::first_attr_value_str_by_name(&foreign_item.attrs[],
|
||||
match attr::first_attr_value_str_by_name(&foreign_item.attrs,
|
||||
"linkage") {
|
||||
// If this is a static with a linkage specified, then we need to handle
|
||||
// it a little specially. The typesystem prevents things like &T and
|
||||
@ -240,11 +240,11 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
|
||||
let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[..]);
|
||||
let fn_type = cabi::compute_abi_info(ccx,
|
||||
&llsig.llarg_tys[],
|
||||
&llsig.llarg_tys,
|
||||
llsig.llret_ty,
|
||||
llsig.ret_def);
|
||||
|
||||
let arg_tys: &[cabi::ArgType] = &fn_type.arg_tys[];
|
||||
let arg_tys: &[cabi::ArgType] = &fn_type.arg_tys;
|
||||
|
||||
let mut llargs_foreign = Vec::new();
|
||||
|
||||
@ -439,7 +439,7 @@ fn gate_simd_ffi(tcx: &ty::ctxt, decl: &ast::FnDecl, ty: &ty::BareFnTy) {
|
||||
tcx.sess.span_err(ast_ty.span,
|
||||
&format!("use of SIMD type `{}` in FFI is highly experimental and \
|
||||
may result in invalid code",
|
||||
pprust::ty_to_string(ast_ty))[]);
|
||||
pprust::ty_to_string(ast_ty)));
|
||||
tcx.sess.span_help(ast_ty.span,
|
||||
"add #![feature(simd_ffi)] to the crate attributes to enable");
|
||||
}
|
||||
@ -603,7 +603,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ccx.sess().bug(&format!("build_rust_fn: extern fn {} has ty {}, \
|
||||
expected a bare fn ty",
|
||||
ccx.tcx().map.path_to_string(id),
|
||||
t.repr(tcx))[]);
|
||||
t.repr(tcx)));
|
||||
}
|
||||
};
|
||||
|
||||
@ -868,9 +868,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
// the massive simplifications that have occurred.
|
||||
|
||||
pub fn link_name(i: &ast::ForeignItem) -> InternedString {
|
||||
match attr::first_attr_value_str_by_name(&i.attrs[], "link_name") {
|
||||
match attr::first_attr_value_str_by_name(&i.attrs, "link_name") {
|
||||
Some(ln) => ln.clone(),
|
||||
None => match weak_lang_items::link_name(&i.attrs[]) {
|
||||
None => match weak_lang_items::link_name(&i.attrs) {
|
||||
Some(name) => name,
|
||||
None => token::get_ident(i.ident),
|
||||
}
|
||||
@ -913,7 +913,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
|
||||
let llsig = foreign_signature(ccx, &fn_sig, &fn_sig.inputs);
|
||||
let fn_ty = cabi::compute_abi_info(ccx,
|
||||
&llsig.llarg_tys[],
|
||||
&llsig.llarg_tys,
|
||||
llsig.llret_ty,
|
||||
llsig.ret_def);
|
||||
debug!("foreign_types_for_fn_ty(\
|
||||
@ -922,7 +922,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
fn_ty={} -> {}, \
|
||||
ret_def={}",
|
||||
ty.repr(ccx.tcx()),
|
||||
ccx.tn().types_to_str(&llsig.llarg_tys[]),
|
||||
ccx.tn().types_to_str(&llsig.llarg_tys),
|
||||
ccx.tn().type_to_string(llsig.llret_ty),
|
||||
ccx.tn().types_to_str(&fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>()),
|
||||
ccx.tn().type_to_string(fn_ty.ret_ty.ty),
|
||||
|
@ -243,7 +243,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
sig.inputs[0]
|
||||
}
|
||||
_ => bcx.sess().bug(&format!("Expected function type, found {}",
|
||||
bcx.ty_to_string(fty))[])
|
||||
bcx.ty_to_string(fty)))
|
||||
};
|
||||
|
||||
let (struct_data, info) = if type_is_sized(bcx.tcx(), t) {
|
||||
@ -370,7 +370,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info:
|
||||
C_uint(bcx.ccx(), unit_align))
|
||||
}
|
||||
_ => bcx.sess().bug(&format!("Unexpected unsized type, found {}",
|
||||
bcx.ty_to_string(t))[])
|
||||
bcx.ty_to_string(t)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -443,7 +443,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
|
||||
bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
|
||||
because the struct is unsized. See issue\
|
||||
#16758",
|
||||
bcx.ty_to_string(t))[]);
|
||||
bcx.ty_to_string(t)));
|
||||
trans_struct_drop(bcx, t, v0, dtor, did, substs)
|
||||
}
|
||||
}
|
||||
@ -521,7 +521,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>)
|
||||
note_unique_llvm_symbol(ccx, name);
|
||||
|
||||
let ty_name = token::intern_and_get_ident(
|
||||
&ppaux::ty_to_string(ccx.tcx(), t)[]);
|
||||
&ppaux::ty_to_string(ccx.tcx(), t));
|
||||
let ty_name = C_str_slice(ccx, ty_name);
|
||||
|
||||
debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
|
||||
@ -540,7 +540,7 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
|
||||
let fn_nm = mangle_internal_name_by_type_and_seq(
|
||||
ccx,
|
||||
t,
|
||||
&format!("glue_{}", name)[]);
|
||||
&format!("glue_{}", name));
|
||||
let llfn = decl_cdecl_fn(ccx, &fn_nm[..], llfnty, ty::mk_nil(ccx.tcx()));
|
||||
note_unique_llvm_symbol(ccx, fn_nm.clone());
|
||||
return (fn_nm, llfn);
|
||||
|
@ -36,7 +36,7 @@ use syntax::parse::token;
|
||||
use util::ppaux::{Repr, ty_to_string};
|
||||
|
||||
pub fn get_simple_intrinsic(ccx: &CrateContext, item: &ast::ForeignItem) -> Option<ValueRef> {
|
||||
let name = match &token::get_ident(item.ident)[] {
|
||||
let name = match &token::get_ident(item.ident)[..] {
|
||||
"sqrtf32" => "llvm.sqrt.f32",
|
||||
"sqrtf64" => "llvm.sqrt.f64",
|
||||
"powif32" => "llvm.powi.f32",
|
||||
|
@ -79,7 +79,7 @@ pub fn trans_impl(ccx: &CrateContext,
|
||||
match *impl_item {
|
||||
ast::MethodImplItem(ref method) => {
|
||||
if method.pe_generics().ty_params.len() == 0 {
|
||||
let trans_everywhere = attr::requests_inline(&method.attrs[]);
|
||||
let trans_everywhere = attr::requests_inline(&method.attrs);
|
||||
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
|
||||
let llfn = get_item_val(ccx, method.id);
|
||||
let empty_substs = tcx.mk_substs(Substs::trans_empty());
|
||||
@ -305,7 +305,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.bug(&format!("static call to invalid vtable: {}",
|
||||
vtbl.repr(tcx))[]);
|
||||
vtbl.repr(tcx)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -393,7 +393,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
traits::VtableParam(..) => {
|
||||
bcx.sess().bug(
|
||||
&format!("resolved vtable bad vtable {} in trans",
|
||||
vtable.repr(bcx.tcx()))[]);
|
||||
vtable.repr(bcx.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -749,7 +749,7 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
tcx.sess.bug(
|
||||
&format!("resolved vtable for {} to bad vtable {} in trans",
|
||||
trait_ref.repr(tcx),
|
||||
vtable.repr(tcx))[]);
|
||||
vtable.repr(tcx)));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -177,7 +177,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
..
|
||||
} => {
|
||||
let d = mk_lldecl(abi);
|
||||
let needs_body = setup_lldecl(d, &i.attrs[]);
|
||||
let needs_body = setup_lldecl(d, &i.attrs);
|
||||
if needs_body {
|
||||
if abi != abi::Rust {
|
||||
foreign::trans_rust_fn_with_foreign_abi(
|
||||
@ -220,7 +220,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
match *ii {
|
||||
ast::MethodImplItem(ref mth) => {
|
||||
let d = mk_lldecl(abi::Rust);
|
||||
let needs_body = setup_lldecl(d, &mth.attrs[]);
|
||||
let needs_body = setup_lldecl(d, &mth.attrs);
|
||||
if needs_body {
|
||||
trans_fn(ccx,
|
||||
mth.pe_fn_decl(),
|
||||
@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
match *method {
|
||||
ast::ProvidedMethod(ref mth) => {
|
||||
let d = mk_lldecl(abi::Rust);
|
||||
let needs_body = setup_lldecl(d, &mth.attrs[]);
|
||||
let needs_body = setup_lldecl(d, &mth.attrs);
|
||||
if needs_body {
|
||||
trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d,
|
||||
psubsts, mth.id, &[]);
|
||||
@ -250,7 +250,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
ccx.sess().bug(&format!("can't monomorphize a {:?}",
|
||||
map_node)[])
|
||||
map_node))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -258,7 +258,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let d = mk_lldecl(abi::Rust);
|
||||
set_inline_hint(d);
|
||||
base::trans_tuple_struct(ccx,
|
||||
&struct_def.fields[],
|
||||
&struct_def.fields,
|
||||
struct_def.ctor_id.expect("ast-mapped tuple struct \
|
||||
didn't have a ctor id"),
|
||||
psubsts,
|
||||
@ -276,7 +276,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ast_map::NodePat(..) |
|
||||
ast_map::NodeLocal(..) => {
|
||||
ccx.sess().bug(&format!("can't monomorphize a {:?}",
|
||||
map_node)[])
|
||||
map_node))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -109,7 +109,7 @@ impl Type {
|
||||
}
|
||||
|
||||
pub fn int(ccx: &CrateContext) -> Type {
|
||||
match &ccx.tcx().sess.target.target.target_pointer_width[] {
|
||||
match &ccx.tcx().sess.target.target.target_pointer_width[..] {
|
||||
"32" => Type::i32(ccx),
|
||||
"64" => Type::i64(ccx),
|
||||
tws => panic!("Unsupported target word size for int: {}", tws),
|
||||
|
@ -185,7 +185,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ
|
||||
let llsizingty = match t.sty {
|
||||
_ if !lltype_is_sized(cx.tcx(), t) => {
|
||||
cx.sess().bug(&format!("trying to take the sizing type of {}, an unsized type",
|
||||
ppaux::ty_to_string(cx.tcx(), t))[])
|
||||
ppaux::ty_to_string(cx.tcx(), t)))
|
||||
}
|
||||
|
||||
ty::ty_bool => Type::bool(cx),
|
||||
@ -238,7 +238,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ
|
||||
|
||||
ty::ty_projection(..) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => {
|
||||
cx.sess().bug(&format!("fictitious type {} in sizing_type_of()",
|
||||
ppaux::ty_to_string(cx.tcx(), t))[])
|
||||
ppaux::ty_to_string(cx.tcx(), t)))
|
||||
}
|
||||
ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable")
|
||||
};
|
||||
@ -418,7 +418,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
|
||||
}
|
||||
ty::ty_trait(..) => Type::opaque_trait(cx),
|
||||
_ => cx.sess().bug(&format!("ty_open with sized type: {}",
|
||||
ppaux::ty_to_string(cx.tcx(), t))[])
|
||||
ppaux::ty_to_string(cx.tcx(), t)))
|
||||
},
|
||||
|
||||
ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"),
|
||||
|
@ -195,7 +195,7 @@ pub fn opt_ast_region_to_region<'tcx>(
|
||||
help_name
|
||||
} else {
|
||||
format!("one of {}'s {} elided lifetimes", help_name, n)
|
||||
})[]);
|
||||
})[..]);
|
||||
|
||||
if len == 2 && i == 0 {
|
||||
m.push_str(" or ");
|
||||
@ -876,7 +876,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>(
|
||||
.sess
|
||||
.span_bug(ast_ty.span,
|
||||
&format!("unbound path {}",
|
||||
path.repr(this.tcx()))[])
|
||||
path.repr(this.tcx())))
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
@ -898,7 +898,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>(
|
||||
this.tcx().sess.span_bug(
|
||||
path.span,
|
||||
&format!("converting `Box` to `{}`",
|
||||
ty.repr(this.tcx()))[]);
|
||||
ty.repr(this.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1206,7 +1206,7 @@ pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>,
|
||||
tcx.sess
|
||||
.span_bug(ast_ty.span,
|
||||
&format!("unbound path {}",
|
||||
path.repr(tcx))[])
|
||||
path.repr(tcx)))
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
@ -1419,7 +1419,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>,
|
||||
let input_params = if self_ty.is_some() {
|
||||
&decl.inputs[1..]
|
||||
} else {
|
||||
&decl.inputs[]
|
||||
&decl.inputs[..]
|
||||
};
|
||||
let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None));
|
||||
let input_pats: Vec<String> = input_params.iter()
|
||||
|
@ -165,7 +165,7 @@ impl<'a, 'tcx> Implicator<'a, 'tcx> {
|
||||
ty::ty_open(_) => {
|
||||
self.tcx().sess.bug(
|
||||
&format!("Unexpected type encountered while doing wf check: {}",
|
||||
ty.repr(self.tcx()))[]);
|
||||
ty.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -331,7 +331,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> {
|
||||
self.tcx().sess.span_bug(
|
||||
self.span,
|
||||
&format!("self-type `{}` for ObjectPick never dereferenced to an object",
|
||||
self_ty.repr(self.tcx()))[])
|
||||
self_ty.repr(self.tcx())))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -386,7 +386,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> {
|
||||
&format!(
|
||||
"{} was a subtype of {} but now is not?",
|
||||
self_ty.repr(self.tcx()),
|
||||
method_self_ty.repr(self.tcx()))[]);
|
||||
method_self_ty.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -272,7 +272,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
span,
|
||||
&format!(
|
||||
"trait method is &self but first arg is: {}",
|
||||
transformed_self_ty.repr(fcx.tcx()))[]);
|
||||
transformed_self_ty.repr(fcx.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -282,7 +282,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
span,
|
||||
&format!(
|
||||
"unexpected explicit self type in operator method: {:?}",
|
||||
method_ty.explicit_self)[]);
|
||||
method_ty.explicit_self));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -878,7 +878,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
|
||||
debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty));
|
||||
|
||||
debug!("searching inherent candidates");
|
||||
match self.consider_candidates(self_ty, &self.inherent_candidates[]) {
|
||||
match self.consider_candidates(self_ty, &self.inherent_candidates) {
|
||||
None => {}
|
||||
Some(pick) => {
|
||||
return Some(pick);
|
||||
@ -886,7 +886,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
|
||||
}
|
||||
|
||||
debug!("searching extension candidates");
|
||||
self.consider_candidates(self_ty, &self.extension_candidates[])
|
||||
self.consider_candidates(self_ty, &self.extension_candidates)
|
||||
}
|
||||
|
||||
fn consider_candidates(&self,
|
||||
|
@ -71,7 +71,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
if is_field {
|
||||
cx.sess.span_note(span,
|
||||
&format!("use `(s.{0})(...)` if you meant to call the \
|
||||
function stored in the `{0}` field", method_ustring)[]);
|
||||
function stored in the `{0}` field", method_ustring));
|
||||
}
|
||||
|
||||
if static_sources.len() > 0 {
|
||||
|
@ -615,7 +615,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
|
||||
let tcx = ccx.tcx;
|
||||
let err_count_on_creation = tcx.sess.err_count();
|
||||
|
||||
let arg_tys = &fn_sig.inputs[];
|
||||
let arg_tys = &fn_sig.inputs;
|
||||
let ret_ty = fn_sig.output;
|
||||
|
||||
debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})",
|
||||
@ -713,7 +713,7 @@ pub fn check_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) {
|
||||
ast::ItemEnum(ref enum_definition, _) => {
|
||||
check_enum_variants(ccx,
|
||||
it.span,
|
||||
&enum_definition.variants[],
|
||||
&enum_definition.variants,
|
||||
it.id);
|
||||
}
|
||||
ast::ItemFn(ref decl, _, _, _, ref body) => {
|
||||
@ -1334,7 +1334,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
&format!("no type for local variable {}",
|
||||
nid)[]);
|
||||
nid));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1707,7 +1707,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
Some(&t) => t,
|
||||
None => {
|
||||
self.tcx().sess.bug(&format!("no type for expr in fcx {}",
|
||||
self.tag())[]);
|
||||
self.tag()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1739,7 +1739,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
self.tcx().sess.bug(
|
||||
&format!("no type for node {}: {} in fcx {}",
|
||||
id, self.tcx().map.node_to_string(id),
|
||||
self.tag())[]);
|
||||
self.tag()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2275,7 +2275,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
if arg_types.len() == 1 {""} else {"s"},
|
||||
args.len(),
|
||||
if args.len() == 1 {" was"} else {"s were"});
|
||||
expected_arg_tys = &[][];
|
||||
expected_arg_tys = &[];
|
||||
err_args(fcx.tcx(), args.len())
|
||||
} else {
|
||||
expected_arg_tys = match expected_arg_tys.get(0) {
|
||||
@ -2292,7 +2292,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
span_err!(tcx.sess, sp, E0059,
|
||||
"cannot use call notation; the first type parameter \
|
||||
for the function trait is neither a tuple nor unit");
|
||||
expected_arg_tys = &[][];
|
||||
expected_arg_tys = &[];
|
||||
err_args(fcx.tcx(), args.len())
|
||||
}
|
||||
}
|
||||
@ -2309,7 +2309,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
if expected_arg_count == 1 {""} else {"s"},
|
||||
supplied_arg_count,
|
||||
if supplied_arg_count == 1 {" was"} else {"s were"});
|
||||
expected_arg_tys = &[][];
|
||||
expected_arg_tys = &[];
|
||||
err_args(fcx.tcx(), supplied_arg_count)
|
||||
}
|
||||
} else {
|
||||
@ -2319,7 +2319,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
if expected_arg_count == 1 {""} else {"s"},
|
||||
supplied_arg_count,
|
||||
if supplied_arg_count == 1 {" was"} else {"s were"});
|
||||
expected_arg_tys = &[][];
|
||||
expected_arg_tys = &[];
|
||||
err_args(fcx.tcx(), supplied_arg_count)
|
||||
};
|
||||
|
||||
@ -2809,7 +2809,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
};
|
||||
let args = match rhs {
|
||||
Some(rhs) => slice::ref_slice(rhs),
|
||||
None => &[][]
|
||||
None => &[][..]
|
||||
};
|
||||
match method {
|
||||
Some(method) => {
|
||||
@ -4584,7 +4584,7 @@ pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
|
||||
}
|
||||
|
||||
let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id })
|
||||
[].get(0).unwrap_or(&attr::ReprAny);
|
||||
.get(0).unwrap_or(&attr::ReprAny);
|
||||
|
||||
if hint != attr::ReprAny && vs.len() <= 1 {
|
||||
if vs.len() == 1 {
|
||||
|
@ -188,7 +188,7 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region {
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.bug(&format!("unexpected def in region_of_def: {:?}",
|
||||
def)[])
|
||||
def))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -288,7 +288,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> {
|
||||
Some(f) => f,
|
||||
None => {
|
||||
self.tcx().sess.bug(
|
||||
&format!("No fn-sig entry for id={}", id)[]);
|
||||
&format!("No fn-sig entry for id={}", id));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1013,7 +1013,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
|
||||
rcx.tcx().sess.span_bug(
|
||||
deref_expr.span,
|
||||
&format!("bad overloaded deref type {}",
|
||||
method.ty.repr(rcx.tcx()))[])
|
||||
method.ty.repr(rcx.tcx())))
|
||||
}
|
||||
};
|
||||
|
||||
@ -1417,7 +1417,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
|
||||
rcx.tcx().sess.span_bug(
|
||||
span,
|
||||
&format!("Illegal upvar id: {}",
|
||||
upvar_id.repr(rcx.tcx()))[]);
|
||||
upvar_id.repr(rcx.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1562,7 +1562,7 @@ fn generic_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
|
||||
GenericKind::Param(..) => { }
|
||||
GenericKind::Projection(ref projection_ty) => {
|
||||
param_bounds.push_all(
|
||||
&projection_bounds(rcx, origin.span(), projection_ty)[]);
|
||||
&projection_bounds(rcx, origin.span(), projection_ty));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -393,7 +393,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
self.tcx().sess.span_err(
|
||||
span,
|
||||
&format!("parameter `{}` is never used",
|
||||
param_name.user_string(self.tcx()))[]);
|
||||
param_name.user_string(self.tcx())));
|
||||
|
||||
match suggested_marker_id {
|
||||
Some(def_id) => {
|
||||
|
@ -86,7 +86,7 @@ fn get_base_type_def_id<'a, 'tcx>(inference_context: &InferCtxt<'a, 'tcx>,
|
||||
inference_context.tcx.sess.span_bug(
|
||||
span,
|
||||
&format!("coherence encountered unexpected type searching for base type: {}",
|
||||
ty.repr(inference_context.tcx))[]);
|
||||
ty.repr(inference_context.tcx)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -899,7 +899,7 @@ fn get_trait_def<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item),
|
||||
_ => {
|
||||
tcx.sess.bug(&format!("get_trait_def({}): not an item",
|
||||
trait_id.node)[])
|
||||
trait_id.node))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -925,7 +925,7 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
ref s => {
|
||||
tcx.sess.span_bug(
|
||||
it.span,
|
||||
&format!("trait_def_of_item invoked on {:?}", s)[]);
|
||||
&format!("trait_def_of_item invoked on {:?}", s));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1025,7 +1025,7 @@ fn convert_trait_predicates<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, it: &ast::Ite
|
||||
ref s => {
|
||||
tcx.sess.span_bug(
|
||||
it.span,
|
||||
&format!("trait_def_of_item invoked on {:?}", s)[]);
|
||||
&format!("trait_def_of_item invoked on {:?}", s));
|
||||
}
|
||||
};
|
||||
|
||||
@ -1284,8 +1284,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
-> ty::Generics<'tcx> {
|
||||
ty_generics(ccx,
|
||||
subst::TypeSpace,
|
||||
&generics.lifetimes[],
|
||||
&generics.ty_params[],
|
||||
&generics.lifetimes,
|
||||
&generics.ty_params,
|
||||
&generics.where_clause,
|
||||
ty::Generics::empty())
|
||||
}
|
||||
@ -1314,8 +1314,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
let mut generics =
|
||||
ty_generics(ccx,
|
||||
subst::TypeSpace,
|
||||
&ast_generics.lifetimes[],
|
||||
&ast_generics.ty_params[],
|
||||
&ast_generics.lifetimes,
|
||||
&ast_generics.ty_params,
|
||||
&ast_generics.where_clause,
|
||||
ty::Generics::empty());
|
||||
|
||||
@ -1360,7 +1360,7 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>,
|
||||
ty_generics(ccx,
|
||||
subst::FnSpace,
|
||||
&early_lifetimes[..],
|
||||
&generics.ty_params[],
|
||||
&generics.ty_params,
|
||||
&generics.where_clause,
|
||||
base_generics)
|
||||
}
|
||||
@ -1557,7 +1557,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>,
|
||||
let param_ty = ty::ParamTy::new(space, index, param.ident.name);
|
||||
let bounds = compute_bounds(ccx,
|
||||
param_ty.to_ty(ccx.tcx),
|
||||
¶m.bounds[],
|
||||
¶m.bounds,
|
||||
SizedByDefault::Yes,
|
||||
param.span);
|
||||
let default = match param.default {
|
||||
@ -1733,7 +1733,7 @@ fn check_bounds_compatible<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>,
|
||||
if !param_bounds.builtin_bounds.contains(&ty::BoundSized) {
|
||||
ty::each_bound_trait_and_supertraits(
|
||||
ccx.tcx,
|
||||
¶m_bounds.trait_bounds[],
|
||||
¶m_bounds.trait_bounds,
|
||||
|trait_ref| {
|
||||
let trait_def = ccx.get_trait_def(trait_ref.def_id());
|
||||
if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) {
|
||||
|
@ -253,7 +253,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
|
||||
&format!("main has a non-function type: found \
|
||||
`{}`",
|
||||
ppaux::ty_to_string(tcx,
|
||||
main_t))[]);
|
||||
main_t)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -304,7 +304,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
|
||||
tcx.sess.span_bug(start_span,
|
||||
&format!("start has a non-function type: found \
|
||||
`{}`",
|
||||
ppaux::ty_to_string(tcx, start_t))[]);
|
||||
ppaux::ty_to_string(tcx, start_t)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -595,7 +595,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
|
||||
let trait_def = ty::lookup_trait_def(tcx, did);
|
||||
let predicates = ty::predicates(tcx, ty::mk_self_type(tcx), &trait_def.bounds);
|
||||
self.add_constraints_from_predicates(&trait_def.generics,
|
||||
&predicates[],
|
||||
&predicates,
|
||||
self.covariant);
|
||||
|
||||
let trait_items = ty::trait_items(tcx, did);
|
||||
@ -652,7 +652,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
None => {
|
||||
self.tcx().sess.bug(&format!(
|
||||
"no inferred index entry for {}",
|
||||
self.tcx().map.node_to_string(param_id))[]);
|
||||
self.tcx().map.node_to_string(param_id)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -941,7 +941,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
self.tcx().sess.bug(
|
||||
&format!("unexpected type encountered in \
|
||||
variance inference: {}",
|
||||
ty.repr(self.tcx()))[]);
|
||||
ty.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1071,7 +1071,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
.sess
|
||||
.bug(&format!("unexpected region encountered in variance \
|
||||
inference: {}",
|
||||
region.repr(self.tcx()))[]);
|
||||
region.repr(self.tcx())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2169,7 +2169,7 @@ impl Clean<Vec<Item>> for doctree::Import {
|
||||
// forcefully don't inline if this is not public or if the
|
||||
// #[doc(no_inline)] attribute is present.
|
||||
let denied = self.vis != ast::Public || self.attrs.iter().any(|a| {
|
||||
&a.name()[] == "doc" && match a.meta_item_list() {
|
||||
&a.name()[..] == "doc" && match a.meta_item_list() {
|
||||
Some(l) => attr::contains_name(l, "no_inline"),
|
||||
None => false,
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
|
||||
// keywords are also included in the identifier set
|
||||
token::Ident(ident, _is_mod_sep) => {
|
||||
match &token::get_ident(ident)[] {
|
||||
match &token::get_ident(ident)[..] {
|
||||
"ref" | "mut" => "kw-2",
|
||||
|
||||
"self" => "self",
|
||||
|
@ -253,7 +253,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||
let please_inline = item.attrs.iter().any(|item| {
|
||||
match item.meta_item_list() {
|
||||
Some(list) => {
|
||||
list.iter().any(|i| &i.name()[] == "inline")
|
||||
list.iter().any(|i| &i.name()[..] == "inline")
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
Some(('=', _)) => None,
|
||||
Some(('+', operand)) => {
|
||||
Some(token::intern_and_get_ident(&format!(
|
||||
"={}", operand)[]))
|
||||
"={}", operand)))
|
||||
}
|
||||
_ => {
|
||||
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
||||
|
@ -83,15 +83,15 @@ pub enum Annotatable {
|
||||
impl Annotatable {
|
||||
pub fn attrs(&self) -> &[ast::Attribute] {
|
||||
match *self {
|
||||
Annotatable::Item(ref i) => &i.attrs[],
|
||||
Annotatable::Item(ref i) => &i.attrs,
|
||||
Annotatable::TraitItem(ref i) => match *i {
|
||||
ast::TraitItem::RequiredMethod(ref tm) => &tm.attrs[],
|
||||
ast::TraitItem::ProvidedMethod(ref m) => &m.attrs[],
|
||||
ast::TraitItem::TypeTraitItem(ref at) => &at.attrs[],
|
||||
ast::TraitItem::RequiredMethod(ref tm) => &tm.attrs,
|
||||
ast::TraitItem::ProvidedMethod(ref m) => &m.attrs,
|
||||
ast::TraitItem::TypeTraitItem(ref at) => &at.attrs,
|
||||
},
|
||||
Annotatable::ImplItem(ref i) => match *i {
|
||||
ast::ImplItem::MethodImplItem(ref m) => &m.attrs[],
|
||||
ast::ImplItem::TypeImplItem(ref t) => &t.attrs[],
|
||||
ast::ImplItem::MethodImplItem(ref m) => &m.attrs,
|
||||
ast::ImplItem::TypeImplItem(ref t) => &t.attrs,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -639,7 +639,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
|
||||
pub fn mod_path(&self) -> Vec<ast::Ident> {
|
||||
let mut v = Vec::new();
|
||||
v.push(token::str_to_ident(&self.ecfg.crate_name[]));
|
||||
v.push(token::str_to_ident(&self.ecfg.crate_name));
|
||||
v.extend(self.mod_path.iter().cloned());
|
||||
return v;
|
||||
}
|
||||
@ -648,7 +648,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
if self.recursion_count > self.ecfg.recursion_limit {
|
||||
self.span_fatal(ei.call_site,
|
||||
&format!("recursion limit reached while expanding the macro `{}`",
|
||||
ei.callee.name)[]);
|
||||
ei.callee.name));
|
||||
}
|
||||
|
||||
let mut call_site = ei.call_site;
|
||||
@ -773,7 +773,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
|
||||
tts: &[ast::TokenTree],
|
||||
name: &str) {
|
||||
if tts.len() != 0 {
|
||||
cx.span_err(sp, &format!("{} takes no arguments", name)[]);
|
||||
cx.span_err(sp, &format!("{} takes no arguments", name));
|
||||
}
|
||||
}
|
||||
|
||||
@ -786,12 +786,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
|
||||
-> Option<String> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
if p.token == token::Eof {
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
||||
return None
|
||||
}
|
||||
let ret = cx.expander().fold_expr(p.parse_expr());
|
||||
if p.token != token::Eof {
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
||||
}
|
||||
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
|
||||
s.to_string()
|
||||
|
@ -762,7 +762,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
|
||||
let loc = self.codemap().lookup_char_pos(span.lo);
|
||||
let expr_file = self.expr_str(span,
|
||||
token::intern_and_get_ident(&loc.file.name[]));
|
||||
token::intern_and_get_ident(&loc.file.name));
|
||||
let expr_line = self.expr_usize(span, loc.line);
|
||||
let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
|
||||
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
|
||||
|
@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
ast::LitInt(i, ast::UnsignedIntLit(_)) |
|
||||
ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) |
|
||||
ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => {
|
||||
accumulator.push_str(&format!("{}", i)[]);
|
||||
accumulator.push_str(&format!("{}", i));
|
||||
}
|
||||
ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) |
|
||||
ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => {
|
||||
accumulator.push_str(&format!("-{}", i)[]);
|
||||
accumulator.push_str(&format!("-{}", i));
|
||||
}
|
||||
ast::LitBool(b) => {
|
||||
accumulator.push_str(&format!("{}", b)[]);
|
||||
accumulator.push_str(&format!("{}", b));
|
||||
}
|
||||
ast::LitByte(..) |
|
||||
ast::LitBinary(..) => {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user