Auto merge of #21505 - GuillaumeGomez:interned_string, r=alexcrichton

It's in order to make the code more homogeneous.
This commit is contained in:
bors 2015-02-07 02:04:47 +00:00
commit 7ebf9bc5c2
67 changed files with 295 additions and 300 deletions

View File

@ -699,7 +699,7 @@ impl LintPass for UnusedAttributes {
if !attr::is_used(attr) {
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
if CRATE_ATTRS.contains(&attr.name().get()) {
if CRATE_ATTRS.contains(&&attr.name()[]) {
let msg = match attr.node.style {
ast::AttrOuter => "crate-level attribute should be an inner \
attribute: add an exclamation mark: #![foo]",
@ -801,10 +801,10 @@ impl LintPass for UnusedResults {
None => {}
Some(s) => {
msg.push_str(": ");
msg.push_str(s.get());
msg.push_str(&s);
}
}
cx.span_lint(UNUSED_MUST_USE, sp, &msg[]);
cx.span_lint(UNUSED_MUST_USE, sp, &msg);
return true;
}
}
@ -826,8 +826,8 @@ impl NonCamelCaseTypes {
fn check_case(&self, cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
fn is_camel_case(ident: ast::Ident) -> bool {
let ident = token::get_ident(ident);
if ident.get().is_empty() { return true; }
let ident = ident.get().trim_matches('_');
if ident.is_empty() { return true; }
let ident = ident.trim_matches('_');
// start with a non-lowercase letter rather than non-uppercase
// ones (some scripts don't have a concept of upper/lowercase)
@ -844,7 +844,7 @@ impl NonCamelCaseTypes {
let s = token::get_ident(ident);
if !is_camel_case(ident) {
let c = to_camel_case(s.get());
let c = to_camel_case(&s);
let m = if c.is_empty() {
format!("{} `{}` should have a camel case name such as `CamelCase`", sort, s)
} else {
@ -977,8 +977,8 @@ impl NonSnakeCase {
fn check_snake_case(&self, cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
fn is_snake_case(ident: ast::Ident) -> bool {
let ident = token::get_ident(ident);
if ident.get().is_empty() { return true; }
let ident = ident.get().trim_left_matches('\'');
if ident.is_empty() { return true; }
let ident = ident.trim_left_matches('\'');
let ident = ident.trim_matches('_');
let mut allow_underscore = true;
@ -996,8 +996,8 @@ impl NonSnakeCase {
let s = token::get_ident(ident);
if !is_snake_case(ident) {
let sc = NonSnakeCase::to_snake_case(s.get());
if sc != s.get() {
let sc = NonSnakeCase::to_snake_case(&s);
if sc != &s[] {
cx.span_lint(NON_SNAKE_CASE, span,
&*format!("{} `{}` should have a snake case name such as `{}`",
sort, s, sc));
@ -1077,10 +1077,10 @@ impl NonUpperCaseGlobals {
fn check_upper_case(cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
let s = token::get_ident(ident);
if s.get().chars().any(|c| c.is_lowercase()) {
let uc: String = NonSnakeCase::to_snake_case(s.get()).chars()
if s.chars().any(|c| c.is_lowercase()) {
let uc: String = NonSnakeCase::to_snake_case(&s).chars()
.map(|c| c.to_uppercase()).collect();
if uc != s.get() {
if uc != &s[] {
cx.span_lint(NON_UPPER_CASE_GLOBALS, span,
&format!("{} `{}` should have an upper case name such as `{}`",
sort, s, uc));
@ -1241,7 +1241,7 @@ impl LintPass for UnusedImportBraces {
match items[0].node {
ast::PathListIdent {ref name, ..} => {
let m = format!("braces around {} is unnecessary",
token::get_ident(*name).get());
&token::get_ident(*name));
cx.span_lint(UNUSED_IMPORT_BRACES, item.span,
&m[]);
},
@ -1358,7 +1358,7 @@ impl UnusedMut {
pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| {
let ident = path1.node;
if let ast::BindByValue(ast::MutMutable) = mode {
if !token::get_ident(ident).get().starts_with("_") {
if !token::get_ident(ident).starts_with("_") {
match mutables.entry(ident.name.usize()) {
Vacant(entry) => { entry.insert(vec![id]); },
Occupied(mut entry) => { entry.get_mut().push(id); },

View File

@ -341,7 +341,7 @@ pub fn gather_attrs(attrs: &[ast::Attribute])
-> Vec<Result<(InternedString, Level, Span), Span>> {
let mut out = vec!();
for attr in attrs {
let level = match Level::from_str(attr.name().get()) {
let level = match Level::from_str(&attr.name()) {
None => continue,
Some(lvl) => lvl,
};
@ -499,10 +499,10 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
continue;
}
Ok((lint_name, level, span)) => {
match self.lints.find_lint(lint_name.get(), &self.tcx.sess, Some(span)) {
match self.lints.find_lint(&lint_name, &self.tcx.sess, Some(span)) {
Some(lint_id) => vec![(lint_id, level, span)],
None => {
match self.lints.lint_groups.get(lint_name.get()) {
match self.lints.lint_groups.get(&lint_name[]) {
Some(&(ref v, _)) => v.iter()
.map(|lint_id: &LintId|
(*lint_id, level, span))

View File

@ -170,7 +170,7 @@ impl<'a> CrateReader<'a> {
fn process_crate(&self, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name() == "link_args") {
match a.value_str() {
Some(ref linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()),
Some(ref linkarg) => self.sess.cstore.add_used_link_args(&linkarg),
None => { /* fallthrough */ }
}
}
@ -184,15 +184,15 @@ impl<'a> CrateReader<'a> {
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
let name = path_str.to_string();
validate_crate_name(Some(self.sess), &name[],
Some(i.span));
name
}
None => ident.get().to_string(),
None => ident.to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
ident: ident.to_string(),
name: name,
id: i.id,
should_link: should_link(i),
@ -237,7 +237,7 @@ impl<'a> CrateReader<'a> {
.collect::<Vec<&ast::Attribute>>();
for m in &link_args {
match m.value_str() {
Some(linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()),
Some(linkarg) => self.sess.cstore.add_used_link_args(&linkarg),
None => { /* fallthrough */ }
}
}
@ -289,7 +289,7 @@ impl<'a> CrateReader<'a> {
}
};
register_native_lib(self.sess, Some(m.span),
n.get().to_string(), kind);
n.to_string(), kind);
}
None => {}
}

View File

@ -383,7 +383,7 @@ pub fn is_staged_api(cstore: &cstore::CStore, def: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(def.krate);
let attrs = decoder::get_crate_attributes(cdata.data());
for attr in &attrs {
if attr.name().get() == "staged_api" {
if &attr.name()[] == "staged_api" {
match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) }
}
}

View File

@ -86,11 +86,11 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
}
fn encode_name(rbml_w: &mut Encoder, name: ast::Name) {
rbml_w.wr_tagged_str(tag_paths_data_name, token::get_name(name).get());
rbml_w.wr_tagged_str(tag_paths_data_name, &token::get_name(name));
}
fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
rbml_w.wr_tagged_str(tag_item_impl_type_basename, token::get_ident(name).get());
rbml_w.wr_tagged_str(tag_item_impl_type_basename, &token::get_ident(name));
}
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
@ -372,7 +372,7 @@ fn encode_path<PI: Iterator<Item=PathElem>>(rbml_w: &mut Encoder, path: PI) {
ast_map::PathMod(_) => tag_path_elem_mod,
ast_map::PathName(_) => tag_path_elem_name
};
rbml_w.wr_tagged_str(tag, token::get_name(pe.name()).get());
rbml_w.wr_tagged_str(tag, &token::get_name(pe.name()));
}
rbml_w.end_tag();
}
@ -915,7 +915,7 @@ fn encode_method_argument_names(rbml_w: &mut Encoder,
rbml_w.start_tag(tag_method_argument_name);
if let ast::PatIdent(_, ref path1, _) = arg.pat.node {
let name = token::get_ident(path1.node);
rbml_w.writer.write_all(name.get().as_bytes());
rbml_w.writer.write_all(name.as_bytes());
}
rbml_w.end_tag();
}
@ -1636,7 +1636,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
ast::MetaWord(ref name) => {
rbml_w.start_tag(tag_meta_item_word);
rbml_w.start_tag(tag_meta_item_name);
rbml_w.writer.write_all(name.get().as_bytes());
rbml_w.writer.write_all(name.as_bytes());
rbml_w.end_tag();
rbml_w.end_tag();
}
@ -1645,10 +1645,10 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
ast::LitStr(ref value, _) => {
rbml_w.start_tag(tag_meta_item_name_value);
rbml_w.start_tag(tag_meta_item_name);
rbml_w.writer.write_all(name.get().as_bytes());
rbml_w.writer.write_all(name.as_bytes());
rbml_w.end_tag();
rbml_w.start_tag(tag_meta_item_value);
rbml_w.writer.write_all(value.get().as_bytes());
rbml_w.writer.write_all(value.as_bytes());
rbml_w.end_tag();
rbml_w.end_tag();
}
@ -1658,7 +1658,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
ast::MetaList(ref name, ref items) => {
rbml_w.start_tag(tag_meta_item_list);
rbml_w.start_tag(tag_meta_item_name);
rbml_w.writer.write_all(name.get().as_bytes());
rbml_w.writer.write_all(name.as_bytes());
rbml_w.end_tag();
for inner_item in items {
encode_meta_item(rbml_w, &**inner_item);
@ -1695,7 +1695,7 @@ fn encode_paren_sugar(rbml_w: &mut Encoder, paren_sugar: bool) {
fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[ast::Name]) {
rbml_w.start_tag(tag_associated_type_names);
for &name in names {
rbml_w.wr_tagged_str(tag_associated_type_name, token::get_name(name).get());
rbml_w.wr_tagged_str(tag_associated_type_name, &token::get_name(name));
}
rbml_w.end_tag();
}

View File

@ -249,11 +249,11 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
span_warn!(cx.tcx.sess, p.span, E0170,
"pattern binding `{}` is named the same as one \
of the variants of the type `{}`",
token::get_ident(ident.node).get(), ty_to_string(cx.tcx, pat_ty));
&token::get_ident(ident.node), ty_to_string(cx.tcx, pat_ty));
span_help!(cx.tcx.sess, p.span,
"if you meant to match on a variant, \
consider making the path in the pattern qualified: `{}::{}`",
ty_to_string(cx.tcx, pat_ty), token::get_ident(ident.node).get());
ty_to_string(cx.tcx, pat_ty), &token::get_ident(ident.node));
}
}
}

View File

@ -610,7 +610,7 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val {
ast::LitInt(n, ast::UnsignedIntLit(_)) => const_uint(n),
ast::LitFloat(ref n, _) |
ast::LitFloatUnsuffixed(ref n) => {
const_float(n.get().parse::<f64>().unwrap() as f64)
const_float(n.parse::<f64>().unwrap() as f64)
}
ast::LitBool(b) => const_bool(b)
}

View File

@ -321,7 +321,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool {
for attr in lint::gather_attrs(attrs) {
match attr {
Ok((ref name, lint::Allow, _))
if name.get() == dead_code => return true,
if &name[] == dead_code => return true,
_ => (),
}
}

View File

@ -514,7 +514,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
lifetime of captured variable `{}`...",
ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
.to_string());
note_and_explain_region(
self.tcx,
@ -526,7 +525,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
&format!("...but `{}` is only valid for ",
ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
.to_string())[],
sup,
"");
@ -570,8 +568,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
&format!("captured variable `{}` does not \
outlive the enclosing closure",
ty::local_var_name_str(self.tcx,
id).get()
.to_string())[]);
id).to_string())[]);
note_and_explain_region(
self.tcx,
"captured variable is valid for ",
@ -959,7 +956,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
// choice of lifetime name deterministic and thus easier to test.
let mut names = Vec::new();
for rn in region_names {
let lt_name = token::get_name(*rn).get().to_string();
let lt_name = token::get_name(*rn).to_string();
names.push(lt_name);
}
names.sort();
@ -1438,15 +1435,15 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
}
infer::EarlyBoundRegion(_, name) => {
format!(" for lifetime parameter `{}`",
token::get_name(name).get())
&token::get_name(name))
}
infer::BoundRegionInCoherence(name) => {
format!(" for lifetime parameter `{}` in coherence check",
token::get_name(name).get())
&token::get_name(name))
}
infer::UpvarRegion(ref upvar_id, _) => {
format!(" for capture of `{}` by closure",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string())
ty::local_var_name_str(self.tcx, upvar_id.var_id).to_string())
}
};
@ -1527,7 +1524,6 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
&format!(
"...so that closure can access `{}`",
ty::local_var_name_str(self.tcx, upvar_id.var_id)
.get()
.to_string())[])
}
infer::InfStackClosure(span) => {
@ -1553,7 +1549,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
does not outlive the enclosing closure",
ty::local_var_name_str(
self.tcx,
id).get().to_string())[]);
id).to_string())[]);
}
infer::IndexSlice(span) => {
self.tcx.sess.span_note(
@ -1730,7 +1726,7 @@ impl LifeGiver {
fn with_taken(taken: &[ast::LifetimeDef]) -> LifeGiver {
let mut taken_ = HashSet::new();
for lt in taken {
let lt_name = token::get_name(lt.lifetime.name).get().to_string();
let lt_name = token::get_name(lt.lifetime.name).to_string();
taken_.insert(lt_name);
}
LifeGiver {

View File

@ -149,7 +149,7 @@ impl<'a, 'v> Visitor<'v> for LanguageItemCollector<'a> {
fn visit_item(&mut self, item: &ast::Item) {
match extract(&item.attrs) {
Some(value) => {
let item_index = self.item_refs.get(value.get()).map(|x| *x);
let item_index = self.item_refs.get(&value[]).map(|x| *x);
match item_index {
Some(item_index) => {

View File

@ -333,7 +333,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> {
fn variable_name(&self, var: Variable) -> String {
match self.var_kinds[var.get()] {
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
token::get_ident(nm).get().to_string()
token::get_ident(nm).to_string()
},
ImplicitRet => "<implicit-ret>".to_string(),
CleanExit => "<clean-exit>".to_string()

View File

@ -1543,7 +1543,7 @@ impl<'tcx> Repr<'tcx> for InteriorKind {
fn repr(&self, _tcx: &ty::ctxt) -> String {
match *self {
InteriorField(NamedField(fld)) => {
token::get_name(fld).get().to_string()
token::get_name(fld).to_string()
}
InteriorField(PositionalField(i)) => format!("#{}", i),
InteriorElement(_) => "[]".to_string(),

View File

@ -180,7 +180,7 @@ impl Index {
pub fn new(krate: &Crate) -> Index {
let mut staged_api = false;
for attr in &krate.attrs {
if attr.name().get() == "staged_api" {
if &attr.name()[] == "staged_api" {
match attr.node.value.node {
ast::MetaWord(_) => {
attr::mark_used(attr);
@ -240,12 +240,12 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
if !self.active_features.contains(feature) {
let msg = match *reason {
Some(ref r) => format!("use of unstable library feature '{}': {}",
feature.get(), r.get()),
None => format!("use of unstable library feature '{}'", feature.get())
&feature, &r),
None => format!("use of unstable library feature '{}'", &feature)
};
emit_feature_warn(&self.tcx.sess.parse_sess.span_diagnostic,
feature.get(), span, &msg[]);
&feature, span, &msg);
}
}
Some(Stability { level, ref feature, .. }) => {

View File

@ -86,7 +86,7 @@ fn report_on_unimplemented<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>,
}).collect::<HashMap<String, String>>();
generic_map.insert("Self".to_string(),
trait_ref.self_ty().user_string(infcx.tcx));
let parser = Parser::new(istring.get());
let parser = Parser::new(&istring);
let mut errored = false;
let err: String = parser.filter_map(|p| {
match p {

View File

@ -4644,7 +4644,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
fields.iter()
.map(|f| token::get_name(f.name).get().to_string())
.map(|f| token::get_name(f.name).to_string())
.collect::<Vec<String>>())[]);
}

View File

@ -55,7 +55,7 @@ pub fn check_crate(krate: &ast::Crate,
pub fn link_name(attrs: &[ast::Attribute]) -> Option<InternedString> {
lang_items::extract(attrs).and_then(|name| {
$(if name.get() == stringify!($name) {
$(if &name[] == stringify!($name) {
Some(InternedString::new(stringify!($sym)))
} else)* {
None
@ -110,7 +110,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
match lang_items::extract(&i.attrs) {
None => {}
Some(lang_item) => self.register(lang_item.get(), i.span),
Some(lang_item) => self.register(&lang_item, i.span),
}
visit::walk_foreign_item(self, i)
}

View File

@ -109,7 +109,7 @@ impl<'a, 'v> Visitor<'v> for PluginLoader<'a> {
let mut reexport = HashSet::new();
for attr in &item.attrs {
let mut used = true;
match attr.name().get() {
match &attr.name()[] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated; use \
#[macro_use], #[plugin], and/or #[no_link]");

View File

@ -217,7 +217,7 @@ pub fn region_to_string(cx: &ctxt, prefix: &str, space: bool, region: Region) ->
match region {
ty::ReScope(_) => prefix.to_string(),
ty::ReEarlyBound(_, _, _, name) => {
token::get_name(name).get().to_string()
token::get_name(name).to_string()
}
ty::ReLateBound(_, br) => bound_region_to_string(cx, prefix, space, br),
ty::ReFree(ref fr) => bound_region_to_string(cx, prefix, space, fr.bound_region),
@ -277,7 +277,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
match ident {
Some(i) => {
s.push(' ');
s.push_str(token::get_ident(i).get());
s.push_str(&token::get_ident(i));
}
_ => { }
}
@ -1020,19 +1020,19 @@ impl<'tcx> Repr<'tcx> for ty::Method<'tcx> {
impl<'tcx> Repr<'tcx> for ast::Name {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_name(*self).get().to_string()
token::get_name(*self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ast::Name {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(*self).get().to_string()
token::get_name(*self).to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::Ident {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_ident(*self).get().to_string()
token::get_ident(*self).to_string()
}
}
@ -1220,7 +1220,7 @@ impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
}
})
});
let names: Vec<_> = names.iter().map(|s| s.get()).collect();
let names: Vec<_> = names.iter().map(|s| &s[]).collect();
let value_str = unbound_value.user_string(tcx);
if names.len() == 0 {
@ -1248,7 +1248,7 @@ impl<'tcx> UserString<'tcx> for Ty<'tcx> {
impl<'tcx> UserString<'tcx> for ast::Ident {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(self.name).get().to_string()
token::get_name(self.name).to_string()
}
}

View File

@ -329,7 +329,7 @@ mod svh_visitor {
// macro invocations, namely macro_rules definitions,
// *can* appear as items, even in the expanded crate AST.
if macro_name(mac).get() == "macro_rules" {
if &macro_name(mac)[] == "macro_rules" {
// Pretty-printing definition to a string strips out
// surface artifacts (currently), such as the span
// information, yielding a content-based hash.

View File

@ -906,7 +906,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
match loan_path.kind {
LpUpvar(ty::UpvarId{ var_id: id, closure_expr_id: _ }) |
LpVar(id) => {
out.push_str(ty::local_var_name_str(self.tcx, id).get());
out.push_str(&ty::local_var_name_str(self.tcx, id));
}
LpDowncast(ref lp_base, variant_def_id) => {
@ -923,7 +923,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
match fname {
mc::NamedField(fname) => {
out.push('.');
out.push_str(token::get_name(fname).get());
out.push_str(&token::get_name(fname));
}
mc::PositionalField(idx) => {
out.push('.');

View File

@ -914,7 +914,7 @@ pub fn build_output_filenames(input: &Input,
// If a crate name is present, we use it as the link name
let stem = sess.opts.crate_name.clone().or_else(|| {
attr::find_crate_name(attrs).map(|n| n.get().to_string())
attr::find_crate_name(attrs).map(|n| n.to_string())
}).unwrap_or(input.filestem());
OutputFilenames {

View File

@ -307,8 +307,8 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
ViewPathSimple(binding, ref full_path) => {
let source_name =
full_path.segments.last().unwrap().identifier.name;
if token::get_name(source_name).get() == "mod" ||
token::get_name(source_name).get() == "self" {
if &token::get_name(source_name)[] == "mod" ||
&token::get_name(source_name)[] == "self" {
self.resolve_error(view_path.span,
"`self` imports are only allowed within a { } list");
}
@ -1020,7 +1020,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
self.handle_external_def(def,
def_visibility,
&*child_name_bindings,
token::get_name(name).get(),
&token::get_name(name),
name,
root);
}

View File

@ -1093,7 +1093,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
} else {
result.push_str("::")
}
result.push_str(token::get_name(*name).get());
result.push_str(&token::get_name(*name));
};
result
}
@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
-> String {
match subclass {
SingleImport(_, source) => {
token::get_name(source).get().to_string()
token::get_name(source).to_string()
}
GlobImport => "*".to_string()
}
@ -1708,7 +1708,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
debug!("(resolving glob import) writing resolution `{}` in `{}` \
to `{}`",
token::get_name(name).get(),
&token::get_name(name),
self.module_to_string(&*containing_module),
self.module_to_string(module_));
@ -1725,7 +1725,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let msg = format!("a {} named `{}` has already been imported \
in this module",
namespace_name,
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_directive.span, E0251, "{}", msg);
} else {
let target = Target::new(containing_module.clone(),
@ -1757,7 +1757,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
name: Name,
namespace: Namespace) {
debug!("check_for_conflicting_import: {}; target exists: {}",
token::get_name(name).get(),
&token::get_name(name),
target.is_some());
match *target {
@ -1768,7 +1768,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
TypeNS => "type",
ValueNS => "value",
},
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_span, E0252, "{}", &msg[]);
}
Some(_) | None => {}
@ -1804,7 +1804,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let msg = format!("import `{0}` conflicts with imported \
crate in this module \
(maybe you meant `use {0}::*`?)",
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_span, E0254, "{}", &msg[]);
}
Some(_) | None => {}
@ -1826,7 +1826,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
if let Some(ref value) = *name_bindings.value_def.borrow() {
let msg = format!("import `{}` conflicts with value \
in this module",
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_span, E0255, "{}", &msg[]);
if let Some(span) = value.value_span {
self.session.span_note(span,
@ -1844,7 +1844,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
None => {
let msg = format!("import `{}` conflicts with type in \
this module",
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_span, E0256, "{}", &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
@ -1866,7 +1866,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
_ => {
let msg = format!("import `{}` conflicts with existing \
submodule",
token::get_name(name).get());
&token::get_name(name));
span_err!(self.session, import_span, E0258, "{}", &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
@ -1892,7 +1892,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
span_err!(self.session, span, E0259,
"an external crate named `{}` has already \
been imported into this module",
token::get_name(name).get());
&token::get_name(name));
}
}
@ -1906,7 +1906,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
"the name `{}` conflicts with an external \
crate that has been imported into this \
module",
token::get_name(name).get());
&token::get_name(name));
}
}
@ -1955,7 +1955,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let module_name = self.module_to_string(&*search_module);
let mut span = span;
let msg = if "???" == &module_name[] {
span.hi = span.lo + Pos::from_usize(segment_name.get().len());
span.hi = span.lo + Pos::from_usize(segment_name.len());
match search_parent_externals(name,
&self.current_module) {
@ -2368,11 +2368,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let mut containing_module;
let mut i;
let first_module_path_string = token::get_name(module_path[0]);
if "self" == first_module_path_string.get() {
if "self" == &first_module_path_string[] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
} else if "super" == first_module_path_string.get() {
} else if "super" == &first_module_path_string[] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
@ -2383,7 +2383,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
// Now loop through all the `super`s we find.
while i < module_path.len() {
let string = token::get_name(module_path[i]);
if "super" != string.get() {
if "super" != &string[] {
break
}
debug!("(resolving module prefix) resolving `super` at {}",
@ -2417,7 +2417,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
allow_private_imports: bool)
-> ResolveResult<(Target, bool)> {
debug!("(resolving name in module) resolving `{}` in `{}`",
token::get_name(name).get(),
&token::get_name(name),
self.module_to_string(&*module_));
// First, check the direct children of the module.
@ -2493,7 +2493,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
// We're out of luck.
debug!("(resolving name in module) failed to resolve `{}`",
token::get_name(name).get());
&token::get_name(name));
return Failed(None);
}
@ -4372,7 +4372,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let mut smallest = 0;
for (i, other) in maybes.iter().enumerate() {
values[i] = lev_distance(name, other.get());
values[i] = lev_distance(name, &other);
if values[i] <= values[smallest] {
smallest = i;
@ -4383,9 +4383,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
values[smallest] != uint::MAX &&
values[smallest] < name.len() + 2 &&
values[smallest] <= max_distance &&
name != maybes[smallest].get() {
name != &maybes[smallest][] {
Some(maybes[smallest].get().to_string())
Some(maybes[smallest].to_string())
} else {
None
@ -4475,7 +4475,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
false // Stop advancing
});
if method_scope && token::get_name(self.self_name).get()
if method_scope && &token::get_name(self.self_name)[]
== path_name {
self.resolve_error(
expr.span,

View File

@ -141,7 +141,7 @@ pub fn find_crate_name(sess: Option<&Session>,
if let Some(sess) = sess {
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, ref name)) = attr_crate_name {
if *s != name.get() {
if *s != &name[] {
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
@ -153,7 +153,7 @@ pub fn find_crate_name(sess: Option<&Session>,
}
if let Some((attr, s)) = attr_crate_name {
return validate(s.get().to_string(), Some(attr.span));
return validate(s.to_string(), Some(attr.span));
}
if let Input::File(ref path) = *input {
if let Some(s) = path.filestem_str() {
@ -293,7 +293,7 @@ pub fn mangle<PI: Iterator<Item=PathElem>>(path: PI,
// First, connect each component with <len, name> pairs.
for e in path {
push(&mut n, &token::get_name(e.name()).get()[])
push(&mut n, &token::get_name(e.name()))
}
match hash {

View File

@ -355,7 +355,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
},
};
let qualname = format!("{}::{}", qualname, get_ident(method.pe_ident()).get());
let qualname = format!("{}::{}", qualname, &get_ident(method.pe_ident()));
let qualname = &qualname[];
// record the decl for this def (if it has one)
@ -436,7 +436,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
Some(sub_span) => self.fmt.field_str(field.span,
Some(sub_span),
field.node.id,
&name.get()[],
&name[],
&qualname[],
&typ[],
scope_id),
@ -525,7 +525,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
self.fmt.static_str(item.span,
sub_span,
item.id,
get_ident(item.ident).get(),
&get_ident(item.ident),
&qualname[],
&value[],
&ty_to_string(&*typ)[],
@ -548,7 +548,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
self.fmt.static_str(item.span,
sub_span,
item.id,
get_ident(item.ident).get(),
&get_ident(item.ident),
&qualname[],
"",
&ty_to_string(&*typ)[],
@ -607,7 +607,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
}
for variant in &enum_definition.variants {
let name = get_ident(variant.node.name);
let name = name.get();
let name = &name;
let mut qualname = enum_name.clone();
qualname.push_str("::");
qualname.push_str(name);
@ -1094,7 +1094,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
sub_span,
item.id,
mod_id,
get_ident(ident).get(),
&get_ident(ident),
self.cur_scope);
self.write_sub_paths_truncated(path, true);
}
@ -1149,9 +1149,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
}
ast::ItemExternCrate(ref s) => {
let name = get_ident(item.ident);
let name = name.get();
let name = &name;
let location = match *s {
Some((ref s, _)) => s.get().to_string(),
Some((ref s, _)) => s.to_string(),
None => name.to_string(),
};
let alias_span = self.span.span_for_last_ident(item.span);
@ -1259,7 +1259,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
},
};
qualname.push_str(get_ident(method_type.ident).get());
qualname.push_str(&get_ident(method_type.ident));
let qualname = &qualname[];
let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
@ -1541,7 +1541,7 @@ pub fn process_crate(sess: &Session,
assert!(analysis.glob_map.is_some());
let cratename = match attr::find_crate_name(&krate.attrs[]) {
Some(name) => name.get().to_string(),
Some(name) => name.to_string(),
None => {
info!("Could not find crate name, using 'unknown_crate'");
String::from_str("unknown_crate")

View File

@ -77,13 +77,13 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
fcx.pop_custom_cleanup_scope(temp_scope);
let mut constraints = constraints.iter()
.map(|s| s.get().to_string())
.map(|s| s.to_string())
.chain(ext_constraints.into_iter())
.collect::<Vec<String>>()
.connect(",");
let mut clobbers = ia.clobbers.iter()
.map(|s| format!("~{{{}}}", s.get()))
.map(|s| format!("~{{{}}}", &s))
.collect::<Vec<String>>()
.connect(",");
let more_clobbers = get_clobbers();
@ -120,7 +120,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
ast::AsmIntel => llvm::AD_Intel
};
let asm = CString::from_slice(ia.asm.get().as_bytes());
let asm = CString::from_slice(ia.asm.as_bytes());
let constraints = CString::from_slice(constraints.as_bytes());
let r = InlineAsmCall(bcx,
asm.as_ptr(),

View File

@ -444,7 +444,7 @@ pub fn set_llvm_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: Val
for attr in attrs {
let mut used = true;
match attr.name().get() {
match &attr.name()[] {
"no_stack_check" => unset_split_stack(llfn),
"no_split_stack" => {
unset_split_stack(llfn);
@ -2254,7 +2254,7 @@ pub fn update_linkage(ccx: &CrateContext,
let item = ccx.tcx().map.get(id);
if let ast_map::NodeItem(i) = item {
if let Some(name) = attr::first_attr_value_str_by_name(&i.attrs, "linkage") {
if let Some(linkage) = llvm_linkage_by_name(name.get()) {
if let Some(linkage) = llvm_linkage_by_name(&name) {
llvm::SetLinkage(llval, linkage);
} else {
ccx.sess().span_fatal(i.span, "invalid linkage specified");
@ -2727,7 +2727,7 @@ fn exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, id: ast::NodeId,
match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name
Some(name) => name.get().to_string(),
Some(name) => name.to_string(),
_ => ccx.tcx().map.with_path(id, |path| {
if attr::contains_name(attrs, "no_mangle") {
@ -2735,7 +2735,7 @@ fn exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, id: ast::NodeId,
path.last().unwrap().to_string()
} else {
match weak_lang_items::link_name(attrs) {
Some(name) => name.get().to_string(),
Some(name) => name.to_string(),
None => {
// Usual name mangling
mangle_exported_name(ccx, path, ty, id)
@ -2830,12 +2830,12 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
match attr::first_attr_value_str_by_name(&i.attrs[],
"link_section") {
Some(sect) => {
if contains_null(sect.get()) {
if contains_null(&sect) {
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`",
sect.get())[]);
&sect)[]);
}
unsafe {
let buf = CString::from_slice(sect.get().as_bytes());
let buf = CString::from_slice(sect.as_bytes());
llvm::LLVMSetSection(v, buf.as_ptr());
}
},
@ -2875,7 +2875,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
let abi = ccx.tcx().map.get_foreign_abi(id);
let ty = ty::node_id_to_type(ccx.tcx(), ni.id);
let name = foreign::link_name(&*ni);
foreign::register_foreign_item_fn(ccx, abi, ty, &name.get()[])
foreign::register_foreign_item_fn(ccx, abi, ty, &name)
}
ast::ForeignItemStatic(..) => {
foreign::register_static(ccx, &*ni)

View File

@ -590,7 +590,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
pub fn ident(&self, ident: Ident) -> String {
token::get_ident(ident).get().to_string()
token::get_ident(ident).to_string()
}
pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
@ -834,8 +834,8 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va
}
let sc = llvm::LLVMConstStringInContext(cx.llcx(),
s.get().as_ptr() as *const c_char,
s.get().len() as c_uint,
s.as_ptr() as *const c_char,
s.len() as c_uint,
!null_terminated as Bool);
let gsym = token::gensym("str");
@ -853,7 +853,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va
// NB: Do not use `do_spill_noroot` to make this into a constant string, or
// you will be kicked off fast isel. See issue #4352 for an example of this.
pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
let len = s.get().len();
let len = s.len();
let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
}

View File

@ -58,13 +58,13 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
}
}
ast::LitFloat(ref fs, t) => {
C_floating(fs.get(), Type::float_from_ty(cx, t))
C_floating(&fs, Type::float_from_ty(cx, t))
}
ast::LitFloatUnsuffixed(ref fs) => {
let lit_float_ty = ty::node_id_to_type(cx.tcx(), e.id);
match lit_float_ty.sty {
ty::ty_float(t) => {
C_floating(fs.get(), Type::float_from_ty(cx, t))
C_floating(&fs, Type::float_from_ty(cx, t))
}
_ => {
cx.sess().span_bug(lit.span,

View File

@ -802,7 +802,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let variable_type = ty::node_id_to_type(cx.tcx(), node_id);
let type_metadata = type_metadata(cx, variable_type, span);
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_ident(ident).get().to_string();
let var_name = token::get_ident(ident).to_string();
let linkage_name =
namespace_node.mangled_name_of_contained_item(&var_name[]);
let var_scope = namespace_node.scope;
@ -1350,7 +1350,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
// Get_template_parameters() will append a `<...>` clause to the function
// name if necessary.
let mut function_name = String::from_str(token::get_ident(ident).get());
let mut function_name = String::from_str(&token::get_ident(ident));
let template_parameters = get_template_parameters(cx,
generics,
param_substs,
@ -1499,7 +1499,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let ident = special_idents::type_self;
let ident = token::get_ident(ident);
let name = CString::from_slice(ident.get().as_bytes());
let name = CString::from_slice(ident.as_bytes());
let param_metadata = unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -1533,7 +1533,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
if cx.sess().opts.debuginfo == FullDebugInfo {
let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP);
let ident = token::get_ident(ident);
let name = CString::from_slice(ident.get().as_bytes());
let name = CString::from_slice(ident.as_bytes());
let param_metadata = unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -1656,7 +1656,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
CapturedVariable => (0, DW_TAG_auto_variable)
};
let name = CString::from_slice(name.get().as_bytes());
let name = CString::from_slice(name.as_bytes());
match (variable_access, [].as_slice()) {
(DirectVariable { alloca }, address_operations) |
(IndirectVariable {alloca, address_operations}, _) => {
@ -1993,7 +1993,7 @@ impl<'tcx> StructMemberDescriptionFactory<'tcx> {
let name = if field.name == special_idents::unnamed_field.name {
"".to_string()
} else {
token::get_name(field.name).get().to_string()
token::get_name(field.name).to_string()
};
let offset = if self.is_simd {
@ -2223,7 +2223,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
// MemberDescription of the struct's single field.
let sole_struct_member_description = MemberDescription {
name: match non_null_variant.arg_names {
Some(ref names) => token::get_ident(names[0]).get().to_string(),
Some(ref names) => token::get_ident(names[0]).to_string(),
None => "".to_string()
},
llvm_type: non_null_llvm_type,
@ -2237,13 +2237,13 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
.get_unique_type_id_of_enum_variant(
cx,
self.enum_type,
non_null_variant_name.get());
&non_null_variant_name);
// Now we can create the metadata of the artificial struct
let artificial_struct_metadata =
composite_type_metadata(cx,
artificial_struct_llvm_type,
non_null_variant_name.get(),
&non_null_variant_name,
unique_type_id,
&[sole_struct_member_description],
self.containing_scope,
@ -2373,7 +2373,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
// Could do some consistency checks here: size, align, field count, discr type
let variant_name = token::get_name(variant_info.name);
let variant_name = variant_name.get();
let variant_name = &variant_name;
let unique_type_id = debug_context(cx).type_map
.borrow_mut()
.get_unique_type_id_of_enum_variant(
@ -2392,7 +2392,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
Some(ref names) => {
names.iter()
.map(|ident| {
token::get_ident(*ident).get().to_string()
token::get_ident(*ident).to_string()
}).collect()
}
None => variant_info.args.iter().map(|_| "".to_string()).collect()
@ -2443,7 +2443,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
.iter()
.map(|v| {
let token = token::get_name(v.name);
let name = CString::from_slice(token.get().as_bytes());
let name = CString::from_slice(token.as_bytes());
unsafe {
llvm::LLVMDIBuilderCreateEnumerator(
DIB(cx),
@ -2473,7 +2473,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
codemap::DUMMY_SP);
let discriminant_name = get_enum_discriminant_name(cx, enum_def_id);
let name = CString::from_slice(discriminant_name.get().as_bytes());
let name = CString::from_slice(discriminant_name.as_bytes());
let discriminant_type_metadata = unsafe {
llvm::LLVMDIBuilderCreateEnumerationType(
DIB(cx),
@ -3126,7 +3126,7 @@ fn contains_nodebug_attribute(attributes: &[ast::Attribute]) -> bool {
attributes.iter().any(|attr| {
let meta_item: &ast::MetaItem = &*attr.node.value;
match meta_item.node {
ast::MetaWord(ref value) => value.get() == "no_debug",
ast::MetaWord(ref value) => &value[] == "no_debug",
_ => false
}
})
@ -3847,7 +3847,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let mut path_element_count = 0;
for path_element in path {
let name = token::get_name(path_element.name());
output.push_str(name.get());
output.push_str(&name);
output.push_str("::");
path_element_count += 1;
}
@ -3862,7 +3862,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let name = token::get_name(path.last()
.expect("debuginfo: Empty item path?")
.name());
output.push_str(name.get());
output.push_str(&name);
}
});
}
@ -3912,8 +3912,8 @@ impl NamespaceTreeNode {
None => {}
}
let string = token::get_name(node.name);
output.push_str(&format!("{}", string.get().len())[]);
output.push_str(string.get());
output.push_str(&format!("{}", string.len())[]);
output.push_str(&string);
}
let mut name = String::from_str("_ZN");
@ -3970,7 +3970,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
};
let namespace_name = token::get_name(name);
let namespace_name = CString::from_slice(namespace_name
.get().as_bytes());
.as_bytes());
let scope = unsafe {
llvm::LLVMDIBuilderCreateNameSpace(
DIB(cx),

View File

@ -118,7 +118,7 @@ pub fn register_static(ccx: &CrateContext,
// static and call it a day. Some linkages (like weak) will make it such
// that the static actually has a null value.
Some(name) => {
let linkage = match llvm_linkage_by_name(name.get()) {
let linkage = match llvm_linkage_by_name(&name) {
Some(linkage) => linkage,
None => {
ccx.sess().span_fatal(foreign_item.span,
@ -134,7 +134,7 @@ pub fn register_static(ccx: &CrateContext,
};
unsafe {
// Declare a symbol `foo` with the desired linkage.
let buf = CString::from_slice(ident.get().as_bytes());
let buf = CString::from_slice(ident.as_bytes());
let g1 = llvm::LLVMAddGlobal(ccx.llmod(), llty2.to_ref(),
buf.as_ptr());
llvm::SetLinkage(g1, linkage);
@ -146,7 +146,7 @@ pub fn register_static(ccx: &CrateContext,
// `extern_with_linkage_foo` will instead be initialized to
// zero.
let mut real_name = "_rust_extern_with_linkage_".to_string();
real_name.push_str(ident.get());
real_name.push_str(&ident);
let real_name = CString::from_vec(real_name.into_bytes());
let g2 = llvm::LLVMAddGlobal(ccx.llmod(), llty.to_ref(),
real_name.as_ptr());
@ -157,7 +157,7 @@ pub fn register_static(ccx: &CrateContext,
}
None => unsafe {
// Generate an external declaration.
let buf = CString::from_slice(ident.get().as_bytes());
let buf = CString::from_slice(ident.as_bytes());
llvm::LLVMAddGlobal(ccx.llmod(), llty.to_ref(), buf.as_ptr())
}
}
@ -468,7 +468,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
}
register_foreign_item_fn(ccx, abi, ty,
&lname.get()[]);
&lname);
// Unlike for other items, we shouldn't call
// `base::update_linkage` here. Foreign items have
// special linkage requirements, which are handled
@ -478,7 +478,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
}
ccx.item_symbols().borrow_mut().insert(foreign_item.id,
lname.get().to_string());
lname.to_string());
}
}

View File

@ -36,7 +36,7 @@ use syntax::parse::token;
use util::ppaux::{Repr, ty_to_string};
pub fn get_simple_intrinsic(ccx: &CrateContext, item: &ast::ForeignItem) -> Option<ValueRef> {
let name = match token::get_ident(item.ident).get() {
let name = match &token::get_ident(item.ident)[] {
"sqrtf32" => "llvm.sqrt.f32",
"sqrtf64" => "llvm.sqrt.f64",
"powif32" => "llvm.powi.f32",
@ -166,7 +166,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let name = token::get_ident(foreign_item.ident);
// For `transmute` we can just trans the input expr directly into dest
if name.get() == "transmute" {
if &name[] == "transmute" {
let llret_ty = type_of::type_of(ccx, ret_ty.unwrap());
match args {
callee::ArgExprs(arg_exprs) => {
@ -274,13 +274,13 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let call_debug_location = DebugLoc::At(call_info.id, call_info.span);
// These are the only intrinsic functions that diverge.
if name.get() == "abort" {
if &name[] == "abort" {
let llfn = ccx.get_intrinsic(&("llvm.trap"));
Call(bcx, llfn, &[], None, call_debug_location);
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
Unreachable(bcx);
return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to()));
} else if name.get() == "unreachable" {
} else if &name[] == "unreachable" {
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
Unreachable(bcx);
return Result::new(bcx, C_nil(ccx));
@ -307,7 +307,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
};
let simple = get_simple_intrinsic(ccx, &*foreign_item);
let llval = match (simple, name.get()) {
let llval = match (simple, &name[]) {
(Some(llfn), _) => {
Call(bcx, llfn, &llargs, None, call_debug_location)
}

View File

@ -209,7 +209,7 @@ pub fn trans_lit_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match dest {
Ignore => bcx,
SaveIn(lldest) => {
let bytes = str_lit.get().len();
let bytes = str_lit.len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit, false);
let llcstr = consts::ptrcast(llcstr, Type::i8p(bcx.ccx()));
@ -242,7 +242,7 @@ pub fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match dest {
Ignore => return bcx,
SaveIn(lldest) => {
let bytes = s.get().len();
let bytes = s.len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), (*s).clone(), false);
base::call_memcpy(bcx,
@ -343,7 +343,7 @@ pub fn elements_required(bcx: Block, content_expr: &ast::Expr) -> uint {
match content_expr.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(ref s, _) => s.get().len(),
ast::LitStr(ref s, _) => s.len(),
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected evec content")

View File

@ -1160,12 +1160,11 @@ pub fn ast_ty_to_ty<'tcx>(
using the syntax `<Type \
as {}>::{}`",
path_str,
token::get_ident(
&token::get_ident(
path.segments
.last()
.unwrap()
.identifier)
.get());
.identifier));
this.tcx().types.err
}
def::DefAssociatedPath(provenance, assoc_ident) => {

View File

@ -805,7 +805,7 @@ fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
a.check_name("rustc_on_unimplemented")
}) {
if let Some(ref istring) = attr.value_str() {
let parser = Parser::new(istring.get());
let parser = Parser::new(&istring);
let types = &*generics.ty_params;
for token in parser {
match token {
@ -3104,7 +3104,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
tcx : &ty::ctxt<'tcx>,
skip : Vec<&str>) {
let ident = token::get_ident(field.node);
let name = ident.get();
let name = &ident;
// only find fits with at least one matching letter
let mut best_dist = name.len();
let fields = ty::lookup_struct_fields(tcx, id);
@ -3286,7 +3286,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
let (_, seen) = class_field_map[name];
if !seen {
missing_fields.push(
format!("`{}`", token::get_name(name).get()))
format!("`{}`", &token::get_name(name)))
}
}
@ -5223,8 +5223,8 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
let tcx = ccx.tcx;
let name = token::get_ident(it.ident);
let (n_tps, inputs, output) = if name.get().starts_with("atomic_") {
let split : Vec<&str> = name.get().split('_').collect();
let (n_tps, inputs, output) = if name.starts_with("atomic_") {
let split : Vec<&str> = name.split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
//We only care about the operation here
@ -5253,10 +5253,10 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
}
};
(n_tps, inputs, ty::FnConverging(output))
} else if name.get() == "abort" || name.get() == "unreachable" {
} else if &name[] == "abort" || &name[] == "unreachable" {
(0, Vec::new(), ty::FnDiverging)
} else {
let (n_tps, inputs, output) = match name.get() {
let (n_tps, inputs, output) = match &name[] {
"breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)),
"size_of" |
"pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.uint),

View File

@ -405,7 +405,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
let span = self.reason.span(self.tcx);
span_err!(self.tcx.sess, span, E0104,
"cannot resolve lifetime for captured variable `{}`: {}",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string(),
ty::local_var_name_str(self.tcx, upvar_id.var_id).to_string(),
infer::fixup_err_to_string(e));
}

View File

@ -410,12 +410,12 @@ pub enum Attribute {
impl Clean<Attribute> for ast::MetaItem {
fn clean(&self, cx: &DocContext) -> Attribute {
match self.node {
ast::MetaWord(ref s) => Word(s.get().to_string()),
ast::MetaWord(ref s) => Word(s.to_string()),
ast::MetaList(ref s, ref l) => {
List(s.get().to_string(), l.clean(cx))
List(s.to_string(), l.clean(cx))
}
ast::MetaNameValue(ref s, ref v) => {
NameValue(s.get().to_string(), lit_to_string(v))
NameValue(s.to_string(), lit_to_string(v))
}
}
}
@ -700,19 +700,19 @@ impl Lifetime {
impl Clean<Lifetime> for ast::Lifetime {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_string())
Lifetime(token::get_name(self.name).to_string())
}
}
impl Clean<Lifetime> for ast::LifetimeDef {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.lifetime.name).get().to_string())
Lifetime(token::get_name(self.lifetime.name).to_string())
}
}
impl Clean<Lifetime> for ty::RegionParameterDef {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_string())
Lifetime(token::get_name(self.name).to_string())
}
}
@ -721,7 +721,7 @@ impl Clean<Option<Lifetime>> for ty::Region {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BrNamed(_, name)) =>
Some(Lifetime(token::get_name(name).get().to_string())),
Some(Lifetime(token::get_name(name).to_string())),
ty::ReEarlyBound(_, _, _, name) => Some(Lifetime(name.clean(cx))),
ty::ReLateBound(..) |
@ -1953,20 +1953,20 @@ fn path_to_string(p: &ast::Path) -> String {
} else {
first = false;
}
s.push_str(i.get());
s.push_str(&i);
}
s
}
impl Clean<String> for ast::Ident {
fn clean(&self, _: &DocContext) -> String {
token::get_ident(*self).get().to_string()
token::get_ident(*self).to_string()
}
}
impl Clean<String> for ast::Name {
fn clean(&self, _: &DocContext) -> String {
token::get_name(*self).get().to_string()
token::get_name(*self).to_string()
}
}
@ -2158,7 +2158,7 @@ impl Clean<Vec<Item>> for doctree::Import {
// forcefully don't inline if this is not public or if the
// #[doc(no_inline)] attribute is present.
let denied = self.vis != ast::Public || self.attrs.iter().any(|a| {
a.name().get() == "doc" && match a.meta_item_list() {
&a.name()[] == "doc" && match a.meta_item_list() {
Some(l) => attr::contains_name(l, "no_inline"),
None => false,
}
@ -2311,7 +2311,7 @@ impl ToSource for syntax::codemap::Span {
fn lit_to_string(lit: &ast::Lit) -> String {
match lit.node {
ast::LitStr(ref st, _) => st.get().to_string(),
ast::LitStr(ref st, _) => st.to_string(),
ast::LitBinary(ref data) => format!("{:?}", data),
ast::LitByte(b) => {
let mut res = String::from_str("b'");
@ -2323,8 +2323,8 @@ fn lit_to_string(lit: &ast::Lit) -> String {
},
ast::LitChar(c) => format!("'{}'", c),
ast::LitInt(i, _t) => i.to_string(),
ast::LitFloat(ref f, _t) => f.get().to_string(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_string(),
ast::LitFloat(ref f, _t) => f.to_string(),
ast::LitFloatUnsuffixed(ref f) => f.to_string(),
ast::LitBool(b) => b.to_string(),
}
}
@ -2336,7 +2336,7 @@ fn name_from_pat(p: &ast::Pat) -> String {
match p.node {
PatWild(PatWildSingle) => "_".to_string(),
PatWild(PatWildMulti) => "..".to_string(),
PatIdent(_, ref p, _) => token::get_ident(p.node).get().to_string(),
PatIdent(_, ref p, _) => token::get_ident(p.node).to_string(),
PatEnum(ref p, _) => path_to_string(p),
PatStruct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", path_to_string(name),
@ -2486,11 +2486,11 @@ impl Clean<Stability> for attr::Stability {
fn clean(&self, _: &DocContext) -> Stability {
Stability {
level: self.level,
feature: self.feature.get().to_string(),
feature: self.feature.to_string(),
since: self.since.as_ref().map_or("".to_string(),
|interned| interned.get().to_string()),
|interned| interned.to_string()),
reason: self.reason.as_ref().map_or("".to_string(),
|interned| interned.get().to_string()),
|interned| interned.to_string()),
}
}
}

View File

@ -142,7 +142,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
// keywords are also included in the identifier set
token::Ident(ident, _is_mod_sep) => {
match token::get_ident(ident).get() {
match &token::get_ident(ident)[] {
"ref" | "mut" => "kw-2",
"self" => "self",

View File

@ -237,7 +237,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
ast::ItemExternCrate(ref p) => {
let path = match *p {
None => None,
Some((ref x, _)) => Some(x.get().to_string()),
Some((ref x, _)) => Some(x.to_string()),
};
om.extern_crates.push(ExternCrate {
name: name,
@ -253,7 +253,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
let please_inline = item.attrs.iter().any(|item| {
match item.meta_item_list() {
Some(list) => {
list.iter().any(|i| i.name().get() == "inline")
list.iter().any(|i| &i.name()[] == "inline")
}
None => false,
}

View File

@ -112,13 +112,13 @@ impl fmt::Display for Ident {
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Name(nm) = *self;
write!(f, "{:?}({})", token::get_name(*self).get(), nm)
write!(f, "{:?}({})", token::get_name(*self), nm)
}
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(token::get_name(*self).get(), f)
fmt::Display::fmt(&token::get_name(*self), f)
}
}
@ -174,7 +174,7 @@ impl Name {
pub fn as_str<'a>(&'a self) -> &'a str {
unsafe {
// FIXME #12938: can't use copy_lifetime since &str isn't a &T
::std::mem::transmute::<&str,&str>(token::get_name(*self).get())
::std::mem::transmute::<&str,&str>(&token::get_name(*self))
}
}
@ -193,7 +193,7 @@ pub type Mrk = u32;
impl Encodable for Ident {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(token::get_ident(*self).get())
s.emit_str(&token::get_ident(*self))
}
}

View File

@ -27,7 +27,7 @@ use std::u32;
pub fn path_name_i(idents: &[Ident]) -> String {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| {
token::get_ident(*i).get().to_string()
token::get_ident(*i).to_string()
}).collect::<Vec<String>>().connect("::")
}

View File

@ -44,7 +44,7 @@ pub fn is_used(attr: &Attribute) -> bool {
pub trait AttrMetaMethods {
fn check_name(&self, name: &str) -> bool {
name == self.name().get()
name == &self.name()[]
}
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
@ -62,7 +62,7 @@ pub trait AttrMetaMethods {
impl AttrMetaMethods for Attribute {
fn check_name(&self, name: &str) -> bool {
let matches = name == self.name().get();
let matches = name == &self.name()[];
if matches {
mark_used(self);
}
@ -142,7 +142,7 @@ impl AttributeMethods for Attribute {
let meta = mk_name_value_item_str(
InternedString::new("doc"),
token::intern_and_get_ident(&strip_doc_comment_decoration(
comment.get())[]));
&comment)[]));
if self.node.style == ast::AttrOuter {
f(&mk_attr_outer(self.node.id, meta))
} else {
@ -209,7 +209,7 @@ pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
hi: BytePos)
-> Attribute {
let style = doc_comment_style(text.get());
let style = doc_comment_style(&text);
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
let attr = Attribute_ {
id: id,
@ -326,11 +326,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool {
/// Tests if a cfg-pattern matches the cfg set
pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool {
match cfg.node {
ast::MetaList(ref pred, ref mis) if pred.get() == "any" =>
ast::MetaList(ref pred, ref mis) if &pred[] == "any" =>
mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
ast::MetaList(ref pred, ref mis) if pred.get() == "all" =>
ast::MetaList(ref pred, ref mis) if &pred[] == "all" =>
mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
ast::MetaList(ref pred, ref mis) if pred.get() == "not" => {
ast::MetaList(ref pred, ref mis) if &pred[] == "not" => {
if mis.len() != 1 {
diagnostic.span_err(cfg.span, "expected 1 cfg-pattern");
return false;
@ -382,7 +382,7 @@ fn find_stability_generic<'a,
'outer: for attr in attrs {
let tag = attr.name();
let tag = tag.get();
let tag = &tag[];
if tag != "deprecated" && tag != "unstable" && tag != "stable" {
continue // not a stability level
}
@ -394,8 +394,8 @@ fn find_stability_generic<'a,
let mut feature = None;
let mut since = None;
let mut reason = None;
for meta in metas {
if meta.name().get() == "feature" {
for meta in metas.iter() {
if meta.name() == "feature" {
match meta.value_str() {
Some(v) => feature = Some(v),
None => {
@ -404,7 +404,7 @@ fn find_stability_generic<'a,
}
}
}
if meta.name().get() == "since" {
if &meta.name()[] == "since" {
match meta.value_str() {
Some(v) => since = Some(v),
None => {
@ -413,7 +413,7 @@ fn find_stability_generic<'a,
}
}
}
if meta.name().get() == "reason" {
if &meta.name()[] == "reason" {
match meta.value_str() {
Some(v) => reason = Some(v),
None => {
@ -521,11 +521,11 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt
for item in items {
match item.node {
ast::MetaWord(ref word) => {
let hint = match word.get() {
let hint = match &word[] {
// Can't use "extern" because it's not a lexical identifier.
"C" => Some(ReprExtern),
"packed" => Some(ReprPacked),
_ => match int_type_of_word(word.get()) {
_ => match int_type_of_word(&word) {
Some(ity) => Some(ReprInt(item.span, ity)),
None => {
// Not a word we recognize

View File

@ -10,6 +10,7 @@
use std::cell::RefCell;
use std::collections::BTreeMap;
use ast;
use ast::{Ident, Name, TokenTree};
use codemap::Span;
@ -57,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
match diagnostics.insert(code.name, span) {
Some(previous_span) => {
ecx.span_warn(span, &format!(
"diagnostic code {} already used", token::get_ident(code).get()
"diagnostic code {} already used", &token::get_ident(code)
)[]);
ecx.span_note(previous_span, "previous invocation");
},
@ -68,7 +69,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
with_registered_diagnostics(|diagnostics| {
if !diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!(
"used diagnostic code {} not registered", token::get_ident(code).get()
"used diagnostic code {} not registered", &token::get_ident(code)
)[]);
}
});
@ -93,12 +94,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", token::get_ident(*code).get()
"diagnostic code {} already registered", &token::get_ident(*code)
)[]);
}
});
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + token::get_ident(*code).get()
"__register_diagnostic_".to_string() + &token::get_ident(*code)
)[]));
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
}

View File

@ -102,7 +102,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
// It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when
// a register is clobbered early.)
let output = match constraint.get().slice_shift_char() {
let output = match constraint.slice_shift_char() {
Some(('=', _)) => None,
Some(('+', operand)) => {
Some(token::intern_and_get_ident(&format!(
@ -129,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let (constraint, _str_style) = p.parse_str();
if constraint.get().starts_with("=") {
if constraint.starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.get().starts_with("+") {
} else if constraint.starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
@ -213,7 +213,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
MacExpr::new(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm {
asm: token::intern_and_get_ident(asm.get()),
asm: token::intern_and_get_ident(&asm),
asm_str_style: asm_str_style.unwrap(),
outputs: outputs,
inputs: inputs,

View File

@ -790,7 +790,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
}
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
s.get().to_string()
s.to_string()
})
}

View File

@ -21,7 +21,6 @@ use parse::token::InternedString;
use parse::token;
use ptr::P;
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {
pub use ext;
@ -576,7 +575,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
let field_name = token::get_ident(ident);
let field_span = Span {
lo: sp.lo - Pos::from_usize(field_name.get().len()),
lo: sp.lo - Pos::from_usize(field_name.len()),
hi: sp.hi,
expn_id: sp.expn_id,
};

View File

@ -32,7 +32,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
ast::LitStr(ref s, _) |
ast::LitFloat(ref s, _) |
ast::LitFloatUnsuffixed(ref s) => {
accumulator.push_str(s.get());
accumulator.push_str(&s);
}
ast::LitChar(c) => {
accumulator.push(c);

View File

@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
} else {
match *e {
ast::TtToken(_, token::Ident(ident, _)) => {
res_str.push_str(token::get_ident(ident).get())
res_str.push_str(&token::get_ident(ident))
},
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");

View File

@ -24,7 +24,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
{
let name = match mitem.node {
MetaWord(ref tname) => {
match tname.get() {
match &tname[] {
"Copy" => "Copy",
"Send" | "Sync" => {
return cx.span_err(span,

View File

@ -363,7 +363,7 @@ impl<'a> TraitDef<'a> {
// generated implementations are linted
let mut attrs = newitem.attrs.clone();
attrs.extend(item.attrs.iter().filter(|a| {
match a.name().get() {
match &a.name()[] {
"allow" | "warn" | "deny" | "forbid" => true,
_ => false,
}

View File

@ -74,7 +74,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
|i| push(i)))
}
match tname.get() {
match &tname[] {
"Clone" => expand!(clone::expand_deriving_clone),
"Hash" => expand!(hash::expand_deriving_hash),

View File

@ -72,7 +72,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
}
};
let mut format_string = String::from_str(token::get_ident(name).get());
let mut format_string = String::from_str(&token::get_ident(name));
// the internal fields we're actually formatting
let mut exprs = Vec::new();
@ -107,7 +107,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
let name = token::get_ident(field.name.unwrap());
format_string.push_str(" ");
format_string.push_str(name.get());
format_string.push_str(&name);
format_string.push_str(": {:?}");
exprs.push(field.self_.clone());

View File

@ -101,12 +101,12 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
}
}
let e = match env::var_string(var.get()) {
Err(..) => {
cx.span_err(sp, msg.get());
let e = match env::var_string(&var[]) {
Err(_) => {
cx.span_err(sp, &msg);
cx.expr_usize(sp, 0)
}
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s))
};
MacExpr::new(e)
}

View File

@ -375,7 +375,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err(
pth.span,
&format!("macro undefined: '{}!'",
extnamestr.get())[]);
&extnamestr)[]);
// let compilation continue
None
@ -385,7 +385,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
name: extnamestr.to_string(),
format: MacroBang,
span: exp_span,
},
@ -411,7 +411,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err(
pth.span,
&format!("non-expression macro in expression position: {}",
&extnamestr.get()[]
&extnamestr[]
)[]);
return None;
}
@ -422,7 +422,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err(
pth.span,
&format!("'{}' is not a tt-style macro",
extnamestr.get())[]);
&extnamestr)[]);
None
}
}
@ -506,14 +506,14 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander)
for attr in &modifiers {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
match fld.cx.syntax_env.find(&intern(&mname)) {
Some(rc) => match *rc {
Modifier(ref mac) => {
attr::mark_used(attr);
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
name: mname.get().to_string(),
name: mname.to_string(),
format: MacroAttribute,
span: None,
}
@ -613,7 +613,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
name: extnamestr.to_string(),
format: MacroBang,
span: span
}
@ -626,13 +626,13 @@ pub fn expand_item_mac(it: P<ast::Item>,
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
&format!("macro {}! expects an ident argument",
extnamestr.get())[]);
&extnamestr)[]);
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
name: extnamestr.to_string(),
format: MacroBang,
span: span
}
@ -651,7 +651,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
name: extnamestr.to_string(),
format: MacroBang,
span: None,
}
@ -677,7 +677,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
_ => {
fld.cx.span_err(it.span,
&format!("{}! is not legal in item position",
extnamestr.get())[]);
&extnamestr)[]);
return SmallVector::zero();
}
}
@ -696,7 +696,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
None => {
fld.cx.span_err(path_span,
&format!("non-item macro in item position: {}",
extnamestr.get())[]);
&extnamestr)[]);
return SmallVector::zero();
}
};
@ -950,7 +950,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
name: extnamestr.to_string(),
format: MacroBang,
span: tt_span
}
@ -968,7 +968,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
pth.span,
&format!(
"non-pattern macro in pattern position: {}",
extnamestr.get()
&extnamestr
)[]
);
return DummyResult::raw_pat(span);
@ -981,7 +981,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
_ => {
fld.cx.span_err(span,
&format!("{}! is not legal in pattern position",
extnamestr.get())[]);
&extnamestr)[]);
return DummyResult::raw_pat(span);
}
}
@ -1065,7 +1065,7 @@ fn expand_annotatable(a: Annotatable,
for attr in a.attrs() {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
match fld.cx.syntax_env.find(&intern(&mname)) {
Some(rc) => match *rc {
Decorator(ref dec) => {
let it = match a {
@ -1079,7 +1079,7 @@ fn expand_annotatable(a: Annotatable,
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
name: mname.get().to_string(),
name: mname.to_string(),
format: MacroAttribute,
span: None
}
@ -1180,7 +1180,7 @@ fn modifiers(attrs: &Vec<ast::Attribute>,
fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(attr.name().get())) {
match fld.cx.syntax_env.find(&intern(&attr.name())) {
Some(rc) => match *rc {
Modifier(_) => true,
_ => false
@ -1195,7 +1195,7 @@ fn multi_modifiers(attrs: &[ast::Attribute],
fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(attr.name().get())) {
match fld.cx.syntax_env.find(&intern(&attr.name())) {
Some(rc) => match *rc {
MultiModifier(_) => true,
_ => false
@ -1220,14 +1220,14 @@ fn expand_item_multi_modifier(mut it: Annotatable,
for attr in &modifiers {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
match fld.cx.syntax_env.find(&intern(&mname)) {
Some(rc) => match *rc {
MultiModifier(ref mac) => {
attr::mark_used(attr);
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
name: mname.get().to_string(),
name: mname.to_string(),
format: MacroAttribute,
span: None,
}
@ -1862,7 +1862,7 @@ mod test {
.collect();
println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name);
let string = token::get_ident(final_varref_ident);
println!("varref's first segment's string: \"{}\"", string.get());
println!("varref's first segment's string: \"{}\"", &string[]);
println!("binding #{}: {}, resolves to {}",
binding_idx, bindings[binding_idx], binding_name);
mtwt::with_sctable(|x| mtwt::display_sctable(x));
@ -1915,7 +1915,7 @@ foo_module!();
let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| {
let ident = token::get_ident(**b);
let string = ident.get();
let string = &ident[];
"xx" == string
}).collect();
let cxbinds: &[&ast::Ident] = &cxbinds[];
@ -1929,7 +1929,7 @@ foo_module!();
// the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1
&& "xx" == token::get_ident(p.segments[0].identifier).get()
&& "xx" == &token::get_ident(p.segments[0].identifier)[]
}).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:");

View File

@ -118,7 +118,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
}
};
let interned_name = token::get_ident(ident);
let name = interned_name.get();
let name = &interned_name[];
p.expect(&token::Eq);
let e = p.parse_expr();
match names.get(name) {
@ -672,7 +673,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
None => return DummyResult::raw_expr(sp)
};
let mut parser = parse::Parser::new(fmt.get());
let mut parser = parse::Parser::new(&fmt);
loop {
match parser.next() {
Some(piece) => {

View File

@ -161,7 +161,7 @@ pub mod rt {
impl ToSource for ast::Ident {
fn to_source(&self) -> String {
token::get_ident(*self).get().to_string()
token::get_ident(*self).to_string()
}
}

View File

@ -73,7 +73,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path()
.iter()
.map(|x| token::get_ident(*x).get().to_string())
.map(|x| token::get_ident(*x).to_string())
.collect::<Vec<String>>()
.connect("::");
base::MacExpr::new(cx.expr_str(

View File

@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
string.get())[])
&string)[])
}
}
}
@ -487,8 +487,8 @@ pub fn parse(sess: &ParseSess,
let name_string = token::get_ident(name);
let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, name_string.get()))));
ei.idx += 1;
parse_nt(&mut rust_parser, span, &name_string))));
ei.idx += 1us;
ei.match_cur += 1;
}
_ => panic!()

View File

@ -21,6 +21,7 @@
//! For the purpose of future feature-tracking, once code for detection of feature
//! gate usage is added, *do not remove it again* even once the feature
//! becomes stable.
use self::Status::*;
use abi::RustIntrinsic;
@ -255,7 +256,7 @@ impl<'a> PostExpansionVisitor<'a> {
impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
fn visit_name(&mut self, sp: Span, name: ast::Name) {
if !token::get_name(name).get().is_ascii() {
if !token::get_name(name).is_ascii() {
self.gate_feature("non_ascii_idents", sp,
"non-ascii idents are not fully supported.");
}
@ -382,7 +383,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
"link_name") {
Some(val) => val.get().starts_with("llvm."),
Some(val) => val.starts_with("llvm."),
_ => false
};
if links_to_llvm {

View File

@ -1201,19 +1201,19 @@ mod test {
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc.get(), "/// doc comment");
assert_eq!(&doc[], "/// doc comment");
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
.map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[], b);
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc.get(), "/** doc comment\n * with CRLF */");
assert_eq!(&doc[], "/** doc comment\n * with CRLF */");
}
#[test]

View File

@ -5133,7 +5133,7 @@ impl<'a> Parser<'a> {
outer_attrs, "path") {
Some(d) => (dir_path.join(d), true),
None => {
let mod_name = mod_string.get().to_string();
let mod_name = mod_string.to_string();
let default_path_str = format!("{}.rs", mod_name);
let secondary_path_str = format!("{}/mod.rs", mod_name);
let default_path = dir_path.join(&default_path_str[]);
@ -5145,7 +5145,7 @@ impl<'a> Parser<'a> {
self.span_err(id_sp,
"cannot declare a new module at this location");
let this_module = match self.mod_path_stack.last() {
Some(name) => name.get().to_string(),
Some(name) => name.to_string(),
None => self.root_module_name.as_ref().unwrap().clone(),
};
self.span_note(id_sp,
@ -5191,7 +5191,7 @@ impl<'a> Parser<'a> {
};
self.eval_src_mod_from_path(file_path, owns_directory,
mod_string.get().to_string(), id_sp)
mod_string.to_string(), id_sp)
}
fn eval_src_mod_from_path(&mut self,

View File

@ -625,11 +625,6 @@ impl InternedString {
string: string,
}
}
#[inline]
pub fn get<'a>(&'a self) -> &'a str {
&self.string[]
}
}
impl Deref for InternedString {
@ -644,7 +639,7 @@ impl BytesContainer for InternedString {
// of `BytesContainer`, which is itself a workaround for the lack of
// DST.
unsafe {
let this = self.get();
let this = &self[];
mem::transmute::<&[u8],&[u8]>(this.container_as_bytes())
}
}

View File

@ -258,7 +258,7 @@ pub fn token_to_string(tok: &Token) -> String {
}
/* Name components */
token::Ident(s, _) => token::get_ident(s).get().to_string(),
token::Ident(s, _) => token::get_ident(s).to_string(),
token::Lifetime(s) => format!("{}", token::get_ident(s)),
token::Underscore => "_".to_string(),
@ -798,7 +798,7 @@ impl<'a> State<'a> {
try!(self.head(&visibility_qualified(item.vis,
"extern crate")[]));
if let Some((ref p, style)) = *optional_path {
try!(self.print_string(p.get(), style));
try!(self.print_string(p, style));
try!(space(&mut self.s));
try!(word(&mut self.s, "as"));
try!(space(&mut self.s));
@ -1313,7 +1313,7 @@ impl<'a> State<'a> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(attr.span.lo));
if attr.node.is_sugared_doc {
word(&mut self.s, attr.value_str().unwrap().get())
word(&mut self.s, &attr.value_str().unwrap())
} else {
match attr.node.style {
ast::AttrInner => try!(word(&mut self.s, "#![")),
@ -1847,17 +1847,17 @@ impl<'a> State<'a> {
ast::ExprInlineAsm(ref a) => {
try!(word(&mut self.s, "asm!"));
try!(self.popen());
try!(self.print_string(a.asm.get(), a.asm_str_style));
try!(self.print_string(&a.asm, a.asm_str_style));
try!(self.word_space(":"));
try!(self.commasep(Inconsistent, &a.outputs[],
|s, &(ref co, ref o, is_rw)| {
match co.get().slice_shift_char() {
match co.slice_shift_char() {
Some(('=', operand)) if is_rw => {
try!(s.print_string(&format!("+{}", operand)[],
ast::CookedStr))
}
_ => try!(s.print_string(co.get(), ast::CookedStr))
_ => try!(s.print_string(&co, ast::CookedStr))
}
try!(s.popen());
try!(s.print_expr(&**o));
@ -1869,7 +1869,7 @@ impl<'a> State<'a> {
try!(self.commasep(Inconsistent, &a.inputs[],
|s, &(ref co, ref o)| {
try!(s.print_string(co.get(), ast::CookedStr));
try!(s.print_string(&co, ast::CookedStr));
try!(s.popen());
try!(s.print_expr(&**o));
try!(s.pclose());
@ -1880,7 +1880,7 @@ impl<'a> State<'a> {
try!(self.commasep(Inconsistent, &a.clobbers[],
|s, co| {
try!(s.print_string(co.get(), ast::CookedStr));
try!(s.print_string(&co, ast::CookedStr));
Ok(())
}));
@ -1954,7 +1954,7 @@ impl<'a> State<'a> {
let encoded = ident.encode_with_hygiene();
try!(word(&mut self.s, &encoded[]))
} else {
try!(word(&mut self.s, token::get_ident(ident).get()))
try!(word(&mut self.s, &token::get_ident(ident)))
}
self.ann.post(self, NodeIdent(&ident))
}
@ -1964,7 +1964,7 @@ impl<'a> State<'a> {
}
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
try!(word(&mut self.s, token::get_name(name).get()));
try!(word(&mut self.s, &token::get_name(name)));
self.ann.post(self, NodeName(&name))
}
@ -2532,15 +2532,15 @@ impl<'a> State<'a> {
try!(self.ibox(indent_unit));
match item.node {
ast::MetaWord(ref name) => {
try!(word(&mut self.s, name.get()));
try!(word(&mut self.s, &name));
}
ast::MetaNameValue(ref name, ref value) => {
try!(self.word_space(name.get()));
try!(self.word_space(&name[]));
try!(self.word_space("="));
try!(self.print_literal(value));
}
ast::MetaList(ref name, ref items) => {
try!(word(&mut self.s, name.get()));
try!(word(&mut self.s, &name));
try!(self.popen());
try!(self.commasep(Consistent,
&items[],
@ -2731,7 +2731,7 @@ impl<'a> State<'a> {
_ => ()
}
match lit.node {
ast::LitStr(ref st, style) => self.print_string(st.get(), style),
ast::LitStr(ref st, style) => self.print_string(&st, style),
ast::LitByte(byte) => {
let mut res = String::from_str("b'");
ascii::escape_default(byte, |c| res.push(c as char));
@ -2772,10 +2772,10 @@ impl<'a> State<'a> {
word(&mut self.s,
&format!(
"{}{}",
f.get(),
&f,
&ast_util::float_ty_to_string(t)[])[])
}
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]),
ast::LitBool(val) => {
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
}

View File

@ -512,7 +512,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
});
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
// building `use <ident> = __test::main`
let reexport_ident = token::str_to_ident(s.get());
let reexport_ident = token::str_to_ident(&s);
let use_path =
nospan(ast::ViewPathSimple(reexport_ident,
@ -575,7 +575,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crate_name(&krate.attrs[]) {
Some(ref s) if "test" == &s.get()[] => true,
Some(ref s) if "test" == &s[] => true,
_ => false
}
}

View File

@ -37,9 +37,9 @@ impl LintPass for Pass {
fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
if name.get() == "lintme" {
if &name[] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
} else if name.get() == "pleaselintme" {
} else if &name[] == "pleaselintme" {
cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'");
}
}

View File

@ -35,7 +35,7 @@ impl LintPass for Pass {
fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
if name.get() == "lintme" {
if &name[] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
}
}

View File

@ -1,3 +1,5 @@
// no-prefer-dynamic
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.