auto merge of #18368 : alexcrichton/rust/rollup, r=alexcrichton

This commit is contained in:
bors 2014-10-27 23:02:55 +00:00
commit bd7138dd69
86 changed files with 1603 additions and 661 deletions

100
.mailmap
View File

@ -5,8 +5,104 @@
# email addresses.
#
Elly Jones <elly@leptoquark.net>
ILyoan <ilyoan@gmail.com>
Aaron Todd <github@opprobrio.us>
Ahmed Charles <ahmedcharles@gmail.com> <acharles@outlook.com>
Alex Lyon <arcterus@mail.com> <Arcterus@mail.com>
Alex Rønne Petersen <alex@lycus.org>
Andreas Gal <gal@mozilla.com> <andreas.gal@gmail.com>
Andrew Poelstra <asp11@sfu.ca> <apoelstra@wpsoftware.net>
Anton Löfgren <anton.lofgren@gmail.com> <alofgren@op5.com>
Ariel Ben-Yehuda <arielb1@mail.tau.ac.il> <ariel.byd@gmail.com>
Austin Seipp <mad.one@gmail.com> <as@hacks.yi.org>
Ben Alpert <ben@benalpert.com> <spicyjalapeno@gmail.com>
Benjamin Jackman <ben@jackman.biz>
Björn Steinbrink <bsteinbr@gmail.com> <B.Steinbrink@gmx.de>
blake2-ppc <ulrik.sverdrup@gmail.com> <blake2-ppc>
Boris Egorov <jightuse@gmail.com> <egorov@linux.com>
Brian Anderson <banderson@mozilla.com> <andersrb@gmail.com>
Brian Dawn <brian.t.dawn@gmail.com>
Carl-Anton Ingmarsson <mail@carlanton.se> <ca.ingmarsson@gmail.com>
Carol Willing <carolcode@willingconsulting.com>
Chris Pressey <cpressey@gmail.com>
Clark Gaebel <cg.wowus.cg@gmail.com> <cgaebel@mozilla.com>
David Klein <david.klein@baesystemsdetica.com>
David Manescu <david.manescu@gmail.com> <dman2626@uni.sydney.edu.au>
Damien Schoof <damien.schoof@gmail.com>
Derek Chiang <derekchiang93@gmail.com> Derek Chiang (Enchi Jiang) <derekchiang93@gmail.com>
Dylan Braithwaite <dylanbraithwaite1@gmail.com> <mail@dylanb.me>
Eduardo Bautista <me@eduardobautista.com> <mail@eduardobautista.com>
Eduardo Bautista <me@eduardobautista.com> <=>
Elliott Slaughter <elliottslaughter@gmail.com> <eslaughter@mozilla.com>
Elly Fong-Jones <elly@leptoquark.net>
Eric Holk <eric.holk@gmail.com> <eholk@mozilla.com>
Eric Holk <eric.holk@gmail.com> <eholk@cs.indiana.edu>
Eric Holmes <eric@ejholmes.net>
Eric Reed <ecreed@cs.washington.edu> <ereed@mozilla.com>
Erick Tryzelaar <erick.tryzelaar@gmail.com> <etryzelaar@iqt.org>
Evgeny Sologubov
Falco Hirschenberger <falco.hirschenberger@gmail.com> <hirschen@itwm.fhg.de>
Gareth Daniel Smith <garethdanielsmith@gmail.com>
Georges Dubus <georges.dubus@gmail.com> <georges.dubus@compiletoi.net>
Graham Fawcett <fawcett@uwindsor.ca> <graham.fawcett@gmail.com>
Graydon Hoare <graydon@mozilla.com> <graydon@pobox.com>
Heather <heather@cynede.net> <Heather@cynede.net>
Heather <heather@cynede.net> <Cynede@Gentoo.org>
Ilyong Cho <ilyoan@gmail.com>
J. J. Weber <jjweber@gmail.com>
Jakub Bukaj <jakub@jakub.cc>
Jakub Bukaj <jakub@jakub.cc> <jakubw@jakubw.net>
James Deng <cnjamesdeng@gmail.com> <cnJamesDeng@gmail.com>
James Miller <bladeon@gmail.com> <james@aatch.net>
Jason Orendorff <jorendorff@mozilla.com> <jason@mozmac-2.local>
Jason Orendorff <jorendorff@mozilla.com> <jason.orendorff@gmail.com>
Jeremy Letang <letang.jeremy@gmail.com>
Jihyun Yu <jihyun@nclab.kaist.ac.kr> jihyun <jihyun@nablecomm.com>
Jihyun Yu <jihyun@nclab.kaist.ac.kr> <yjh0502@gmail.com>
John Clements <clements@racket-lang.org> <clements@brinckerhoff.org>
Jorge Aparicio <japaric@linux.com> <japaricious@gmail.com>
Jonathan Bailey <jbailey@mozilla.com> <jbailey@jbailey-20809.local>
Junyoung Cho <june0.cho@samsung.com>
Jyun-Yan You <jyyou.tw@gmail.com> <jyyou@cs.nctu.edu.tw>
Kang Seonghoon <kang.seonghoon@mearie.org> <public+git@mearie.org>
Keegan McAllister <kmcallister@mozilla.com> <mcallister.keegan@gmail.com>
Kyeongwoon Lee <kyeongwoon.lee@samsung.com>
Lee Wondong <wdlee91@gmail.com>
Lennart Kudling <github@kudling.de>
Lindsey Kuper <lindsey@composition.al> <lindsey@rockstargirl.org>
Lindsey Kuper <lindsey@composition.al> <lkuper@mozilla.com>
Luqman Aden <me@luqman.ca> <laden@mozilla.com>
Luqman Aden <me@luqman.ca> <laden@csclub.uwaterloo.ca>
Luke Metz <luke.metz@students.olin.edu>
Makoto Nakashima <makoto.nksm+github@gmail.com> <makoto.nksm@gmail.com>
Makoto Nakashima <makoto.nksm+github@gmail.com> gifnksm <makoto.nksm+github@gmail.com>
Margaret Meyerhofer <mmeyerho@andrew.cmu.edu> <mmeyerho@andrew>
Mark Sinclair <mark.edward.x@gmail.com>
Mark Sinclair <mark.edward.x@gmail.com> =Mark Sinclair <=125axel125@gmail.com>
Matt Brubeck <mbrubeck@limpet.net> <mbrubeck@cs.hmc.edu>
Matthew Auld <matthew.auld@intel.com>
Matthew McPherrin <matthew@mcpherrin.ca> <matt@mcpherrin.ca>
Matthijs Hofstra <thiezz@gmail.com>
Michael Williams <m.t.williams@live.com>
Michael Woerister <michaelwoerister@gmail> <michaelwoerister@gmail.com>
Michael Woerister <michaelwoerister@gmail> <michaelwoerister@posteo>
Neil Pankey <npankey@gmail.com> <neil@wire.im>
Philipp Brüschweiler <blei42@gmail.com> <blei42@gmail.com>
Philipp Brüschweiler <blei42@gmail.com> <bruphili@student.ethz.ch>
Pradeep Kumar <gohanpra@gmail.com>
Richard Diamond <wichard@vitalitystudios.com> <wichard@hahbee.co>
Rob Arnold <robarnold@cs.cmu.edu>
Robert Gawdzik <rgawdzik@hotmail.com> Robert Gawdzik ☢ <rgawdzik@hotmail.com>
Robert Millar <robert.millar@cantab.net>
Ryan Scheel <ryan.havvy@gmail.com>
Seonghyun Kim <sh8281.kim@samsung.com>
Simon Barber-Dueck <sbarberdueck@gmail.com> Simon BD <simon@server>
Simon Sapin <simon@exyr.org> <simon.sapin@exyr.org>
startling <tdixon51793@gmail.com>
Steven Fackler <sfackler@gmail.com> <sfackler@palantir.com>
Steven Stewart-Gallus <sstewartgallus00@langara.bc.ca> <sstewartgallus00@mylangara.bc.ca>
Tim Chevalier <chevalier@alum.wellesley.edu> <catamorphism@gmail.com>
Torsten Weber <TorstenWeber12@gmail.com> <torstenweber12@gmail.com>
William Ting <io@williamting.com> <william.h.ting@gmail.com>
Youngsoo Son <ysson83@gmail.com> <ysoo.son@samsung.com>
Zack Corr <zack@z0w0.me> <zackcorr95@gmail.com>
Zack Slayton <zack.slayton@gmail.com>

1
configure vendored
View File

@ -989,6 +989,7 @@ do
make_dir $h/test/doc-guide-container
make_dir $h/test/doc-guide-tasks
make_dir $h/test/doc-guide-plugin
make_dir $h/test/doc-guide-crates
make_dir $h/test/doc-rust
done

View File

@ -27,7 +27,7 @@
######################################################################
DOCS := index intro tutorial guide guide-ffi guide-macros guide-lifetimes \
guide-tasks guide-container guide-pointers guide-testing \
guide-plugin complement-bugreport \
guide-plugin guide-crates complement-bugreport \
complement-lang-faq complement-design-faq complement-project-faq rust \
rustdoc guide-unsafe guide-strings reference

View File

@ -452,7 +452,7 @@ fn main() {
Rust will give us a compile-time error:
```{rust,ignore}
```{notrust,ignore}
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
/home/you/projects/phrases/src/main.rs:4:5: 4:40 error: a value named `hello` has already been imported in this module
/home/you/projects/phrases/src/main.rs:4 use phrases::japanese::greetings::hello;

View File

@ -56,7 +56,7 @@ extern crate rustc;
use syntax::codemap::Span;
use syntax::parse::token::{IDENT, get_ident};
use syntax::ast::{TokenTree, TTTok};
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint
use rustc::plugin::Registry;
@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
("I", 1)];
let text = match args {
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
@ -151,8 +151,7 @@ higher-level syntax elements like expressions:
fn expand_foo(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-> Box<MacResult+'static> {
let mut parser =
parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), args.to_slice())
let mut parser = cx.new_parser_from_tts(args);
let expr: P<Expr> = parser.parse_expr();
```

View File

@ -416,7 +416,7 @@ great detail, so if you want the full details, check that out.
In general, prefer stack allocation over heap allocation. Using references to
stack allocated information is preferred whenever possible. Therefore,
references are the default pointer type you should use, unless you have
references are the default pointer type you should use, unless you have a
specific reason to use a different type. The other types of pointers cover when
they're appropriate to use in their own best practices sections.

View File

@ -14,8 +14,8 @@ Rust has two main types of strings: `&str` and `String`.
# &str
The first kind is a `&str`. This is pronounced a 'string slice.' String literals
are of the type `&str`:
The first kind is a `&str`. This is pronounced a 'string slice'.
String literals are of the type `&str`:
```{rust}
let string = "Hello there.";
@ -121,8 +121,8 @@ Both of these lines will print `12`.
To compare a String to a constant string, prefer `as_slice()`...
```{rust}
fn compare(string: String) {
if string.as_slice() == "Hello" {
fn compare(x: String) {
if x.as_slice() == "Hello" {
println!("yes");
}
}
@ -131,8 +131,8 @@ fn compare(string: String) {
... over `to_string()`:
```{rust}
fn compare(string: String) {
if string == "Hello".to_string() {
fn compare(x: String) {
if x == "Hello".to_string() {
println!("yes");
}
}

View File

@ -1130,12 +1130,12 @@ fn main() {
let y = Missing;
match x {
Value(n) => println!("x is {:d}", n),
Value(n) => println!("x is {}", n),
Missing => println!("x is missing!"),
}
match y {
Value(n) => println!("y is {:d}", n),
Value(n) => println!("y is {}", n),
Missing => println!("y is missing!"),
}
}
@ -1301,7 +1301,7 @@ Instead, it looks like this:
```{rust}
for x in range(0i, 10i) {
println!("{:d}", x);
println!("{}", x);
}
```
@ -1408,7 +1408,7 @@ iteration: This will only print the odd numbers:
for x in range(0i, 10i) {
if x % 2 == 0 { continue; }
println!("{:d}", x);
println!("{}", x);
}
```
@ -1677,12 +1677,12 @@ fn main() {
let y = Missing;
match x {
Value(n) => println!("x is {:d}", n),
Value(n) => println!("x is {}", n),
Missing => println!("x is missing!"),
}
match y {
Value(n) => println!("y is {:d}", n),
Value(n) => println!("y is {}", n),
Missing => println!("y is missing!"),
}
}
@ -1793,7 +1793,7 @@ Finally, Cargo generated a hello, world for us. Check out `src/main.rs`:
```{rust}
fn main() {
println!("Hello, world!");
println!("Hello, world!")
}
```
@ -2682,12 +2682,12 @@ like this:
```
fn main() {
println!("Hello, world!");
println!("Hello, world!")
}
mod hello {
fn print_hello() {
println!("Hello, world!");
println!("Hello, world!")
}
}
```
@ -2721,7 +2721,7 @@ fn main() {
mod hello {
fn print_hello() {
println!("Hello, world!");
println!("Hello, world!")
}
}
```
@ -2744,7 +2744,7 @@ fn main() {
mod hello {
pub fn print_hello() {
println!("Hello, world!");
println!("Hello, world!")
}
}
```
@ -2921,15 +2921,11 @@ it `false`, so this test should fail. Let's try it!
```{notrust,ignore}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default
/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: function is never used: `main`, #[warn(dead_code)] on by default
/home/you/projects/testing/src/main.rs:1 fn main() {
/home/you/projects/testing/src/main.rs:2 println!("Hello, world");
/home/you/projects/testing/src/main.rs:2 println!("Hello, world!")
/home/you/projects/testing/src/main.rs:3 }
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/lib-654ce120f310a3a5
running 1 test
test foo ... FAILED
@ -2946,7 +2942,7 @@ failures:
test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured
task '<main>' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:242
task '<main>' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:243
```
Lots of output! Let's break this down:
@ -2960,9 +2956,9 @@ You can run all of your tests with `cargo test`. This runs both your tests in
`tests`, as well as the tests you put inside of your crate.
```{notrust,ignore}
/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default
/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: function is never used: `main`, #[warn(dead_code)] on by default
/home/you/projects/testing/src/main.rs:1 fn main() {
/home/you/projects/testing/src/main.rs:2 println!("Hello, world");
/home/you/projects/testing/src/main.rs:2 println!("Hello, world!")
/home/you/projects/testing/src/main.rs:3 }
```
@ -2974,18 +2970,8 @@ We'll turn this lint off for just this function soon. For now, just ignore this
output.
```{notrust,ignore}
running 0 tests
Running target/lib-654ce120f310a3a5
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```
Wait a minute, zero tests? Didn't we define one? Yup. This output is from
attempting to run the tests in our crate, of which we don't have any.
You'll note that Rust reports on several kinds of tests: passed, failed,
ignored, and measured. The 'measured' tests refer to benchmark tests, which
we'll cover soon enough!
```{notrust,ignore}
running 1 test
test foo ... FAILED
```
@ -3008,7 +2994,7 @@ failures:
test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured
task '<main>' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:242
task '<main>' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:243
```
After all the tests run, Rust will show us any output from our failed tests.
@ -3029,29 +3015,30 @@ And then try to run our tests again:
```{notrust,ignore}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default
/home/you/projects/testing/src/main.rs:1 fn main() {
/home/you/projects/testing/src/main.rs:2 println!("Hello, world");
/home/you/projects/testing/src/main.rs:3 }
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/lib-654ce120f310a3a5
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-6d7518593c7c3ee5
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```
Nice! Our test passes, as we expected. Let's get rid of that warning for our `main`
function. Change your `src/main.rs` to look like this:
Nice! Our test passes, as we expected. Note how we didn't get the
`main` warning this time? This is because `src/main.rs` didn't
need recompiling, but we'll get that warning again if we
change (and recompile) that file. Let's get rid of that
warning; change your `src/main.rs` to look like this:
```{rust}
#[cfg(not(test))]
fn main() {
println!("Hello, world");
println!("Hello, world!")
}
```
@ -3062,21 +3049,24 @@ our tests, it sets things up so that `cfg(test)` is true. But we want to only
include `main` when it's _not_ true. So we use `not` to negate things:
`cfg(not(test))` will only compile our code when the `cfg(test)` is false.
With this attribute, we won't get the warning:
With this attribute we won't get the warning (even
though `src/main.rs` gets recompiled this time):
```{notrust,ignore}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/lib-654ce120f310a3a5
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-6d7518593c7c3ee5
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```
Nice. Okay, let's write a real test now. Change your `tests/lib.rs`
@ -3133,7 +3123,7 @@ extern crate testing;
#[cfg(not(test))]
fn main() {
println!("Hello, world");
println!("Hello, world!")
}
```
@ -3156,21 +3146,30 @@ Let's give it a run:
```{ignore,notrust}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/lib-654ce120f310a3a5
running 1 test
test math_checks_out ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-6d7518593c7c3ee5
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-8a94b31f7fd2e8fe
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Doc-tests testing
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```
Great! One test passed. We've got an integration test showing that our public
@ -3196,21 +3195,30 @@ If you run `cargo test`, you should get the same output:
```{ignore,notrust}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/lib-654ce120f310a3a5
running 1 test
test math_checks_out ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-6d7518593c7c3ee5
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-8a94b31f7fd2e8fe
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Doc-tests testing
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```
If we tried to write a test for these two new functions, it wouldn't
@ -3283,6 +3291,20 @@ Let's give it a shot:
```{ignore,notrust}
$ cargo test
Compiling testing v0.0.1 (file:///home/you/projects/testing)
Running target/lib-654ce120f310a3a5
running 1 test
test math_checks_out ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-6d7518593c7c3ee5
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Running target/testing-8a94b31f7fd2e8fe
running 2 tests
test test::test_times_four ... ok
@ -3290,16 +3312,11 @@ test test::test_add_three ... ok
test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured
Doc-tests testing
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test math_checks_out ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
```
Cool! We now have two tests of our internal functions. You'll note that there
@ -3637,40 +3654,72 @@ pub fn as_maybe_owned(&self) -> MaybeOwned<'a> { ... }
## Boxes
All of our references so far have been to variables we've created on the stack.
In Rust, the simplest way to allocate heap variables is using a *box*. To
create a box, use the `box` keyword:
Most of the types we've seen so far have a fixed size or number of components.
The compiler needs this fact to lay out values in memory. However, some data
structures, such as a linked list, do not have a fixed size. You might think to
implement a linked list with an enum that's either a `Node` or the end of the
list (`Nil`), like this:
```{rust}
let x = box 5i;
```{rust,ignore}
enum List { // error: illegal recursive enum type
Node(u32, List),
Nil
}
```
This allocates an integer `5` on the heap, and creates a binding `x` that
refers to it. The great thing about boxed pointers is that we don't have to
manually free this allocation! If we write
But the compiler complains that the type is recursive, that is, it could be
arbitrarily large. To remedy this, Rust provides a fixed-size container called
a **box** that can hold any type. You can box up any value with the `box`
keyword. Our boxed List gets the type `Box<List>` (more on the notation when we
get to generics):
```{rust}
enum List {
Node(u32, Box<List>),
Nil
}
fn main() {
let list = Node(0, box Node(1, box Nil));
}
```
A box dynamically allocates memory to hold its contents. The great thing about
Rust is that that memory is *automatically*, *efficiently*, and *predictably*
deallocated when you're done with the box.
A box is a pointer type, and you access what's inside using the `*` operator,
just like regular references. This (rather silly) example dynamically allocates
an integer `5` and makes `x` a pointer to it:
```{rust}
{
let x = box 5i;
// do stuff
println!("{}", *x); // Prints 5
}
```
then Rust will automatically free `x` at the end of the block. This isn't
because Rust has a garbage collector -- it doesn't. Instead, when `x` goes out
of scope, Rust `free`s `x`. This Rust code will do the same thing as the
following C code:
The great thing about boxes is that we don't have to manually free this
allocation! Instead, when `x` reaches the end of its lifetime -- in this case,
when it goes out of scope at the end of the block -- Rust `free`s `x`. This
isn't because Rust has a garbage collector (it doesn't). Instead, by tracking
the ownership and lifetime of a variable (with a little help from you, the
programmer), the compiler knows precisely when it is no longer used.
The Rust code above will do the same thing as the following C code:
```{c,ignore}
{
int *x = (int *)malloc(sizeof(int));
// do stuff
if (!x) abort();
*x = 5;
printf("%d\n", *x);
free(x);
}
```
This means we get the benefits of manual memory management, but the compiler
ensures that we don't do something wrong. We can't forget to `free` our memory.
We get the benefits of manual memory management, while ensuring we don't
introduce any bugs. We can't forget to `free` our memory.
Boxes are the sole owner of their contents, so you cannot take a mutable
reference to them and then use the original box:
@ -3706,48 +3755,50 @@ let mut x = box 5i;
*x;
```
Boxes are simple and efficient pointers to dynamically allocated values with a
single owner. They are useful for tree-like structures where the lifetime of a
child depends solely on the lifetime of its (single) parent. If you need a
value that must persist as long as any of several referrers, read on.
## Rc and Arc
Sometimes, you need to allocate something on the heap, but give out multiple
references to the memory. Rust's `Rc<T>` (pronounced 'arr cee tee') and
`Arc<T>` types (again, the `T` is for generics, we'll learn more later) provide
you with this ability. **Rc** stands for 'reference counted,' and **Arc** for
'atomically reference counted.' This is how Rust keeps track of the multiple
owners: every time we make a new reference to the `Rc<T>`, we add one to its
internal 'reference count.' Every time a reference goes out of scope, we
subtract one from the count. When the count is zero, the `Rc<T>` can be safely
deallocated. `Arc<T>` is almost identical to `Rc<T>`, except for one thing: The
'atomically' in 'Arc' means that increasing and decreasing the count uses a
thread-safe mechanism to do so. Why two types? `Rc<T>` is faster, so if you're
not in a multi-threaded scenario, you can have that advantage. Since we haven't
talked about threading yet in Rust, we'll show you `Rc<T>` for the rest of this
section.
Sometimes, you need a variable that is referenced from multiple places
(immutably!), lasting as long as any of those places, and disappearing when it
is no longer referenced. For instance, in a graph-like data structure, a node
might be referenced from all of its neighbors. In this case, it is not possible
for the compiler to determine ahead of time when the value can be freed -- it
needs a little run-time support.
To create an `Rc<T>`, use `Rc::new()`:
Rust's **Rc** type provides shared ownership of a dynamically allocated value
that is automatically freed at the end of its last owner's lifetime. (`Rc`
stands for 'reference counted,' referring to the way these library types are
implemented.) This provides more flexibility than single-owner boxes, but has
some runtime overhead.
```{rust}
use std::rc::Rc;
let x = Rc::new(5i);
```
To create a second reference, use the `.clone()` method:
To create an `Rc` value, use `Rc::new()`. To create a second owner, use the
`.clone()` method:
```{rust}
use std::rc::Rc;
let x = Rc::new(5i);
let y = x.clone();
println!("{} {}", *x, *y); // Prints 5 5
```
The `Rc<T>` will live as long as any of its references are alive. After they
all go out of scope, the memory will be `free`d.
The `Rc` will live as long as any of its owners are alive. After that, the
memory will be `free`d.
If you use `Rc<T>` or `Arc<T>`, you have to be careful about introducing
cycles. If you have two `Rc<T>`s that point to each other, the reference counts
will never drop to zero, and you'll have a memory leak. To learn more, check
out [the section on `Rc<T>` and `Arc<T>` in the pointers
guide](guide-pointers.html#rc-and-arc).
**Arc** is an 'atomically reference counted' value, identical to `Rc` except
that ownership can be safely shared among multiple threads. Why two types?
`Arc` has more overhead, so if you're not in a multi-threaded scenario, you
don't have to pay the price.
If you use `Rc` or `Arc`, you have to be careful about introducing cycles. If
you have two `Rc`s that point to each other, they will happily keep each other
alive forever, creating a memory leak. To learn more, check out [the section on
`Rc` and `Arc` in the pointers guide](guide-pointers.html#rc-and-arc).
# Patterns
@ -4220,7 +4271,7 @@ Remember Rust's `for` loop? Here's an example:
```{rust}
for x in range(0i, 10i) {
println!("{:d}", x);
println!("{}", x);
}
```
@ -4353,7 +4404,7 @@ is one:
```{rust}
let greater_than_forty_two = range(0i, 100i)
.find(|x| *x >= 42);
.find(|x| *x > 42);
match greater_than_forty_two {
Some(_) => println!("We got some numbers!"),
@ -4474,7 +4525,7 @@ range(1i, 100i).map(|x| x + 1i);
`map` is called upon another iterator, and produces a new iterator where each
element reference has the closure it's been given as an argument called on it.
So this would give us the numbers from `2-101`. Well, almost! If you
So this would give us the numbers from `2-100`. Well, almost! If you
compile the example, you'll get a warning:
```{notrust,ignore}
@ -5288,9 +5339,9 @@ There are two circumstances where Rust's safety provisions don't work well.
The first is when interfacing with C code, and the second is when building
certain kinds of abstractions.
Rust has support for FFI (which you can read about in the [FFI
Guide](guide-ffi.html)), but can't guarantee that the C code will be safe.
Therefore, Rust marks such functions with the `unsafe`
Rust has support for [FFI](http://en.wikipedia.org/wiki/Foreign_function_interface)
(which you can read about in the [FFI Guide](guide-ffi.html)), but can't guarantee
that the C code will be safe. Therefore, Rust marks such functions with the `unsafe`
keyword, which indicates that the function may not behave properly.
Second, if you'd like to create some sort of shared-memory data structure, Rust

View File

@ -57,6 +57,7 @@ a guide that can help you out:
* [Strings](guide-strings.html)
* [Pointers](guide-pointers.html)
* [References and Lifetimes](guide-lifetimes.html)
* [Crates and modules](guide-crates.html)
* [Tasks and Communication](guide-tasks.html)
* [Foreign Function Interface](guide-ffi.html)
* [Writing Unsafe and Low-Level Code](guide-unsafe.html)

View File

@ -19,6 +19,7 @@
[type: text] src/doc/guide-tasks.md $lang:doc/l10n/$lang/guide-tasks.md
[type: text] src/doc/guide-testing.md $lang:doc/l10n/$lang/guide-testing.md
[type: text] src/doc/guide-unsafe.md $lang:doc/l10n/$lang/guide-unsafe.md
[type: text] src/doc/guide-unsafe.md $lang:doc/l10n/$lang/guide-crates.md
[type: text] src/doc/guide.md $lang:doc/l10n/$lang/guide.md
[type: text] src/doc/index.md $lang:doc/l10n/$lang/index.md
[type: text] src/doc/intro.md $lang:doc/l10n/$lang/intro.md

View File

@ -376,7 +376,7 @@ fn bar( a:int,
-> int
{ }
fn baz( a:int, // shoudl work with a comment here
fn baz( a:int, // should work with a comment here
b:char)
-> int
{ }

View File

@ -15,6 +15,7 @@
use core::atomic;
use core::clone::Clone;
use core::fmt::{mod, Show};
use core::kinds::{Sync, Send};
use core::mem::{min_align_of, size_of, drop};
use core::mem;
@ -147,6 +148,12 @@ impl<T: Send + Sync> Deref<T> for Arc<T> {
}
}
impl<T: Send + Sync + Show> Show for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(**self).fmt(f)
}
}
impl<T: Send + Sync + Clone> Arc<T> {
/// Acquires a mutable pointer to the inner contents by guaranteeing that
/// the reference count is one (no sharing is possible).
@ -280,6 +287,7 @@ mod tests {
use std::mem::drop;
use std::ops::Drop;
use std::option::{Option, Some, None};
use std::str::Str;
use std::sync::atomic;
use std::task;
use std::vec::Vec;
@ -426,4 +434,10 @@ mod tests {
assert!(canary.load(atomic::Acquire) == 1);
drop(arc_weak);
}
#[test]
fn show_arc() {
let a = Arc::new(5u32);
assert!(format!("{}", a).as_slice() == "5")
}
}

View File

@ -41,10 +41,10 @@ use ringbuf::RingBuf;
/// the BST strategy.
///
/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
/// this, we reduce the number of allocations by a factor of B, and improve cache effeciency in
/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
/// searches. However, this does mean that searches will have to do *more* comparisons on average.
/// The precise number of comparisons depends on the node search strategy used. For optimal cache
/// effeciency, one could search the nodes linearly. For optimal comparisons, one could search
/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
/// the node using binary search. As a compromise, one could also perform a linear search
/// that initially only checks every i<sup>th</sup> element for some choice of i.
///

View File

@ -53,7 +53,7 @@ pub struct Node<K, V> {
// hard. For now, we accept this cost in the name of correctness and simplicity.
//
// As a compromise, keys and vals could be merged into one Vec<(K, V)>, which would shave
// off 3 words, but possibly hurt our cache effeciency during search, which only cares about
// off 3 words, but possibly hurt our cache efficiency during search, which only cares about
// keys. This would also avoid the Zip we use in our iterator implementations. This is
// probably worth investigating.
//
@ -72,7 +72,7 @@ impl<K: Ord, V> Node<K, V> {
/// `GoDown` will be yielded with the index of the subtree the key must lie in.
pub fn search(&self, key: &K) -> SearchResult {
// FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
// For the B configured as of this writing (B = 6), binary search was *singnificantly*
// For the B configured as of this writing (B = 6), binary search was *significantly*
// worse for uints.
self.search_linear(key)
}
@ -375,7 +375,7 @@ impl<K, V> Node<K, V> {
}
}
/// Steal! Stealing is roughly analagous to a binary tree rotation.
/// Steal! Stealing is roughly analogous to a binary tree rotation.
/// In this case, we're "rotating" right.
unsafe fn steal_to_left(&mut self, underflowed_child_index: uint) {
// Take the biggest stuff off left
@ -387,7 +387,7 @@ impl<K, V> Node<K, V> {
}
};
// Swap the parent's seperating key-value pair with left's
// Swap the parent's separating key-value pair with left's
self.unsafe_swap(underflowed_child_index - 1, &mut key, &mut val);
// Put them at the start of right
@ -402,7 +402,7 @@ impl<K, V> Node<K, V> {
}
}
/// Steal! Stealing is roughly analagous to a binary tree rotation.
/// Steal! Stealing is roughly analogous to a binary tree rotation.
/// In this case, we're "rotating" left.
unsafe fn steal_to_right(&mut self, underflowed_child_index: uint) {
// Take the smallest stuff off right
@ -414,7 +414,7 @@ impl<K, V> Node<K, V> {
}
};
// Swap the parent's seperating key-value pair with right's
// Swap the parent's separating key-value pair with right's
self.unsafe_swap(underflowed_child_index, &mut key, &mut val);
// Put them at the end of left
@ -430,9 +430,9 @@ impl<K, V> Node<K, V> {
}
/// Merge! Left and right will be smooshed into one node, along with the key-value
/// pair that seperated them in their parent.
/// pair that separated them in their parent.
unsafe fn merge_children(&mut self, left_index: uint) {
// Permanently remove right's index, and the key-value pair that seperates
// Permanently remove right's index, and the key-value pair that separates
// left and right
let (key, val, right) = {
match (self.keys.remove(left_index),
@ -448,7 +448,7 @@ impl<K, V> Node<K, V> {
left.absorb(key, val, right);
}
/// Take all the values from right, seperated by the given key and value
/// Take all the values from right, separated by the given key and value
fn absorb(&mut self, key: K, val: V, right: Node<K, V>) {
// Just as a sanity check, make sure we can fit this guy in
debug_assert!(self.len() + right.len() <= self.capacity())

View File

@ -292,7 +292,7 @@ impl<T> BoxedSlice<T> for Box<[T]> {
#[experimental]
fn into_vec(mut self) -> Vec<T> {
unsafe {
let xs = Vec::from_raw_parts(self.len(), self.len(), self.as_mut_ptr());
let xs = Vec::from_raw_parts(self.as_mut_ptr(), self.len(), self.len());
mem::forget(self);
xs
}

View File

@ -780,7 +780,7 @@ pub mod raw {
#[inline]
pub unsafe fn from_parts(buf: *mut u8, length: uint, capacity: uint) -> String {
String {
vec: Vec::from_raw_parts(length, capacity, buf),
vec: Vec::from_raw_parts(buf, length, capacity),
}
}

View File

@ -103,9 +103,9 @@ use slice::{Items, MutItems};
#[unsafe_no_drop_flag]
#[stable]
pub struct Vec<T> {
ptr: *mut T,
len: uint,
cap: uint,
ptr: *mut T
}
impl<T> Vec<T> {
@ -125,7 +125,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { len: 0, cap: 0, ptr: EMPTY as *mut T }
Vec { ptr: EMPTY as *mut T, len: 0, cap: 0 }
}
/// Constructs a new, empty `Vec` with the specified capacity.
@ -159,14 +159,14 @@ impl<T> Vec<T> {
#[stable]
pub fn with_capacity(capacity: uint) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { len: 0, cap: uint::MAX, ptr: EMPTY as *mut T }
Vec { ptr: EMPTY as *mut T, len: 0, cap: uint::MAX }
} else if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(&mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
Vec { len: 0, cap: capacity, ptr: ptr as *mut T }
Vec { ptr: ptr as *mut T, len: 0, cap: capacity }
}
}
@ -231,15 +231,15 @@ impl<T> Vec<T> {
/// }
///
/// // Put everything back together into a Vec
/// let rebuilt = Vec::from_raw_parts(len, cap, p);
/// let rebuilt = Vec::from_raw_parts(p, len, cap);
/// assert_eq!(rebuilt, vec![4i, 5i, 6i]);
/// }
/// }
/// ```
#[experimental]
pub unsafe fn from_raw_parts(length: uint, capacity: uint,
ptr: *mut T) -> Vec<T> {
Vec { len: length, cap: capacity, ptr: ptr }
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
Vec { ptr: ptr, len: length, cap: capacity }
}
/// Consumes the `Vec`, partitioning it based on a predicate.
@ -1680,7 +1680,7 @@ impl<'a, T> Drop for DerefVec<'a, T> {
pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
unsafe {
DerefVec {
x: Vec::from_raw_parts(x.len(), x.len(), x.as_ptr() as *mut T),
x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
l: ContravariantLifetime::<'a>
}
}
@ -1929,7 +1929,7 @@ impl<T> Vec<T> {
let vec_cap = pv.vec.capacity();
let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
mem::forget(pv);
Vec::from_raw_parts(vec_len, vec_cap, vec_ptr)
Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
}
} else {
// Put the `Vec` into the `PartialVecZeroSized` structure and

View File

@ -21,18 +21,21 @@
use mem;
/// The representation of a Rust slice
#[repr(C)]
pub struct Slice<T> {
pub data: *const T,
pub len: uint,
}
/// The representation of a Rust closure
#[repr(C)]
pub struct Closure {
pub code: *mut (),
pub env: *mut (),
}
/// The representation of a Rust procedure (`proc()`)
#[repr(C)]
pub struct Procedure {
pub code: *mut (),
pub env: *mut (),
@ -42,6 +45,7 @@ pub struct Procedure {
///
/// This struct does not have a `Repr` implementation
/// because there is no way to refer to all trait objects generically.
#[repr(C)]
pub struct TraitObject {
pub data: *mut (),
pub vtable: *mut (),
@ -60,4 +64,3 @@ pub trait Repr<T> {
impl<'a, T> Repr<Slice<T>> for &'a [T] {}
impl<'a> Repr<Slice<u8>> for &'a str {}

View File

@ -173,7 +173,7 @@ impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
self.init(&[0u32, ..KEY_WORDS]);
// set key inplace
// set key in place
let key = self.state.slice_mut(4, 4+KEY_WORDS);
for (k, s) in key.iter_mut().zip(seed.iter()) {
*k = *s;

View File

@ -714,7 +714,13 @@ pub fn run_passes(sess: &Session,
.stdout(::std::io::process::InheritFd(1))
.stderr(::std::io::process::InheritFd(2));
match cmd.status() {
Ok(_) => {},
Ok(status) => {
if !status.success() {
sess.err(format!("linking of {} with `{}` failed",
output_path.display(), cmd).as_slice());
sess.abort_if_errors();
}
},
Err(e) => {
sess.err(format!("could not exec the linker `{}`: {}",
pname,

View File

@ -36,6 +36,7 @@ register_diagnostics!(
E0015,
E0016,
E0017,
E0018,
E0019,
E0020,
E0022,

View File

@ -777,13 +777,28 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
// Otherwise, just a plain error.
match assignee_cmt.note {
mc::NoteClosureEnv(upvar_id) => {
self.bccx.span_err(
assignment_span,
format!("cannot assign to {}",
self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
self.bccx.span_note(
self.tcx().map.span(upvar_id.closure_expr_id),
"consider changing this closure to take self by mutable reference");
// If this is an `Fn` closure, it simply can't mutate upvars.
// If it's an `FnMut` closure, the original variable was declared immutable.
// We need to determine which is the case here.
let kind = match assignee_cmt.upvar().unwrap().cat {
mc::cat_upvar(mc::Upvar { kind, .. }) => kind,
_ => unreachable!()
};
if kind == ty::FnUnboxedClosureKind {
self.bccx.span_err(
assignment_span,
format!("cannot assign to {}",
self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
self.bccx.span_note(
self.tcx().map.span(upvar_id.closure_expr_id),
"consider changing this closure to take self by mutable reference");
} else {
self.bccx.span_err(
assignment_span,
format!("cannot assign to {} {}",
assignee_cmt.mutbl.to_user_str(),
self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
}
}
_ => match opt_loan_path(&assignee_cmt) {
Some(lp) => {
@ -825,12 +840,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
mc::cat_rvalue(..) |
mc::cat_static_item |
mc::cat_deref(_, _, mc::UnsafePtr(..)) |
mc::cat_deref(_, _, mc::BorrowedPtr(..)) |
mc::cat_deref(_, _, mc::Implicit(..)) => {
assert_eq!(cmt.mutbl, mc::McDeclared);
return;
}
mc::cat_deref(_, _, mc::BorrowedPtr(..)) => {
assert_eq!(cmt.mutbl, mc::McDeclared);
// We need to drill down to upvar if applicable
match cmt.upvar() {
Some(b) => cmt = b,
None => return
}
}
mc::cat_deref(b, _, mc::OwnedPtr) => {
assert_eq!(cmt.mutbl, mc::McInherited);
cmt = b;

View File

@ -625,7 +625,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
match err.code {
err_mutbl => {
let descr = match err.cmt.note {
mc::NoteClosureEnv(_) => {
mc::NoteClosureEnv(_) | mc::NoteUpvarRef(_) => {
self.cmt_to_string(&*err.cmt)
}
_ => match opt_loan_path(&err.cmt) {
@ -761,11 +761,20 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
match code {
err_mutbl(..) => {
match err.cmt.note {
mc::NoteClosureEnv(upvar_id) => {
self.tcx.sess.span_note(
self.tcx.map.span(upvar_id.closure_expr_id),
"consider changing this closure to take \
self by mutable reference");
mc::NoteClosureEnv(upvar_id) | mc::NoteUpvarRef(upvar_id) => {
// If this is an `Fn` closure, it simply can't mutate upvars.
// If it's an `FnMut` closure, the original variable was declared immutable.
// We need to determine which is the case here.
let kind = match err.cmt.upvar().unwrap().cat {
mc::cat_upvar(mc::Upvar { kind, .. }) => kind,
_ => unreachable!()
};
if kind == ty::FnUnboxedClosureKind {
self.tcx.sess.span_note(
self.tcx.map.span(upvar_id.closure_expr_id),
"consider changing this closure to take \
self by mutable reference");
}
}
_ => {}
}

View File

@ -119,12 +119,18 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) -> bool {
}
}
ExprLit(_) => (),
ExprCast(_, _) => {
let ety = ty::expr_ty(v.tcx, e);
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
ExprCast(ref from, _) => {
let toty = ty::expr_ty(v.tcx, e);
let fromty = ty::expr_ty(v.tcx, &**from);
if !ty::type_is_numeric(toty) && !ty::type_is_unsafe_ptr(toty) {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in a constant expression",
ppaux::ty_to_string(v.tcx, ety));
ppaux::ty_to_string(v.tcx, toty));
}
if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in a constant \
expression");
}
}
ExprPath(ref pth) => {

View File

@ -421,7 +421,7 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor,
node: FieldPat {
ident: Ident::new(field.name),
pat: pat,
is_shorthand: true,
is_shorthand: false,
}
}).collect();
let has_more_fields = field_pats.len() < pats_len;

View File

@ -341,7 +341,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P<Pat> {
node: FieldPat {
ident: field.ident.node,
pat: const_expr_to_pat(tcx, &*field.expr),
is_shorthand: true,
is_shorthand: false,
},
}).collect();
PatStruct(path.clone(), field_pats, false)

View File

@ -655,52 +655,55 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
// FnOnce | copied | upvar -> &'up bk
// old stack | N/A | upvar -> &'env mut -> &'up bk
// old proc/once | copied | N/A
let var_ty = if_ok!(self.node_ty(var_id));
let upvar_id = ty::UpvarId { var_id: var_id,
closure_expr_id: fn_node_id };
// Do we need to deref through an env reference?
let has_env_deref = kind != ty::FnOnceUnboxedClosureKind;
// Mutability of original variable itself
let var_mutbl = MutabilityCategory::from_local(self.tcx(), var_id);
// Mutability of environment dereference
let env_mutbl = match kind {
ty::FnOnceUnboxedClosureKind => var_mutbl,
ty::FnMutUnboxedClosureKind => McInherited,
ty::FnUnboxedClosureKind => McImmutable
// Construct information about env pointer dereference, if any
let mutbl = match kind {
ty::FnOnceUnboxedClosureKind => None, // None, env is by-value
ty::FnMutUnboxedClosureKind => match mode { // Depends on capture type
ast::CaptureByValue => Some(var_mutbl), // Mutable if the original var is
ast::CaptureByRef => Some(McDeclared) // Mutable regardless
},
ty::FnUnboxedClosureKind => Some(McImmutable) // Never mutable
};
let env_info = mutbl.map(|env_mutbl| {
// Look up the node ID of the closure body so we can construct
// a free region within it
let fn_body_id = {
let fn_expr = match self.tcx().map.find(fn_node_id) {
Some(ast_map::NodeExpr(e)) => e,
_ => unreachable!()
};
// Look up the node ID of the closure body so we can construct
// a free region within it
let fn_body_id = {
let fn_expr = match self.tcx().map.find(fn_node_id) {
Some(ast_map::NodeExpr(e)) => e,
_ => unreachable!()
match fn_expr.node {
ast::ExprFnBlock(_, _, ref body) |
ast::ExprProc(_, ref body) |
ast::ExprUnboxedFn(_, _, _, ref body) => body.id,
_ => unreachable!()
}
};
match fn_expr.node {
ast::ExprFnBlock(_, _, ref body) |
ast::ExprProc(_, ref body) |
ast::ExprUnboxedFn(_, _, _, ref body) => body.id,
_ => unreachable!()
}
};
// Region of environment pointer
let env_region = ty::ReFree(ty::FreeRegion {
scope_id: fn_body_id,
bound_region: ty::BrEnv
});
// Region of environment pointer
let env_region = ty::ReFree(ty::FreeRegion {
scope_id: fn_body_id,
bound_region: ty::BrEnv
let env_ptr = BorrowedPtr(if env_mutbl.is_mutable() {
ty::MutBorrow
} else {
ty::ImmBorrow
}, env_region);
(env_mutbl, env_ptr)
});
let env_ptr = BorrowedPtr(if env_mutbl.is_mutable() {
ty::MutBorrow
} else {
ty::ImmBorrow
}, env_region);
let var_ty = if_ok!(self.node_ty(var_id));
// First, switch by capture mode
Ok(match mode {
ast::CaptureByValue => {
@ -717,25 +720,27 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
note: NoteNone
};
if has_env_deref {
// We need to add the env deref. This means that
// the above is actually immutable and has a ref
// type. However, nothing should actually look at
// the type, so we can get away with stuffing a
// `ty_err` in there instead of bothering to
// construct a proper one.
base.mutbl = McImmutable;
base.ty = ty::mk_err();
Rc::new(cmt_ {
id: id,
span: span,
cat: cat_deref(Rc::new(base), 0, env_ptr),
mutbl: env_mutbl,
ty: var_ty,
note: NoteClosureEnv(upvar_id)
})
} else {
Rc::new(base)
match env_info {
Some((env_mutbl, env_ptr)) => {
// We need to add the env deref. This means
// that the above is actually immutable and
// has a ref type. However, nothing should
// actually look at the type, so we can get
// away with stuffing a `ty_err` in there
// instead of bothering to construct a proper
// one.
base.mutbl = McImmutable;
base.ty = ty::mk_err();
Rc::new(cmt_ {
id: id,
span: span,
cat: cat_deref(Rc::new(base), 0, env_ptr),
mutbl: env_mutbl,
ty: var_ty,
note: NoteClosureEnv(upvar_id)
})
}
None => Rc::new(base)
}
},
ast::CaptureByRef => {
@ -755,16 +760,18 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
note: NoteNone
};
// As in the by-value case, add env deref if needed
if has_env_deref {
base = cmt_ {
id: id,
span: span,
cat: cat_deref(Rc::new(base), 0, env_ptr),
mutbl: env_mutbl,
ty: ty::mk_err(),
note: NoteClosureEnv(upvar_id)
};
match env_info {
Some((env_mutbl, env_ptr)) => {
base = cmt_ {
id: id,
span: span,
cat: cat_deref(Rc::new(base), 0, env_ptr),
mutbl: env_mutbl,
ty: ty::mk_err(),
note: NoteClosureEnv(upvar_id)
};
}
None => {}
}
// Look up upvar borrow so we can get its region

View File

@ -279,4 +279,132 @@ selection. This is because it must account for the transformed self
type of the receiver and various other complications. The procedure is
described in `select.rs` in the "METHOD MATCHING" section.
# Caching and subtle considerations therewith
In general we attempt to cache the results of trait selection. This
is a somewhat complex process. Part of the reason for this is that we
want to be able to cache results even when all the types in the trait
reference are not fully known. In that case, it may happen that the
trait selection process is also influencing type variables, so we have
to be able to not only cache the *result* of the selection process,
but *replay* its effects on the type variables.
## An example
The high-level idea of how the cache works is that we first replace
all unbound inference variables with skolemized versions. Therefore,
if we had a trait reference `uint : Foo<$1>`, where `$n` is an unbound
inference variable, we might replace it with `uint : Foo<%0>`, where
`%n` is a skolemized type. We would then look this up in the cache.
If we found a hit, the hit would tell us the immediate next step to
take in the selection process: i.e., apply impl #22, or apply where
clause `X : Foo<Y>`. Let's say in this case there is no hit.
Therefore, we search through impls and where clauses and so forth, and
we come to the conclusion that the only possible impl is this one,
with def-id 22:
impl Foo<int> for uint { ... } // Impl #22
We would then record in the cache `uint : Foo<%0> ==>
ImplCandidate(22)`. Next we would confirm `ImplCandidate(22)`, which
would (as a side-effect) unify `$1` with `int`.
Now, at some later time, we might come along and see a `uint :
Foo<$3>`. When skolemized, this would yield `uint : Foo<%0>`, just as
before, and hence the cache lookup would succeed, yielding
`ImplCandidate(22)`. We would confirm `ImplCandidate(22)` which would
(as a side-effect) unify `$3` with `int`.
## Where clauses and the local vs global cache
One subtle interaction is that the results of trait lookup will vary
depending on what where clauses are in scope. Therefore, we actually
have *two* caches, a local and a global cache. The local cache is
attached to the `ParameterEnvironment` and the global cache attached
to the `tcx`. We use the local cache whenever the result might depend
on the where clauses that are in scope. The determination of which
cache to use is done by the method `pick_candidate_cache` in
`select.rs`.
There are two cases where we currently use the local cache. The
current rules are probably more conservative than necessary.
### Trait references that involve parameter types
The most obvious case where you need the local environment is
when the trait reference includes parameter types. For example,
consider the following function:
impl<T> Vec<T> {
fn foo(x: T)
where T : Foo
{ ... }
fn bar(x: T)
{ ... }
}
If there is an obligation `T : Foo`, or `int : Bar<T>`, or whatever,
clearly the results from `foo` and `bar` are potentially different,
since the set of where clauses in scope are different.
### Trait references with unbound variables when where clauses are in scope
There is another less obvious interaction which involves unbound variables
where *only* where clauses are in scope (no impls). This manifested as
issue #18209 (`run-pass/trait-cache-issue-18209.rs`). Consider
this snippet:
```
pub trait Foo {
fn load_from() -> Box<Self>;
fn load() -> Box<Self> {
Foo::load_from()
}
}
```
The default method will incur an obligation `$0 : Foo` from the call
to `load_from`. If there are no impls, this can be eagerly resolved to
`VtableParam(Self : Foo)` and cached. Because the trait reference
doesn't involve any parameters types (only the resolution does), this
result was stored in the global cache, causing later calls to
`Foo::load_from()` to get nonsense.
To fix this, we always use the local cache if there are unbound
variables and where clauses in scope. This is more conservative than
necessary as far as I can tell. However, it still seems to be a simple
rule and I observe ~99% hit rate on rustc, so it doesn't seem to hurt
us in particular.
Here is an example of the kind of subtle case that I would be worried
about with a more complex rule (although this particular case works
out ok). Imagine the trait reference doesn't directly reference a
where clause, but the where clause plays a role in the winnowing
phase. Something like this:
```
pub trait Foo<T> { ... }
pub trait Bar { ... }
impl<U,T:Bar> Foo<U> for T { ... } // Impl A
impl Foo<char> for uint { ... } // Impl B
```
Now, in some function, we have no where clauses in scope, and we have
an obligation `$1 : Foo<$0>`. We might then conclude that `$0=char`
and `$1=uint`: this is because for impl A to apply, `uint:Bar` would
have to hold, and we know it does not or else the coherence check
would have failed. So we might enter into our global cache: `$1 :
Foo<$0> => Impl B`. Then we come along in a different scope, where a
generic type `A` is around with the bound `A:Bar`. Now suddenly the
impl is viable.
The flaw in this imaginary DOOMSDAY SCENARIO is that we would not
currently conclude that `$1 : Foo<$0>` implies that `$0 == uint` and
`$1 == char`, even though it is true that (absent type parameters)
there is no other type the user could enter. However, it is not
*completely* implausible that we *could* draw this conclusion in the
future; we wouldn't have to guess types, in particular, we could be
led by the impls.
*/

View File

@ -211,7 +211,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// can be applied to particular types. It skips the "confirmation"
// step and hence completely ignores output type parameters.
//
// The result is "true" if the obliation *may* hold and "false" if
// The result is "true" if the obligation *may* hold and "false" if
// we can be sure it does not.
pub fn evaluate_obligation_intercrate(&mut self,
@ -844,19 +844,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
cache_skol_trait_ref: &Rc<ty::TraitRef>)
-> &SelectionCache
{
// High-level idea: we have to decide whether to consult the
// cache that is specific to this scope, or to consult the
// global cache. We want the cache that is specific to this
// scope whenever where clauses might affect the result.
// If the trait refers to any parameters in scope, then use
// the cache of the param-environment. This is because the
// result will depend on the where clauses that are in
// scope. Otherwise, use the generic tcx cache, since the
// result holds across all environments.
// the cache of the param-environment.
if
cache_skol_trait_ref.input_types().iter().any(
|&t| ty::type_has_self(t) || ty::type_has_params(t))
{
&self.param_env.selection_cache
} else {
&self.tcx().selection_cache
return &self.param_env.selection_cache;
}
// If the trait refers to unbound type variables, and there
// are where clauses in scope, then use the local environment.
// If there are no where clauses in scope, which is a very
// common case, then we can use the global environment.
// See the discussion in doc.rs for more details.
if
!self.param_env.caller_obligations.is_empty()
&&
cache_skol_trait_ref.input_types().iter().any(
|&t| ty::type_has_ty_infer(t))
{
return &self.param_env.selection_cache;
}
// Otherwise, we can use the global cache.
&self.tcx().selection_cache
}
fn check_candidate_cache(&mut self,
@ -1935,26 +1952,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
util::obligations_for_generics(self.tcx(), cause, recursion_depth,
&impl_generics, impl_substs)
}
fn contains_skolemized_types(&self,
ty: ty::t)
-> bool
{
/*!
* True if the type contains skolemized variables.
*/
let mut found_skol = false;
ty::walk_ty(ty, |t| {
match ty::get(t).sty {
ty::ty_infer(ty::SkolemizedTy(_)) => { found_skol = true; }
_ => { }
}
});
found_skol
}
}
impl Repr for Candidate {

View File

@ -940,6 +940,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
controlflow::trans_loop(bcx, expr.id, &**body)
}
ast::ExprAssign(ref dst, ref src) => {
let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
if ty::type_needs_drop(bcx.tcx(), dst_datum.ty) {
@ -960,7 +961,6 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// We could avoid this intermediary with some analysis
// to determine whether `dst` may possibly own `src`.
debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
let src_datum = unpack_datum!(
bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
bcx = glue::drop_ty(bcx,
@ -969,7 +969,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
Some(NodeInfo { id: expr.id, span: expr.span }));
src_datum.store_to(bcx, dst_datum.val)
} else {
trans_into(bcx, &**src, SaveIn(dst_datum.to_llref()))
src_datum.store_to(bcx, dst_datum.val)
}
}
ast::ExprAssignOp(op, ref dst, ref src) => {
@ -2117,7 +2117,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
deref_owned_pointer(bcx, expr, datum, content_ty)
} else {
// A fat pointer and an opened DST value have the same
// represenation just different types. Since there is no
// representation just different types. Since there is no
// temporary for `*e` here (because it is unsized), we cannot
// emulate the sized object code path for running drop glue and
// free. Instead, we schedule cleanup for `e`, turning it into
@ -2142,7 +2142,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// owner (or, in the case of *T, by the user).
DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
} else {
// A fat pointer and an opened DST value have the same represenation
// A fat pointer and an opened DST value have the same representation
// just different types.
DatumBlock::new(bcx, Datum::new(datum.val,
ty::mk_open(bcx.tcx(), content_ty),

View File

@ -585,17 +585,22 @@ pub struct ctxt<'tcx> {
pub repr_hint_cache: RefCell<DefIdMap<Rc<Vec<attr::ReprAttr>>>>,
}
pub enum tbox_flag {
has_params = 1,
has_self = 2,
needs_infer = 4,
has_regions = 8,
has_ty_err = 16,
has_ty_bot = 32,
// a meta-pub flag: subst may be required if the type has parameters, a self
// type, or references bound regions
needs_subst = 1 | 2 | 8
// Flags that we track on types. These flags are propagated upwards
// through the type during type construction, so that we can quickly
// check whether the type has various kinds of types in it without
// recursing over the type itself.
bitflags! {
flags TypeFlags: u32 {
const NO_TYPE_FLAGS = 0b0,
const HAS_PARAMS = 0b1,
const HAS_SELF = 0b10,
const HAS_TY_INFER = 0b100,
const HAS_RE_INFER = 0b1000,
const HAS_REGIONS = 0b10000,
const HAS_TY_ERR = 0b100000,
const HAS_TY_BOT = 0b1000000,
const NEEDS_SUBST = HAS_PARAMS.bits | HAS_SELF.bits | HAS_REGIONS.bits,
}
}
pub type t_box = &'static t_box_;
@ -604,7 +609,13 @@ pub type t_box = &'static t_box_;
pub struct t_box_ {
pub sty: sty,
pub id: uint,
pub flags: uint,
pub flags: TypeFlags,
}
impl fmt::Show for TypeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.bits)
}
}
// To reduce refcounting cost, we're representing types as unsafe pointers
@ -631,15 +642,16 @@ pub fn get(t: t) -> t_box {
}
}
pub fn tbox_has_flag(tb: t_box, flag: tbox_flag) -> bool {
(tb.flags & (flag as uint)) != 0u
fn tbox_has_flag(tb: t_box, flag: TypeFlags) -> bool {
tb.flags.intersects(flag)
}
pub fn type_has_params(t: t) -> bool {
tbox_has_flag(get(t), has_params)
tbox_has_flag(get(t), HAS_PARAMS)
}
pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), has_self) }
pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), HAS_SELF) }
pub fn type_has_ty_infer(t: t) -> bool { tbox_has_flag(get(t), HAS_TY_INFER) }
pub fn type_needs_infer(t: t) -> bool {
tbox_has_flag(get(t), needs_infer)
tbox_has_flag(get(t), HAS_TY_INFER | HAS_RE_INFER)
}
pub fn type_id(t: t) -> uint { get(t).id }
@ -886,7 +898,7 @@ mod primitives {
pub static $name: t_box_ = t_box_ {
sty: $sty,
id: $id,
flags: 0,
flags: super::NO_TYPE_FLAGS,
};
)
)
@ -910,13 +922,13 @@ mod primitives {
pub static TY_BOT: t_box_ = t_box_ {
sty: super::ty_bot,
id: 16,
flags: super::has_ty_bot as uint,
flags: super::HAS_TY_BOT,
};
pub static TY_ERR: t_box_ = t_box_ {
sty: super::ty_err,
id: 17,
flags: super::has_ty_err as uint,
flags: super::HAS_TY_ERR,
};
pub const LAST_PRIMITIVE_ID: uint = 18;
@ -1577,32 +1589,32 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t {
_ => ()
}
let mut flags = 0u;
fn rflags(r: Region) -> uint {
(has_regions as uint) | {
let mut flags = NO_TYPE_FLAGS;
fn rflags(r: Region) -> TypeFlags {
HAS_REGIONS | {
match r {
ty::ReInfer(_) => needs_infer as uint,
_ => 0u
ty::ReInfer(_) => HAS_RE_INFER,
_ => NO_TYPE_FLAGS,
}
}
}
fn sflags(substs: &Substs) -> uint {
let mut f = 0u;
fn sflags(substs: &Substs) -> TypeFlags {
let mut f = NO_TYPE_FLAGS;
let mut i = substs.types.iter();
for tt in i {
f |= get(*tt).flags;
f = f | get(*tt).flags;
}
match substs.regions {
subst::ErasedRegions => {}
subst::NonerasedRegions(ref regions) => {
for r in regions.iter() {
f |= rflags(*r)
f = f | rflags(*r)
}
}
}
return f;
}
fn flags_for_bounds(bounds: &ExistentialBounds) -> uint {
fn flags_for_bounds(bounds: &ExistentialBounds) -> TypeFlags {
rflags(bounds.region_bound)
}
match &st {
@ -1610,58 +1622,58 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t {
&ty_str => {}
// You might think that we could just return ty_err for
// any type containing ty_err as a component, and get
// rid of the has_ty_err flag -- likewise for ty_bot (with
// rid of the HAS_TY_ERR flag -- likewise for ty_bot (with
// the exception of function types that return bot).
// But doing so caused sporadic memory corruption, and
// neither I (tjc) nor nmatsakis could figure out why,
// so we're doing it this way.
&ty_bot => flags |= has_ty_bot as uint,
&ty_err => flags |= has_ty_err as uint,
&ty_bot => flags = flags | HAS_TY_BOT,
&ty_err => flags = flags | HAS_TY_ERR,
&ty_param(ref p) => {
if p.space == subst::SelfSpace {
flags |= has_self as uint;
flags = flags | HAS_SELF;
} else {
flags |= has_params as uint;
flags = flags | HAS_PARAMS;
}
}
&ty_unboxed_closure(_, ref region) => flags |= rflags(*region),
&ty_infer(_) => flags |= needs_infer as uint,
&ty_unboxed_closure(_, ref region) => flags = flags | rflags(*region),
&ty_infer(_) => flags = flags | HAS_TY_INFER,
&ty_enum(_, ref substs) | &ty_struct(_, ref substs) => {
flags |= sflags(substs);
flags = flags | sflags(substs);
}
&ty_trait(box TyTrait { ref substs, ref bounds, .. }) => {
flags |= sflags(substs);
flags |= flags_for_bounds(bounds);
flags = flags | sflags(substs);
flags = flags | flags_for_bounds(bounds);
}
&ty_uniq(tt) | &ty_vec(tt, _) | &ty_open(tt) => {
flags |= get(tt).flags
flags = flags | get(tt).flags
}
&ty_ptr(ref m) => {
flags |= get(m.ty).flags;
flags = flags | get(m.ty).flags;
}
&ty_rptr(r, ref m) => {
flags |= rflags(r);
flags |= get(m.ty).flags;
flags = flags | rflags(r);
flags = flags | get(m.ty).flags;
}
&ty_tup(ref ts) => for tt in ts.iter() { flags |= get(*tt).flags; },
&ty_tup(ref ts) => for tt in ts.iter() { flags = flags | get(*tt).flags; },
&ty_bare_fn(ref f) => {
for a in f.sig.inputs.iter() { flags |= get(*a).flags; }
flags |= get(f.sig.output).flags;
for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; }
flags = flags | get(f.sig.output).flags;
// T -> _|_ is *not* _|_ !
flags &= !(has_ty_bot as uint);
flags = flags - HAS_TY_BOT;
}
&ty_closure(ref f) => {
match f.store {
RegionTraitStore(r, _) => {
flags |= rflags(r);
flags = flags | rflags(r);
}
_ => {}
}
for a in f.sig.inputs.iter() { flags |= get(*a).flags; }
flags |= get(f.sig.output).flags;
for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; }
flags = flags | get(f.sig.output).flags;
// T -> _|_ is *not* _|_ !
flags &= !(has_ty_bot as uint);
flags |= flags_for_bounds(&f.bounds);
flags = flags - HAS_TY_BOT;
flags = flags | flags_for_bounds(&f.bounds);
}
}
@ -1976,18 +1988,20 @@ impl ItemSubsts {
// Type utilities
pub fn type_is_nil(ty: t) -> bool { get(ty).sty == ty_nil }
pub fn type_is_nil(ty: t) -> bool {
get(ty).sty == ty_nil
}
pub fn type_is_bot(ty: t) -> bool {
(get(ty).flags & (has_ty_bot as uint)) != 0
get(ty).flags.intersects(HAS_TY_BOT)
}
pub fn type_is_error(ty: t) -> bool {
(get(ty).flags & (has_ty_err as uint)) != 0
get(ty).flags.intersects(HAS_TY_ERR)
}
pub fn type_needs_subst(ty: t) -> bool {
tbox_has_flag(get(ty), needs_subst)
tbox_has_flag(get(ty), NEEDS_SUBST)
}
pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool {
@ -3605,7 +3619,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
// Special case: A unit like struct's constructor must be called without () at the
// end (like `UnitStruct`) which means this is an ExprPath to a DefFn. But in case
// of unit structs this is should not be interpretet as function pointer but as
// of unit structs this is should not be interpreted as function pointer but as
// call to the constructor.
def::DefFn(_, _, true) => RvalueDpsExpr,
@ -5409,7 +5423,7 @@ impl BorrowKind {
MutBorrow => ast::MutMutable,
ImmBorrow => ast::MutImmutable,
// We have no type correponding to a unique imm borrow, so
// We have no type corresponding to a unique imm borrow, so
// use `&mut`. It gives all the capabilities of an `&uniq`
// and hence is a safe "over approximation".
UniqueImmBorrow => ast::MutMutable,

View File

@ -74,7 +74,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
let const_did = tcx.def_map.borrow().get_copy(&pat.id).def_id();
let const_pty = ty::lookup_item_type(tcx, const_did);
fcx.write_ty(pat.id, const_pty.ty);
demand::eqtype(fcx, pat.span, expected, const_pty.ty);
demand::suptype(fcx, pat.span, expected, const_pty.ty);
}
ast::PatIdent(bm, ref path, ref sub) if pat_is_binding(&tcx.def_map, pat) => {
let typ = fcx.local_ty(pat.span, pat.id);

View File

@ -88,7 +88,7 @@ use middle::ty;
use middle::typeck::astconv::AstConv;
use middle::typeck::check::{FnCtxt, NoPreference, PreferMutLvalue};
use middle::typeck::check::{impl_self_ty};
use middle::typeck::check::vtable2::select_fcx_obligations_where_possible;
use middle::typeck::check::vtable::select_fcx_obligations_where_possible;
use middle::typeck::check;
use middle::typeck::infer;
use middle::typeck::{MethodCall, MethodCallee};
@ -223,17 +223,37 @@ pub fn report_error(fcx: &FnCtxt,
{
match error {
NoMatch(static_sources) => {
let cx = fcx.tcx();
let method_ustring = method_name.user_string(cx);
// True if the type is a struct and contains a field with
// the same name as the not-found method
let is_field = match ty::get(rcvr_ty).sty {
ty_struct(did, _) =>
ty::lookup_struct_fields(cx, did)
.iter()
.any(|f| f.name.user_string(cx) == method_ustring),
_ => false
};
fcx.type_error_message(
span,
|actual| {
format!("type `{}` does not implement any \
method in scope named `{}`",
actual,
method_name.user_string(fcx.tcx()))
method_ustring)
},
rcvr_ty,
None);
// If the method has the name of a field, give a help note
if is_field {
cx.sess.span_note(span,
format!("use `(s.{0})(...)` if you meant to call the \
function stored in the `{0}` field", method_ustring).as_slice());
}
if static_sources.len() > 0 {
fcx.tcx().sess.fileline_note(
span,

View File

@ -139,7 +139,7 @@ use syntax::visit::Visitor;
use syntax;
pub mod _match;
pub mod vtable2; // New trait code
pub mod vtable;
pub mod writeback;
pub mod regionmanip;
pub mod regionck;
@ -409,7 +409,7 @@ fn check_bare_fn(ccx: &CrateCtxt,
let fcx = check_fn(ccx, fn_ty.fn_style, id, &fn_ty.sig,
decl, id, body, &inh);
vtable2::select_all_fcx_obligations_or_error(&fcx);
vtable::select_all_fcx_obligations_or_error(&fcx);
regionck::regionck_fn(&fcx, id, body);
writeback::resolve_type_vars_in_fn(&fcx, decl, body);
}
@ -1372,7 +1372,7 @@ fn check_cast(fcx: &FnCtxt,
if ty::type_is_trait(t_1) {
// This will be looked up later on.
vtable2::check_object_cast(fcx, cast_expr, e, t_1);
vtable::check_object_cast(fcx, cast_expr, e, t_1);
fcx.write_ty(id, t_1);
return
}
@ -1677,7 +1677,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::UnsizeVtable(ref ty_trait, self_ty) => {
// If the type is `Foo+'a`, ensures that the type
// being cast to `Foo+'a` implements `Foo`:
vtable2::register_object_cast_obligations(self,
vtable::register_object_cast_obligations(self,
span,
ty_trait,
self_ty);
@ -2564,7 +2564,7 @@ fn check_argument_types<'a>(fcx: &FnCtxt,
// an "opportunistic" vtable resolution of any trait
// bounds on the call.
if check_blocks {
vtable2::select_fcx_obligations_where_possible(fcx);
vtable::select_fcx_obligations_where_possible(fcx);
}
// For variadic functions, we don't have a declared type for all of
@ -4036,7 +4036,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ast::ExprForLoop(ref pat, ref head, ref block, _) => {
check_expr(fcx, &**head);
let typ = lookup_method_for_for_loop(fcx, &**head, expr.id);
vtable2::select_fcx_obligations_where_possible(fcx);
vtable::select_fcx_obligations_where_possible(fcx);
let pcx = pat_ctxt {
fcx: fcx,
@ -4743,7 +4743,7 @@ pub fn check_const_with_ty(fcx: &FnCtxt,
check_expr_with_hint(fcx, e, declty);
demand::coerce(fcx, e.span, declty, e);
vtable2::select_all_fcx_obligations_or_error(fcx);
vtable::select_all_fcx_obligations_or_error(fcx);
regionck::regionck_expr(fcx, e);
writeback::resolve_type_vars_in_expr(fcx, e);
}

View File

@ -126,7 +126,7 @@ use middle::ty;
use middle::typeck::astconv::AstConv;
use middle::typeck::check::FnCtxt;
use middle::typeck::check::regionmanip;
use middle::typeck::check::vtable2;
use middle::typeck::check::vtable;
use middle::typeck::infer::resolve_and_force_all_but_regions;
use middle::typeck::infer::resolve_type;
use middle::typeck::infer;
@ -172,7 +172,7 @@ pub fn regionck_fn(fcx: &FnCtxt, id: ast::NodeId, blk: &ast::Block) {
// Region checking a fn can introduce new trait obligations,
// particularly around closure bounds.
vtable2::select_all_fcx_obligations_or_error(fcx);
vtable::select_all_fcx_obligations_or_error(fcx);
fcx.infcx().resolve_regions_and_report_errors();
}
@ -1674,7 +1674,7 @@ fn link_reborrowed_region(rcx: &Rcx,
//
// If mutability was inferred from an upvar, we may be
// forced to revisit this decision later if processing
// another borrow or nested closure ends up coverting the
// another borrow or nested closure ends up converting the
// upvar borrow kind to mutable/unique. Record the
// information needed to perform the recursive link in the
// maybe link map.

View File

@ -14,7 +14,7 @@ use middle::traits;
use middle::ty;
use middle::ty_fold::{TypeFolder, TypeFoldable};
use middle::typeck::astconv::AstConv;
use middle::typeck::check::{FnCtxt, Inherited, blank_fn_ctxt, vtable2, regionck};
use middle::typeck::check::{FnCtxt, Inherited, blank_fn_ctxt, vtable, regionck};
use middle::typeck::check::regionmanip::replace_late_bound_regions;
use middle::typeck::CrateCtxt;
use util::ppaux::Repr;
@ -100,7 +100,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
let inh = Inherited::new(ccx.tcx, param_env);
let fcx = blank_fn_ctxt(ccx, &inh, polytype.ty, item.id);
f(self, &fcx);
vtable2::select_all_fcx_obligations_or_error(&fcx);
vtable::select_all_fcx_obligations_or_error(&fcx);
regionck::regionck_item(&fcx, item);
}

View File

@ -17,6 +17,7 @@ Core encoding and decoding interfaces.
use std::path;
use std::rc::Rc;
use std::cell::{Cell, RefCell};
use std::sync::Arc;
pub trait Encoder<E> {
// Primitive types:
@ -556,6 +557,18 @@ impl<E, D: Decoder<E>, T: Decodable<D, E>> Decodable<D, E> for RefCell<T> {
}
}
impl<E, S:Encoder<E>, T:Encodable<S, E>+Send+Sync> Encodable<S, E> for Arc<T> {
fn encode(&self, s: &mut S) -> Result<(), E> {
(**self).encode(s)
}
}
impl<E, D:Decoder<E>,T:Decodable<D, E>+Send+Sync> Decodable<D, E> for Arc<T> {
fn decode(d: &mut D) -> Result<Arc<T>, E> {
Ok(Arc::new(try!(Decodable::decode(d))))
}
}
// ___________________________________________________________________________
// Helper routines

View File

@ -247,8 +247,7 @@ impl OwnedAsciiCast for String {
#[inline]
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
let v: Vec<u8> = mem::transmute(self);
v.into_ascii_nocheck()
self.into_bytes().into_ascii_nocheck()
}
}
@ -260,7 +259,14 @@ impl OwnedAsciiCast for Vec<u8> {
#[inline]
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
mem::transmute(self)
let v = Vec::from_raw_parts(self.as_ptr() as *mut Ascii,
self.len(),
self.capacity());
// We forget `self` to avoid freeing it at the end of the scope
// Otherwise, the returned `Vec` would point to freed memory
mem::forget(self);
v
}
}
@ -338,7 +344,16 @@ pub trait IntoBytes {
impl IntoBytes for Vec<Ascii> {
fn into_bytes(self) -> Vec<u8> {
unsafe { mem::transmute(self) }
unsafe {
let v = Vec::from_raw_parts(self.as_ptr() as *mut u8,
self.len(),
self.capacity());
// We forget `self` to avoid freeing it at the end of the scope
// Otherwise, the returned `Vec` would point to freed memory
mem::forget(self);
v
}
}
}

View File

@ -186,7 +186,7 @@ impl<T: Eq + Hash<S>, S, H: Hasher<S>> HashSet<T, H> {
/// # Example
///
/// This is a slightly silly example where we define the number's
/// parity as the equivilance class. It is important that the
/// parity as the equivalance class. It is important that the
/// values hash the same, which is why we implement `Hash`.
///
/// ```

View File

@ -22,7 +22,7 @@ use slice;
use slice::AsSlice;
use vec::Vec;
static BUF_CAPACITY: uint = 128;
const BUF_CAPACITY: uint = 128;
fn combine(seek: SeekStyle, cur: uint, end: uint, offset: i64) -> IoResult<u64> {
// compute offset as signed and clamp to prevent overflow
@ -71,7 +71,12 @@ impl MemWriter {
/// the internal buffer.
#[inline]
pub fn with_capacity(n: uint) -> MemWriter {
MemWriter { buf: Vec::with_capacity(n) }
MemWriter::from_vec(Vec::with_capacity(n))
}
/// Create a new `MemWriter` that will append to an existing `Vec`.
#[inline]
pub fn from_vec(buf: Vec<u8>) -> MemWriter {
MemWriter { buf: buf }
}
/// Acquires an immutable reference to the underlying buffer of this

View File

@ -24,6 +24,9 @@ use std::fmt::Show;
use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder};
#[cfg(stage0)]
pub use self::TtToken as TTTok;
// FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name".
@ -436,7 +439,7 @@ pub enum Stmt_ {
/// Expr with trailing semi-colon (may have any type):
StmtSemi(P<Expr>, NodeId),
/// bool: is there a trailing sem-colon?
/// bool: is there a trailing semi-colon?
StmtMac(Mac, bool),
}
@ -592,6 +595,28 @@ pub enum CaptureClause {
CaptureByRef,
}
/// A token that delimits a sequence of token trees
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Delimiter {
pub span: Span,
pub token: ::parse::token::Token,
}
impl Delimiter {
/// Convert the delimiter to a `TtToken`
pub fn to_tt(&self) -> TokenTree {
TtToken(self.span, self.token.clone())
}
}
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub enum KleeneOp {
ZeroOrMore,
OneOrMore,
}
/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can
@ -600,9 +625,9 @@ pub enum CaptureClause {
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS "matchers" against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// macro_parser::matched_nonterminals into the TTNonterminals it finds.
/// `macro_parser::matched_nonterminals` into the `TtNonterminal`s it finds.
///
/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
/// The RHS of an MBE macro is the only place a `TtNonterminal` or `TtSequence`
/// makes any real sense. You could write them elsewhere but nothing
/// else knows what to do with them, so you'll probably get a syntax
/// error.
@ -610,22 +635,29 @@ pub enum CaptureClause {
#[doc="For macro invocations; parsing is delegated to the macro"]
pub enum TokenTree {
/// A single token
TTTok(Span, ::parse::token::Token),
/// A delimited sequence (the delimiters appear as the first
/// and last elements of the vector)
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTDelim(Rc<Vec<TokenTree>>),
TtToken(Span, ::parse::token::Token),
/// A delimited sequence of token trees
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
// These only make sense for right-hand-sides of MBE macros:
/// A kleene-style repetition sequence with a span, a TTForest,
/// an optional separator, and a boolean where true indicates
/// zero or more (..), and false indicates one or more (+).
/// A Kleene-style repetition sequence with an optional separator.
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
/// A syntactic variable that will be filled in by macro expansion.
TTNonterminal(Span, Ident)
TtNonterminal(Span, Ident)
}
impl TokenTree {
/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
TtToken(span, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _, _, _) => span,
TtNonterminal(span, _) => span,
}
}
}
// Matchers are nodes defined-by and recognized-by the main rust parser and
@ -684,9 +716,9 @@ pub type Matcher = Spanned<Matcher_>;
pub enum Matcher_ {
/// Match one token
MatchTok(::parse::token::Token),
/// Match repetitions of a sequence: body, separator, zero ok?,
/// Match repetitions of a sequence: body, separator, Kleene operator,
/// lo, hi position-in-match-array used:
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, bool, uint, uint),
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
/// Parse a Rust NT: name to bind, name of NT, position in match array:
MatchNonterminal(Ident, Ident, uint)
}

View File

@ -250,30 +250,18 @@ fn impl_item_in_cfg(cx: &mut Context, impl_item: &ast::ImplItem) -> bool {
// Determine if an item should be translated in the current crate
// configuration based on the item's attributes
fn in_cfg(diagnostic: &SpanHandler, cfg: &[P<ast::MetaItem>], attrs: &[ast::Attribute]) -> bool {
let mut in_cfg = false;
let mut seen_cfg = false;
for attr in attrs.iter() {
attrs.iter().all(|attr| {
let mis = match attr.node.value.node {
ast::MetaList(_, ref mis) if attr.check_name("cfg") => mis,
_ => continue
_ => return true
};
if mis.len() != 1 {
diagnostic.span_err(attr.span, "expected 1 cfg-pattern");
return false;
return true;
}
if seen_cfg {
diagnostic.span_err(attr.span, "The semantics of multiple `#[cfg(..)]` attributes on \
same item are changing from the union of the cfgs to \
the intersection of the cfgs. Change `#[cfg(a)] \
#[cfg(b)]` to `#[cfg(any(a, b))]`.");
return false;
}
seen_cfg = true;
in_cfg |= attr::cfg_matches(diagnostic, cfg, &*mis[0]);
}
in_cfg | !seen_cfg
attr::cfg_matches(diagnostic, cfg, &*mis[0])
})
}

View File

@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
[ast::TTTok(_, token::IDENT(code, _))] => code,
[ast::TtToken(_, token::IDENT(code, _))] => code,
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TTTok(_, token::IDENT(ref code, _))] => {
[ast::TtToken(_, token::IDENT(ref code, _))] => {
(code, None)
},
[ast::TTTok(_, token::IDENT(ref code, _)),
ast::TTTok(_, token::COMMA),
ast::TTTok(_, token::LIT_STR_RAW(description, _))] => {
[ast::TtToken(_, token::IDENT(ref code, _)),
ast::TtToken(_, token::COMMA),
ast::TtToken(_, token::LIT_STR_RAW(description, _))] => {
(code, Some(description))
}
_ => unreachable!()
@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TTTok(_, token::IDENT(ref name, _))] => name,
[ast::TtToken(_, token::IDENT(ref name, _))] => name,
_ => unreachable!()
};

View File

@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
} else {
match tts[0] {
ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => {
return Some(parse::raw_str_lit(ident.as_str()))
}
_ => {

View File

@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 {
match *e {
ast::TTTok(_, token::COMMA) => (),
ast::TtToken(_, token::COMMA) => (),
_ => {
cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::expr(sp);
@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
}
} else {
match *e {
ast::TTTok(_, token::IDENT(ident,_)) => {
ast::TtToken(_, token::IDENT(ident,_)) => {
res_str.push_str(token::get_ident(ident).get())
}
_ => {

View File

@ -1250,7 +1250,7 @@ impl<'a> TraitDef<'a> {
// id is guaranteed to be Some
codemap::Spanned {
span: pat.span,
node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: true },
node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: false },
}
}).collect();
cx.pat_struct(self.span, matching_path, field_pats)

View File

@ -13,16 +13,14 @@ use codemap;
use ext::base;
use print;
use std::rc::Rc;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
sp: codemap::Span,
tt: &[ast::TokenTree])
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
cx.print_backtrace();
println!("{}", print::pprust::tt_to_string(&ast::TTDelim(
Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));
println!("{}", print::pprust::tts_to_string(tts));
// any so that `log_syntax` can be invoked as an expression and item.
base::DummyResult::any(sp)

View File

@ -23,7 +23,7 @@ use ptr::P;
*
* This is registered as a set of expression syntax extension called quote!
* that lifts its argument token-tree to an AST representing the
* construction of the same token tree, with ast::TTNonterminal nodes
* construction of the same token tree, with ast::TtNonterminal nodes
* interpreted as antiquotes (splices).
*
*/
@ -637,12 +637,12 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
}
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TTTok(sp, ref tok) => {
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp,
mk_ast_path(cx, sp, "TTTok"),
mk_ast_path(cx, sp, "TtToken"),
vec!(e_sp, mk_token(cx, sp, tok)));
let e_push =
cx.expr_method_call(sp,
@ -650,13 +650,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
id_ext("push"),
vec!(e_tok));
vec!(cx.stmt_expr(e_push))
}
ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
ast::TTSeq(..) => fail!("TTSeq in quote!"),
ast::TTNonterminal(sp, ident) => {
},
ast::TtDelimited(sp, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
mk_tt(cx, sp, &open.to_tt()).into_iter()
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
.collect()
},
ast::TtSequence(..) => fail!("TtSequence in quote!"),
ast::TtNonterminal(sp, ident) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter())
let e_to_toks =
@ -674,7 +677,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(e_to_toks));
vec!(cx.stmt_expr(e_push))
}
},
}
}
@ -690,7 +693,7 @@ fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (P<ast::Expr>, P<ast::Expr>) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a TTNonterminal during the main parse, so we have to re-parse
// $foo as a TtNonterminal during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.

View File

@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
tt: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
match tt {
[ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => {
[ast::TtToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
cx.set_trace_macros(true);
}
[ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => {
[ast::TtToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),

View File

@ -323,9 +323,9 @@ pub fn parse(sess: &ParseSess,
} else {
match ei.elts[idx].node.clone() {
/* need to descend into sequence */
MatchSeq(ref matchers, ref sep, zero_ok,
MatchSeq(ref matchers, ref sep, kleene_op,
match_idx_lo, match_idx_hi) => {
if zero_ok {
if kleene_op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.idx += 1u;
//we specifically matched zero repeats.

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
use ast;
use codemap::{Span, Spanned, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, MacroDef};
@ -147,13 +147,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
rhses: &[Rc<NamedMatch>])
-> Box<MacResult+'cx> {
if cx.trace_macros() {
println!("{}! {} {} {}",
println!("{}! {{ {} }}",
token::get_ident(name),
"{",
print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter()
.map(|x| (*x).clone())
.collect()))),
"}");
print::pprust::tts_to_string(arg));
}
// Which arm's failure should we report? (the one furthest along)
@ -175,15 +171,12 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
// okay, what's your transcriber?
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// cut off delimiters; don't parse 'em
TTDelim(ref tts) => {
(*tts).slice(1u,(*tts).len()-1u)
.iter()
.map(|x| (*x).clone())
.collect()
}
_ => cx.span_fatal(
sp, "macro rhs must be delimited")
// ignore delimiters
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.clone()
},
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
}
},
_ => cx.span_bug(sp, "bad thing in rhs")
@ -239,10 +232,11 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
ms(MatchSeq(vec!(
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
ms(MatchTok(FAT_ARROW)),
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)),
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI),
ast::OneOrMore, 0u, 2u)),
//to phase into semicolon-termination instead of
//semicolon-separation
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u)));
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u)));
// Parse the macro_rules! invocation (`none` is for no interpolations):

View File

@ -9,7 +9,7 @@
// except according to those terms.
use ast;
use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident};
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, TtNonterminal, Ident};
use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@ -18,6 +18,7 @@ use parse::token;
use parse::lexer::TokenAndSpan;
use std::rc::Rc;
use std::ops::Add;
use std::collections::HashMap;
///an unzipping of `TokenTree`s
@ -44,7 +45,7 @@ pub struct TtReader<'a> {
}
/// This can do Macro-By-Example transcription. On the other hand, if
/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
/// `src` contains no `TtSequence`s and `TtNonterminal`s, `interp` can (and
/// should) be none.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
@ -104,37 +105,45 @@ enum LockstepIterSize {
LisContradiction(String),
}
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
match lhs {
LisUnconstrained => rhs.clone(),
LisContradiction(_) => lhs.clone(),
LisConstraint(l_len, l_id) => match rhs {
LisUnconstrained => lhs.clone(),
LisContradiction(_) => rhs.clone(),
LisConstraint(r_len, _) if l_len == r_len => lhs.clone(),
LisConstraint(r_len, r_id) => {
let l_n = token::get_ident(l_id);
let r_n = token::get_ident(r_id);
LisContradiction(format!("inconsistent lockstep iteration: \
'{}' has {} items, but '{}' has {}",
l_n, l_len, r_n, r_len).to_string())
}
impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
fn add(&self, other: &LockstepIterSize) -> LockstepIterSize {
match *self {
LisUnconstrained => other.clone(),
LisContradiction(_) => self.clone(),
LisConstraint(l_len, l_id) => match *other {
LisUnconstrained => self.clone(),
LisContradiction(_) => other.clone(),
LisConstraint(r_len, _) if l_len == r_len => self.clone(),
LisConstraint(r_len, r_id) => {
let l_n = token::get_ident(l_id);
let r_n = token::get_ident(r_id);
LisContradiction(format!("inconsistent lockstep iteration: \
'{}' has {} items, but '{}' has {}",
l_n, l_len, r_n, r_len).to_string())
}
},
}
}
}
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t {
TTDelim(ref tts) | TTSeq(_, ref tts, _, _) => {
tts.iter().fold(LisUnconstrained, |lis, tt| {
lis_merge(lis, lockstep_iter_size(tt, r))
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
}
TTTok(..) => LisUnconstrained,
TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
},
TtSequence(_, ref tts, _, _) => {
tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
TtToken(..) => LisUnconstrained,
TtNonterminal(_, name) => match *lookup_cur_matched(r, name) {
MatchedNonterminal(_) => LisUnconstrained,
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
}
},
}
}
@ -189,32 +198,38 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
}
}
loop { /* because it's easiest, this handles `TTDelim` not starting
with a `TTTok`, even though it won't happen */
loop { /* because it's easiest, this handles `TtDelimited` not starting
with a `TtToken`, even though it won't happen */
let t = {
let frame = r.stack.last().unwrap();
// FIXME(pcwalton): Bad copy.
(*frame.forest)[frame.idx].clone()
};
match t {
TTDelim(tts) => {
TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
let mut forest = Vec::with_capacity(1 + tts.len() + 1);
forest.push(open.to_tt());
forest.extend(tts.iter().map(|x| (*x).clone()));
forest.push(close.to_tt());
r.stack.push(TtFrame {
forest: tts,
forest: Rc::new(forest),
idx: 0,
dotdotdoted: false,
sep: None
});
// if this could be 0-length, we'd need to potentially recur here
}
TTTok(sp, tok) => {
TtToken(sp, tok) => {
r.cur_span = sp;
r.cur_tok = tok;
r.stack.last_mut().unwrap().idx += 1;
return ret_val;
}
TTSeq(sp, tts, sep, zerok) => {
TtSequence(sp, tts, sep, kleene_op) => {
// FIXME(pcwalton): Bad copy.
match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) {
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op), r) {
LisUnconstrained => {
r.sp_diag.span_fatal(
sp.clone(), /* blame macro writer */
@ -228,7 +243,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
LisConstraint(len, _) => {
if len == 0 {
if !zerok {
if kleene_op == ast::OneOrMore {
// FIXME #2887 blame invoker
r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once");
@ -249,7 +264,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
}
// FIXME #2887: think about span stuff here
TTNonterminal(sp, ident) => {
TtNonterminal(sp, ident) => {
r.stack.last_mut().unwrap().idx += 1;
match *lookup_cur_matched(r, ident) {
/* sidestep the interpolation tricks for ident because

View File

@ -569,16 +569,29 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
match *tt {
TTTok(span, ref tok) =>
TTTok(span, fld.fold_token(tok.clone())),
TTDelim(ref tts) => TTDelim(Rc::new(fld.fold_tts(tts.as_slice()))),
TTSeq(span, ref pattern, ref sep, is_optional) =>
TTSeq(span,
Rc::new(fld.fold_tts(pattern.as_slice())),
sep.clone().map(|tok| fld.fold_token(tok)),
is_optional),
TTNonterminal(sp,ref ident) =>
TTNonterminal(sp,fld.fold_ident(*ident))
TtToken(span, ref tok) =>
TtToken(span, fld.fold_token(tok.clone())),
TtDelimited(span, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
TtDelimited(span, Rc::new((
Delimiter {
span: open.span,
token: fld.fold_token(open.token.clone())
},
fld.fold_tts(tts.as_slice()),
Delimiter {
span: close.span,
token: fld.fold_token(close.token.clone())
},
)))
},
TtSequence(span, ref pattern, ref sep, is_optional) =>
TtSequence(span,
Rc::new(fld.fold_tts(pattern.as_slice())),
sep.clone().map(|tok| fld.fold_token(tok)),
is_optional),
TtNonterminal(sp,ref ident) =>
TtNonterminal(sp,fld.fold_ident(*ident))
}
}

View File

@ -74,7 +74,7 @@ impl<T> OwnedSlice<T> {
pub fn into_vec(self) -> Vec<T> {
// null is ok, because len == 0 in that case, as required by Vec.
unsafe {
let ret = Vec::from_raw_parts(self.len, self.len, self.data);
let ret = Vec::from_raw_parts(self.data, self.len, self.len);
// the vector owns the allocation now
mem::forget(self);
ret

View File

@ -788,65 +788,57 @@ mod test {
}
// check the token-tree-ization of macros
#[test] fn string_to_tts_macro () {
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
[ast::TTTok(_,_),
ast::TTTok(_,token::NOT),
ast::TTTok(_,_),
ast::TTDelim(ref delim_elts)] => {
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
match delim_elts {
[ast::TTTok(_,token::LPAREN),
ast::TTDelim(ref first_set),
ast::TTTok(_,token::FAT_ARROW),
ast::TTDelim(ref second_set),
ast::TTTok(_,token::RPAREN)] => {
let first_set: &[ast::TokenTree] =
first_set.as_slice();
match first_set {
[ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR),
ast::TTTok(_,_),
ast::TTTok(_,token::RPAREN)] => {
let second_set: &[ast::TokenTree] =
second_set.as_slice();
match second_set {
[ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR),
ast::TTTok(_,_),
ast::TTTok(_,token::RPAREN)] => {
assert_eq!("correct","correct")
}
_ => assert_eq!("wrong 4","correct")
}
},
_ => {
error!("failing value 3: {}",first_set);
assert_eq!("wrong 3","correct")
}
[ast::TtToken(_, token::IDENT(name_macro_rules, false)),
ast::TtToken(_, token::NOT),
ast::TtToken(_, token::IDENT(name_zip, false)),
ast::TtDelimited(_, ref macro_delimed)]
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed;
match (macro_open, macro_tts.as_slice(), macro_close) {
(&ast::Delimiter { token: token::LPAREN, .. },
[ast::TtDelimited(_, ref first_delimed),
ast::TtToken(_, token::FAT_ARROW),
ast::TtDelimited(_, ref second_delimed)],
&ast::Delimiter { token: token::RPAREN, .. }) => {
let (ref first_open, ref first_tts, ref first_close) = **first_delimed;
match (first_open, first_tts.as_slice(), first_close) {
(&ast::Delimiter { token: token::LPAREN, .. },
[ast::TtToken(_, token::DOLLAR),
ast::TtToken(_, token::IDENT(name, false))],
&ast::Delimiter { token: token::RPAREN, .. })
if name.as_str() == "a" => {},
_ => fail!("value 3: {}", **first_delimed),
}
let (ref second_open, ref second_tts, ref second_close) = **second_delimed;
match (second_open, second_tts.as_slice(), second_close) {
(&ast::Delimiter { token: token::LPAREN, .. },
[ast::TtToken(_, token::DOLLAR),
ast::TtToken(_, token::IDENT(name, false))],
&ast::Delimiter { token: token::RPAREN, .. })
if name.as_str() == "a" => {},
_ => fail!("value 4: {}", **second_delimed),
}
},
_ => {
error!("failing value 2: {}",delim_elts);
assert_eq!("wrong","correct");
}
_ => fail!("value 2: {}", **macro_delimed),
}
},
_ => {
error!("failing value: {}",tts);
assert_eq!("wrong 1","correct");
}
_ => fail!("value: {}",tts),
}
}
#[test] fn string_to_tts_1 () {
#[test]
fn string_to_tts_1 () {
let tts = string_to_tts("fn a (b : int) { b; }".to_string());
assert_eq!(json::encode(&tts),
"[\
{\
\"variant\":\"TTTok\",\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
{\
@ -859,7 +851,7 @@ mod test {
]\
},\
{\
\"variant\":\"TTTok\",\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
{\
@ -872,96 +864,90 @@ mod test {
]\
},\
{\
\"variant\":\"TTDelim\",\
\"variant\":\"TtDelimited\",\
\"fields\":[\
null,\
[\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"LPAREN\"\
]\
\"span\":null,\
\"token\":\"LPAREN\"\
},\
[\
{\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
\"COLON\"\
]\
},\
{\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"int\",\
false\
]\
}\
]\
}\
],\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"COLON\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"int\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"RPAREN\"\
]\
\"span\":null,\
\"token\":\"RPAREN\"\
}\
]\
]\
},\
{\
\"variant\":\"TTDelim\",\
\"variant\":\"TtDelimited\",\
\"fields\":[\
null,\
[\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"LBRACE\"\
]\
\"span\":null,\
\"token\":\"LBRACE\"\
},\
[\
{\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
\"SEMI\"\
]\
}\
],\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"SEMI\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"RBRACE\"\
]\
\"span\":null,\
\"token\":\"RBRACE\"\
}\
]\
]\

View File

@ -48,8 +48,8 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub};
use ast::StrStyle;
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
use ast::{TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok};
use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
use ast::{TyTypeof, TyInfer, TypeMethod};
use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath};
@ -2497,27 +2497,30 @@ impl<'a> Parser<'a> {
return e;
}
/// Parse an optional separator followed by a kleene-style
/// Parse an optional separator followed by a Kleene-style
/// repetition token (+ or *).
pub fn parse_sep_and_zerok(&mut self) -> (Option<token::Token>, bool) {
fn parse_zerok(parser: &mut Parser) -> Option<bool> {
pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) {
fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> {
match parser.token {
token::BINOP(token::STAR) | token::BINOP(token::PLUS) => {
let zerok = parser.token == token::BINOP(token::STAR);
token::BINOP(token::STAR) => {
parser.bump();
Some(zerok)
Some(ast::ZeroOrMore)
},
token::BINOP(token::PLUS) => {
parser.bump();
Some(ast::OneOrMore)
},
_ => None
}
};
match parse_zerok(self) {
Some(zerok) => return (None, zerok),
match parse_kleene_op(self) {
Some(kleene_op) => return (None, kleene_op),
None => {}
}
let separator = self.bump_and_get();
match parse_zerok(self) {
match parse_kleene_op(self) {
Some(zerok) => (Some(separator), zerok),
None => self.fatal("expected `*` or `+`")
}
@ -2526,8 +2529,8 @@ impl<'a> Parser<'a> {
/// parse a single token tree from the input.
pub fn parse_token_tree(&mut self) -> TokenTree {
// FIXME #6994: currently, this is too eager. It
// parses token trees but also identifies TTSeq's
// and TTNonterminal's; it's too early to know yet
// parses token trees but also identifies TtSequence's
// and TtNonterminal's; it's too early to know yet
// whether something will be a nonterminal or a seq
// yet.
maybe_whole!(deref self, NtTT);
@ -2564,26 +2567,21 @@ impl<'a> Parser<'a> {
seq_sep_none(),
|p| p.parse_token_tree()
);
let (s, z) = p.parse_sep_and_zerok();
let (sep, repeat) = p.parse_sep_and_kleene_op();
let seq = match seq {
Spanned { node, .. } => node,
};
TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat)
} else {
TTNonterminal(sp, p.parse_ident())
TtNonterminal(sp, p.parse_ident())
}
}
_ => {
parse_any_tt_tok(p)
TtToken(p.span, p.bump_and_get())
}
}
}
// turn the next token into a TTTok:
fn parse_any_tt_tok(p: &mut Parser) -> TokenTree {
TTTok(p.span, p.bump_and_get())
}
match (&self.token, token::close_delimiter_for(&self.token)) {
(&token::EOF, _) => {
let open_braces = self.open_braces.clone();
@ -2595,21 +2593,32 @@ impl<'a> Parser<'a> {
self.fatal("this file contains an un-closed delimiter ");
}
(_, Some(close_delim)) => {
// The span for beginning of the delimited section
let pre_span = self.span;
// Parse the open delimiter.
self.open_braces.push(self.span);
let mut result = vec!(parse_any_tt_tok(self));
let open = Delimiter {
span: self.span,
token: self.bump_and_get(),
};
let trees =
self.parse_seq_to_before_end(&close_delim,
seq_sep_none(),
|p| p.parse_token_tree());
result.extend(trees.into_iter());
// Parse the token trees within the delimeters
let tts = self.parse_seq_to_before_end(
&close_delim, seq_sep_none(), |p| p.parse_token_tree()
);
// Parse the close delimiter.
result.push(parse_any_tt_tok(self));
let close = Delimiter {
span: self.span,
token: self.bump_and_get(),
};
self.open_braces.pop().unwrap();
TTDelim(Rc::new(result))
// Expand to cover the entire delimited token tree
let span = Span { hi: self.span.hi, ..pre_span };
TtDelimited(span, Rc::new((open, tts, close)))
}
_ => parse_non_delim_tt_tok(self)
}
@ -2673,8 +2682,8 @@ impl<'a> Parser<'a> {
if ms.len() == 0u {
self.fatal("repetition body must be nonempty");
}
let (sep, zerok) = self.parse_sep_and_zerok();
MatchSeq(ms, sep, zerok, name_idx_lo, *name_idx)
let (sep, kleene_op) = self.parse_sep_and_kleene_op();
MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx)
} else {
let bound_to = self.parse_ident();
self.expect(&token::COLON);

View File

@ -224,6 +224,10 @@ pub fn item_to_string(i: &ast::Item) -> String {
$to_string(|s| s.print_item(i))
}
pub fn view_item_to_string(i: &ast::ViewItem) -> String {
$to_string(|s| s.print_view_item(i))
}
pub fn generics_to_string(generics: &ast::Generics) -> String {
$to_string(|s| s.print_generics(generics))
}
@ -1020,8 +1024,15 @@ impl<'a> State<'a> {
/// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TTDelim(ref tts) => self.print_tts(tts.as_slice()),
ast::TTTok(_, ref tk) => {
ast::TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice()));
try!(space(&mut self.s));
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
},
ast::TtToken(_, ref tk) => {
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
match *tk {
parse::token::DOC_COMMENT(..) => {
@ -1030,22 +1041,25 @@ impl<'a> State<'a> {
_ => Ok(())
}
}
ast::TTSeq(_, ref tts, ref sep, zerok) => {
ast::TtSequence(_, ref tts, ref separator, kleene_op) => {
try!(word(&mut self.s, "$("));
for tt_elt in (*tts).iter() {
try!(self.print_tt(tt_elt));
}
try!(word(&mut self.s, ")"));
match *sep {
match *separator {
Some(ref tk) => {
try!(word(&mut self.s,
parse::token::to_string(tk).as_slice()));
}
None => ()
}
word(&mut self.s, if zerok { "*" } else { "+" })
match kleene_op {
ast::ZeroOrMore => word(&mut self.s, "*"),
ast::OneOrMore => word(&mut self.s, "+"),
}
}
ast::TTNonterminal(_, name) => {
ast::TtNonterminal(_, name) => {
try!(word(&mut self.s, "$"));
self.print_ident(name)
}
@ -1983,8 +1997,10 @@ impl<'a> State<'a> {
Consistent, fields.as_slice(),
|s, f| {
try!(s.cbox(indent_unit));
try!(s.print_ident(f.node.ident));
try!(s.word_nbsp(":"));
if !f.node.is_shorthand {
try!(s.print_ident(f.node.ident));
try!(s.word_nbsp(":"));
}
try!(s.print_pat(&*f.node.pat));
s.end()
},

View File

@ -71,7 +71,8 @@ fn color_to_bits(color: color::Color) -> u16 {
}
fn bits_to_color(bits: u16) -> color::Color {
let color = match bits & 0x7 {
let bits = bits & 0x7;
let color = match bits {
0 => color::BLACK,
0x1 => color::BLUE,
0x2 => color::GREEN,

View File

@ -1,3 +1,12 @@
S 2014-10-22 d44ea72
freebsd-x86_64 8bf5ee7c1ca8ab880800cf3a535e16bb7ffbf9e8
linux-i386 1fc8302b405406a3fc183b23c8397bef5a56c52a
linux-x86_64 3e04d8197a96b0c858e4e2763b3893df35ae2fb3
macos-i386 b9823771ae6237a3c1c19eb2e98a2372ce23439d
macos-x86_64 3cf9fc1cd252a80430d8673e35a1256674e122ae
winnt-i386 5a6d2ad82a31deffad5b6a17487a8cd5c21f7636
winnt-x86_64 7468b87eb5be238993ccd41ad74bbd88dd176d31
S 2014-10-10 78a7676
freebsd-x86_64 511061af382e2e837a6d615823e1a952e8281483
linux-i386 0644637db852db8a6c603ded0531ccaa60291bd3

View File

@ -18,7 +18,7 @@ extern crate rustc;
use syntax::codemap::Span;
use syntax::parse::token::{IDENT, get_ident};
use syntax::ast::{TokenTree, TTTok};
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint
use rustc::plugin::Registry;
@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
("I", 1)];
let text = match args {
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);

View File

@ -0,0 +1,17 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod foo { pub fn bar() {} }
fn main() {
match () {
foo::bar => {} //~ ERROR `bar` is not an enum variant, struct or const
}
}

View File

@ -0,0 +1,15 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let foo = "str";
println!("{}", foo.desc); //~ ERROR attempted access of field `desc` on type `&str`,
// but no field with that name was found
}

View File

@ -0,0 +1,18 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait T<'a> {
fn a(&'a self) -> &'a bool;
fn b(&self) {
self.a(); //~ ERROR mismatched types: expected `&'a Self`, found `&Self` (lifetime mismatch)
}
}
fn main() {}

View File

@ -9,6 +9,7 @@
// except according to those terms.
static X: uint = 0 as *const uint as uint;
//~^ ERROR: can not cast a pointer to an integer in a constant expression
fn main() {
assert_eq!(X, 0);

View File

@ -0,0 +1,18 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
struct B<T>;
fn main() {
let foo = B; //~ ERROR unable to infer enough type information to locate the impl of the trait
let closure = |:| foo;
}

View File

@ -0,0 +1,17 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
static z: &'static int = {
let p = 3;
&p
//~^ ERROR cannot borrow a local variable inside a static block, define a separate static instead
};
}

View File

@ -0,0 +1,19 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(struct_variant)]
enum Foo {
Variant { x: uint }
}
fn main() {
let f = Variant(42u); //~ ERROR expected function, found `Foo`
}

View File

@ -0,0 +1,15 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
const X: u32 = 1;
const Y: uint = &X as *const u32 as uint; //~ ERROR E0018
println!("{}", Y);
}

View File

@ -0,0 +1,19 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Obj<'a> {
closure: ||: 'a -> u32
}
fn main() {
let o = Obj { closure: || 42 };
o.closure(); //~ ERROR type `Obj<'_>` does not implement any method in scope named `closure`
//~^ NOTE use `(s.closure)(...)` if you meant to call the function stored in the `closure` field
}

View File

@ -0,0 +1,15 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
static x: &'static uint = &1;
static y: uint = *x;
//~^ ERROR cannot refer to other statics by value,
// use the address-of operator or a constant instead
fn main() {}

View File

@ -0,0 +1,27 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum A { B, C }
mod foo { pub fn bar() {} }
fn main() {
match (true, false) {
B => (), //~ ERROR expected `(bool,bool)`, found `A` (expected tuple, found enum A)
_ => ()
}
match &Some(42i) {
Some(x) => (), //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<<generic #4>>`
None => () //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<<generic #5>>`
}
}

View File

@ -0,0 +1,31 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
// Test that even unboxed closures that are capable of mutating their
// environment cannot mutate captured variables that have not been
// declared mutable (#18335)
fn set(x: &mut uint) { *x = 0; }
fn main() {
let x = 0u;
move |&mut:| x = 1; //~ ERROR cannot assign
move |&mut:| set(&mut x); //~ ERROR cannot borrow
move |:| x = 1; //~ ERROR cannot assign
move |:| set(&mut x); //~ ERROR cannot borrow
|&mut:| x = 1; //~ ERROR cannot assign
// FIXME: this should be `cannot borrow` (issue #18330)
|&mut:| set(&mut x); //~ ERROR cannot assign
|:| x = 1; //~ ERROR cannot assign
// FIXME: this should be `cannot borrow` (issue #18330)
|:| set(&mut x); //~ ERROR cannot assign
}

View File

@ -0,0 +1,15 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pp-exact
// pretty-compare-only
// Testing that shorthand struct patterns are preserved
fn main() { let Foo { a, ref b, mut c, x: y, z: z } = foo; }

View File

@ -9,6 +9,9 @@
// except according to those terms.
#![allow(dead_code)]
#![feature(unboxed_closures, unboxed_closure_sugar)]
// compile-flags:-g
fn foo<T>() {}
@ -82,6 +85,9 @@ fn bar<'b>() {
// issue #13490
let _ = || -> ! loop {};
let _ = proc() -> ! loop {};
// issue #17021
let c = box |&:| {};
}
struct B<T>;

View File

@ -28,7 +28,7 @@ pub fn main() {
let x: *mut S = &mut S;
// Test we can chnage the mutability from mut to const.
// Test we can change the mutability from mut to const.
let x: &T = &mut S;
let x: *const T = &mut S;
}

View File

@ -0,0 +1,44 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Hash<H> {
fn hash2(&self, hasher: &H) -> u64;
}
trait Stream {
fn input(&mut self, bytes: &[u8]);
fn result(&self) -> u64;
}
trait StreamHasher<S: Stream> {
fn stream(&self) -> S;
}
//////////////////////////////////////////////////////////////////////////////
trait StreamHash<S: Stream, H: StreamHasher<S>>: Hash<H> {
fn input_stream(&self, stream: &mut S);
}
impl<S: Stream, H: StreamHasher<S>> Hash<H> for u8 {
fn hash2(&self, hasher: &H) -> u64 {
let mut stream = hasher.stream();
self.input_stream(&mut stream);
stream.result()
}
}
impl<S: Stream, H: StreamHasher<S>> StreamHash<S, H> for u8 {
fn input_stream(&self, stream: &mut S) {
stream.input([*self]);
}
}
fn main() {}

View File

@ -0,0 +1,27 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io::Reader;
enum Wrapper<'a> {
WrapReader(&'a Reader + 'a)
}
trait Wrap<'a> {
fn wrap(self) -> Wrapper<'a>;
}
impl<'a, R: Reader> Wrap<'a> for &'a mut R {
fn wrap(self) -> Wrapper<'a> {
WrapReader(self as &'a mut Reader)
}
}
pub fn main() {}

View File

@ -0,0 +1,25 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
use std::mem;
fn main() {
let y = 0u8;
let closure = move |&: x| y + x;
// Check that both closures are capturing by value
assert_eq!(1, mem::size_of_val(&closure));
spawn(proc() {
let ok = closure;
})
}

View File

@ -0,0 +1,32 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty
#![feature(unboxed_closures)]
struct Parser<'a, I, O> {
parse: Box<FnMut<(I,), Result<O, String>> + 'a>
}
impl<'a, I, O: 'a> Parser<'a, I, O> {
fn compose<K: 'a>(mut self, mut rhs: Parser<O, K>) -> Parser<'a, I, K> {
Parser {
parse: box move |&mut: x: I| {
match self.parse.call_mut((x,)) {
Ok(r) => rhs.parse.call_mut((r,)),
Err(e) => Err(e)
}
}
}
}
}
fn main() {}

View File

@ -0,0 +1,22 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
const X: &'static str = "12345";
fn test(s: String) -> bool {
match s.as_slice() {
X => true,
_ => false
}
}
fn main() {
assert!(test("12345".to_string()));
}

View File

@ -0,0 +1,30 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct Foo {
f1: int,
_f2: int,
}
#[inline(never)]
pub fn foo(f: &mut Foo) -> Foo {
let ret = *f;
f.f1 = 0;
ret
}
pub fn main() {
let mut f = Foo {
f1: 8,
_f2: 9,
};
f = foo(&mut f);
assert_eq!(f.f1, 8);
}

View File

@ -30,7 +30,7 @@ unsafe fn test_triangle() -> bool {
let ascend = ascend.as_mut_slice();
static ALIGN : uint = 1;
// Checks that `ascend` forms triangle of acending size formed
// Checks that `ascend` forms triangle of ascending size formed
// from pairs of rows (where each pair of rows is equally sized),
// and the elements of the triangle match their row-pair index.
unsafe fn sanity_check(ascend: &[*mut u8]) {

View File

@ -0,0 +1,27 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the cache results from the default method do not pollute
// the cache for the later call in `load()`.
//
// See issue #18209.
pub trait Foo {
fn load_from() -> Box<Self>;
fn load() -> Box<Self> {
Foo::load_from()
}
}
pub fn load<M: Foo>() -> Box<M> {
Foo::load()
}
fn main() { }

View File

@ -0,0 +1,28 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
#![deny(unused_mut)]
// Test that mutating a mutable upvar in a capture-by-value unboxed
// closure does not ice (issue #18238) and marks the upvar as used
// mutably so we do not get a spurious warning about it not needing to
// be declared mutable (issue #18336).
fn main() {
{
let mut x = 0u;
move |&mut:| x += 1;
}
{
let mut x = 0u;
move |:| x += 1;
}
}

View File

@ -10,7 +10,7 @@
fn sub_expr() {
// Test for a &[T] => &&[T] coercion in sub-expression position
// (surpisingly, this can cause errors which are not caused by either of:
// (surprisingly, this can cause errors which are not caused by either of:
// `let x = vec.slice_mut(0, 2);`
// `foo(vec.slice_mut(0, 2));` ).
let mut vec: Vec<int> = vec!(1, 2, 3, 4);