mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-27 17:24:06 +00:00
auto merge of #10477 : ktt3ja/rust/dead-code, r=alexcrichton
PR for issue #1749 mainly to get some feedback and suggestion. This adds a pass that warns if a function, struct, enum, or static item is never used. For the following code, ```rust pub static pub_static: int = 0; static priv_static: int = 0; static used_static: int = 0; pub fn pub_fn() { used_fn(); } fn priv_fn() { let unused_struct = PrivStruct; } fn used_fn() {} pub struct PubStruct(); struct PrivStruct(); struct UsedStruct1 { x: int } struct UsedStruct2(int); struct UsedStruct3(); pub enum pub_enum { foo1, bar1 } enum priv_enum { foo2, bar2 } enum used_enum { foo3, bar3 } fn foo() { bar(); let unused_enum = foo2; } fn bar() { foo(); } fn main() { let used_struct1 = UsedStruct1 { x: 1 }; let used_struct2 = UsedStruct2(1); let used_struct3 = UsedStruct3; let t = used_static; let e = foo3; } ``` it would add the following warnings: ```rust /home/ktt3ja/test.rs:2:0: 2:28 warning: code is never used: `priv_static`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:2 static priv_static: int = 0; ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ /home/ktt3ja/test.rs:6:0: 6:48 warning: code is never used: `priv_fn`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:6 fn priv_fn() { let unused_struct = PrivStruct; } ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /home/ktt3ja/test.rs:10:0: 10:20 warning: code is never used: `PrivStruct`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:10 struct PrivStruct(); ^~~~~~~~~~~~~~~~~~~~ /home/ktt3ja/test.rs:16:0: 16:29 warning: code is never used: `priv_enum`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:16 enum priv_enum { foo2, bar2 } ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /home/ktt3ja/test.rs:19:0: 22:1 warning: code is never used: `foo`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:19 fn foo() { /home/ktt3ja/test.rs:20 bar(); /home/ktt3ja/test.rs:21 let unused_enum = foo2; /home/ktt3ja/test.rs:22 } /home/ktt3ja/test.rs:24:0: 26:1 warning: code is never used: `bar`, #[warn(dead_code)] on by default /home/ktt3ja/test.rs:24 fn bar() { /home/ktt3ja/test.rs:25 foo(); /home/ktt3ja/test.rs:26 } ``` Furthermore, I would like to solicit some test cases since I haven't tested extensively and I'm still unclear about some of the things in here. For example, I'm not sure how reexports would affect this and just assumed that LiveContext (which is a copy of reachable::ReachableContext) does enough work to handle it. Also, the test case above doesn't include any impl or methods, etc.
This commit is contained in:
commit
a6310f6ad3
@ -192,10 +192,6 @@ pub fn opt_str2(maybestr: Option<~str>) -> ~str {
|
||||
match maybestr { None => ~"(none)", Some(s) => { s } }
|
||||
}
|
||||
|
||||
pub fn str_opt(maybestr: ~str) -> Option<~str> {
|
||||
if maybestr != ~"(none)" { Some(maybestr) } else { None }
|
||||
}
|
||||
|
||||
pub fn str_mode(s: ~str) -> mode {
|
||||
match s {
|
||||
~"compile-fail" => mode_compile_fail,
|
||||
|
@ -18,6 +18,7 @@ use header::TestProps;
|
||||
use header::load_props;
|
||||
use procsrv;
|
||||
use util::logv;
|
||||
#[cfg(target_os = "win32")]
|
||||
use util;
|
||||
|
||||
use std::io::File;
|
||||
@ -482,6 +483,7 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
||||
format!("{}:{}:", testfile.display(), ee.line)
|
||||
}).collect::<~[~str]>();
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
fn to_lower( s : &str ) -> ~str {
|
||||
let i = s.chars();
|
||||
let c : ~[char] = i.map( |c| {
|
||||
@ -822,6 +824,7 @@ fn make_cmdline(libpath: &str, prog: &str, args: &[~str]) -> ~str {
|
||||
|
||||
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line
|
||||
// for diagnostic purposes
|
||||
#[cfg(target_os = "win32")]
|
||||
fn lib_path_cmd_prefix(path: &str) -> ~str {
|
||||
format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
|
||||
}
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
use common::config;
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
use std::os::getenv;
|
||||
|
||||
/// Conversion table from triple OS name to Rust SYSNAME
|
||||
@ -31,6 +32,7 @@ pub fn get_os(triple: &str) -> &'static str {
|
||||
fail!("Cannot determine OS from triple");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
pub fn make_new_path(path: &str) -> ~str {
|
||||
|
||||
// Windows just uses PATH as the library search path, so we have to
|
||||
@ -43,21 +45,9 @@ pub fn make_new_path(path: &str) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[cfg(target_os = "freebsd")]
|
||||
pub fn lib_path_env_var() -> ~str { ~"LD_LIBRARY_PATH" }
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn lib_path_env_var() -> ~str { ~"DYLD_LIBRARY_PATH" }
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
pub fn lib_path_env_var() -> ~str { ~"PATH" }
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[cfg(target_os = "macos")]
|
||||
#[cfg(target_os = "freebsd")]
|
||||
pub fn path_div() -> ~str { ~":" }
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
pub fn path_div() -> ~str { ~";" }
|
||||
|
||||
|
@ -64,6 +64,7 @@ while cur < len(lines):
|
||||
#[ allow(dead_assignment) ];\n
|
||||
#[ allow(unused_mut) ];\n
|
||||
#[ allow(attribute_usage) ];\n
|
||||
#[ allow(dead_code) ];\n
|
||||
#[ feature(macro_rules, globs, struct_variant, managed_boxes) ];\n
|
||||
""" + block
|
||||
if xfail:
|
||||
|
@ -382,11 +382,9 @@ impl Bitv {
|
||||
#[inline]
|
||||
pub fn negate(&mut self) {
|
||||
match self.rep {
|
||||
Small(ref mut b) => b.negate(),
|
||||
Big(ref mut s) => {
|
||||
s.each_storage(|w| { *w = !*w; true });
|
||||
}
|
||||
}
|
||||
Small(ref mut s) => s.negate(),
|
||||
Big(ref mut b) => b.negate(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -14,15 +14,14 @@
|
||||
//! Starting implementation of a btree for rust.
|
||||
//! Structure inspired by github user davidhalperin's gist.
|
||||
|
||||
|
||||
#[allow(dead_code)];
|
||||
use std::util::replace;
|
||||
|
||||
|
||||
///A B-tree contains a root node (which contains a vector of elements),
|
||||
///a length (the height of the tree), and lower and upper bounds on the
|
||||
///number of elements that a given node can contain.
|
||||
#[allow(missing_doc)]
|
||||
pub struct BTree<K, V>{
|
||||
pub struct BTree<K, V> {
|
||||
root: Node<K, V>,
|
||||
len: uint,
|
||||
lower_bound: uint,
|
||||
@ -34,11 +33,11 @@ pub struct BTree<K, V>{
|
||||
//especially during insertions and deletions.
|
||||
//Using the swap or replace methods is one option for replacing dependence on Clone, or
|
||||
//changing the way in which the BTree is stored could also potentially work.
|
||||
impl<K: Clone + TotalOrd, V: Clone> BTree<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> BTree<K, V> {
|
||||
|
||||
///Returns new BTree with root node (leaf) and user-supplied lower bound
|
||||
fn new(k: K, v: V, lb: uint) -> BTree<K, V>{
|
||||
BTree{
|
||||
pub fn new(k: K, v: V, lb: uint) -> BTree<K, V> {
|
||||
BTree {
|
||||
root: Node::new_leaf(~[LeafElt::new(k, v)]),
|
||||
len: 1,
|
||||
lower_bound: lb,
|
||||
@ -48,8 +47,10 @@ impl<K: Clone + TotalOrd, V: Clone> BTree<K, V>{
|
||||
|
||||
///Helper function for clone: returns new BTree with supplied root node,
|
||||
///length, and lower bound. For use when the length is known already.
|
||||
fn new_with_node_len(n: Node<K, V>, length: uint, lb: uint) -> BTree<K, V>{
|
||||
BTree{
|
||||
pub fn new_with_node_len(n: Node<K, V>,
|
||||
length: uint,
|
||||
lb: uint) -> BTree<K, V> {
|
||||
BTree {
|
||||
root: n,
|
||||
len: length,
|
||||
lower_bound: lb,
|
||||
@ -59,35 +60,31 @@ impl<K: Clone + TotalOrd, V: Clone> BTree<K, V>{
|
||||
|
||||
///Implements the Clone trait for the BTree.
|
||||
///Uses a helper function/constructor to produce a new BTree.
|
||||
fn clone(&self) -> BTree<K, V>{
|
||||
pub fn clone(&self) -> BTree<K, V> {
|
||||
return BTree::new_with_node_len(self.root.clone(), self.len, self.lower_bound);
|
||||
}
|
||||
|
||||
///Returns the value of a given key, which may not exist in the tree.
|
||||
///Calls the root node's get method.
|
||||
fn get(self, k: K) -> Option<V>{
|
||||
pub fn get(self, k: K) -> Option<V> {
|
||||
return self.root.get(k);
|
||||
}
|
||||
|
||||
///Checks to see if the key already exists in the tree, and if it is not,
|
||||
///the key-value pair is added to the tree by calling add on the root node.
|
||||
fn add(self, k: K, v: V) -> bool{
|
||||
pub fn add(self, k: K, v: V) -> bool {
|
||||
let is_get = &self.clone().get(k.clone());
|
||||
if is_get.is_some(){ return false; }
|
||||
else{
|
||||
else {
|
||||
replace(&mut self.root.clone(),self.root.add(k.clone(), v));
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for BTree<K, V>{
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for BTree<K, V> {
|
||||
///Returns a string representation of the BTree
|
||||
fn to_str(&self) -> ~str{
|
||||
fn to_str(&self) -> ~str {
|
||||
let ret = self.root.to_str();
|
||||
ret
|
||||
}
|
||||
@ -99,14 +96,14 @@ impl<K: ToStr + TotalOrd, V: ToStr> ToStr for BTree<K, V>{
|
||||
//Branches contain BranchElts, which contain a left child (another node) and a key-value
|
||||
//pair. Branches also contain the rightmost child of the elements in the array.
|
||||
//Leaves contain LeafElts, which do not have children.
|
||||
enum Node<K, V>{
|
||||
enum Node<K, V> {
|
||||
LeafNode(Leaf<K, V>),
|
||||
BranchNode(Branch<K, V>)
|
||||
}
|
||||
|
||||
|
||||
//Node functions/methods
|
||||
impl<K: Clone + TotalOrd, V: Clone> Node<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Node<K, V> {
|
||||
|
||||
///Differentiates between leaf and branch nodes.
|
||||
fn is_leaf(&self) -> bool{
|
||||
@ -117,20 +114,20 @@ impl<K: Clone + TotalOrd, V: Clone> Node<K, V>{
|
||||
}
|
||||
|
||||
///Creates a new leaf node given a vector of elements.
|
||||
fn new_leaf(vec: ~[LeafElt<K, V>]) -> Node<K,V>{
|
||||
fn new_leaf(vec: ~[LeafElt<K, V>]) -> Node<K,V> {
|
||||
LeafNode(Leaf::new(vec))
|
||||
}
|
||||
|
||||
///Creates a new branch node given a vector of an elements and a pointer to a rightmost child.
|
||||
fn new_branch(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Node<K, V>{
|
||||
fn new_branch(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Node<K, V> {
|
||||
BranchNode(Branch::new(vec, right))
|
||||
}
|
||||
|
||||
|
||||
///Returns the corresponding value to the provided key.
|
||||
///get() is called in different ways on a branch or a leaf.
|
||||
fn get(&self, k: K) -> Option<V>{
|
||||
match *self{
|
||||
fn get(&self, k: K) -> Option<V> {
|
||||
match *self {
|
||||
LeafNode(ref leaf) => return leaf.get(k),
|
||||
BranchNode(ref branch) => return branch.get(k)
|
||||
}
|
||||
@ -138,31 +135,35 @@ impl<K: Clone + TotalOrd, V: Clone> Node<K, V>{
|
||||
|
||||
///A placeholder for add
|
||||
///Currently returns a leaf node with a single value (the added one)
|
||||
fn add(self, k: K, v: V) -> Node<K, V>{
|
||||
fn add(self, k: K, v: V) -> Node<K, V> {
|
||||
return Node::new_leaf(~[LeafElt::new(k, v)]);
|
||||
}
|
||||
}
|
||||
|
||||
//Again, this might not be necessary in the future.
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for Node<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for Node<K, V> {
|
||||
|
||||
///Returns a new node based on whether or not it is a branch or a leaf.
|
||||
fn clone(&self) -> Node<K, V>{
|
||||
match *self{
|
||||
LeafNode(ref leaf) => return Node::new_leaf(leaf.elts.clone()),
|
||||
BranchNode(ref branch) => return Node::new_branch(branch.elts.clone(),
|
||||
branch.rightmost_child.clone())
|
||||
fn clone(&self) -> Node<K, V> {
|
||||
match *self {
|
||||
LeafNode(ref leaf) => {
|
||||
return Node::new_leaf(leaf.elts.clone());
|
||||
}
|
||||
BranchNode(ref branch) => {
|
||||
return Node::new_branch(branch.elts.clone(),
|
||||
branch.rightmost_child.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//The following impl is unfinished. Old iterations of code are left in for
|
||||
//future reference when implementing this trait (commented-out).
|
||||
impl<K: Clone + TotalOrd, V: Clone> TotalOrd for Node<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> TotalOrd for Node<K, V> {
|
||||
|
||||
///Placeholder for an implementation of TotalOrd for Nodes.
|
||||
#[allow(unused_variable)]
|
||||
fn cmp(&self, other: &Node<K, V>) -> Ordering{
|
||||
fn cmp(&self, other: &Node<K, V>) -> Ordering {
|
||||
//Requires a match statement--defer these procs to branch and leaf.
|
||||
/* if self.elts[0].less_than(other.elts[0]) { return Less}
|
||||
if self.elts[0].greater_than(other.elts[0]) {return Greater}
|
||||
@ -174,11 +175,11 @@ impl<K: Clone + TotalOrd, V: Clone> TotalOrd for Node<K, V>{
|
||||
|
||||
//The following impl is unfinished. Old iterations of code are left in for
|
||||
//future reference when implementing this trait (commented-out).
|
||||
impl<K: Clone + TotalOrd, V: Clone> TotalEq for Node<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> TotalEq for Node<K, V> {
|
||||
|
||||
///Placeholder for an implementation of TotalEq for Nodes.
|
||||
#[allow(unused_variable)]
|
||||
fn equals(&self, other: &Node<K, V>) -> bool{
|
||||
fn equals(&self, other: &Node<K, V>) -> bool {
|
||||
/* put in a match and defer this stuff to branch and leaf
|
||||
|
||||
let mut shorter = 0;
|
||||
@ -202,11 +203,11 @@ impl<K: Clone + TotalOrd, V: Clone> TotalEq for Node<K, V>{
|
||||
}
|
||||
|
||||
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Node<K, V>{
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Node<K, V> {
|
||||
///Returns a string representation of a Node.
|
||||
///The Branch's to_str() is not implemented yet.
|
||||
fn to_str(&self) -> ~str{
|
||||
match *self{
|
||||
fn to_str(&self) -> ~str {
|
||||
match *self {
|
||||
LeafNode(ref leaf) => leaf.to_str(),
|
||||
BranchNode(..) => ~""
|
||||
}
|
||||
@ -216,31 +217,31 @@ impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Node<K, V>{
|
||||
|
||||
//A leaf is a vector with elements that contain no children. A leaf also
|
||||
//does not contain a rightmost child.
|
||||
struct Leaf<K, V>{
|
||||
struct Leaf<K, V> {
|
||||
elts: ~[LeafElt<K, V>]
|
||||
}
|
||||
|
||||
//Vector of values with children, plus a rightmost child (greater than all)
|
||||
struct Branch<K, V>{
|
||||
struct Branch<K, V> {
|
||||
elts: ~[BranchElt<K,V>],
|
||||
rightmost_child: ~Node<K, V>
|
||||
}
|
||||
|
||||
|
||||
impl<K: Clone + TotalOrd, V: Clone> Leaf<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Leaf<K, V> {
|
||||
|
||||
///Creates a new Leaf from a vector of LeafElts.
|
||||
fn new(vec: ~[LeafElt<K, V>]) -> Leaf<K, V>{
|
||||
Leaf{
|
||||
fn new(vec: ~[LeafElt<K, V>]) -> Leaf<K, V> {
|
||||
Leaf {
|
||||
elts: vec
|
||||
}
|
||||
}
|
||||
|
||||
///Returns the corresponding value to the supplied key.
|
||||
fn get(&self, k: K) -> Option<V>{
|
||||
for s in self.elts.iter(){
|
||||
fn get(&self, k: K) -> Option<V> {
|
||||
for s in self.elts.iter() {
|
||||
let order = s.key.cmp(&k);
|
||||
match order{
|
||||
match order {
|
||||
Equal => return Some(s.value.clone()),
|
||||
_ => {}
|
||||
}
|
||||
@ -250,18 +251,18 @@ impl<K: Clone + TotalOrd, V: Clone> Leaf<K, V>{
|
||||
|
||||
///Placeholder for add method in progress.
|
||||
///Currently returns a new Leaf containing a single LeafElt.
|
||||
fn add(&self, k: K, v: V) -> Node<K, V>{
|
||||
fn add(&self, k: K, v: V) -> Node<K, V> {
|
||||
return Node::new_leaf(~[LeafElt::new(k, v)]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Leaf<K, V>{
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Leaf<K, V> {
|
||||
|
||||
///Returns a string representation of a Leaf.
|
||||
fn to_str(&self) -> ~str{
|
||||
fn to_str(&self) -> ~str {
|
||||
let mut ret = ~"";
|
||||
for s in self.elts.iter(){
|
||||
for s in self.elts.iter() {
|
||||
ret = ret + " // " + s.to_str();
|
||||
}
|
||||
ret
|
||||
@ -270,11 +271,11 @@ impl<K: ToStr + TotalOrd, V: ToStr> ToStr for Leaf<K, V>{
|
||||
}
|
||||
|
||||
|
||||
impl<K: Clone + TotalOrd, V: Clone> Branch<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Branch<K, V> {
|
||||
|
||||
///Creates a new Branch from a vector of BranchElts and a rightmost child (a node).
|
||||
fn new(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Branch<K, V>{
|
||||
Branch{
|
||||
fn new(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Branch<K, V> {
|
||||
Branch {
|
||||
elts: vec,
|
||||
rightmost_child: right
|
||||
}
|
||||
@ -282,10 +283,10 @@ impl<K: Clone + TotalOrd, V: Clone> Branch<K, V>{
|
||||
|
||||
///Returns the corresponding value to the supplied key.
|
||||
///If the key is not there, find the child that might hold it.
|
||||
fn get(&self, k: K) -> Option<V>{
|
||||
for s in self.elts.iter(){
|
||||
fn get(&self, k: K) -> Option<V> {
|
||||
for s in self.elts.iter() {
|
||||
let order = s.key.cmp(&k);
|
||||
match order{
|
||||
match order {
|
||||
Less => return s.left.get(k),
|
||||
Equal => return Some(s.value.clone()),
|
||||
_ => {}
|
||||
@ -296,29 +297,29 @@ impl<K: Clone + TotalOrd, V: Clone> Branch<K, V>{
|
||||
|
||||
|
||||
///Placeholder for add method in progress
|
||||
fn add(&self, k: K, v: V) -> Node<K, V>{
|
||||
fn add(&self, k: K, v: V) -> Node<K, V> {
|
||||
return Node::new_leaf(~[LeafElt::new(k, v)]);
|
||||
}
|
||||
}
|
||||
|
||||
//A LeafElt containts no left child, but a key-value pair.
|
||||
struct LeafElt<K, V>{
|
||||
struct LeafElt<K, V> {
|
||||
key: K,
|
||||
value: V
|
||||
}
|
||||
|
||||
//A BranchElt has a left child in addition to a key-value pair.
|
||||
struct BranchElt<K, V>{
|
||||
struct BranchElt<K, V> {
|
||||
left: Node<K, V>,
|
||||
key: K,
|
||||
value: V
|
||||
}
|
||||
|
||||
impl<K: Clone + TotalOrd, V> LeafElt<K, V>{
|
||||
impl<K: Clone + TotalOrd, V> LeafElt<K, V> {
|
||||
|
||||
///Creates a new LeafElt from a supplied key-value pair.
|
||||
fn new(k: K, v: V) -> LeafElt<K, V>{
|
||||
LeafElt{
|
||||
fn new(k: K, v: V) -> LeafElt<K, V> {
|
||||
LeafElt {
|
||||
key: k,
|
||||
value: v
|
||||
}
|
||||
@ -326,9 +327,9 @@ impl<K: Clone + TotalOrd, V> LeafElt<K, V>{
|
||||
|
||||
///Compares another LeafElt against itself and determines whether
|
||||
///the original LeafElt's key is less than the other one's key.
|
||||
fn less_than(&self, other: LeafElt<K, V>) -> bool{
|
||||
fn less_than(&self, other: LeafElt<K, V>) -> bool {
|
||||
let order = self.key.cmp(&other.key);
|
||||
match order{
|
||||
match order {
|
||||
Less => true,
|
||||
_ => false
|
||||
}
|
||||
@ -336,9 +337,9 @@ impl<K: Clone + TotalOrd, V> LeafElt<K, V>{
|
||||
|
||||
///Compares another LeafElt against itself and determines whether
|
||||
///the original LeafElt's key is greater than the other one's key.
|
||||
fn greater_than(&self, other: LeafElt<K, V>) -> bool{
|
||||
fn greater_than(&self, other: LeafElt<K, V>) -> bool {
|
||||
let order = self.key.cmp(&other.key);
|
||||
match order{
|
||||
match order {
|
||||
Greater => true,
|
||||
_ => false
|
||||
}
|
||||
@ -346,40 +347,40 @@ impl<K: Clone + TotalOrd, V> LeafElt<K, V>{
|
||||
|
||||
///Takes a key and determines whether its own key and the supplied key
|
||||
///are the same.
|
||||
fn has_key(&self, other: K) -> bool{
|
||||
fn has_key(&self, other: K) -> bool {
|
||||
let order = self.key.cmp(&other);
|
||||
match order{
|
||||
match order {
|
||||
Equal => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//This may be eliminated in the future to perserve efficiency by adjusting the way
|
||||
//the BTree as a whole is stored in memory.
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for LeafElt<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for LeafElt<K, V> {
|
||||
|
||||
///Returns a new LeafElt by cloning the key and value.
|
||||
fn clone(&self) -> LeafElt<K, V>{
|
||||
fn clone(&self) -> LeafElt<K, V> {
|
||||
return LeafElt::new(self.key.clone(), self.value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for LeafElt<K, V>{
|
||||
impl<K: ToStr + TotalOrd, V: ToStr> ToStr for LeafElt<K, V> {
|
||||
|
||||
///Returns a string representation of a LeafElt.
|
||||
fn to_str(&self) -> ~str{
|
||||
return "Key: " + self.key.to_str() + ", value: "+ self.value.to_str() + "; ";
|
||||
fn to_str(&self) -> ~str {
|
||||
return "Key: " + self.key.to_str() + ", value: "
|
||||
+ self.value.to_str() + "; ";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<K: Clone + TotalOrd, V: Clone> BranchElt<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> BranchElt<K, V> {
|
||||
|
||||
///Creates a new BranchElt from a supplied key, value, and left child.
|
||||
fn new(k: K, v: V, n: Node<K, V>) -> BranchElt<K, V>{
|
||||
BranchElt{
|
||||
fn new(k: K, v: V, n: Node<K, V>) -> BranchElt<K, V> {
|
||||
BranchElt {
|
||||
left: n,
|
||||
key: k,
|
||||
value: v
|
||||
@ -388,16 +389,18 @@ impl<K: Clone + TotalOrd, V: Clone> BranchElt<K, V>{
|
||||
|
||||
///Placeholder for add method in progress.
|
||||
///Overall implementation will determine the actual return value of this method.
|
||||
fn add(&self, k: K, v: V) -> LeafElt<K, V>{
|
||||
fn add(&self, k: K, v: V) -> LeafElt<K, V> {
|
||||
return LeafElt::new(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for BranchElt<K, V>{
|
||||
impl<K: Clone + TotalOrd, V: Clone> Clone for BranchElt<K, V> {
|
||||
|
||||
///Returns a new BranchElt by cloning the key, value, and left child.
|
||||
fn clone(&self) -> BranchElt<K, V>{
|
||||
return BranchElt::new(self.key.clone(), self.value.clone(), self.left.clone());
|
||||
fn clone(&self) -> BranchElt<K, V> {
|
||||
return BranchElt::new(self.key.clone(),
|
||||
self.value.clone(),
|
||||
self.left.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,17 +17,6 @@ use std::str;
|
||||
// http://www.matroska.org/technical/specs/rfc/index.html
|
||||
|
||||
// Common data structures
|
||||
struct EbmlTag {
|
||||
id: uint,
|
||||
size: uint,
|
||||
}
|
||||
|
||||
struct EbmlState {
|
||||
ebml_tag: EbmlTag,
|
||||
tag_pos: uint,
|
||||
data_pos: uint,
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub struct Doc {
|
||||
data: @~[u8],
|
||||
|
@ -39,10 +39,7 @@ pub mod rustrt {
|
||||
}
|
||||
}
|
||||
|
||||
static LZ_NONE : c_int = 0x0; // Huffman-coding only.
|
||||
static LZ_FAST : c_int = 0x1; // LZ with only one probe
|
||||
static LZ_NORM : c_int = 0x80; // LZ with 128 probes, "normal"
|
||||
static LZ_BEST : c_int = 0xfff; // LZ with 4095 probes, "best"
|
||||
static TINFL_FLAG_PARSE_ZLIB_HEADER : c_int = 0x1; // parse zlib header and adler32 checksum
|
||||
static TDEFL_WRITE_ZLIB_HEADER : c_int = 0x01000; // write zlib header and adler32 checksum
|
||||
|
||||
|
@ -57,7 +57,6 @@ pub mod BigDigit {
|
||||
pub static bits: uint = 32;
|
||||
|
||||
pub static base: uint = 1 << bits;
|
||||
static hi_mask: uint = (-1 as uint) << bits;
|
||||
static lo_mask: uint = (-1 as uint) >> bits;
|
||||
|
||||
#[inline]
|
||||
|
@ -100,7 +100,7 @@ impl<T: Clone + Integer + Ord>
|
||||
}
|
||||
|
||||
/// Return a `reduce`d copy of self.
|
||||
fn reduced(&self) -> Ratio<T> {
|
||||
pub fn reduced(&self) -> Ratio<T> {
|
||||
let mut ret = self.clone();
|
||||
ret.reduce();
|
||||
ret
|
||||
|
@ -179,7 +179,6 @@ impl<'self, T:Clone + Ord + Eq> Sort for &'self mut [T] {
|
||||
|
||||
static MIN_MERGE: uint = 64;
|
||||
static MIN_GALLOP: uint = 7;
|
||||
static INITIAL_TMP_STORAGE: uint = 128;
|
||||
|
||||
#[allow(missing_doc)]
|
||||
pub fn tim_sort<T:Clone + Ord>(array: &mut [T]) {
|
||||
|
@ -364,16 +364,6 @@ fn split_char_first(s: &str, c: char) -> (~str, ~str) {
|
||||
}
|
||||
}
|
||||
|
||||
fn userinfo_from_str(uinfo: &str) -> UserInfo {
|
||||
let (user, p) = split_char_first(uinfo, ':');
|
||||
let pass = if p.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(p)
|
||||
};
|
||||
return UserInfo::new(user, pass);
|
||||
}
|
||||
|
||||
fn userinfo_to_str(userinfo: &UserInfo) -> ~str {
|
||||
match userinfo.pass {
|
||||
Some(ref pass) => format!("{}:{}@", userinfo.user, *pass),
|
||||
|
@ -310,6 +310,10 @@ pub fn phase_3_run_analysis_passes(sess: Session,
|
||||
time(time_passes, "reachability checking", (), |_|
|
||||
reachable::find_reachable(ty_cx, method_map, &exported_items));
|
||||
|
||||
time(time_passes, "death checking", (), |_|
|
||||
middle::dead::check_crate(ty_cx, method_map,
|
||||
&exported_items, reachable_map, crate));
|
||||
|
||||
time(time_passes, "lint checking", (), |_|
|
||||
lint::check_crate(ty_cx, &exported_items, crate));
|
||||
|
||||
@ -510,19 +514,6 @@ pub fn pretty_print_input(sess: Session,
|
||||
cfg: ast::CrateConfig,
|
||||
input: &input,
|
||||
ppm: PpMode) {
|
||||
fn ann_typed_post(tcx: ty::ctxt, node: pprust::ann_node) {
|
||||
match node {
|
||||
pprust::node_expr(s, expr) => {
|
||||
pp::space(s.s);
|
||||
pp::word(s.s, "as");
|
||||
pp::space(s.s);
|
||||
pp::word(s.s, ppaux::ty_to_str(tcx, ty::expr_ty(tcx, expr)));
|
||||
pprust::pclose(s);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
|
||||
let crate = phase_1_parse_input(sess, cfg.clone(), input);
|
||||
|
||||
let (crate, is_expanded) = match ppm {
|
||||
|
@ -77,6 +77,7 @@ pub mod middle {
|
||||
pub mod reachable;
|
||||
pub mod graph;
|
||||
pub mod cfg;
|
||||
pub mod dead;
|
||||
}
|
||||
|
||||
pub mod front {
|
||||
|
@ -30,7 +30,6 @@ use std::u64;
|
||||
use std::io;
|
||||
use std::io::extensions::u64_from_be_bytes;
|
||||
use std::option;
|
||||
use std::str;
|
||||
use std::vec;
|
||||
use extra::ebml::reader;
|
||||
use extra::ebml;
|
||||
@ -523,212 +522,6 @@ pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
|
||||
})
|
||||
}
|
||||
|
||||
struct EachItemContext<'self> {
|
||||
intr: @ident_interner,
|
||||
cdata: Cmd,
|
||||
get_crate_data: GetCrateDataCb<'self>,
|
||||
path_builder: &'self mut ~str,
|
||||
callback: 'self |&str, DefLike, ast::visibility| -> bool,
|
||||
}
|
||||
|
||||
impl<'self> EachItemContext<'self> {
|
||||
// Pushes the given name and returns the old length.
|
||||
fn push_name(&mut self, string: &str) -> uint {
|
||||
let path_len = self.path_builder.len();
|
||||
if path_len != 0 {
|
||||
self.path_builder.push_str("::")
|
||||
}
|
||||
self.path_builder.push_str(string);
|
||||
path_len
|
||||
}
|
||||
|
||||
// Pops the given name.
|
||||
fn pop_name(&mut self, old_len: uint) {
|
||||
// XXX(pcwalton): There's no safe function to do this. :(
|
||||
unsafe {
|
||||
str::raw::set_len(self.path_builder, old_len)
|
||||
}
|
||||
}
|
||||
|
||||
fn process_item_and_pop_name(&mut self,
|
||||
doc: ebml::Doc,
|
||||
def_id: ast::DefId,
|
||||
old_len: uint,
|
||||
vis: ast::visibility)
|
||||
-> bool {
|
||||
let def_like = item_to_def_like(doc, def_id, self.cdata.cnum);
|
||||
match def_like {
|
||||
DlDef(def) => {
|
||||
debug!("(iterating over each item of a module) processing \
|
||||
`{}` (def {:?})",
|
||||
*self.path_builder,
|
||||
def);
|
||||
}
|
||||
_ => {
|
||||
debug!("(iterating over each item of a module) processing \
|
||||
`{}` ({}:{})",
|
||||
*self.path_builder,
|
||||
def_id.crate,
|
||||
def_id.node);
|
||||
}
|
||||
}
|
||||
|
||||
let mut continue_ = (self.callback)(*self.path_builder, def_like, vis);
|
||||
|
||||
let family = item_family(doc);
|
||||
if family == ForeignMod {
|
||||
// These are unnamed; pop the name now.
|
||||
self.pop_name(old_len)
|
||||
}
|
||||
|
||||
if continue_ {
|
||||
// Recurse if necessary.
|
||||
match family {
|
||||
Mod | ForeignMod | Trait | Impl => {
|
||||
continue_ = self.each_item_of_module(def_id);
|
||||
}
|
||||
ImmStatic | MutStatic | Struct | UnsafeFn | Fn | ForeignFn |
|
||||
UnsafeStaticMethod | StaticMethod | Type | ForeignType |
|
||||
TupleVariant | StructVariant | Enum | PublicField |
|
||||
PrivateField | InheritedField => {}
|
||||
}
|
||||
}
|
||||
|
||||
if family != ForeignMod {
|
||||
self.pop_name(old_len)
|
||||
}
|
||||
|
||||
continue_
|
||||
}
|
||||
|
||||
fn each_item_of_module(&mut self, def_id: ast::DefId) -> bool {
|
||||
// This item might not be in this crate. If it's not, look it up.
|
||||
let items = if def_id.crate == self.cdata.cnum {
|
||||
reader::get_doc(reader::Doc(self.cdata.data), tag_items)
|
||||
} else {
|
||||
let crate_data = (self.get_crate_data)(def_id.crate);
|
||||
let root = reader::Doc(crate_data.data);
|
||||
reader::get_doc(root, tag_items)
|
||||
};
|
||||
|
||||
// Look up the item.
|
||||
let item_doc = match maybe_find_item(def_id.node, items) {
|
||||
None => return false,
|
||||
Some(item_doc) => item_doc,
|
||||
};
|
||||
|
||||
self.each_child_of_module_or_crate(item_doc)
|
||||
}
|
||||
|
||||
fn each_child_of_module_or_crate(&mut self, item_doc: ebml::Doc) -> bool {
|
||||
let mut continue_ = true;
|
||||
|
||||
// Iterate over all children.
|
||||
reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
|
||||
let child_def_id = reader::with_doc_data(child_info_doc,
|
||||
parse_def_id);
|
||||
let child_def_id = translate_def_id(self.cdata, child_def_id);
|
||||
|
||||
// This item may be in yet another crate, if it was the child of
|
||||
// a reexport.
|
||||
let other_crates_items = if child_def_id.crate ==
|
||||
self.cdata.cnum {
|
||||
reader::get_doc(reader::Doc(self.cdata.data), tag_items)
|
||||
} else {
|
||||
let crate_data = (self.get_crate_data)(child_def_id.crate);
|
||||
let root = reader::Doc(crate_data.data);
|
||||
reader::get_doc(root, tag_items)
|
||||
};
|
||||
|
||||
debug!("(iterating over each item of a module) looking up item \
|
||||
{}:{} in `{}`, crate {}",
|
||||
child_def_id.crate,
|
||||
child_def_id.node,
|
||||
*self.path_builder,
|
||||
self.cdata.cnum);
|
||||
|
||||
// Get the item.
|
||||
match maybe_find_item(child_def_id.node, other_crates_items) {
|
||||
None => {}
|
||||
Some(child_item_doc) => {
|
||||
// Push the name.
|
||||
let child_name = item_name(self.intr, child_item_doc);
|
||||
debug!("(iterating over each item of a module) pushing \
|
||||
name `{}` onto `{}`",
|
||||
token::ident_to_str(&child_name),
|
||||
*self.path_builder);
|
||||
let old_len =
|
||||
self.push_name(token::ident_to_str(&child_name));
|
||||
|
||||
// Process this item.
|
||||
|
||||
let vis = item_visibility(child_item_doc);
|
||||
continue_ = self.process_item_and_pop_name(child_item_doc,
|
||||
child_def_id,
|
||||
old_len,
|
||||
vis);
|
||||
}
|
||||
}
|
||||
continue_
|
||||
});
|
||||
|
||||
if !continue_ {
|
||||
return false
|
||||
}
|
||||
|
||||
// Iterate over reexports.
|
||||
each_reexport(item_doc, |reexport_doc| {
|
||||
let def_id_doc = reader::get_doc(
|
||||
reexport_doc,
|
||||
tag_items_data_item_reexport_def_id);
|
||||
let orig_def_id = reader::with_doc_data(def_id_doc, parse_def_id);
|
||||
|
||||
// NB: was "cdata"
|
||||
let def_id = translate_def_id(self.cdata, orig_def_id);
|
||||
|
||||
let name_doc = reader::get_doc(reexport_doc,
|
||||
tag_items_data_item_reexport_name);
|
||||
let name = name_doc.as_str_slice();
|
||||
|
||||
// Push the name.
|
||||
debug!("(iterating over each item of a module) pushing \
|
||||
reexported name `{}` onto `{}` (crate {}, orig {}, \
|
||||
in crate {})",
|
||||
name,
|
||||
*self.path_builder,
|
||||
def_id.crate,
|
||||
orig_def_id.crate,
|
||||
self.cdata.cnum);
|
||||
let old_len = self.push_name(name);
|
||||
|
||||
// This reexport may be in yet another crate.
|
||||
let other_crates_items = if def_id.crate == self.cdata.cnum {
|
||||
reader::get_doc(reader::Doc(self.cdata.data), tag_items)
|
||||
} else {
|
||||
let crate_data = (self.get_crate_data)(def_id.crate);
|
||||
let root = reader::Doc(crate_data.data);
|
||||
reader::get_doc(root, tag_items)
|
||||
};
|
||||
|
||||
// Get the item.
|
||||
match maybe_find_item(def_id.node, other_crates_items) {
|
||||
None => { self.pop_name(old_len); }
|
||||
Some(reexported_item_doc) => {
|
||||
continue_ = self.process_item_and_pop_name(
|
||||
reexported_item_doc,
|
||||
def_id,
|
||||
old_len,
|
||||
ast::public);
|
||||
}
|
||||
}
|
||||
|
||||
continue_
|
||||
});
|
||||
|
||||
continue_
|
||||
}
|
||||
}
|
||||
|
||||
fn each_child_of_item_or_crate(intr: @ident_interner,
|
||||
cdata: Cmd,
|
||||
item_doc: ebml::Doc,
|
||||
@ -1259,62 +1052,6 @@ pub fn get_item_visibility(cdata: Cmd, id: ast::NodeId)
|
||||
item_visibility(lookup_item(id, cdata.data))
|
||||
}
|
||||
|
||||
fn family_has_type_params(fam: Family) -> bool {
|
||||
match fam {
|
||||
ImmStatic | ForeignType | Mod | ForeignMod | PublicField | PrivateField
|
||||
| ForeignFn | MutStatic => false,
|
||||
_ => true
|
||||
}
|
||||
}
|
||||
|
||||
fn family_names_type(fam: Family) -> bool {
|
||||
match fam { Type | Mod | Trait => true, _ => false }
|
||||
}
|
||||
|
||||
fn read_path(d: ebml::Doc) -> (~str, uint) {
|
||||
reader::with_doc_data(d, |desc| {
|
||||
let pos = u64_from_be_bytes(desc, 0u, 4u) as uint;
|
||||
let pathbytes = desc.slice_from(4u).to_owned();
|
||||
let path = str::from_utf8_owned(pathbytes);
|
||||
|
||||
(path, pos)
|
||||
})
|
||||
}
|
||||
|
||||
fn describe_def(items: ebml::Doc, id: ast::DefId) -> ~str {
|
||||
if id.crate != ast::LOCAL_CRATE { return ~"external"; }
|
||||
let it = match maybe_find_item(id.node, items) {
|
||||
Some(it) => it,
|
||||
None => fail!("describe_def: item not found {:?}", id)
|
||||
};
|
||||
return item_family_to_str(item_family(it));
|
||||
}
|
||||
|
||||
fn item_family_to_str(fam: Family) -> ~str {
|
||||
match fam {
|
||||
ImmStatic => ~"static",
|
||||
MutStatic => ~"static mut",
|
||||
Fn => ~"fn",
|
||||
UnsafeFn => ~"unsafe fn",
|
||||
StaticMethod => ~"static method",
|
||||
UnsafeStaticMethod => ~"unsafe static method",
|
||||
ForeignFn => ~"foreign fn",
|
||||
Type => ~"type",
|
||||
ForeignType => ~"foreign type",
|
||||
Mod => ~"mod",
|
||||
ForeignMod => ~"foreign mod",
|
||||
Enum => ~"enum",
|
||||
StructVariant => ~"struct variant",
|
||||
TupleVariant => ~"tuple variant",
|
||||
Impl => ~"impl",
|
||||
Trait => ~"trait",
|
||||
Struct => ~"struct",
|
||||
PublicField => ~"public field",
|
||||
PrivateField => ~"private field",
|
||||
InheritedField => ~"inherited field",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
|
||||
let mut items: ~[@ast::MetaItem] = ~[];
|
||||
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
|
||||
@ -1370,15 +1107,6 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
|
||||
return attrs;
|
||||
}
|
||||
|
||||
fn list_meta_items(intr: @ident_interner,
|
||||
meta_items: ebml::Doc,
|
||||
out: @mut io::Writer) {
|
||||
let r = get_meta_items(meta_items);
|
||||
for mi in r.iter() {
|
||||
write!(out, "{}\n", pprust::meta_item_to_str(*mi, intr));
|
||||
}
|
||||
}
|
||||
|
||||
fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
|
||||
out: @mut io::Writer) {
|
||||
write!(out, "=Crate Attributes ({})=\n", hash);
|
||||
|
@ -128,20 +128,6 @@ struct entry<T> {
|
||||
pos: u64
|
||||
}
|
||||
|
||||
fn add_to_index(ebml_w: &mut writer::Encoder,
|
||||
path: &[Ident],
|
||||
index: &mut ~[entry<~str>],
|
||||
name: Ident) {
|
||||
let mut full_path = ~[];
|
||||
full_path.push_all(path);
|
||||
full_path.push(name);
|
||||
index.push(
|
||||
entry {
|
||||
val: ast_util::path_name_i(full_path),
|
||||
pos: ebml_w.writer.tell()
|
||||
});
|
||||
}
|
||||
|
||||
fn encode_trait_ref(ebml_w: &mut writer::Encoder,
|
||||
ecx: &EncodeContext,
|
||||
trait_ref: &ty::TraitRef,
|
||||
@ -1442,10 +1428,6 @@ fn encode_index<T:'static>(
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn write_str(writer: @mut MemWriter, s: ~str) {
|
||||
writer.write(s.as_bytes());
|
||||
}
|
||||
|
||||
fn write_i64(writer: @mut MemWriter, &n: &i64) {
|
||||
let wr: &mut MemWriter = writer;
|
||||
assert!(n < 0x7fff_ffff);
|
||||
|
@ -23,7 +23,6 @@ use syntax::abi::AbiSet;
|
||||
use syntax::abi;
|
||||
use syntax::ast;
|
||||
use syntax::ast::*;
|
||||
use syntax::codemap::dummy_sp;
|
||||
use syntax::opt_vec;
|
||||
|
||||
// Compact string representation for ty::t values. API ty_str &
|
||||
@ -130,34 +129,6 @@ pub fn parse_trait_ref_data(data: &[u8], crate_num: ast::CrateNum, pos: uint, tc
|
||||
parse_trait_ref(&mut st, conv)
|
||||
}
|
||||
|
||||
fn parse_path(st: &mut PState) -> @ast::Path {
|
||||
let mut idents: ~[ast::Ident] = ~[];
|
||||
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
|
||||
idents.push(parse_ident_(st, is_last));
|
||||
loop {
|
||||
match peek(st) {
|
||||
':' => { next(st); next(st); }
|
||||
c => {
|
||||
if c == '(' {
|
||||
return @ast::Path {
|
||||
span: dummy_sp(),
|
||||
global: false,
|
||||
segments: idents.move_iter().map(|identifier| {
|
||||
ast::PathSegment {
|
||||
identifier: identifier,
|
||||
lifetimes: opt_vec::Empty,
|
||||
types: opt_vec::Empty,
|
||||
}
|
||||
}).collect()
|
||||
};
|
||||
} else {
|
||||
idents.push(parse_ident_(st, is_last));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_sigil(st: &mut PState) -> ast::Sigil {
|
||||
match next(st) {
|
||||
'@' => ast::ManagedSigil,
|
||||
|
@ -57,13 +57,6 @@ fn mywrite(w: @mut MemWriter, fmt: &fmt::Arguments) {
|
||||
fmt::write(&mut *w as &mut io::Writer, fmt);
|
||||
}
|
||||
|
||||
fn cx_uses_abbrevs(cx: @ctxt) -> bool {
|
||||
match cx.abbrevs {
|
||||
ac_no_abbrevs => return false,
|
||||
ac_use_abbrevs(_) => return true
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enc_ty(w: @mut MemWriter, cx: @ctxt, t: ty::t) {
|
||||
match cx.abbrevs {
|
||||
ac_no_abbrevs => {
|
||||
|
@ -390,10 +390,6 @@ fn renumber_ast(xcx: @ExtendedDecodeContext, ii: ast::inlined_item)
|
||||
// ______________________________________________________________________
|
||||
// Encoding and decoding of ast::def
|
||||
|
||||
fn encode_def(ebml_w: &mut writer::Encoder, def: ast::Def) {
|
||||
def.encode(ebml_w)
|
||||
}
|
||||
|
||||
fn decode_def(xcx: @ExtendedDecodeContext, doc: ebml::Doc) -> ast::Def {
|
||||
let mut dsr = reader::Decoder(doc);
|
||||
let def: ast::Def = Decodable::decode(&mut dsr);
|
||||
|
@ -84,44 +84,6 @@ enum MoveError {
|
||||
}
|
||||
|
||||
impl<'self> CheckLoanCtxt<'self> {
|
||||
fn check_by_move_capture(&self,
|
||||
closure_id: ast::NodeId,
|
||||
cap_var: &moves::CaptureVar,
|
||||
move_path: @LoanPath) {
|
||||
let move_err = self.analyze_move_out_from(closure_id, move_path);
|
||||
match move_err {
|
||||
MoveOk => {}
|
||||
MoveWhileBorrowed(loan_path, loan_span) => {
|
||||
self.bccx.span_err(
|
||||
cap_var.span,
|
||||
format!("cannot move `{}` into closure \
|
||||
because it is borrowed",
|
||||
self.bccx.loan_path_to_str(move_path)));
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_str(loan_path)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_captured_variables(&self, closure_id: ast::NodeId, span: Span) {
|
||||
let cap_vars = self.bccx.capture_map.get(&closure_id);
|
||||
for cap_var in cap_vars.iter() {
|
||||
let var_id = ast_util::def_id_of_def(cap_var.def).node;
|
||||
let var_path = @LpVar(var_id);
|
||||
self.check_if_path_is_moved(closure_id, span,
|
||||
MovedInCapture, var_path);
|
||||
match cap_var.mode {
|
||||
moves::CapRef | moves::CapCopy => {}
|
||||
moves::CapMove => {
|
||||
self.check_by_move_capture(closure_id, cap_var, var_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
pub fn each_issued_loan(&self, scope_id: ast::NodeId, op: |&Loan| -> bool)
|
||||
|
@ -818,10 +818,6 @@ impl<'self> GatherLoanCtxt<'self> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pat_is_variant_or_struct(&self, pat: @ast::Pat) -> bool {
|
||||
pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat)
|
||||
}
|
||||
|
||||
pub fn pat_is_binding(&self, pat: @ast::Pat) -> bool {
|
||||
pat_util::pat_is_binding(self.bccx.tcx.def_map, pat)
|
||||
}
|
||||
|
@ -50,10 +50,6 @@ struct RestrictionsContext<'self> {
|
||||
}
|
||||
|
||||
impl<'self> RestrictionsContext<'self> {
|
||||
fn tcx(&self) -> ty::ctxt {
|
||||
self.bccx.tcx
|
||||
}
|
||||
|
||||
fn restrict(&self,
|
||||
cmt: mc::cmt,
|
||||
restrictions: RestrictionSet) -> RestrictionResult {
|
||||
@ -251,14 +247,4 @@ impl<'self> RestrictionsContext<'self> {
|
||||
cause);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_no_mutability_control(&self,
|
||||
cmt: mc::cmt,
|
||||
restrictions: RestrictionSet) {
|
||||
if restrictions.intersects(RESTR_MUTATE | RESTR_FREEZE | RESTR_CLAIM) {
|
||||
self.bccx.report(BckError {span: self.span,
|
||||
cmt: cmt,
|
||||
code: err_freeze_aliasable_const});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
352
src/librustc/middle/dead.rs
Normal file
352
src/librustc/middle/dead.rs
Normal file
@ -0,0 +1,352 @@
|
||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// This implements the dead-code warning pass. It follows middle::reachable
|
||||
// closely. The idea is that all reachable symbols are live, codes called
|
||||
// from live codes are live, and everything else is dead.
|
||||
|
||||
use middle::ty;
|
||||
use middle::typeck;
|
||||
use middle::privacy;
|
||||
use middle::lint::dead_code;
|
||||
|
||||
use std::hashmap::HashSet;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util::{local_def, def_id_of_def, is_local};
|
||||
use syntax::codemap;
|
||||
use syntax::parse::token;
|
||||
use syntax::visit::Visitor;
|
||||
use syntax::visit;
|
||||
|
||||
// Any local node that may call something in its body block should be
|
||||
// explored. For example, if it's a live node_item that is a
|
||||
// function, then we should explore its block to check for codes that
|
||||
// may need to be marked as live.
|
||||
fn should_explore(tcx: ty::ctxt, def_id: ast::DefId) -> bool {
|
||||
if !is_local(def_id) {
|
||||
return false;
|
||||
}
|
||||
match tcx.items.find(&def_id.node) {
|
||||
Some(&ast_map::node_item(..))
|
||||
| Some(&ast_map::node_method(..))
|
||||
| Some(&ast_map::node_trait_method(..)) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
struct MarkSymbolVisitor {
|
||||
worklist: ~[ast::NodeId],
|
||||
method_map: typeck::method_map,
|
||||
tcx: ty::ctxt,
|
||||
live_symbols: ~HashSet<ast::NodeId>,
|
||||
}
|
||||
|
||||
impl MarkSymbolVisitor {
|
||||
fn new(tcx: ty::ctxt,
|
||||
method_map: typeck::method_map,
|
||||
worklist: ~[ast::NodeId]) -> MarkSymbolVisitor {
|
||||
MarkSymbolVisitor {
|
||||
worklist: worklist,
|
||||
method_map: method_map,
|
||||
tcx: tcx,
|
||||
live_symbols: ~HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn lookup_and_handle_definition(&mut self, id: &ast::NodeId,
|
||||
span: codemap::Span) {
|
||||
let def = match self.tcx.def_map.find(id) {
|
||||
Some(&def) => def,
|
||||
None => self.tcx.sess.span_bug(span, "def ID not in def map?!"),
|
||||
};
|
||||
let def_id = match def {
|
||||
ast::DefVariant(enum_id, _, _) => Some(enum_id),
|
||||
ast::DefPrimTy(_) => None,
|
||||
_ => Some(def_id_of_def(def)),
|
||||
};
|
||||
match def_id {
|
||||
Some(def_id) => {
|
||||
if should_explore(self.tcx, def_id) {
|
||||
self.worklist.push(def_id.node);
|
||||
}
|
||||
self.live_symbols.insert(def_id.node);
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_live_symbols(&mut self) {
|
||||
let mut scanned = HashSet::new();
|
||||
while self.worklist.len() > 0 {
|
||||
let id = self.worklist.pop();
|
||||
if scanned.contains(&id) {
|
||||
continue
|
||||
}
|
||||
scanned.insert(id);
|
||||
match self.tcx.items.find(&id) {
|
||||
Some(node) => {
|
||||
self.live_symbols.insert(id);
|
||||
self.visit_node(node);
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_node(&mut self, node: &ast_map::ast_node) {
|
||||
match *node {
|
||||
ast_map::node_item(item, _) => {
|
||||
match item.node {
|
||||
ast::item_fn(..)
|
||||
| ast::item_ty(..)
|
||||
| ast::item_static(..)
|
||||
| ast::item_foreign_mod(_) => {
|
||||
visit::walk_item(self, item, ());
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
ast_map::node_trait_method(trait_method, _, _) => {
|
||||
visit::walk_trait_method(self, trait_method, ());
|
||||
}
|
||||
ast_map::node_method(method, _, _) => {
|
||||
visit::walk_block(self, method.body, ());
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<()> for MarkSymbolVisitor {
|
||||
|
||||
fn visit_expr(&mut self, expr: @ast::Expr, _: ()) {
|
||||
match expr.node {
|
||||
ast::ExprPath(_) | ast::ExprStruct(..) => {
|
||||
self.lookup_and_handle_definition(&expr.id, expr.span);
|
||||
}
|
||||
ast::ExprMethodCall(..) => {
|
||||
match self.method_map.find(&expr.id) {
|
||||
Some(&typeck::method_map_entry {
|
||||
origin: typeck::method_static(def_id),
|
||||
..
|
||||
}) => {
|
||||
if should_explore(self.tcx, def_id) {
|
||||
self.worklist.push(def_id.node);
|
||||
}
|
||||
self.live_symbols.insert(def_id.node);
|
||||
}
|
||||
Some(_) => (),
|
||||
None => {
|
||||
self.tcx.sess.span_bug(expr.span,
|
||||
"method call expression not \
|
||||
in method map?!")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
|
||||
visit::walk_expr(self, expr, ())
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, typ: &ast::Ty, _: ()) {
|
||||
match typ.node {
|
||||
ast::ty_path(_, _, ref id) => {
|
||||
self.lookup_and_handle_definition(id, typ.span);
|
||||
}
|
||||
_ => visit::walk_ty(self, typ, ()),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, _item: @ast::item, _: ()) {
|
||||
// Do not recurse into items. These items will be added to the
|
||||
// worklist and recursed into manually if necessary.
|
||||
}
|
||||
}
|
||||
|
||||
// This visitor is used to mark the implemented methods of a trait. Since we
|
||||
// can not be sure if such methods are live or dead, we simply mark them
|
||||
// as live.
|
||||
struct TraitMethodSeeder {
|
||||
worklist: ~[ast::NodeId],
|
||||
}
|
||||
|
||||
impl Visitor<()> for TraitMethodSeeder {
|
||||
fn visit_item(&mut self, item: @ast::item, _: ()) {
|
||||
match item.node {
|
||||
ast::item_impl(_, Some(ref _trait_ref), _, ref methods) => {
|
||||
for method in methods.iter() {
|
||||
self.worklist.push(method.id);
|
||||
}
|
||||
}
|
||||
ast::item_mod(..) | ast::item_fn(..) => {
|
||||
visit::walk_item(self, item, ());
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_and_seed_worklist(tcx: ty::ctxt,
|
||||
exported_items: &privacy::ExportedItems,
|
||||
reachable_symbols: &HashSet<ast::NodeId>,
|
||||
crate: &ast::Crate) -> ~[ast::NodeId] {
|
||||
let mut worklist = ~[];
|
||||
|
||||
// Preferably, we would only need to seed the worklist with reachable
|
||||
// symbols. However, since the set of reachable symbols differs
|
||||
// depending on whether a crate is built as bin or lib, and we want
|
||||
// the warning to be consistent, we also seed the worklist with
|
||||
// exported symbols.
|
||||
for &id in exported_items.iter() {
|
||||
worklist.push(id);
|
||||
}
|
||||
for &id in reachable_symbols.iter() {
|
||||
worklist.push(id);
|
||||
}
|
||||
|
||||
// Seed entry point
|
||||
match *tcx.sess.entry_fn {
|
||||
Some((id, _)) => worklist.push(id),
|
||||
None => ()
|
||||
}
|
||||
|
||||
// Seed implemeneted trait methods
|
||||
let mut trait_method_seeder = TraitMethodSeeder {
|
||||
worklist: worklist
|
||||
};
|
||||
visit::walk_crate(&mut trait_method_seeder, crate, ());
|
||||
|
||||
return trait_method_seeder.worklist;
|
||||
}
|
||||
|
||||
fn find_live(tcx: ty::ctxt,
|
||||
method_map: typeck::method_map,
|
||||
exported_items: &privacy::ExportedItems,
|
||||
reachable_symbols: &HashSet<ast::NodeId>,
|
||||
crate: &ast::Crate)
|
||||
-> ~HashSet<ast::NodeId> {
|
||||
let worklist = create_and_seed_worklist(tcx, exported_items,
|
||||
reachable_symbols, crate);
|
||||
let mut symbol_visitor = MarkSymbolVisitor::new(tcx, method_map, worklist);
|
||||
symbol_visitor.mark_live_symbols();
|
||||
symbol_visitor.live_symbols
|
||||
}
|
||||
|
||||
fn should_warn(item: @ast::item) -> bool {
|
||||
match item.node {
|
||||
ast::item_static(..)
|
||||
| ast::item_fn(..)
|
||||
| ast::item_enum(..)
|
||||
| ast::item_struct(..) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn get_struct_ctor_id(item: &ast::item) -> Option<ast::NodeId> {
|
||||
match item.node {
|
||||
ast::item_struct(struct_def, _) => struct_def.ctor_id,
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
struct DeadVisitor {
|
||||
tcx: ty::ctxt,
|
||||
live_symbols: ~HashSet<ast::NodeId>,
|
||||
}
|
||||
|
||||
impl DeadVisitor {
|
||||
// id := node id of an item's definition.
|
||||
// ctor_id := `Some` if the item is a struct_ctor (tuple struct),
|
||||
// `None` otherwise.
|
||||
// If the item is a struct_ctor, then either its `id` or
|
||||
// `ctor_id` (unwrapped) is in the live_symbols set. More specifically,
|
||||
// DefMap maps the ExprPath of a struct_ctor to the node referred by
|
||||
// `ctor_id`. On the other hand, in a statement like
|
||||
// `type <ident> <generics> = <ty>;` where <ty> refers to a struct_ctor,
|
||||
// DefMap maps <ty> to `id` instead.
|
||||
fn symbol_is_live(&mut self, id: ast::NodeId,
|
||||
ctor_id: Option<ast::NodeId>) -> bool {
|
||||
if self.live_symbols.contains(&id)
|
||||
|| ctor_id.map_default(false,
|
||||
|ctor| self.live_symbols.contains(&ctor)) {
|
||||
return true;
|
||||
}
|
||||
// If it's a type whose methods are live, then it's live, too.
|
||||
// This is done to handle the case where, for example, the static
|
||||
// method of a private type is used, but the type itself is never
|
||||
// called directly.
|
||||
let def_id = local_def(id);
|
||||
match self.tcx.inherent_impls.find(&def_id) {
|
||||
None => (),
|
||||
Some(ref impl_list) => {
|
||||
for impl_ in impl_list.iter() {
|
||||
for method in impl_.methods.iter() {
|
||||
if self.live_symbols.contains(&method.def_id.node) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<()> for DeadVisitor {
|
||||
fn visit_item(&mut self, item: @ast::item, _: ()) {
|
||||
let ctor_id = get_struct_ctor_id(item);
|
||||
if !self.symbol_is_live(item.id, ctor_id) && should_warn(item) {
|
||||
self.tcx.sess.add_lint(dead_code, item.id, item.span,
|
||||
format!("code is never used: `{}`",
|
||||
token::ident_to_str(&item.ident)));
|
||||
}
|
||||
visit::walk_item(self, item, ());
|
||||
}
|
||||
|
||||
fn visit_fn(&mut self, fk: &visit::fn_kind,
|
||||
_: &ast::fn_decl, block: ast::P<ast::Block>,
|
||||
span: codemap::Span, id: ast::NodeId, _: ()) {
|
||||
// Have to warn method here because methods are not ast::item
|
||||
match *fk {
|
||||
visit::fk_method(..) => {
|
||||
let ident = visit::name_of_fn(fk);
|
||||
if !self.symbol_is_live(id, None) {
|
||||
self.tcx.sess
|
||||
.add_lint(dead_code, id, span,
|
||||
format!("code is never used: `{}`",
|
||||
token::ident_to_str(&ident)));
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
visit::walk_block(self, block, ());
|
||||
}
|
||||
|
||||
// Overwrite so that we don't warn the trait method itself.
|
||||
fn visit_trait_method(&mut self, trait_method :&ast::trait_method, _: ()) {
|
||||
match *trait_method {
|
||||
ast::provided(method) => visit::walk_block(self, method.body, ()),
|
||||
ast::required(_) => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_crate(tcx: ty::ctxt,
|
||||
method_map: typeck::method_map,
|
||||
exported_items: &privacy::ExportedItems,
|
||||
reachable_symbols: &HashSet<ast::NodeId>,
|
||||
crate: &ast::Crate) {
|
||||
let live_symbols = find_live(tcx, method_map, exported_items,
|
||||
reachable_symbols, crate);
|
||||
let mut visitor = DeadVisitor { tcx: tcx, live_symbols: live_symbols };
|
||||
visit::walk_crate(&mut visitor, crate, ());
|
||||
}
|
@ -411,20 +411,6 @@ pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
|
||||
});
|
||||
}
|
||||
|
||||
fn is_nullary_variant(cx: &Context, ex: @Expr) -> bool {
|
||||
match ex.node {
|
||||
ExprPath(_) => {
|
||||
match cx.tcx.def_map.get_copy(&ex.id) {
|
||||
DefVariant(edid, vdid, _) => {
|
||||
ty::enum_variant_with_id(cx.tcx, edid, vdid).args.is_empty()
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn check_imm_free_var(cx: &Context, def: Def, sp: Span) {
|
||||
match def {
|
||||
DefLocal(_, BindByValue(MutMutable)) => {
|
||||
@ -585,8 +571,4 @@ pub fn check_cast_for_escaping_regions(
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_subregion_of(cx: &Context, r_sub: ty::Region, r_sup: ty::Region) -> bool {
|
||||
cx.tcx.region_maps.is_subregion_of(r_sub, r_sup)
|
||||
}
|
||||
}
|
||||
|
@ -88,6 +88,7 @@ pub enum lint {
|
||||
dead_assignment,
|
||||
unused_mut,
|
||||
unnecessary_allocation,
|
||||
dead_code,
|
||||
|
||||
missing_doc,
|
||||
unreachable_code,
|
||||
@ -282,6 +283,13 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
|
||||
default: warn
|
||||
}),
|
||||
|
||||
("dead_code",
|
||||
LintSpec {
|
||||
lint: dead_code,
|
||||
desc: "detect piece of code that will never be used",
|
||||
default: warn
|
||||
}),
|
||||
|
||||
("missing_doc",
|
||||
LintSpec {
|
||||
lint: missing_doc,
|
||||
|
@ -578,7 +578,7 @@ static ACC_USE: uint = 4u;
|
||||
|
||||
type LiveNodeMap = @mut HashMap<NodeId, LiveNode>;
|
||||
|
||||
struct Liveness {
|
||||
pub struct Liveness {
|
||||
tcx: ty::ctxt,
|
||||
ir: @mut IrMaps,
|
||||
s: Specials,
|
||||
@ -625,37 +625,10 @@ impl Liveness {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn variable_from_path(&self, expr: &Expr) -> Option<Variable> {
|
||||
match expr.node {
|
||||
ExprPath(_) => {
|
||||
let def = self.tcx.def_map.get_copy(&expr.id);
|
||||
moves::moved_variable_node_id_from_def(def).map(|rdef| {
|
||||
self.variable(rdef, expr.span)
|
||||
})
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn variable(&self, node_id: NodeId, span: Span) -> Variable {
|
||||
self.ir.variable(node_id, span)
|
||||
}
|
||||
|
||||
pub fn variable_from_def_map(&self, node_id: NodeId, span: Span)
|
||||
-> Option<Variable> {
|
||||
match self.tcx.def_map.find(&node_id) {
|
||||
Some(&def) => {
|
||||
moves::moved_variable_node_id_from_def(def).map(|rdef| {
|
||||
self.variable(rdef, span)
|
||||
})
|
||||
}
|
||||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
span, "Not present in def map")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pat_bindings(&self,
|
||||
pat: @Pat,
|
||||
f: |LiveNode, Variable, Span, NodeId|) {
|
||||
@ -730,13 +703,6 @@ impl Liveness {
|
||||
self.assigned_on_entry(self.successors[*ln], var)
|
||||
}
|
||||
|
||||
pub fn indices(&self, ln: LiveNode, op: |uint|) {
|
||||
let node_base_idx = self.idx(ln, Variable(0));
|
||||
for var_idx in range(0u, self.ir.num_vars) {
|
||||
op(node_base_idx + var_idx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indices2(&self,
|
||||
ln: LiveNode,
|
||||
succ_ln: LiveNode,
|
||||
|
@ -57,13 +57,6 @@ fn item_might_be_inlined(item: @ast::item) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
// Returns true if the given type method must be inlined because it may be
|
||||
// monomorphized or it was marked with `#[inline]`.
|
||||
fn ty_method_might_be_inlined(ty_method: &ast::TypeMethod) -> bool {
|
||||
attributes_specify_inlining(ty_method.attrs) ||
|
||||
generics_require_inlining(&ty_method.generics)
|
||||
}
|
||||
|
||||
fn method_might_be_inlined(tcx: ty::ctxt, method: &ast::method,
|
||||
impl_src: ast::DefId) -> bool {
|
||||
if attributes_specify_inlining(method.attrs) ||
|
||||
@ -83,15 +76,6 @@ fn method_might_be_inlined(tcx: ty::ctxt, method: &ast::method,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns true if the given trait method must be inlined because it may be
|
||||
// monomorphized or it was marked with `#[inline]`.
|
||||
fn trait_method_might_be_inlined(trait_method: &ast::trait_method) -> bool {
|
||||
match *trait_method {
|
||||
ast::required(ref ty_method) => ty_method_might_be_inlined(ty_method),
|
||||
ast::provided(_) => true
|
||||
}
|
||||
}
|
||||
|
||||
// Information needed while computing reachability.
|
||||
struct ReachableContext {
|
||||
// The type context.
|
||||
|
@ -52,12 +52,6 @@ type BindingMap = HashMap<Name,binding_info>;
|
||||
// Trait method resolution
|
||||
pub type TraitMap = HashMap<NodeId,@mut ~[DefId]>;
|
||||
|
||||
// A summary of the generics on a trait.
|
||||
struct TraitGenerics {
|
||||
has_lifetime: bool,
|
||||
type_parameter_count: uint,
|
||||
}
|
||||
|
||||
// This is the replacement export map. It maps a module to all of the exports
|
||||
// within.
|
||||
pub type ExportMap2 = @mut HashMap<NodeId, ~[Export2]>;
|
||||
@ -141,12 +135,6 @@ enum NameDefinition {
|
||||
ImportNameDefinition(Def, LastPrivate) //< The name identifies an import.
|
||||
}
|
||||
|
||||
#[deriving(Eq)]
|
||||
enum Mutability {
|
||||
Mutable,
|
||||
Immutable
|
||||
}
|
||||
|
||||
enum SelfBinding {
|
||||
NoSelfBinding,
|
||||
HasSelfBinding(NodeId, explicit_self)
|
||||
@ -192,9 +180,6 @@ enum ResolveResult<T> {
|
||||
}
|
||||
|
||||
impl<T> ResolveResult<T> {
|
||||
fn failed(&self) -> bool {
|
||||
match *self { Failed => true, _ => false }
|
||||
}
|
||||
fn indeterminate(&self) -> bool {
|
||||
match *self { Indeterminate => true, _ => false }
|
||||
}
|
||||
@ -5432,6 +5417,7 @@ impl Resolver {
|
||||
return self.idents_to_str(idents.move_rev_iter().collect::<~[ast::Ident]>());
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // useful for debugging
|
||||
fn dump_module(&mut self, module_: @mut Module) {
|
||||
debug!("Dump of module `{}`:", self.module_to_str(module_));
|
||||
|
||||
|
@ -49,8 +49,6 @@ use middle::trans::type_::Type;
|
||||
use syntax::ast;
|
||||
use syntax::abi::AbiSet;
|
||||
use syntax::ast_map;
|
||||
use syntax::visit;
|
||||
use syntax::visit::Visitor;
|
||||
|
||||
// Represents a (possibly monomorphized) top-level fn item or method
|
||||
// item. Note that this is just the fn-ptr and is not a Rust closure
|
||||
@ -569,27 +567,6 @@ pub fn trans_lang_call_with_type_params(bcx: @mut Block,
|
||||
ArgVals(args), Some(dest), DontAutorefArg).bcx;
|
||||
}
|
||||
|
||||
|
||||
struct CalleeTranslationVisitor {
|
||||
flag: bool,
|
||||
}
|
||||
|
||||
impl Visitor<()> for CalleeTranslationVisitor {
|
||||
|
||||
fn visit_item(&mut self, _:@ast::item, _:()) { }
|
||||
|
||||
fn visit_expr(&mut self, e:@ast::Expr, _:()) {
|
||||
|
||||
if !self.flag {
|
||||
match e.node {
|
||||
ast::ExprRet(_) => self.flag = true,
|
||||
_ => visit::walk_expr(self, e, ()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub fn trans_call_inner(in_cx: @mut Block,
|
||||
call_info: Option<NodeInfo>,
|
||||
callee_ty: ty::t,
|
||||
|
@ -117,12 +117,12 @@ use syntax::parse::token::special_idents;
|
||||
static DW_LANG_RUST: c_uint = 0x9000;
|
||||
|
||||
static DW_TAG_auto_variable: c_uint = 0x100;
|
||||
static DW_TAG_arg_variable: c_uint = 0x101;
|
||||
// static DW_TAG_arg_variable: c_uint = 0x101;
|
||||
|
||||
static DW_ATE_boolean: c_uint = 0x02;
|
||||
static DW_ATE_float: c_uint = 0x04;
|
||||
static DW_ATE_signed: c_uint = 0x05;
|
||||
static DW_ATE_signed_char: c_uint = 0x06;
|
||||
// static DW_ATE_signed_char: c_uint = 0x06;
|
||||
static DW_ATE_unsigned: c_uint = 0x07;
|
||||
static DW_ATE_unsigned_char: c_uint = 0x08;
|
||||
|
||||
@ -1169,13 +1169,6 @@ enum RecursiveTypeDescription {
|
||||
|
||||
impl RecursiveTypeDescription {
|
||||
|
||||
fn metadata(&self) -> DICompositeType {
|
||||
match *self {
|
||||
UnfinishedMetadata { metadata_stub, .. } => metadata_stub,
|
||||
FinalMetadata(metadata) => metadata
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&self, cx: &mut CrateContext) -> DICompositeType {
|
||||
match *self {
|
||||
FinalMetadata(metadata) => metadata,
|
||||
@ -1982,24 +1975,6 @@ fn trait_metadata(cx: &mut CrateContext,
|
||||
definition_span);
|
||||
}
|
||||
|
||||
fn unimplemented_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
|
||||
debug!("unimplemented_type_metadata: {:?}", ty::get(t));
|
||||
|
||||
let name = ppaux::ty_to_str(cx.tcx, t);
|
||||
let metadata = format!("NYI<{}>", name).with_c_str(|name| {
|
||||
unsafe {
|
||||
llvm::LLVMDIBuilderCreateBasicType(
|
||||
DIB(cx),
|
||||
name,
|
||||
0_u64,
|
||||
8_u64,
|
||||
DW_ATE_unsigned as c_uint)
|
||||
}
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
fn cache_id_for_type(t: ty::t) -> uint {
|
||||
ty::type_id(t)
|
||||
}
|
||||
@ -2179,11 +2154,6 @@ fn set_debug_location(cx: &mut CrateContext, debug_location: DebugLocation) {
|
||||
// Utility Functions
|
||||
//=-------------------------------------------------------------------------------------------------
|
||||
|
||||
#[inline]
|
||||
fn roundup(x: uint, a: uint) -> uint {
|
||||
((x + (a - 1)) / a) * a
|
||||
}
|
||||
|
||||
/// Return codemap::Loc corresponding to the beginning of the span
|
||||
fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc {
|
||||
cx.sess.codemap.lookup_char_pos(span.lo)
|
||||
|
@ -176,12 +176,6 @@ impl Dest {
|
||||
}
|
||||
}
|
||||
|
||||
fn drop_and_cancel_clean(bcx: @mut Block, dat: Datum) -> @mut Block {
|
||||
let bcx = dat.drop_val(bcx);
|
||||
dat.cancel_clean(bcx);
|
||||
return bcx;
|
||||
}
|
||||
|
||||
pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
|
||||
debug!("trans_to_datum(expr={})", bcx.expr_to_str(expr));
|
||||
|
||||
@ -1792,10 +1786,6 @@ fn trans_assign_op(bcx: @mut Block,
|
||||
return result_datum.copy_to_datum(bcx, DROP_EXISTING, dst_datum);
|
||||
}
|
||||
|
||||
fn shorten(x: &str) -> @str {
|
||||
(if x.char_len() > 60 {x.slice_chars(0, 60)} else {x}).to_managed()
|
||||
}
|
||||
|
||||
pub fn trans_log_level(bcx: @mut Block) -> DatumBlock {
|
||||
let _icx = push_ctxt("trans_log_level");
|
||||
let ccx = bcx.ccx();
|
||||
|
@ -1765,7 +1765,7 @@ def_type_content_sets!(
|
||||
|
||||
// Things that are interior to the value (first nibble):
|
||||
InteriorUnsized = 0b0000__00000000__0001,
|
||||
InteriorAll = 0b0000__00000000__1111,
|
||||
// InteriorAll = 0b0000__00000000__1111,
|
||||
|
||||
// Things that are owned by the value (second and third nibbles):
|
||||
OwnsOwned = 0b0000__00000001__0000,
|
||||
@ -1777,7 +1777,7 @@ def_type_content_sets!(
|
||||
// Things that are reachable by the value in any way (fourth nibble):
|
||||
ReachesNonsendAnnot = 0b0001__00000000__0000,
|
||||
ReachesBorrowed = 0b0010__00000000__0000,
|
||||
ReachesManaged /* see [1] below */ = 0b0100__00000000__0000,
|
||||
// ReachesManaged /* see [1] below */ = 0b0100__00000000__0000,
|
||||
ReachesMutable = 0b1000__00000000__0000,
|
||||
ReachesAll = 0b1111__00000000__0000,
|
||||
|
||||
@ -3631,30 +3631,6 @@ pub fn ty_to_def_id(ty: t) -> Option<ast::DefId> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the def ID of the constructor for the given tuple-like struct, or
|
||||
/// None if the struct is not tuple-like. Fails if the given def ID does not
|
||||
/// refer to a struct at all.
|
||||
fn struct_ctor_id(cx: ctxt, struct_did: ast::DefId) -> Option<ast::DefId> {
|
||||
if struct_did.crate != ast::LOCAL_CRATE {
|
||||
// XXX: Cross-crate functionality.
|
||||
cx.sess.unimpl("constructor ID of cross-crate tuple structs");
|
||||
}
|
||||
|
||||
match cx.items.find(&struct_did.node) {
|
||||
Some(&ast_map::node_item(item, _)) => {
|
||||
match item.node {
|
||||
ast::item_struct(struct_def, _) => {
|
||||
struct_def.ctor_id.map(|ctor_id| {
|
||||
ast_util::local_def(ctor_id)
|
||||
})
|
||||
}
|
||||
_ => cx.sess.bug("called struct_ctor_id on non-struct")
|
||||
}
|
||||
}
|
||||
_ => cx.sess.bug("called struct_ctor_id on non-struct")
|
||||
}
|
||||
}
|
||||
|
||||
// Enum information
|
||||
#[deriving(Clone)]
|
||||
pub struct VariantInfo {
|
||||
|
@ -692,13 +692,6 @@ impl RegionVarBindings {
|
||||
}
|
||||
}
|
||||
|
||||
fn report_type_error(&mut self,
|
||||
origin: SubregionOrigin,
|
||||
terr: &ty::type_err) {
|
||||
let terr_str = ty::type_err_to_str(self.tcx, terr);
|
||||
self.tcx.sess.span_err(origin.span(), terr_str);
|
||||
}
|
||||
|
||||
fn intersect_scopes(&self,
|
||||
region_a: ty::Region,
|
||||
region_b: ty::Region,
|
||||
|
@ -12,7 +12,7 @@
|
||||
use metadata::encoder;
|
||||
use middle::ty::{ReSkolemized, ReVar};
|
||||
use middle::ty::{BoundRegion, BrAnon, BrNamed};
|
||||
use middle::ty::{BrFresh, ctxt, field};
|
||||
use middle::ty::{BrFresh, ctxt};
|
||||
use middle::ty::{mt, t, param_ty};
|
||||
use middle::ty::{ReFree, ReScope, ReInfer, ReStatic, Region,
|
||||
ReEmpty};
|
||||
@ -432,16 +432,6 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
||||
}
|
||||
}
|
||||
}
|
||||
fn method_to_str(cx: ctxt, m: ty::Method) -> ~str {
|
||||
bare_fn_to_str(cx,
|
||||
m.fty.purity,
|
||||
m.fty.abis,
|
||||
Some(m.ident),
|
||||
&m.fty.sig) + ";"
|
||||
}
|
||||
fn field_to_str(cx: ctxt, f: field) -> ~str {
|
||||
return format!("{}: {}", cx.sess.str_of(f.ident), mt_to_str(cx, &f.mt));
|
||||
}
|
||||
|
||||
// if there is an id, print that instead of the structural type:
|
||||
/*for def_id in ty::type_def_id(typ).iter() {
|
||||
|
@ -305,15 +305,6 @@ pub struct Generics {
|
||||
type_params: ~[TyParam]
|
||||
}
|
||||
|
||||
impl Generics {
|
||||
fn new() -> Generics {
|
||||
Generics {
|
||||
lifetimes: ~[],
|
||||
type_params: ~[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clean<Generics> for ast::Generics {
|
||||
fn clean(&self) -> Generics {
|
||||
Generics {
|
||||
|
@ -122,10 +122,6 @@ pub fn render<T: fmt::Default, S: fmt::Default>(
|
||||
);
|
||||
}
|
||||
|
||||
fn boolstr(b: bool) -> &'static str {
|
||||
if b { "true" } else { "false" }
|
||||
}
|
||||
|
||||
fn nonestr<'a>(s: &'a str) -> &'a str {
|
||||
if s == "" { "none" } else { s }
|
||||
}
|
||||
|
@ -38,9 +38,6 @@ static MKDEXT_TABLES: libc::c_uint = 1 << 1;
|
||||
static MKDEXT_FENCED_CODE: libc::c_uint = 1 << 2;
|
||||
static MKDEXT_AUTOLINK: libc::c_uint = 1 << 3;
|
||||
static MKDEXT_STRIKETHROUGH: libc::c_uint = 1 << 4;
|
||||
static MKDEXT_SPACE_HEADERS: libc::c_uint = 1 << 6;
|
||||
static MKDEXT_SUPERSCRIPT: libc::c_uint = 1 << 7;
|
||||
static MKDEXT_LAX_SPACING: libc::c_uint = 1 << 8;
|
||||
|
||||
type sd_markdown = libc::c_void; // this is opaque to us
|
||||
|
||||
|
@ -19,22 +19,10 @@ condition! {
|
||||
pub bad_path: (Path, ~str) -> Path;
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub bad_stat: (Path, ~str) -> FileStat;
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub bad_kind: (~str) -> ();
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub nonexistent_package: (PkgId, ~str) -> Path;
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub copy_failed: (Path, Path) -> ();
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub missing_pkg_files: (PkgId) -> ();
|
||||
}
|
||||
@ -43,10 +31,6 @@ condition! {
|
||||
pub bad_pkg_id: (Path, ~str) -> PkgId;
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub no_rust_path: (~str) -> Path;
|
||||
}
|
||||
|
||||
condition! {
|
||||
pub failed_to_create_temp_dir: (~str) -> Path;
|
||||
}
|
||||
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
use std::path::Path;
|
||||
use std::vec;
|
||||
|
||||
|
@ -63,10 +63,9 @@ mod crate;
|
||||
pub mod exit_codes;
|
||||
mod installed_packages;
|
||||
mod messages;
|
||||
mod package_id;
|
||||
mod package_source;
|
||||
pub mod package_id;
|
||||
pub mod package_source;
|
||||
mod path_util;
|
||||
mod search;
|
||||
mod sha1;
|
||||
mod source_control;
|
||||
mod target;
|
||||
@ -189,10 +188,6 @@ impl<'self> PkgScript<'self> {
|
||||
(cfgs, output.status)
|
||||
}
|
||||
}
|
||||
|
||||
fn hash(&self) -> ~str {
|
||||
self.id.hash()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CtxMethods {
|
||||
@ -924,12 +919,3 @@ pub fn main_args(args: &[~str]) -> int {
|
||||
if result.is_err() { return COPY_FAILED_CODE; }
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn declare_package_script_dependency(prep: &mut workcache::Prep, pkg_src: &PkgSrc) {
|
||||
match pkg_src.package_script_option() {
|
||||
// FIXME (#9639): This needs to handle non-utf8 paths
|
||||
Some(ref p) => prep.declare_input("file", p.as_str().unwrap(),
|
||||
workcache_support::digest_file_with_date(p)),
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
|
@ -321,11 +321,6 @@ impl PkgSrc {
|
||||
}
|
||||
}
|
||||
|
||||
/// True if the given path's stem is self's pkg ID's stem
|
||||
fn stem_matches(&self, p: &Path) -> bool {
|
||||
p.filestem().map_default(false, |p| { p == self.id.short_name.as_bytes() })
|
||||
}
|
||||
|
||||
pub fn push_crate(cs: &mut ~[Crate], prefix: uint, p: &Path) {
|
||||
let mut it = p.components().peekable();
|
||||
if prefix > 0 {
|
||||
|
@ -10,6 +10,8 @@
|
||||
|
||||
// rustpkg utilities having to do with paths and directories
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
pub use package_id::PkgId;
|
||||
pub use target::{OutputType, Main, Lib, Test, Bench, Target, Build, Install};
|
||||
pub use version::{Version, NoVersion, split_version_general, try_parsing_version};
|
||||
|
@ -1,29 +0,0 @@
|
||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use path_util::{installed_library_in_workspace, rust_path};
|
||||
use version::Version;
|
||||
|
||||
/// If some workspace `p` in the RUST_PATH contains a package matching short_name,
|
||||
/// return Some(p) (returns the first one of there are multiple matches.) Return
|
||||
/// None if there's no such path.
|
||||
/// FIXME #8711: This ignores the desired version.
|
||||
pub fn find_installed_library_in_rust_path(pkg_path: &Path, _version: &Version) -> Option<Path> {
|
||||
let rp = rust_path();
|
||||
debug!("find_installed_library_in_rust_path: looking for path {}",
|
||||
pkg_path.display());
|
||||
for p in rp.iter() {
|
||||
match installed_library_in_workspace(pkg_path, p) {
|
||||
Some(path) => return Some(path),
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
@ -79,6 +79,7 @@ fn file_is(p: &Path, stem: &str) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn lib_name_of(p: &Path) -> Path {
|
||||
p.join("lib.rs")
|
||||
}
|
||||
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
use std::libc;
|
||||
use std::os;
|
||||
use std::io;
|
||||
|
@ -54,10 +54,6 @@ pub fn pkg_parent_workspaces(cx: &Context, pkgid: &PkgId) -> ~[Path] {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_workspace(p: &Path) -> bool {
|
||||
p.join("src").is_dir()
|
||||
}
|
||||
|
||||
/// Construct a workspace and package-ID name based on the current directory.
|
||||
/// This gets used when rustpkg gets invoked without a package-ID argument.
|
||||
pub fn cwd_to_workspace() -> Option<(Path, PkgId)> {
|
||||
|
@ -370,14 +370,6 @@ impl Drop for TcpListener {
|
||||
}
|
||||
}
|
||||
|
||||
extern fn listener_close_cb(handle: *uvll::uv_handle_t) {
|
||||
let tcp: &mut TcpListener = unsafe { UvHandle::from_uv_handle(&handle) };
|
||||
unsafe { uvll::free_handle(handle) }
|
||||
|
||||
let sched: ~Scheduler = Local::take();
|
||||
sched.resume_blocked_task_immediately(tcp.closing_task.take_unwrap());
|
||||
}
|
||||
|
||||
// TCP acceptors (bound servers)
|
||||
|
||||
impl HomingIO for TcpAcceptor {
|
||||
|
@ -141,10 +141,6 @@ impl Drop for PipeWatcher {
|
||||
}
|
||||
}
|
||||
|
||||
extern fn pipe_close_cb(handle: *uvll::uv_handle_t) {
|
||||
unsafe { uvll::free_handle(handle) }
|
||||
}
|
||||
|
||||
// PipeListener implementation and traits
|
||||
|
||||
impl PipeListener {
|
||||
|
@ -91,107 +91,6 @@ impl<A:IterBytes> Hash for A {
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_keyed_2<A: IterBytes,
|
||||
B: IterBytes>(a: &A, b: &B, k0: u64, k1: u64) -> u64 {
|
||||
let mut s = State::new(k0, k1);
|
||||
a.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
b.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
s.result_u64()
|
||||
}
|
||||
|
||||
fn hash_keyed_3<A: IterBytes,
|
||||
B: IterBytes,
|
||||
C: IterBytes>(a: &A, b: &B, c: &C, k0: u64, k1: u64) -> u64 {
|
||||
let mut s = State::new(k0, k1);
|
||||
a.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
b.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
c.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
s.result_u64()
|
||||
}
|
||||
|
||||
fn hash_keyed_4<A: IterBytes,
|
||||
B: IterBytes,
|
||||
C: IterBytes,
|
||||
D: IterBytes>(
|
||||
a: &A,
|
||||
b: &B,
|
||||
c: &C,
|
||||
d: &D,
|
||||
k0: u64,
|
||||
k1: u64)
|
||||
-> u64 {
|
||||
let mut s = State::new(k0, k1);
|
||||
a.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
b.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
c.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
d.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
s.result_u64()
|
||||
}
|
||||
|
||||
fn hash_keyed_5<A: IterBytes,
|
||||
B: IterBytes,
|
||||
C: IterBytes,
|
||||
D: IterBytes,
|
||||
E: IterBytes>(
|
||||
a: &A,
|
||||
b: &B,
|
||||
c: &C,
|
||||
d: &D,
|
||||
e: &E,
|
||||
k0: u64,
|
||||
k1: u64)
|
||||
-> u64 {
|
||||
let mut s = State::new(k0, k1);
|
||||
a.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
b.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
c.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
d.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
e.iter_bytes(true, |bytes| {
|
||||
s.input(bytes);
|
||||
true
|
||||
});
|
||||
s.result_u64()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn default_state() -> State {
|
||||
State::new(0, 0)
|
||||
|
@ -12,7 +12,7 @@ use option::Option;
|
||||
use comm::{GenericPort, GenericChan};
|
||||
use super::{Reader, Writer};
|
||||
|
||||
struct PortReader<P>;
|
||||
pub struct PortReader<P>;
|
||||
|
||||
impl<P: GenericPort<~[u8]>> PortReader<P> {
|
||||
pub fn new(_port: P) -> PortReader<P> { fail!() }
|
||||
@ -24,7 +24,7 @@ impl<P: GenericPort<~[u8]>> Reader for PortReader<P> {
|
||||
fn eof(&mut self) -> bool { fail!() }
|
||||
}
|
||||
|
||||
struct ChanWriter<C>;
|
||||
pub struct ChanWriter<C>;
|
||||
|
||||
impl<C: GenericChan<~[u8]>> ChanWriter<C> {
|
||||
pub fn new(_chan: C) -> ChanWriter<C> { fail!() }
|
||||
@ -34,7 +34,7 @@ impl<C: GenericChan<~[u8]>> Writer for ChanWriter<C> {
|
||||
fn write(&mut self, _buf: &[u8]) { fail!() }
|
||||
}
|
||||
|
||||
struct ReaderPort<R>;
|
||||
pub struct ReaderPort<R>;
|
||||
|
||||
impl<R: Reader> ReaderPort<R> {
|
||||
pub fn new(_reader: R) -> ReaderPort<R> { fail!() }
|
||||
@ -46,7 +46,7 @@ impl<R: Reader> GenericPort<~[u8]> for ReaderPort<R> {
|
||||
fn try_recv(&self) -> Option<~[u8]> { fail!() }
|
||||
}
|
||||
|
||||
struct WriterChan<W>;
|
||||
pub struct WriterChan<W>;
|
||||
|
||||
impl<W: Writer> WriterChan<W> {
|
||||
pub fn new(_writer: W) -> WriterChan<W> { fail!() }
|
||||
|
@ -756,10 +756,6 @@ pub fn link(src: &CString, dst: &CString) -> IoResult<()> {
|
||||
#[cfg(windows)]
|
||||
fn mkstat(stat: &libc::stat, path: &CString) -> io::FileStat {
|
||||
let path = unsafe { CString::new(path.with_ref(|p| p), false) };
|
||||
|
||||
// FileStat times are in milliseconds
|
||||
fn mktime(secs: u64, nsecs: u64) -> u64 { secs * 1000 + nsecs / 1000000 }
|
||||
|
||||
let kind = match (stat.st_mode as c_int) & libc::S_IFMT {
|
||||
libc::S_IFREG => io::TypeFile,
|
||||
libc::S_IFDIR => io::TypeDirectory,
|
||||
|
@ -100,6 +100,7 @@ fn mkerr_libc(ret: libc::c_int) -> IoResult<()> {
|
||||
}
|
||||
|
||||
// windows has zero values as errors
|
||||
#[cfg(windows)]
|
||||
fn mkerr_winbool(ret: libc::c_int) -> IoResult<()> {
|
||||
if ret == 0 {
|
||||
Err(last_error())
|
||||
|
@ -1690,7 +1690,7 @@ impl<T> Fuse<T> {
|
||||
/// Resets the fuse such that the next call to .next() or .next_back() will
|
||||
/// call the underlying iterator again even if it prevously returned None.
|
||||
#[inline]
|
||||
fn reset_fuse(&mut self) {
|
||||
pub fn reset_fuse(&mut self) {
|
||||
self.done = false
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ delegate!(
|
||||
fn log2(n: f32) -> f32 = intrinsics::log2f32,
|
||||
fn mul_add(a: f32, b: f32, c: f32) -> f32 = intrinsics::fmaf32,
|
||||
fn pow(n: f32, e: f32) -> f32 = intrinsics::powf32,
|
||||
fn powi(n: f32, e: c_int) -> f32 = intrinsics::powif32,
|
||||
// fn powi(n: f32, e: c_int) -> f32 = intrinsics::powif32,
|
||||
fn sin(n: f32) -> f32 = intrinsics::sinf32,
|
||||
fn sqrt(n: f32) -> f32 = intrinsics::sqrtf32,
|
||||
|
||||
@ -79,25 +79,25 @@ delegate!(
|
||||
fn cbrt(n: c_float) -> c_float = c_float_utils::cbrt,
|
||||
fn copysign(x: c_float, y: c_float) -> c_float = c_float_utils::copysign,
|
||||
fn cosh(n: c_float) -> c_float = c_float_utils::cosh,
|
||||
fn erf(n: c_float) -> c_float = c_float_utils::erf,
|
||||
fn erfc(n: c_float) -> c_float = c_float_utils::erfc,
|
||||
// fn erf(n: c_float) -> c_float = c_float_utils::erf,
|
||||
// fn erfc(n: c_float) -> c_float = c_float_utils::erfc,
|
||||
fn exp_m1(n: c_float) -> c_float = c_float_utils::exp_m1,
|
||||
fn abs_sub(a: c_float, b: c_float) -> c_float = c_float_utils::abs_sub,
|
||||
fn next_after(x: c_float, y: c_float) -> c_float = c_float_utils::next_after,
|
||||
fn frexp(n: c_float, value: &mut c_int) -> c_float = c_float_utils::frexp,
|
||||
fn hypot(x: c_float, y: c_float) -> c_float = c_float_utils::hypot,
|
||||
fn ldexp(x: c_float, n: c_int) -> c_float = c_float_utils::ldexp,
|
||||
fn lgamma(n: c_float, sign: &mut c_int) -> c_float = c_float_utils::lgamma,
|
||||
fn log_radix(n: c_float) -> c_float = c_float_utils::log_radix,
|
||||
// fn lgamma(n: c_float, sign: &mut c_int) -> c_float = c_float_utils::lgamma,
|
||||
// fn log_radix(n: c_float) -> c_float = c_float_utils::log_radix,
|
||||
fn ln_1p(n: c_float) -> c_float = c_float_utils::ln_1p,
|
||||
fn ilog_radix(n: c_float) -> c_int = c_float_utils::ilog_radix,
|
||||
fn modf(n: c_float, iptr: &mut c_float) -> c_float = c_float_utils::modf,
|
||||
// fn ilog_radix(n: c_float) -> c_int = c_float_utils::ilog_radix,
|
||||
// fn modf(n: c_float, iptr: &mut c_float) -> c_float = c_float_utils::modf,
|
||||
fn round(n: c_float) -> c_float = c_float_utils::round,
|
||||
fn ldexp_radix(n: c_float, i: c_int) -> c_float = c_float_utils::ldexp_radix,
|
||||
// fn ldexp_radix(n: c_float, i: c_int) -> c_float = c_float_utils::ldexp_radix,
|
||||
fn sinh(n: c_float) -> c_float = c_float_utils::sinh,
|
||||
fn tan(n: c_float) -> c_float = c_float_utils::tan,
|
||||
fn tanh(n: c_float) -> c_float = c_float_utils::tanh,
|
||||
fn tgamma(n: c_float) -> c_float = c_float_utils::tgamma
|
||||
fn tanh(n: c_float) -> c_float = c_float_utils::tanh
|
||||
// fn tgamma(n: c_float) -> c_float = c_float_utils::tgamma
|
||||
)
|
||||
|
||||
// These are not defined inside consts:: for consistency with
|
||||
|
@ -59,7 +59,7 @@ delegate!(
|
||||
fn log2(n: f64) -> f64 = intrinsics::log2f64,
|
||||
fn mul_add(a: f64, b: f64, c: f64) -> f64 = intrinsics::fmaf64,
|
||||
fn pow(n: f64, e: f64) -> f64 = intrinsics::powf64,
|
||||
fn powi(n: f64, e: c_int) -> f64 = intrinsics::powif64,
|
||||
// fn powi(n: f64, e: c_int) -> f64 = intrinsics::powif64,
|
||||
fn sin(n: f64) -> f64 = intrinsics::sinf64,
|
||||
fn sqrt(n: f64) -> f64 = intrinsics::sqrtf64,
|
||||
|
||||
@ -81,8 +81,8 @@ delegate!(
|
||||
fn cbrt(n: c_double) -> c_double = c_double_utils::cbrt,
|
||||
fn copysign(x: c_double, y: c_double) -> c_double = c_double_utils::copysign,
|
||||
fn cosh(n: c_double) -> c_double = c_double_utils::cosh,
|
||||
fn erf(n: c_double) -> c_double = c_double_utils::erf,
|
||||
fn erfc(n: c_double) -> c_double = c_double_utils::erfc,
|
||||
// fn erf(n: c_double) -> c_double = c_double_utils::erf,
|
||||
// fn erfc(n: c_double) -> c_double = c_double_utils::erfc,
|
||||
fn exp_m1(n: c_double) -> c_double = c_double_utils::exp_m1,
|
||||
fn abs_sub(a: c_double, b: c_double) -> c_double = c_double_utils::abs_sub,
|
||||
fn next_after(x: c_double, y: c_double) -> c_double = c_double_utils::next_after,
|
||||
@ -90,12 +90,12 @@ delegate!(
|
||||
fn hypot(x: c_double, y: c_double) -> c_double = c_double_utils::hypot,
|
||||
fn ldexp(x: c_double, n: c_int) -> c_double = c_double_utils::ldexp,
|
||||
fn lgamma(n: c_double, sign: &mut c_int) -> c_double = c_double_utils::lgamma,
|
||||
fn log_radix(n: c_double) -> c_double = c_double_utils::log_radix,
|
||||
// fn log_radix(n: c_double) -> c_double = c_double_utils::log_radix,
|
||||
fn ln_1p(n: c_double) -> c_double = c_double_utils::ln_1p,
|
||||
fn ilog_radix(n: c_double) -> c_int = c_double_utils::ilog_radix,
|
||||
fn modf(n: c_double, iptr: &mut c_double) -> c_double = c_double_utils::modf,
|
||||
// fn ilog_radix(n: c_double) -> c_int = c_double_utils::ilog_radix,
|
||||
// fn modf(n: c_double, iptr: &mut c_double) -> c_double = c_double_utils::modf,
|
||||
fn round(n: c_double) -> c_double = c_double_utils::round,
|
||||
fn ldexp_radix(n: c_double, i: c_int) -> c_double = c_double_utils::ldexp_radix,
|
||||
// fn ldexp_radix(n: c_double, i: c_int) -> c_double = c_double_utils::ldexp_radix,
|
||||
fn sinh(n: c_double) -> c_double = c_double_utils::sinh,
|
||||
fn tan(n: c_double) -> c_double = c_double_utils::tan,
|
||||
fn tanh(n: c_double) -> c_double = c_double_utils::tanh,
|
||||
|
@ -32,6 +32,7 @@
|
||||
use c_str::CString;
|
||||
use clone::Clone;
|
||||
use container::Container;
|
||||
#[cfg(target_os = "macos")]
|
||||
use iter::range;
|
||||
use libc;
|
||||
use libc::{c_char, c_void, c_int, size_t};
|
||||
@ -338,12 +339,6 @@ pub fn pipe() -> Pipe {
|
||||
}
|
||||
}
|
||||
|
||||
fn dup2(src: c_int, dst: c_int) -> c_int {
|
||||
unsafe {
|
||||
libc::dup2(src, dst)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the proper dll filename for the given basename of a file.
|
||||
pub fn dll_filename(base: &str) -> ~str {
|
||||
format!("{}{}{}", DLL_PREFIX, base, DLL_SUFFIX)
|
||||
@ -708,6 +703,7 @@ pub fn set_exit_status(code: int) {
|
||||
rt::set_exit_status(code);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
unsafe fn load_argc_and_argv(argc: c_int, argv: **c_char) -> ~[~str] {
|
||||
let mut args = ~[];
|
||||
for i in range(0u, argc as uint) {
|
||||
@ -787,10 +783,6 @@ extern "system" {
|
||||
fn CommandLineToArgvW(lpCmdLine: LPCWSTR, pNumArgs: *mut c_int) -> **u16;
|
||||
}
|
||||
|
||||
struct OverriddenArgs {
|
||||
val: ~[~str]
|
||||
}
|
||||
|
||||
/// Returns the arguments which this program was started with (normally passed
|
||||
/// via the command line).
|
||||
pub fn args() -> ~[~str] {
|
||||
|
@ -1049,11 +1049,6 @@ fn prefix_len(p: Option<PathPrefix>) -> uint {
|
||||
}
|
||||
}
|
||||
|
||||
fn prefix_is_sep(p: Option<PathPrefix>, c: u8) -> bool {
|
||||
c.is_ascii() && if !prefix_is_verbatim(p) { is_sep(c as char) }
|
||||
else { is_sep_verbatim(c as char) }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -886,6 +886,7 @@ mod test {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
static RAND_BENCH_N: u64 = 100;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -37,17 +37,6 @@ struct BasicLoop {
|
||||
|
||||
enum Message { RunRemote(uint), RemoveRemote(uint) }
|
||||
|
||||
struct Time {
|
||||
sec: u64,
|
||||
nsec: u64,
|
||||
}
|
||||
|
||||
impl Ord for Time {
|
||||
fn lt(&self, other: &Time) -> bool {
|
||||
self.sec < other.sec || self.nsec < other.nsec
|
||||
}
|
||||
}
|
||||
|
||||
impl BasicLoop {
|
||||
fn new() -> BasicLoop {
|
||||
BasicLoop {
|
||||
@ -238,14 +227,3 @@ impl Drop for BasicPausible {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn time() -> Time {
|
||||
extern {
|
||||
fn rust_get_time(sec: &mut i64, nsec: &mut i32);
|
||||
}
|
||||
let mut sec = 0;
|
||||
let mut nsec = 0;
|
||||
unsafe { rust_get_time(&mut sec, &mut nsec) }
|
||||
|
||||
Time { sec: sec as u64, nsec: nsec as u64 }
|
||||
}
|
||||
|
@ -25,7 +25,9 @@ use unstable::intrinsics::TyDesc;
|
||||
use unstable::raw;
|
||||
|
||||
// This has no meaning with out rtdebug also turned on.
|
||||
#[cfg(rtdebug)]
|
||||
static TRACK_ALLOCATIONS: int = 0;
|
||||
#[cfg(rtdebug)]
|
||||
static MAGIC: u32 = 0xbadc0ffe;
|
||||
|
||||
pub type Box = raw::Box<()>;
|
||||
|
@ -15,6 +15,8 @@
|
||||
//! XXX: Add runtime checks for usage of inconsistent pointer types.
|
||||
//! and for overwriting an existing pointer.
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
use cast;
|
||||
use cell::Cell;
|
||||
use unstable::finally::Finally;
|
||||
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
use libc::c_void;
|
||||
#[cfg(unix)]
|
||||
use libc::c_int;
|
||||
|
@ -183,7 +183,10 @@ impl Process {
|
||||
self.inner.io[0].take();
|
||||
}
|
||||
|
||||
fn close_outputs(&mut self) {
|
||||
/**
|
||||
* Closes the handle to stdout and stderr.
|
||||
*/
|
||||
pub fn close_outputs(&mut self) {
|
||||
self.inner.io[1].take();
|
||||
self.inner.io[2].take();
|
||||
}
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
#[allow(missing_doc)];
|
||||
#[allow(non_uppercase_statics)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
pub mod general_category {
|
||||
|
||||
|
@ -35,6 +35,7 @@ pub enum UnsafeArcUnwrap<T> {
|
||||
UnsafeArcT(T)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T> UnsafeArcUnwrap<T> {
|
||||
fn expect_t(self, msg: &'static str) -> T {
|
||||
match self {
|
||||
|
@ -448,13 +448,6 @@ impl CodeMap {
|
||||
};
|
||||
}
|
||||
|
||||
fn span_to_str_no_adj(&self, sp: Span) -> ~str {
|
||||
let lo = self.lookup_char_pos(sp.lo);
|
||||
let hi = self.lookup_char_pos(sp.hi);
|
||||
return format!("{}:{}:{}: {}:{}", lo.file.name,
|
||||
lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint())
|
||||
}
|
||||
|
||||
fn lookup_byte_offset(&self, bpos: BytePos)
|
||||
-> FileMapAndBytePos {
|
||||
let idx = self.lookup_filemap_idx(bpos);
|
||||
|
@ -201,15 +201,7 @@ pub fn syntax_expander_table() -> SyntaxEnv {
|
||||
} as @SyntaxExpanderTTTrait,
|
||||
None))
|
||||
}
|
||||
// utility function to simplify creating IdentTT syntax extensions
|
||||
// that ignore their contexts
|
||||
fn builtin_item_tt_no_ctxt(f: SyntaxExpanderTTItemFunNoCtxt) -> @Transformer {
|
||||
@SE(IdentTT(@SyntaxExpanderTTItem {
|
||||
expander: SyntaxExpanderTTItemExpanderWithoutContext(f),
|
||||
span: None,
|
||||
} as @SyntaxExpanderTTItemTrait,
|
||||
None))
|
||||
}
|
||||
|
||||
let mut syntax_expanders = HashMap::new();
|
||||
// NB identifier starts with space, and can't conflict with legal idents
|
||||
syntax_expanders.insert(intern(&" block"),
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast::{P, Block, Crate, DeclLocal, Expr_, ExprMac, SyntaxContext};
|
||||
use ast::{P, Block, Crate, DeclLocal, ExprMac, SyntaxContext};
|
||||
use ast::{Local, Ident, mac_invoc_tt};
|
||||
use ast::{item_mac, Mrk, Stmt, StmtDecl, StmtMac, StmtExpr, StmtSemi};
|
||||
use ast::{token_tree};
|
||||
@ -21,7 +21,6 @@ use codemap;
|
||||
use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
|
||||
use ext::base::*;
|
||||
use fold::*;
|
||||
use opt_vec;
|
||||
use parse;
|
||||
use parse::{parse_item_from_source_str};
|
||||
use parse::token;
|
||||
@ -140,29 +139,6 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
|
||||
|
||||
let span = e.span;
|
||||
|
||||
fn mk_expr(_: @ExtCtxt, span: Span, node: Expr_)
|
||||
-> @ast::Expr {
|
||||
@ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: node,
|
||||
span: span,
|
||||
}
|
||||
}
|
||||
|
||||
fn mk_simple_path(ident: ast::Ident, span: Span) -> ast::Path {
|
||||
ast::Path {
|
||||
span: span,
|
||||
global: false,
|
||||
segments: ~[
|
||||
ast::PathSegment {
|
||||
identifier: ident,
|
||||
lifetimes: opt_vec::Empty,
|
||||
types: opt_vec::Empty,
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
// to:
|
||||
//
|
||||
// {
|
||||
@ -714,14 +690,6 @@ pub fn renames_to_fold(renames: @mut ~[(ast::Ident,ast::Name)]) -> @ast_fold {
|
||||
} as @ast_fold
|
||||
}
|
||||
|
||||
// perform a bunch of renames
|
||||
fn apply_pending_renames(folder : @ast_fold, stmt : ast::Stmt) -> @ast::Stmt {
|
||||
folder.fold_stmt(&stmt)
|
||||
.expect_one("renaming of stmt did not produce one stmt")
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn new_span(cx: @ExtCtxt, sp: Span) -> Span {
|
||||
/* this discards information in the case of macro-defining macros */
|
||||
Span {
|
||||
@ -739,6 +707,7 @@ pub fn std_macros() -> @str {
|
||||
@r#"mod __std_macros {
|
||||
#[macro_escape];
|
||||
#[doc(hidden)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
macro_rules! ignore (($($x:tt)*) => (()))
|
||||
|
||||
@ -900,6 +869,7 @@ pub fn std_macros() -> @str {
|
||||
mod $c {
|
||||
#[allow(unused_imports)];
|
||||
#[allow(non_uppercase_statics)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -979,12 +949,6 @@ pub fn inject_std_macros(parse_sess: @mut parse::ParseSess,
|
||||
injector.fold_crate(c)
|
||||
}
|
||||
|
||||
struct NoOpFolder {
|
||||
contents: (),
|
||||
}
|
||||
|
||||
impl ast_fold for NoOpFolder {}
|
||||
|
||||
pub struct MacroExpander {
|
||||
extsbox: @mut SyntaxEnv,
|
||||
cx: @ExtCtxt,
|
||||
|
@ -331,7 +331,12 @@ impl Context {
|
||||
let unnamed = self.ecx.meta_word(self.fmtsp, @"address_insignificant");
|
||||
let unnamed = self.ecx.attribute(self.fmtsp, unnamed);
|
||||
|
||||
return ~[unnamed];
|
||||
// Do not warn format string as dead code
|
||||
let dead_code = self.ecx.meta_word(self.fmtsp, @"dead_code");
|
||||
let allow_dead_code = self.ecx.meta_list(self.fmtsp,
|
||||
@"allow", ~[dead_code]);
|
||||
let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code);
|
||||
return ~[unnamed, allow_dead_code];
|
||||
}
|
||||
|
||||
/// Translate a `parse::Piece` to a static `rt::Piece`
|
||||
|
@ -9,7 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use codemap::{BytePos, Pos, Span};
|
||||
use codemap::Span;
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::base;
|
||||
use ext::build::AstBuilder;
|
||||
@ -357,12 +357,6 @@ fn mk_ident(cx: @ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
|
||||
~[e_str])
|
||||
}
|
||||
|
||||
fn mk_bytepos(cx: @ExtCtxt, sp: Span, bpos: BytePos) -> @ast::Expr {
|
||||
let path = id_ext("BytePos");
|
||||
let arg = cx.expr_uint(sp, bpos.to_uint());
|
||||
cx.expr_call_ident(sp, path, ~[arg])
|
||||
}
|
||||
|
||||
fn mk_binop(cx: @ExtCtxt, sp: Span, bop: token::binop) -> @ast::Expr {
|
||||
let name = match bop {
|
||||
PLUS => "PLUS",
|
||||
|
@ -541,20 +541,6 @@ fn fold_struct_def<T:ast_fold>(struct_def: @ast::struct_def, fld: &T)
|
||||
}
|
||||
}
|
||||
|
||||
fn noop_fold_view_item(vi: &view_item_, fld: @ast_fold) -> view_item_ {
|
||||
match *vi {
|
||||
view_item_extern_mod(ident, name, ref meta_items, node_id) => {
|
||||
view_item_extern_mod(ident,
|
||||
name,
|
||||
fld.fold_meta_items(*meta_items),
|
||||
fld.new_id(node_id))
|
||||
}
|
||||
view_item_use(ref view_paths) => {
|
||||
view_item_use(fld.fold_view_paths(*view_paths))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_trait_ref<T:ast_fold>(p: &trait_ref, fld: &T) -> trait_ref {
|
||||
ast::trait_ref {
|
||||
path: fld.fold_path(&p.path),
|
||||
@ -589,14 +575,6 @@ fn fold_mt<T:ast_fold>(mt: &mt, folder: &T) -> mt {
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_field<T:ast_fold>(f: TypeField, folder: &T) -> TypeField {
|
||||
ast::TypeField {
|
||||
ident: folder.fold_ident(f.ident),
|
||||
mt: fold_mt(&f.mt, folder),
|
||||
span: folder.new_span(f.span),
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T)
|
||||
-> Option<OptVec<TyParamBound>> {
|
||||
b.as_ref().map(|bounds| {
|
||||
|
@ -273,8 +273,6 @@ pub fn nextch(rdr: @mut StringReader) -> char {
|
||||
} else { return unsafe { transmute(-1u32) }; } // FIXME: #8971: unsound
|
||||
}
|
||||
|
||||
fn dec_digit_val(c: char) -> int { return (c as int) - ('0' as int); }
|
||||
|
||||
fn hex_digit_val(c: char) -> int {
|
||||
if in_range(c, '0', '9') { return (c as int) - ('0' as int); }
|
||||
if in_range(c, 'a', 'f') { return (c as int) - ('a' as int) + 10; }
|
||||
@ -282,13 +280,6 @@ fn hex_digit_val(c: char) -> int {
|
||||
fail!();
|
||||
}
|
||||
|
||||
fn oct_digit_val(c: char) -> int {
|
||||
if in_range(c, '0', '7') { return (c as int) - ('0' as int); }
|
||||
fail!();
|
||||
}
|
||||
|
||||
fn bin_digit_value(c: char) -> int { if c == '0' { return 0; } return 1; }
|
||||
|
||||
pub fn is_whitespace(c: char) -> bool {
|
||||
return c == ' ' || c == '\t' || c == '\r' || c == '\n';
|
||||
}
|
||||
@ -304,10 +295,6 @@ fn is_hex_digit(c: char) -> bool {
|
||||
in_range(c, 'A', 'F');
|
||||
}
|
||||
|
||||
fn is_oct_digit(c: char) -> bool { return in_range(c, '0', '7'); }
|
||||
|
||||
fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
|
||||
|
||||
// EFFECT: eats whitespace and comments.
|
||||
// returns a Some(sugared-doc-attr) if one exists, None otherwise.
|
||||
fn consume_whitespace_and_comments(rdr: @mut StringReader)
|
||||
|
@ -135,13 +135,6 @@ pub enum item_or_view_item {
|
||||
iovi_view_item(view_item)
|
||||
}
|
||||
|
||||
#[deriving(Eq)]
|
||||
enum view_item_parse_mode {
|
||||
VIEW_ITEMS_AND_ITEMS_ALLOWED,
|
||||
FOREIGN_ITEMS_ALLOWED,
|
||||
IMPORTS_AND_ITEMS_ALLOWED
|
||||
}
|
||||
|
||||
/* The expr situation is not as complex as I thought it would be.
|
||||
The important thing is to make sure that lookahead doesn't balk
|
||||
at INTERPOLATED tokens */
|
||||
@ -3455,18 +3448,6 @@ impl Parser {
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_optional_purity(&self) -> ast::purity {
|
||||
if self.eat_keyword(keywords::Unsafe) {
|
||||
ast::unsafe_fn
|
||||
} else {
|
||||
ast::impure_fn
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_optional_onceness(&self) -> ast::Onceness {
|
||||
if self.eat_keyword(keywords::Once) { ast::Once } else { ast::Many }
|
||||
}
|
||||
|
||||
// matches optbounds = ( ( : ( boundseq )? )? )
|
||||
// where boundseq = ( bound + boundseq ) | bound
|
||||
// and bound = 'static | ty
|
||||
@ -3531,15 +3512,6 @@ impl Parser {
|
||||
}
|
||||
}
|
||||
|
||||
// parse a generic use site
|
||||
fn parse_generic_values(&self) -> (OptVec<ast::Lifetime>, ~[P<Ty>]) {
|
||||
if !self.eat(&token::LT) {
|
||||
(opt_vec::Empty, ~[])
|
||||
} else {
|
||||
self.parse_generic_values_after_lt()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_generic_values_after_lt(&self) -> (OptVec<ast::Lifetime>, ~[P<Ty>]) {
|
||||
let lifetimes = self.parse_lifetimes();
|
||||
let result = self.parse_seq_to_gt(
|
||||
@ -4080,13 +4052,6 @@ impl Parser {
|
||||
None)
|
||||
}
|
||||
|
||||
fn token_is_pound_or_doc_comment(&self, tok: token::Token) -> bool {
|
||||
match tok {
|
||||
token::POUND | token::DOC_COMMENT(_) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
// parse a structure field declaration
|
||||
pub fn parse_single_struct_field(&self,
|
||||
vis: visibility,
|
||||
@ -4556,26 +4521,6 @@ impl Parser {
|
||||
(id, item_enum(enum_definition, generics), None)
|
||||
}
|
||||
|
||||
fn parse_fn_ty_sigil(&self) -> Option<Sigil> {
|
||||
match *self.token {
|
||||
token::AT => {
|
||||
self.bump();
|
||||
Some(ManagedSigil)
|
||||
}
|
||||
token::TILDE => {
|
||||
self.bump();
|
||||
Some(OwnedSigil)
|
||||
}
|
||||
token::BINOP(token::AND) => {
|
||||
self.bump();
|
||||
Some(BorrowedSigil)
|
||||
}
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fn_expr_lookahead(&self, tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
|
||||
@ -4983,51 +4928,6 @@ impl Parser {
|
||||
return vp;
|
||||
}
|
||||
|
||||
fn is_view_item(&self) -> bool {
|
||||
if !self.is_keyword(keywords::Pub) && !self.is_keyword(keywords::Priv) {
|
||||
token::is_keyword(keywords::Use, self.token)
|
||||
|| (token::is_keyword(keywords::Extern, self.token) &&
|
||||
self.look_ahead(1,
|
||||
|t| token::is_keyword(keywords::Mod, t)))
|
||||
} else {
|
||||
self.look_ahead(1, |t| token::is_keyword(keywords::Use, t))
|
||||
|| (self.look_ahead(1,
|
||||
|t| token::is_keyword(keywords::Extern,
|
||||
t)) &&
|
||||
self.look_ahead(2,
|
||||
|t| token::is_keyword(keywords::Mod, t)))
|
||||
}
|
||||
}
|
||||
|
||||
// parse a view item.
|
||||
fn parse_view_item(
|
||||
&self,
|
||||
attrs: ~[Attribute],
|
||||
vis: visibility
|
||||
) -> view_item {
|
||||
let lo = self.span.lo;
|
||||
let node = if self.eat_keyword(keywords::Use) {
|
||||
self.parse_use()
|
||||
} else if self.eat_keyword(keywords::Extern) {
|
||||
self.expect_keyword(keywords::Mod);
|
||||
let ident = self.parse_ident();
|
||||
let path = if *self.token == token::EQ {
|
||||
self.bump();
|
||||
Some(self.parse_str())
|
||||
}
|
||||
else { None };
|
||||
let metadata = self.parse_optional_meta();
|
||||
view_item_extern_mod(ident, path, metadata, ast::DUMMY_NODE_ID)
|
||||
} else {
|
||||
self.bug("expected view item");
|
||||
};
|
||||
self.expect(&token::SEMI);
|
||||
ast::view_item { node: node,
|
||||
attrs: attrs,
|
||||
vis: vis,
|
||||
span: mk_sp(lo, self.last_span.hi) }
|
||||
}
|
||||
|
||||
// Parses a sequence of items. Stops when it finds program
|
||||
// text that can't be parsed as an item
|
||||
// - mod_items uses extern_mod_allowed = true
|
||||
|
@ -64,7 +64,7 @@ impl<T> SmallVector<T> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get<'a>(&'a self, idx: uint) -> &'a T {
|
||||
pub fn get<'a>(&'a self, idx: uint) -> &'a T {
|
||||
match *self {
|
||||
One(ref v) if idx == 0 => v,
|
||||
Many(ref vs) => &vs[idx],
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#[deny(unreachable_code)];
|
||||
#[allow(unused_variable)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
fn fail_len(v: ~[int]) -> uint {
|
||||
let mut i = 3;
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[allow(dead_code)];
|
||||
|
||||
// Matching against NaN should result in a warning
|
||||
|
||||
use std::f64::NAN;
|
||||
|
@ -9,6 +9,8 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[deny(unreachable_code)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
use std::ptr;
|
||||
pub unsafe fn g() {
|
||||
return;
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[deny(warnings)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
fn main() {
|
||||
while true {} //~ ERROR: infinite
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[deny(ctypes)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
enum Z { }
|
||||
enum U { A }
|
||||
|
69
src/test/compile-fail/lint-dead-code-1.rs
Normal file
69
src/test/compile-fail/lint-dead-code-1.rs
Normal file
@ -0,0 +1,69 @@
|
||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(unused_variable)];
|
||||
#[deny(dead_code)];
|
||||
|
||||
#[crate_type="lib"];
|
||||
|
||||
pub use foo2::Bar2;
|
||||
mod foo {
|
||||
pub struct Bar; //~ ERROR: code is never used
|
||||
}
|
||||
|
||||
mod foo2 {
|
||||
pub struct Bar2;
|
||||
}
|
||||
|
||||
pub static pub_static: int = 0;
|
||||
static priv_static: int = 0; //~ ERROR: code is never used
|
||||
static used_static: int = 0;
|
||||
pub static used_static2: int = used_static;
|
||||
|
||||
pub fn pub_fn() {
|
||||
used_fn();
|
||||
let used_struct1 = UsedStruct1 { x: 1 };
|
||||
let used_struct2 = UsedStruct2(1);
|
||||
let used_struct3 = UsedStruct3;
|
||||
let e = foo3;
|
||||
SemiUsedStruct::la_la_la();
|
||||
|
||||
}
|
||||
fn priv_fn() { //~ ERROR: code is never used
|
||||
let unused_struct = PrivStruct;
|
||||
}
|
||||
fn used_fn() {}
|
||||
|
||||
pub type typ = ~UsedStruct4;
|
||||
pub struct PubStruct();
|
||||
struct PrivStruct; //~ ERROR: code is never used
|
||||
struct UsedStruct1 { x: int }
|
||||
struct UsedStruct2(int);
|
||||
struct UsedStruct3;
|
||||
struct UsedStruct4;
|
||||
// this struct is never used directly, but its method is, so we don't want
|
||||
// to warn it
|
||||
struct SemiUsedStruct;
|
||||
impl SemiUsedStruct {
|
||||
fn la_la_la() {}
|
||||
}
|
||||
|
||||
pub enum pub_enum { foo1, bar1 }
|
||||
enum priv_enum { foo2, bar2 } //~ ERROR: code is never used
|
||||
enum used_enum { foo3, bar3 }
|
||||
|
||||
fn foo() { //~ ERROR: code is never used
|
||||
bar();
|
||||
let unused_enum = foo2;
|
||||
}
|
||||
|
||||
fn bar() { //~ ERROR: code is never used
|
||||
foo();
|
||||
}
|
50
src/test/compile-fail/lint-dead-code-2.rs
Normal file
50
src/test/compile-fail/lint-dead-code-2.rs
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(unused_variable)];
|
||||
#[deny(dead_code)];
|
||||
|
||||
struct Foo;
|
||||
|
||||
trait Bar {
|
||||
fn bar1(&self);
|
||||
fn bar2(&self) {
|
||||
self.bar1();
|
||||
}
|
||||
}
|
||||
|
||||
impl Bar for Foo {
|
||||
fn bar1(&self) {
|
||||
live_fn();
|
||||
}
|
||||
}
|
||||
|
||||
fn live_fn() {}
|
||||
|
||||
fn dead_fn() {} //~ ERROR: code is never used
|
||||
|
||||
#[main]
|
||||
fn dead_fn2() {} //~ ERROR: code is never used
|
||||
|
||||
fn used_fn() {}
|
||||
|
||||
#[start]
|
||||
fn start(_: int, _: **u8) -> int {
|
||||
used_fn();
|
||||
let foo = Foo;
|
||||
foo.bar2();
|
||||
0
|
||||
}
|
||||
|
||||
// this is not main
|
||||
fn main() { //~ ERROR: code is never used
|
||||
dead_fn();
|
||||
dead_fn2();
|
||||
}
|
50
src/test/compile-fail/lint-dead-code-3.rs
Normal file
50
src/test/compile-fail/lint-dead-code-3.rs
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(unused_variable)];
|
||||
#[deny(dead_code)];
|
||||
|
||||
#[crate_type="lib"];
|
||||
|
||||
struct Foo; //~ ERROR: code is never used
|
||||
impl Foo {
|
||||
fn foo(&self) { //~ ERROR: code is never used
|
||||
bar()
|
||||
}
|
||||
}
|
||||
|
||||
fn bar() { //~ ERROR: code is never used
|
||||
fn baz() {} //~ ERROR: code is never used
|
||||
|
||||
Foo.foo();
|
||||
baz();
|
||||
}
|
||||
|
||||
// no warning
|
||||
struct Foo2;
|
||||
impl Foo2 { fn foo2(&self) { bar2() } }
|
||||
fn bar2() {
|
||||
fn baz2() {}
|
||||
|
||||
Foo2.foo2();
|
||||
baz2();
|
||||
}
|
||||
|
||||
pub fn pub_fn() {
|
||||
let foo2_struct = Foo2;
|
||||
foo2_struct.foo2();
|
||||
}
|
||||
|
||||
// not warned because it's used in the parameter of `free` below
|
||||
enum c_void {}
|
||||
|
||||
extern {
|
||||
fn free(p: *c_void);
|
||||
}
|
@ -10,6 +10,7 @@
|
||||
|
||||
#[feature(managed_boxes)];
|
||||
#[forbid(heap_memory)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
struct Foo {
|
||||
x: @int //~ ERROR type uses managed
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(while_true)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
struct A(int);
|
||||
|
||||
|
@ -13,6 +13,7 @@
|
||||
#[feature(struct_variant)];
|
||||
#[feature(globs)];
|
||||
#[deny(missing_doc)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
//! Some garbage docs for the crate here
|
||||
#[doc="More garbage"];
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[forbid(non_camel_case_types)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
struct foo { //~ ERROR type `foo` should have a camel case identifier
|
||||
bar: int,
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[forbid(non_uppercase_statics)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
static foo: int = 1; //~ ERROR static constant should have an uppercase identifier
|
||||
|
||||
|
@ -12,6 +12,7 @@
|
||||
// injected intrinsics by the compiler.
|
||||
|
||||
#[deny(attribute_usage)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
#[abi="stdcall"] extern {} //~ ERROR: obsolete attribute
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
#[deny(unstable)];
|
||||
#[deny(deprecated)];
|
||||
#[deny(experimental)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
mod cross_crate {
|
||||
extern mod lint_stability;
|
||||
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(dead_code)];
|
||||
|
||||
// compile-flags: -D type-limits
|
||||
fn main() { }
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(unused_unsafe)];
|
||||
#[allow(dead_code)];
|
||||
#[deny(unsafe_block)];
|
||||
#[feature(macro_rules)];
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#[feature(globs)];
|
||||
#[deny(unused_imports)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
mod A {
|
||||
pub fn p() {}
|
||||
|
@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
#[deny(unused_imports)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
// Regression test for issue #6633
|
||||
mod issue6633 {
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#[feature(globs)];
|
||||
#[deny(unused_imports)];
|
||||
#[allow(dead_code)];
|
||||
|
||||
use cal = bar::c::cc;
|
||||
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
#[allow(dead_assignment)];
|
||||
#[allow(unused_variable)];
|
||||
#[allow(dead_code)];
|
||||
#[deny(unused_mut)];
|
||||
|
||||
fn main() {
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// Exercise the unused_unsafe attribute in some positive and negative cases
|
||||
|
||||
#[allow(dead_code)];
|
||||
#[deny(unused_unsafe)];
|
||||
|
||||
mod foo {
|
||||
|
@ -8,6 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[allow(dead_code)];
|
||||
#[deny(dead_assignment)];
|
||||
|
||||
fn f1(x: &mut int) {
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// Issue #7526: lowercase static constants in patterns look like bindings
|
||||
|
||||
#[allow(dead_code)];
|
||||
#[deny(non_uppercase_pattern_statics)];
|
||||
|
||||
pub static a : int = 97;
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[allow(dead_code)];
|
||||
|
||||
#[static_assert]
|
||||
static A: bool = false; //~ ERROR static assertion failed
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[allow(dead_code)];
|
||||
|
||||
#[static_assert]
|
||||
static E: bool = 1 == 2; //~ ERROR static assertion failed
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user