mirror of
https://github.com/rust-lang/rust.git
synced 2025-05-14 02:49:40 +00:00
CamelCasify lots of std
This commit is contained in:
parent
ecb646477b
commit
a605fd0cad
@ -74,7 +74,7 @@ type options = {
|
||||
|
||||
enum mode { system_mode, user_mode, local_mode }
|
||||
|
||||
fn opts() -> ~[getopts::opt] {
|
||||
fn opts() -> ~[getopts::Opt] {
|
||||
~[optflag(~"g"), optflag(~"G"), optflag(~"test"),
|
||||
optflag(~"h"), optflag(~"help")]
|
||||
}
|
||||
@ -387,27 +387,27 @@ fn valid_pkg_name(s: ~str) -> bool {
|
||||
s.all(is_valid_digit)
|
||||
}
|
||||
|
||||
fn parse_source(name: ~str, j: json::json) -> source {
|
||||
fn parse_source(name: ~str, j: json::Json) -> source {
|
||||
if !valid_pkg_name(name) {
|
||||
fail fmt!("'%s' is an invalid source name", name);
|
||||
}
|
||||
|
||||
match j {
|
||||
json::dict(j) => {
|
||||
json::Dict(j) => {
|
||||
let mut url = match j.find(~"url") {
|
||||
Some(json::string(u)) => *u,
|
||||
Some(json::String(u)) => *u,
|
||||
_ => fail ~"needed 'url' field in source"
|
||||
};
|
||||
let method = match j.find(~"method") {
|
||||
Some(json::string(u)) => *u,
|
||||
Some(json::String(u)) => *u,
|
||||
_ => assume_source_method(url)
|
||||
};
|
||||
let key = match j.find(~"key") {
|
||||
Some(json::string(u)) => Some(*u),
|
||||
Some(json::String(u)) => Some(*u),
|
||||
_ => None
|
||||
};
|
||||
let keyfp = match j.find(~"keyfp") {
|
||||
Some(json::string(u)) => Some(*u),
|
||||
Some(json::String(u)) => Some(*u),
|
||||
_ => None
|
||||
};
|
||||
if method == ~"file" {
|
||||
@ -429,7 +429,7 @@ fn try_parse_sources(filename: &Path, sources: map::hashmap<~str, source>) {
|
||||
if !os::path_exists(filename) { return; }
|
||||
let c = io::read_whole_file_str(filename);
|
||||
match json::from_str(result::get(c)) {
|
||||
Ok(json::dict(j)) => {
|
||||
Ok(json::Dict(j)) => {
|
||||
for j.each |k, v| {
|
||||
sources.insert(k, parse_source(k, v));
|
||||
debug!("source: %s", k);
|
||||
@ -440,9 +440,9 @@ fn try_parse_sources(filename: &Path, sources: map::hashmap<~str, source>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
fn load_one_source_package(src: source, p: map::hashmap<~str, json::Json>) {
|
||||
let name = match p.find(~"name") {
|
||||
Some(json::string(n)) => {
|
||||
Some(json::String(n)) => {
|
||||
if !valid_pkg_name(*n) {
|
||||
warn(~"malformed source json: "
|
||||
+ src.name + ~", '" + *n + ~"'"+
|
||||
@ -459,7 +459,7 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
};
|
||||
|
||||
let uuid = match p.find(~"uuid") {
|
||||
Some(json::string(n)) => {
|
||||
Some(json::String(n)) => {
|
||||
if !is_uuid(*n) {
|
||||
warn(~"malformed source json: "
|
||||
+ src.name + ~", '" + *n + ~"'"+
|
||||
@ -475,7 +475,7 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
};
|
||||
|
||||
let url = match p.find(~"url") {
|
||||
Some(json::string(n)) => *n,
|
||||
Some(json::String(n)) => *n,
|
||||
_ => {
|
||||
warn(~"malformed source json: " + src.name + ~" (missing url)");
|
||||
return;
|
||||
@ -483,7 +483,7 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
};
|
||||
|
||||
let method = match p.find(~"method") {
|
||||
Some(json::string(n)) => *n,
|
||||
Some(json::String(n)) => *n,
|
||||
_ => {
|
||||
warn(~"malformed source json: "
|
||||
+ src.name + ~" (missing method)");
|
||||
@ -492,16 +492,16 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
};
|
||||
|
||||
let reference = match p.find(~"ref") {
|
||||
Some(json::string(n)) => Some(*n),
|
||||
Some(json::String(n)) => Some(*n),
|
||||
_ => None
|
||||
};
|
||||
|
||||
let mut tags = ~[];
|
||||
match p.find(~"tags") {
|
||||
Some(json::list(js)) => {
|
||||
Some(json::List(js)) => {
|
||||
for (*js).each |j| {
|
||||
match j {
|
||||
json::string(j) => vec::grow(tags, 1u, *j),
|
||||
json::String(j) => vec::grow(tags, 1u, *j),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -510,7 +510,7 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
|
||||
}
|
||||
|
||||
let description = match p.find(~"description") {
|
||||
Some(json::string(n)) => *n,
|
||||
Some(json::String(n)) => *n,
|
||||
_ => {
|
||||
warn(~"malformed source json: " + src.name
|
||||
+ ~" (missing description)");
|
||||
@ -548,8 +548,8 @@ fn load_source_info(c: cargo, src: source) {
|
||||
if !os::path_exists(&srcfile) { return; }
|
||||
let srcstr = io::read_whole_file_str(&srcfile);
|
||||
match json::from_str(result::get(srcstr)) {
|
||||
Ok(json::dict(s)) => {
|
||||
let o = parse_source(src.name, json::dict(s));
|
||||
Ok(json::Dict(s)) => {
|
||||
let o = parse_source(src.name, json::Dict(s));
|
||||
|
||||
src.key = o.key;
|
||||
src.keyfp = o.keyfp;
|
||||
@ -570,10 +570,10 @@ fn load_source_packages(c: cargo, src: source) {
|
||||
if !os::path_exists(&pkgfile) { return; }
|
||||
let pkgstr = io::read_whole_file_str(&pkgfile);
|
||||
match json::from_str(result::get(pkgstr)) {
|
||||
Ok(json::list(js)) => {
|
||||
Ok(json::List(js)) => {
|
||||
for (*js).each |j| {
|
||||
match j {
|
||||
json::dict(p) => {
|
||||
json::Dict(p) => {
|
||||
load_one_source_package(src, p);
|
||||
}
|
||||
_ => {
|
||||
@ -1551,7 +1551,7 @@ fn dump_cache(c: cargo) {
|
||||
need_dir(&c.root);
|
||||
|
||||
let out = c.root.push("cache.json");
|
||||
let _root = json::dict(map::str_hash());
|
||||
let _root = json::Dict(map::str_hash());
|
||||
|
||||
if os::path_exists(&out) {
|
||||
copy_warn(&out, &c.root.push("cache.json.old"));
|
||||
@ -1573,24 +1573,24 @@ fn dump_sources(c: cargo) {
|
||||
match io::buffered_file_writer(&out) {
|
||||
result::Ok(writer) => {
|
||||
let hash = map::str_hash();
|
||||
let root = json::dict(hash);
|
||||
let root = json::Dict(hash);
|
||||
|
||||
for c.sources.each |k, v| {
|
||||
let chash = map::str_hash();
|
||||
let child = json::dict(chash);
|
||||
let child = json::Dict(chash);
|
||||
|
||||
chash.insert(~"url", json::string(@v.url));
|
||||
chash.insert(~"method", json::string(@v.method));
|
||||
chash.insert(~"url", json::String(@v.url));
|
||||
chash.insert(~"method", json::String(@v.method));
|
||||
|
||||
match copy v.key {
|
||||
Some(key) => {
|
||||
chash.insert(~"key", json::string(@key));
|
||||
chash.insert(~"key", json::String(@key));
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
match copy v.keyfp {
|
||||
Some(keyfp) => {
|
||||
chash.insert(~"keyfp", json::string(@keyfp));
|
||||
chash.insert(~"keyfp", json::String(@keyfp));
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ fn parse_config(args: ~[~str]) -> config {
|
||||
Err(f) => fail getopts::fail_str(f)
|
||||
};
|
||||
|
||||
fn opt_path(m: getopts::matches, nm: ~str) -> Path {
|
||||
fn opt_path(m: getopts::Matches, nm: ~str) -> Path {
|
||||
Path(getopts::opt_str(m, nm))
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
// Dynamic arenas.
|
||||
#[warn(non_camel_case_types)];
|
||||
|
||||
// Arenas are used to quickly allocate objects that share a
|
||||
// lifetime. The arena uses ~[u8] vectors as a backing store to
|
||||
@ -22,7 +23,7 @@
|
||||
// overhead when initializing plain-old-data and means we don't need
|
||||
// to waste time running the destructors of POD.
|
||||
|
||||
export arena, arena_with_size;
|
||||
export Arena, arena_with_size;
|
||||
|
||||
import list;
|
||||
import list::{list, cons, nil};
|
||||
@ -46,15 +47,15 @@ const tydesc_drop_glue_index: size_t = 3 as size_t;
|
||||
// The way arena uses arrays is really deeply awful. The arrays are
|
||||
// allocated, and have capacities reserved, but the fill for the array
|
||||
// will always stay at 0.
|
||||
type chunk = {data: @[u8], mut fill: uint, is_pod: bool};
|
||||
type Chunk = {data: @[u8], mut fill: uint, is_pod: bool};
|
||||
|
||||
struct arena {
|
||||
struct Arena {
|
||||
// The head is seperated out from the list as a unbenchmarked
|
||||
// microoptimization, to avoid needing to case on the list to
|
||||
// access the head.
|
||||
priv mut head: chunk;
|
||||
priv mut pod_head: chunk;
|
||||
priv mut chunks: @list<chunk>;
|
||||
priv mut head: Chunk;
|
||||
priv mut pod_head: Chunk;
|
||||
priv mut chunks: @list<Chunk>;
|
||||
drop {
|
||||
unsafe {
|
||||
destroy_chunk(self.head);
|
||||
@ -65,19 +66,19 @@ struct arena {
|
||||
}
|
||||
}
|
||||
|
||||
fn chunk(size: uint, is_pod: bool) -> chunk {
|
||||
fn chunk(size: uint, is_pod: bool) -> Chunk {
|
||||
let mut v = @[];
|
||||
unsafe { at_vec::unsafe::reserve(v, size); }
|
||||
{ data: v, mut fill: 0u, is_pod: is_pod }
|
||||
}
|
||||
|
||||
fn arena_with_size(initial_size: uint) -> arena {
|
||||
return arena {mut head: chunk(initial_size, false),
|
||||
fn arena_with_size(initial_size: uint) -> Arena {
|
||||
return Arena {mut head: chunk(initial_size, false),
|
||||
mut pod_head: chunk(initial_size, true),
|
||||
mut chunks: @nil};
|
||||
}
|
||||
|
||||
fn arena() -> arena {
|
||||
fn Arena() -> Arena {
|
||||
arena_with_size(32u)
|
||||
}
|
||||
|
||||
@ -88,7 +89,7 @@ fn round_up_to(base: uint, align: uint) -> uint {
|
||||
|
||||
// Walk down a chunk, running the destructors for any objects stored
|
||||
// in it.
|
||||
unsafe fn destroy_chunk(chunk: chunk) {
|
||||
unsafe fn destroy_chunk(chunk: Chunk) {
|
||||
let mut idx = 0;
|
||||
let buf = vec::unsafe::to_ptr_slice(chunk.data);
|
||||
let fill = chunk.fill;
|
||||
@ -129,7 +130,7 @@ unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) {
|
||||
}
|
||||
|
||||
// The duplication between the POD and non-POD functions is annoying.
|
||||
impl &arena {
|
||||
impl &Arena {
|
||||
// Functions for the POD part of the arena
|
||||
fn alloc_pod_grow(n_bytes: uint, align: uint) -> *u8 {
|
||||
// Allocate a new chunk.
|
||||
@ -238,7 +239,7 @@ impl &arena {
|
||||
|
||||
#[test]
|
||||
fn test_arena_destructors() {
|
||||
let arena = arena::arena();
|
||||
let arena = arena::Arena();
|
||||
for uint::range(0, 10) |i| {
|
||||
// Arena allocate something with drop glue to make sure it
|
||||
// doesn't leak.
|
||||
@ -251,7 +252,7 @@ fn test_arena_destructors() {
|
||||
|
||||
#[test] #[should_fail] #[ignore(cfg(windows))]
|
||||
fn test_arena_destructors_fail() {
|
||||
let arena = arena::arena();
|
||||
let arena = arena::Arena();
|
||||
// Put some stuff in the arena.
|
||||
for uint::range(0, 10) |i| {
|
||||
// Arena allocate something with drop glue to make sure it
|
||||
|
@ -1,10 +1,11 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
import io::Reader;
|
||||
|
||||
trait to_base64 {
|
||||
trait ToBase64 {
|
||||
fn to_base64() -> ~str;
|
||||
}
|
||||
|
||||
impl ~[u8]: to_base64 {
|
||||
impl ~[u8]: ToBase64 {
|
||||
fn to_base64() -> ~str {
|
||||
let chars = str::chars(
|
||||
~"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
@ -55,17 +56,17 @@ impl ~[u8]: to_base64 {
|
||||
}
|
||||
}
|
||||
|
||||
impl ~str: to_base64 {
|
||||
impl ~str: ToBase64 {
|
||||
fn to_base64() -> ~str {
|
||||
str::to_bytes(self).to_base64()
|
||||
}
|
||||
}
|
||||
|
||||
trait from_base64 {
|
||||
trait FromBase64 {
|
||||
fn from_base64() -> ~[u8];
|
||||
}
|
||||
|
||||
impl ~[u8]: from_base64 {
|
||||
impl ~[u8]: FromBase64 {
|
||||
fn from_base64() -> ~[u8] {
|
||||
if self.len() % 4u != 0u { fail ~"invalid base64 length"; }
|
||||
|
||||
@ -127,7 +128,7 @@ impl ~[u8]: from_base64 {
|
||||
}
|
||||
}
|
||||
|
||||
impl ~str: from_base64 {
|
||||
impl ~str: FromBase64 {
|
||||
fn from_base64() -> ~[u8] {
|
||||
str::to_bytes(self).from_base64()
|
||||
}
|
||||
|
@ -1,16 +1,21 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
import vec::{to_mut, from_elem};
|
||||
|
||||
export bitv;
|
||||
export Bitv;
|
||||
export union;
|
||||
export Union;
|
||||
export intersect;
|
||||
export Intersect;
|
||||
export assign;
|
||||
export Assign;
|
||||
export difference;
|
||||
export Difference;
|
||||
export clone;
|
||||
export get;
|
||||
export equal;
|
||||
export clear;
|
||||
export set_all;
|
||||
export invert;
|
||||
export difference;
|
||||
export set;
|
||||
export is_true;
|
||||
export is_false;
|
||||
@ -25,7 +30,7 @@ fn small_mask(nbits: uint) -> u32 {
|
||||
(1 << nbits) - 1
|
||||
}
|
||||
|
||||
struct small_bitv {
|
||||
struct SmallBitv {
|
||||
/// only the lowest nbits of this value are used. the rest is undefined.
|
||||
let mut bits: u32;
|
||||
new(bits: u32) { self.bits = bits; }
|
||||
@ -41,19 +46,19 @@ struct small_bitv {
|
||||
}
|
||||
}
|
||||
#[inline(always)]
|
||||
fn union(s: &small_bitv, nbits: uint) -> bool {
|
||||
fn union(s: &SmallBitv, nbits: uint) -> bool {
|
||||
self.bits_op(s.bits, nbits, |u1, u2| u1 | u2)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn intersect(s: &small_bitv, nbits: uint) -> bool {
|
||||
fn intersect(s: &SmallBitv, nbits: uint) -> bool {
|
||||
self.bits_op(s.bits, nbits, |u1, u2| u1 & u2)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn become(s: &small_bitv, nbits: uint) -> bool {
|
||||
fn become(s: &SmallBitv, nbits: uint) -> bool {
|
||||
self.bits_op(s.bits, nbits, |_u1, u2| u2)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn difference(s: &small_bitv, nbits: uint) -> bool {
|
||||
fn difference(s: &SmallBitv, nbits: uint) -> bool {
|
||||
self.bits_op(s.bits, nbits, |u1, u2| u1 ^ u2)
|
||||
}
|
||||
#[inline(always)]
|
||||
@ -70,7 +75,7 @@ struct small_bitv {
|
||||
}
|
||||
}
|
||||
#[inline(always)]
|
||||
fn equals(b: &small_bitv, nbits: uint) -> bool {
|
||||
fn equals(b: &SmallBitv, nbits: uint) -> bool {
|
||||
let mask = small_mask(nbits);
|
||||
mask & self.bits == mask & b.bits
|
||||
}
|
||||
@ -106,7 +111,7 @@ fn big_mask(nbits: uint, elem: uint) -> uint {
|
||||
}
|
||||
}
|
||||
|
||||
struct big_bitv {
|
||||
struct BigBitv {
|
||||
// only mut b/c of clone and lack of other constructor
|
||||
let mut storage: ~[mut uint];
|
||||
new(-storage: ~[mut uint]) {
|
||||
@ -114,7 +119,7 @@ struct big_bitv {
|
||||
}
|
||||
priv {
|
||||
#[inline(always)]
|
||||
fn process(b: &big_bitv, nbits: uint, op: fn(uint, uint) -> uint)
|
||||
fn process(b: &BigBitv, nbits: uint, op: fn(uint, uint) -> uint)
|
||||
-> bool {
|
||||
let len = b.storage.len();
|
||||
assert (self.storage.len() == len);
|
||||
@ -145,19 +150,19 @@ struct big_bitv {
|
||||
#[inline(always)]
|
||||
fn invert() { for self.each_storage() |w| { w = !w } }
|
||||
#[inline(always)]
|
||||
fn union(b: &big_bitv, nbits: uint) -> bool {
|
||||
fn union(b: &BigBitv, nbits: uint) -> bool {
|
||||
self.process(b, nbits, lor)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn intersect(b: &big_bitv, nbits: uint) -> bool {
|
||||
fn intersect(b: &BigBitv, nbits: uint) -> bool {
|
||||
self.process(b, nbits, land)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn become(b: &big_bitv, nbits: uint) -> bool {
|
||||
fn become(b: &BigBitv, nbits: uint) -> bool {
|
||||
self.process(b, nbits, right)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn difference(b: &big_bitv, nbits: uint) -> bool {
|
||||
fn difference(b: &BigBitv, nbits: uint) -> bool {
|
||||
self.invert();
|
||||
let b = self.intersect(b, nbits);
|
||||
self.invert();
|
||||
@ -179,7 +184,7 @@ struct big_bitv {
|
||||
else { self.storage[w] & !flag };
|
||||
}
|
||||
#[inline(always)]
|
||||
fn equals(b: &big_bitv, nbits: uint) -> bool {
|
||||
fn equals(b: &BigBitv, nbits: uint) -> bool {
|
||||
let len = b.storage.len();
|
||||
for uint::iterate(0, len) |i| {
|
||||
let mask = big_mask(nbits, i);
|
||||
@ -190,26 +195,26 @@ struct big_bitv {
|
||||
}
|
||||
}
|
||||
|
||||
enum a_bitv { big(~big_bitv), small(~small_bitv) }
|
||||
enum BitvVariant { Big(~BigBitv), Small(~SmallBitv) }
|
||||
|
||||
enum op {union, intersect, assign, difference}
|
||||
enum Op {Union, Intersect, Assign, Difference}
|
||||
|
||||
// The bitvector type
|
||||
struct bitv {
|
||||
let rep: a_bitv;
|
||||
struct Bitv {
|
||||
let rep: BitvVariant;
|
||||
let nbits: uint;
|
||||
|
||||
new(nbits: uint, init: bool) {
|
||||
self.nbits = nbits;
|
||||
if nbits <= 32 {
|
||||
self.rep = small(~small_bitv(if init {!0} else {0}));
|
||||
self.rep = Small(~SmallBitv(if init {!0} else {0}));
|
||||
}
|
||||
else {
|
||||
let nelems = nbits/uint_bits +
|
||||
if nbits % uint_bits == 0 {0} else {1};
|
||||
let elem = if init {!0} else {0};
|
||||
let s = to_mut(from_elem(nelems, elem));
|
||||
self.rep = big(~big_bitv(s));
|
||||
self.rep = Big(~BigBitv(s));
|
||||
};
|
||||
}
|
||||
|
||||
@ -219,27 +224,27 @@ struct bitv {
|
||||
different sizes";
|
||||
}
|
||||
#[inline(always)]
|
||||
fn do_op(op: op, other: &bitv) -> bool {
|
||||
fn do_op(op: Op, other: &Bitv) -> bool {
|
||||
if self.nbits != other.nbits {
|
||||
self.die();
|
||||
}
|
||||
match self.rep {
|
||||
small(s) => match other.rep {
|
||||
small(s1) => match op {
|
||||
union => s.union(s1, self.nbits),
|
||||
intersect => s.intersect(s1, self.nbits),
|
||||
assign => s.become(s1, self.nbits),
|
||||
difference => s.difference(s1, self.nbits)
|
||||
Small(s) => match other.rep {
|
||||
Small(s1) => match op {
|
||||
Union => s.union(s1, self.nbits),
|
||||
Intersect => s.intersect(s1, self.nbits),
|
||||
Assign => s.become(s1, self.nbits),
|
||||
Difference => s.difference(s1, self.nbits)
|
||||
},
|
||||
big(_) => self.die()
|
||||
Big(_) => self.die()
|
||||
},
|
||||
big(s) => match other.rep {
|
||||
small(_) => self.die(),
|
||||
big(s1) => match op {
|
||||
union => s.union(s1, self.nbits),
|
||||
intersect => s.intersect(s1, self.nbits),
|
||||
assign => s.become(s1, self.nbits),
|
||||
difference => s.difference(s1, self.nbits)
|
||||
Big(s) => match other.rep {
|
||||
Small(_) => self.die(),
|
||||
Big(s1) => match op {
|
||||
Union => s.union(s1, self.nbits),
|
||||
Intersect => s.intersect(s1, self.nbits),
|
||||
Assign => s.become(s1, self.nbits),
|
||||
Difference => s.difference(s1, self.nbits)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -253,7 +258,7 @@ struct bitv {
|
||||
* the same length. Returns 'true' if `self` changed.
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn union(v1: &bitv) -> bool { self.do_op(union, v1) }
|
||||
fn union(v1: &Bitv) -> bool { self.do_op(Union, v1) }
|
||||
|
||||
/**
|
||||
* Calculates the intersection of two bitvectors
|
||||
@ -262,7 +267,7 @@ struct bitv {
|
||||
* the same length. Returns 'true' if `self` changed.
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn intersect(v1: &bitv) -> bool { self.do_op(intersect, v1) }
|
||||
fn intersect(v1: &Bitv) -> bool { self.do_op(Intersect, v1) }
|
||||
|
||||
/**
|
||||
* Assigns the value of `v1` to `self`
|
||||
@ -271,20 +276,20 @@ struct bitv {
|
||||
* changed
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn assign(v: &bitv) -> bool { self.do_op(assign, v) }
|
||||
fn assign(v: &Bitv) -> bool { self.do_op(Assign, v) }
|
||||
|
||||
/// Makes a copy of a bitvector
|
||||
#[inline(always)]
|
||||
fn clone() -> ~bitv {
|
||||
fn clone() -> ~Bitv {
|
||||
~match self.rep {
|
||||
small(b) => {
|
||||
bitv{nbits: self.nbits, rep: small(~small_bitv{bits: b.bits})}
|
||||
Small(b) => {
|
||||
Bitv{nbits: self.nbits, rep: Small(~SmallBitv{bits: b.bits})}
|
||||
}
|
||||
big(b) => {
|
||||
Big(b) => {
|
||||
let st = to_mut(from_elem(self.nbits / uint_bits + 1, 0));
|
||||
let len = st.len();
|
||||
for uint::range(0, len) |i| { st[i] = b.storage[i]; };
|
||||
bitv{nbits: self.nbits, rep: big(~big_bitv{storage: st})}
|
||||
Bitv{nbits: self.nbits, rep: Big(~BigBitv{storage: st})}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -294,8 +299,8 @@ struct bitv {
|
||||
pure fn get(i: uint) -> bool {
|
||||
assert (i < self.nbits);
|
||||
match self.rep {
|
||||
big(b) => b.get(i),
|
||||
small(s) => s.get(i)
|
||||
Big(b) => b.get(i),
|
||||
Small(s) => s.get(i)
|
||||
}
|
||||
}
|
||||
|
||||
@ -308,8 +313,8 @@ struct bitv {
|
||||
fn set(i: uint, x: bool) {
|
||||
assert (i < self.nbits);
|
||||
match self.rep {
|
||||
big(b) => b.set(i, x),
|
||||
small(s) => s.set(i, x)
|
||||
Big(b) => b.set(i, x),
|
||||
Small(s) => s.set(i, x)
|
||||
}
|
||||
}
|
||||
|
||||
@ -320,16 +325,16 @@ struct bitv {
|
||||
* contain identical elements.
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn equal(v1: bitv) -> bool {
|
||||
fn equal(v1: Bitv) -> bool {
|
||||
if self.nbits != v1.nbits { return false; }
|
||||
match self.rep {
|
||||
small(b) => match v1.rep {
|
||||
small(b1) => b.equals(b1, self.nbits),
|
||||
Small(b) => match v1.rep {
|
||||
Small(b1) => b.equals(b1, self.nbits),
|
||||
_ => false
|
||||
},
|
||||
big(s) => match v1.rep {
|
||||
big(s1) => s.equals(s1, self.nbits),
|
||||
small(_) => return false
|
||||
Big(s) => match v1.rep {
|
||||
Big(s1) => s.equals(s1, self.nbits),
|
||||
Small(_) => return false
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -338,8 +343,8 @@ struct bitv {
|
||||
#[inline(always)]
|
||||
fn clear() {
|
||||
match self.rep {
|
||||
small(b) => b.clear(),
|
||||
big(s) => for s.each_storage() |w| { w = 0u }
|
||||
Small(b) => b.clear(),
|
||||
Big(s) => for s.each_storage() |w| { w = 0u }
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,16 +352,16 @@ struct bitv {
|
||||
#[inline(always)]
|
||||
fn set_all() {
|
||||
match self.rep {
|
||||
small(b) => b.set_all(),
|
||||
big(s) => for s.each_storage() |w| { w = !0u } }
|
||||
Small(b) => b.set_all(),
|
||||
Big(s) => for s.each_storage() |w| { w = !0u } }
|
||||
}
|
||||
|
||||
/// Invert all bits
|
||||
#[inline(always)]
|
||||
fn invert() {
|
||||
match self.rep {
|
||||
small(b) => b.invert(),
|
||||
big(s) => for s.each_storage() |w| { w = !w } }
|
||||
Small(b) => b.invert(),
|
||||
Big(s) => for s.each_storage() |w| { w = !w } }
|
||||
}
|
||||
|
||||
/**
|
||||
@ -368,13 +373,13 @@ struct bitv {
|
||||
* Returns `true` if `v0` was changed.
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn difference(v: ~bitv) -> bool { self.do_op(difference, v) }
|
||||
fn difference(v: ~Bitv) -> bool { self.do_op(Difference, v) }
|
||||
|
||||
/// Returns true if all bits are 1
|
||||
#[inline(always)]
|
||||
fn is_true() -> bool {
|
||||
match self.rep {
|
||||
small(b) => b.is_true(self.nbits),
|
||||
Small(b) => b.is_true(self.nbits),
|
||||
_ => {
|
||||
for self.each() |i| { if !i { return false; } }
|
||||
true
|
||||
@ -395,8 +400,8 @@ struct bitv {
|
||||
|
||||
fn is_false() -> bool {
|
||||
match self.rep {
|
||||
small(b) => b.is_false(self.nbits),
|
||||
big(_) => {
|
||||
Small(b) => b.is_false(self.nbits),
|
||||
Big(_) => {
|
||||
for self.each() |i| { if i { return false; } }
|
||||
true
|
||||
}
|
||||
@ -465,7 +470,7 @@ pure fn land(w0: uint, w1: uint) -> uint { return w0 & w1; }
|
||||
|
||||
pure fn right(_w0: uint, w1: uint) -> uint { return w1; }
|
||||
|
||||
impl bitv: ops::index<uint,bool> {
|
||||
impl Bitv: ops::index<uint,bool> {
|
||||
pure fn index(&&i: uint) -> bool {
|
||||
self.get(i)
|
||||
}
|
||||
@ -475,10 +480,10 @@ impl bitv: ops::index<uint,bool> {
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_to_str() {
|
||||
let zerolen = bitv(0u, false);
|
||||
let zerolen = Bitv(0u, false);
|
||||
assert zerolen.to_str() == ~"";
|
||||
|
||||
let eightbits = bitv(8u, false);
|
||||
let eightbits = Bitv(8u, false);
|
||||
assert eightbits.to_str() == ~"00000000";
|
||||
}
|
||||
|
||||
@ -486,7 +491,7 @@ mod tests {
|
||||
fn test_0_elements() {
|
||||
let mut act;
|
||||
let mut exp;
|
||||
act = bitv(0u, false);
|
||||
act = Bitv(0u, false);
|
||||
exp = vec::from_elem::<uint>(0u, 0u);
|
||||
assert act.eq_vec(exp);
|
||||
}
|
||||
@ -494,15 +499,15 @@ mod tests {
|
||||
#[test]
|
||||
fn test_1_element() {
|
||||
let mut act;
|
||||
act = bitv(1u, false);
|
||||
act = Bitv(1u, false);
|
||||
assert act.eq_vec(~[0u]);
|
||||
act = bitv(1u, true);
|
||||
act = Bitv(1u, true);
|
||||
assert act.eq_vec(~[1u]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_elements() {
|
||||
let b = bitv::bitv(2, false);
|
||||
let b = bitv::Bitv(2, false);
|
||||
b.set(0, true);
|
||||
b.set(1, false);
|
||||
assert b.to_str() == ~"10";
|
||||
@ -513,15 +518,15 @@ mod tests {
|
||||
let mut act;
|
||||
// all 0
|
||||
|
||||
act = bitv(10u, false);
|
||||
act = Bitv(10u, false);
|
||||
assert (act.eq_vec(~[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// all 1
|
||||
|
||||
act = bitv(10u, true);
|
||||
act = Bitv(10u, true);
|
||||
assert (act.eq_vec(~[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(10u, false);
|
||||
act = Bitv(10u, false);
|
||||
act.set(0u, true);
|
||||
act.set(1u, true);
|
||||
act.set(2u, true);
|
||||
@ -530,7 +535,7 @@ mod tests {
|
||||
assert (act.eq_vec(~[1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(10u, false);
|
||||
act = Bitv(10u, false);
|
||||
act.set(5u, true);
|
||||
act.set(6u, true);
|
||||
act.set(7u, true);
|
||||
@ -539,7 +544,7 @@ mod tests {
|
||||
assert (act.eq_vec(~[0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(10u, false);
|
||||
act = Bitv(10u, false);
|
||||
act.set(0u, true);
|
||||
act.set(3u, true);
|
||||
act.set(6u, true);
|
||||
@ -552,21 +557,21 @@ mod tests {
|
||||
let mut act;
|
||||
// all 0
|
||||
|
||||
act = bitv(31u, false);
|
||||
act = Bitv(31u, false);
|
||||
assert (act.eq_vec(
|
||||
~[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u]));
|
||||
// all 1
|
||||
|
||||
act = bitv(31u, true);
|
||||
act = Bitv(31u, true);
|
||||
assert (act.eq_vec(
|
||||
~[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(31u, false);
|
||||
act = Bitv(31u, false);
|
||||
act.set(0u, true);
|
||||
act.set(1u, true);
|
||||
act.set(2u, true);
|
||||
@ -581,7 +586,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(31u, false);
|
||||
act = Bitv(31u, false);
|
||||
act.set(16u, true);
|
||||
act.set(17u, true);
|
||||
act.set(18u, true);
|
||||
@ -596,7 +601,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(31u, false);
|
||||
act = Bitv(31u, false);
|
||||
act.set(24u, true);
|
||||
act.set(25u, true);
|
||||
act.set(26u, true);
|
||||
@ -610,7 +615,7 @@ mod tests {
|
||||
1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(31u, false);
|
||||
act = Bitv(31u, false);
|
||||
act.set(3u, true);
|
||||
act.set(17u, true);
|
||||
act.set(30u, true);
|
||||
@ -625,21 +630,21 @@ mod tests {
|
||||
let mut act;
|
||||
// all 0
|
||||
|
||||
act = bitv(32u, false);
|
||||
act = Bitv(32u, false);
|
||||
assert (act.eq_vec(
|
||||
~[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// all 1
|
||||
|
||||
act = bitv(32u, true);
|
||||
act = Bitv(32u, true);
|
||||
assert (act.eq_vec(
|
||||
~[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(32u, false);
|
||||
act = Bitv(32u, false);
|
||||
act.set(0u, true);
|
||||
act.set(1u, true);
|
||||
act.set(2u, true);
|
||||
@ -654,7 +659,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(32u, false);
|
||||
act = Bitv(32u, false);
|
||||
act.set(16u, true);
|
||||
act.set(17u, true);
|
||||
act.set(18u, true);
|
||||
@ -669,7 +674,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(32u, false);
|
||||
act = Bitv(32u, false);
|
||||
act.set(24u, true);
|
||||
act.set(25u, true);
|
||||
act.set(26u, true);
|
||||
@ -684,7 +689,7 @@ mod tests {
|
||||
1u, 1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(32u, false);
|
||||
act = Bitv(32u, false);
|
||||
act.set(3u, true);
|
||||
act.set(17u, true);
|
||||
act.set(30u, true);
|
||||
@ -700,21 +705,21 @@ mod tests {
|
||||
let mut act;
|
||||
// all 0
|
||||
|
||||
act = bitv(33u, false);
|
||||
act = Bitv(33u, false);
|
||||
assert (act.eq_vec(
|
||||
~[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// all 1
|
||||
|
||||
act = bitv(33u, true);
|
||||
act = Bitv(33u, true);
|
||||
assert (act.eq_vec(
|
||||
~[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
|
||||
1u, 1u, 1u, 1u, 1u, 1u, 1u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(33u, false);
|
||||
act = Bitv(33u, false);
|
||||
act.set(0u, true);
|
||||
act.set(1u, true);
|
||||
act.set(2u, true);
|
||||
@ -729,7 +734,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(33u, false);
|
||||
act = Bitv(33u, false);
|
||||
act.set(16u, true);
|
||||
act.set(17u, true);
|
||||
act.set(18u, true);
|
||||
@ -744,7 +749,7 @@ mod tests {
|
||||
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(33u, false);
|
||||
act = Bitv(33u, false);
|
||||
act.set(24u, true);
|
||||
act.set(25u, true);
|
||||
act.set(26u, true);
|
||||
@ -759,7 +764,7 @@ mod tests {
|
||||
1u, 1u, 1u, 1u, 1u, 1u, 0u]));
|
||||
// mixed
|
||||
|
||||
act = bitv(33u, false);
|
||||
act = Bitv(33u, false);
|
||||
act.set(3u, true);
|
||||
act.set(17u, true);
|
||||
act.set(30u, true);
|
||||
@ -773,24 +778,24 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_equal_differing_sizes() {
|
||||
let v0 = bitv(10u, false);
|
||||
let v1 = bitv(11u, false);
|
||||
let v0 = Bitv(10u, false);
|
||||
let v1 = Bitv(11u, false);
|
||||
assert !v0.equal(v1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_equal_greatly_differing_sizes() {
|
||||
let v0 = bitv(10u, false);
|
||||
let v1 = bitv(110u, false);
|
||||
let v0 = Bitv(10u, false);
|
||||
let v1 = Bitv(110u, false);
|
||||
assert !v0.equal(v1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_equal_sneaky_small() {
|
||||
let a = bitv::bitv(1, false);
|
||||
let a = bitv::Bitv(1, false);
|
||||
a.set(0, true);
|
||||
|
||||
let b = bitv::bitv(1, true);
|
||||
let b = bitv::Bitv(1, true);
|
||||
b.set(0, true);
|
||||
|
||||
assert a.equal(b);
|
||||
@ -798,12 +803,12 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_equal_sneaky_big() {
|
||||
let a = bitv::bitv(100, false);
|
||||
let a = bitv::Bitv(100, false);
|
||||
for uint::range(0, 100) |i| {
|
||||
a.set(i, true);
|
||||
}
|
||||
|
||||
let b = bitv::bitv(100, true);
|
||||
let b = bitv::Bitv(100, true);
|
||||
for uint::range(0, 100) |i| {
|
||||
b.set(i, true);
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
/*!
|
||||
* Library to interface with chunks of memory allocated in C.
|
||||
*
|
||||
@ -19,14 +20,14 @@
|
||||
* obvious reason (they act on a pointer that cannot be checked inside the
|
||||
* method), but the elimination form is somewhat more subtle in its unsafety.
|
||||
* By using a pointer taken from a c_vec::t without keeping a reference to the
|
||||
* c_vec::t itself around, the c_vec could be garbage collected, and the
|
||||
* c_vec::t itself around, the CVec could be garbage collected, and the
|
||||
* memory within could be destroyed. There are legitimate uses for the
|
||||
* pointer elimination form -- for instance, to pass memory back into C -- but
|
||||
* great care must be taken to ensure that a reference to the c_vec::t is
|
||||
* still held if needed.
|
||||
*/
|
||||
|
||||
export c_vec;
|
||||
export CVec;
|
||||
export c_vec, c_vec_with_dtor;
|
||||
export get, set;
|
||||
export len;
|
||||
@ -38,11 +39,11 @@ export ptr;
|
||||
* Wrapped in a enum for opacity; FIXME #818 when it is possible to have
|
||||
* truly opaque types, this should be revisited.
|
||||
*/
|
||||
enum c_vec<T> {
|
||||
c_vec_({ base: *mut T, len: uint, rsrc: @dtor_res})
|
||||
enum CVec<T> {
|
||||
CVecCtor({ base: *mut T, len: uint, rsrc: @DtorRes})
|
||||
}
|
||||
|
||||
struct dtor_res {
|
||||
struct DtorRes {
|
||||
let dtor: Option<fn@()>;
|
||||
new(dtor: Option<fn@()>) { self.dtor = dtor; }
|
||||
drop {
|
||||
@ -58,23 +59,23 @@ struct dtor_res {
|
||||
*/
|
||||
|
||||
/**
|
||||
* Create a `c_vec` from a foreign buffer with a given length.
|
||||
* Create a `CVec` from a foreign buffer with a given length.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * base - A foreign pointer to a buffer
|
||||
* * len - The number of elements in the buffer
|
||||
*/
|
||||
unsafe fn c_vec<T>(base: *mut T, len: uint) -> c_vec<T> {
|
||||
return c_vec_({
|
||||
unsafe fn c_vec<T>(base: *mut T, len: uint) -> CVec<T> {
|
||||
return CVecCtor({
|
||||
base: base,
|
||||
len: len,
|
||||
rsrc: @dtor_res(option::None)
|
||||
rsrc: @DtorRes(option::None)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a `c_vec` from a foreign buffer, with a given length,
|
||||
* Create a `CVec` from a foreign buffer, with a given length,
|
||||
* and a function to run upon destruction.
|
||||
*
|
||||
* # Arguments
|
||||
@ -85,11 +86,11 @@ unsafe fn c_vec<T>(base: *mut T, len: uint) -> c_vec<T> {
|
||||
* for freeing the buffer, etc.
|
||||
*/
|
||||
unsafe fn c_vec_with_dtor<T>(base: *mut T, len: uint, dtor: fn@())
|
||||
-> c_vec<T> {
|
||||
return c_vec_({
|
||||
-> CVec<T> {
|
||||
return CVecCtor({
|
||||
base: base,
|
||||
len: len,
|
||||
rsrc: @dtor_res(option::Some(dtor))
|
||||
rsrc: @DtorRes(option::Some(dtor))
|
||||
});
|
||||
}
|
||||
|
||||
@ -102,7 +103,7 @@ unsafe fn c_vec_with_dtor<T>(base: *mut T, len: uint, dtor: fn@())
|
||||
*
|
||||
* Fails if `ofs` is greater or equal to the length of the vector
|
||||
*/
|
||||
fn get<T: copy>(t: c_vec<T>, ofs: uint) -> T {
|
||||
fn get<T: copy>(t: CVec<T>, ofs: uint) -> T {
|
||||
assert ofs < len(t);
|
||||
return unsafe { *ptr::mut_offset((*t).base, ofs) };
|
||||
}
|
||||
@ -112,7 +113,7 @@ fn get<T: copy>(t: c_vec<T>, ofs: uint) -> T {
|
||||
*
|
||||
* Fails if `ofs` is greater or equal to the length of the vector
|
||||
*/
|
||||
fn set<T: copy>(t: c_vec<T>, ofs: uint, v: T) {
|
||||
fn set<T: copy>(t: CVec<T>, ofs: uint, v: T) {
|
||||
assert ofs < len(t);
|
||||
unsafe { *ptr::mut_offset((*t).base, ofs) = v };
|
||||
}
|
||||
@ -122,12 +123,12 @@ fn set<T: copy>(t: c_vec<T>, ofs: uint, v: T) {
|
||||
*/
|
||||
|
||||
/// Returns the length of the vector
|
||||
fn len<T>(t: c_vec<T>) -> uint {
|
||||
fn len<T>(t: CVec<T>) -> uint {
|
||||
return (*t).len;
|
||||
}
|
||||
|
||||
/// Returns a pointer to the first element of the vector
|
||||
unsafe fn ptr<T>(t: c_vec<T>) -> *mut T {
|
||||
unsafe fn ptr<T>(t: CVec<T>) -> *mut T {
|
||||
return (*t).base;
|
||||
}
|
||||
|
||||
@ -135,7 +136,7 @@ unsafe fn ptr<T>(t: c_vec<T>) -> *mut T {
|
||||
mod tests {
|
||||
import libc::*;
|
||||
|
||||
fn malloc(n: size_t) -> c_vec<u8> {
|
||||
fn malloc(n: size_t) -> CVec<u8> {
|
||||
let mem = libc::malloc(n);
|
||||
|
||||
assert mem as int != 0;
|
||||
|
@ -1,24 +1,25 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
/// Additional general-purpose comparison functionality.
|
||||
|
||||
const fuzzy_epsilon: float = 1.0e-6;
|
||||
|
||||
trait fuzzy_eq {
|
||||
trait FuzzyEq {
|
||||
pure fn fuzzy_eq(&&other: self) -> bool;
|
||||
}
|
||||
|
||||
impl float: fuzzy_eq {
|
||||
impl float: FuzzyEq {
|
||||
pure fn fuzzy_eq(&&other: float) -> bool {
|
||||
return float::abs(self - other) < fuzzy_epsilon;
|
||||
}
|
||||
}
|
||||
|
||||
impl f32: fuzzy_eq {
|
||||
impl f32: FuzzyEq {
|
||||
pure fn fuzzy_eq(&&other: f32) -> bool {
|
||||
return f32::abs(self - other) < (fuzzy_epsilon as f32);
|
||||
}
|
||||
}
|
||||
|
||||
impl f64: fuzzy_eq {
|
||||
impl f64: FuzzyEq {
|
||||
pure fn fuzzy_eq(&&other: f64) -> bool {
|
||||
return f64::abs(self - other) < (fuzzy_epsilon as f64);
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
//! Unsafe debugging functions for inspecting values.
|
||||
|
||||
import unsafe::reinterpret_cast;
|
||||
|
@ -1,9 +1,11 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
|
||||
//! A deque. Untested as of yet. Likely buggy
|
||||
|
||||
import option::{Some, None};
|
||||
import dvec::DVec;
|
||||
|
||||
trait t<T> {
|
||||
trait Deque<T> {
|
||||
fn size() -> uint;
|
||||
fn add_front(T);
|
||||
fn add_back(T);
|
||||
@ -16,16 +18,16 @@ trait t<T> {
|
||||
|
||||
// FIXME (#2343) eventually, a proper datatype plus an exported impl would
|
||||
// be preferrable.
|
||||
fn create<T: copy>() -> t<T> {
|
||||
type cell<T> = Option<T>;
|
||||
fn create<T: copy>() -> Deque<T> {
|
||||
type Cell<T> = Option<T>;
|
||||
|
||||
let initial_capacity: uint = 32u; // 2^5
|
||||
/**
|
||||
* Grow is only called on full elts, so nelts is also len(elts), unlike
|
||||
* elsewhere.
|
||||
*/
|
||||
fn grow<T: copy>(nelts: uint, lo: uint, -elts: ~[mut cell<T>]) ->
|
||||
~[mut cell<T>] {
|
||||
fn grow<T: copy>(nelts: uint, lo: uint, -elts: ~[mut Cell<T>]) ->
|
||||
~[mut Cell<T>] {
|
||||
assert (nelts == vec::len(elts));
|
||||
let mut rv = ~[mut];
|
||||
|
||||
@ -40,16 +42,16 @@ fn create<T: copy>() -> t<T> {
|
||||
|
||||
return rv;
|
||||
}
|
||||
fn get<T: copy>(elts: DVec<cell<T>>, i: uint) -> T {
|
||||
fn get<T: copy>(elts: DVec<Cell<T>>, i: uint) -> T {
|
||||
match elts.get_elt(i) { Some(t) => t, _ => fail }
|
||||
}
|
||||
|
||||
type repr<T> = {mut nelts: uint,
|
||||
type Repr<T> = {mut nelts: uint,
|
||||
mut lo: uint,
|
||||
mut hi: uint,
|
||||
elts: DVec<cell<T>>};
|
||||
elts: DVec<Cell<T>>};
|
||||
|
||||
impl <T: copy> repr<T>: t<T> {
|
||||
impl <T: copy> Repr<T>: Deque<T> {
|
||||
fn size() -> uint { return self.nelts; }
|
||||
fn add_front(t: T) {
|
||||
let oldlo: uint = self.lo;
|
||||
@ -102,7 +104,7 @@ fn create<T: copy>() -> t<T> {
|
||||
}
|
||||
}
|
||||
|
||||
let repr: repr<T> = {
|
||||
let repr: Repr<T> = {
|
||||
mut nelts: 0u,
|
||||
mut lo: 0u,
|
||||
mut hi: 0u,
|
||||
@ -111,14 +113,14 @@ fn create<T: copy>() -> t<T> {
|
||||
vec::to_mut(
|
||||
vec::from_elem(initial_capacity, None)))
|
||||
};
|
||||
repr as t::<T>
|
||||
repr as Deque::<T>
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_simple() {
|
||||
let d: deque::t<int> = deque::create::<int>();
|
||||
let d: deque::Deque<int> = deque::create::<int>();
|
||||
assert (d.size() == 0u);
|
||||
d.add_front(17);
|
||||
d.add_front(42);
|
||||
@ -162,7 +164,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn test_boxes(a: @int, b: @int, c: @int, d: @int) {
|
||||
let deq: deque::t<@int> = deque::create::<@int>();
|
||||
let deq: deque::Deque<@int> = deque::create::<@int>();
|
||||
assert (deq.size() == 0u);
|
||||
deq.add_front(a);
|
||||
deq.add_front(b);
|
||||
@ -191,12 +193,12 @@ mod tests {
|
||||
assert (deq.get(3) == d);
|
||||
}
|
||||
|
||||
type eqfn<T> = fn@(T, T) -> bool;
|
||||
type EqFn<T> = fn@(T, T) -> bool;
|
||||
|
||||
fn test_parameterized<T: copy owned>(
|
||||
e: eqfn<T>, a: T, b: T, c: T, d: T) {
|
||||
e: EqFn<T>, a: T, b: T, c: T, d: T) {
|
||||
|
||||
let deq: deque::t<T> = deque::create::<T>();
|
||||
let deq: deque::Deque<T> = deque::create::<T>();
|
||||
assert (deq.size() == 0u);
|
||||
deq.add_front(a);
|
||||
deq.add_front(b);
|
||||
@ -225,85 +227,85 @@ mod tests {
|
||||
assert (e(deq.get(3), d));
|
||||
}
|
||||
|
||||
enum taggy { one(int), two(int, int), three(int, int, int), }
|
||||
enum Taggy { One(int), Two(int, int), Three(int, int, int), }
|
||||
|
||||
enum taggypar<T> {
|
||||
onepar(int), twopar(int, int), threepar(int, int, int),
|
||||
enum Taggypar<T> {
|
||||
Onepar(int), Twopar(int, int), Threepar(int, int, int),
|
||||
}
|
||||
|
||||
type reccy = {x: int, y: int, t: taggy};
|
||||
type RecCy = {x: int, y: int, t: Taggy};
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
fn inteq(&&a: int, &&b: int) -> bool { return a == b; }
|
||||
fn intboxeq(&&a: @int, &&b: @int) -> bool { return a == b; }
|
||||
fn taggyeq(a: taggy, b: taggy) -> bool {
|
||||
fn taggyeq(a: Taggy, b: Taggy) -> bool {
|
||||
match a {
|
||||
one(a1) => match b {
|
||||
one(b1) => return a1 == b1,
|
||||
One(a1) => match b {
|
||||
One(b1) => return a1 == b1,
|
||||
_ => return false
|
||||
},
|
||||
two(a1, a2) => match b {
|
||||
two(b1, b2) => return a1 == b1 && a2 == b2,
|
||||
Two(a1, a2) => match b {
|
||||
Two(b1, b2) => return a1 == b1 && a2 == b2,
|
||||
_ => return false
|
||||
},
|
||||
three(a1, a2, a3) => match b {
|
||||
three(b1, b2, b3) => return a1 == b1 && a2 == b2 && a3 == b3,
|
||||
Three(a1, a2, a3) => match b {
|
||||
Three(b1, b2, b3) => return a1 == b1 && a2 == b2 && a3 == b3,
|
||||
_ => return false
|
||||
}
|
||||
}
|
||||
}
|
||||
fn taggypareq<T>(a: taggypar<T>, b: taggypar<T>) -> bool {
|
||||
fn taggypareq<T>(a: Taggypar<T>, b: Taggypar<T>) -> bool {
|
||||
match a {
|
||||
onepar::<T>(a1) => match b {
|
||||
onepar::<T>(b1) => return a1 == b1,
|
||||
Onepar::<T>(a1) => match b {
|
||||
Onepar::<T>(b1) => return a1 == b1,
|
||||
_ => return false
|
||||
},
|
||||
twopar::<T>(a1, a2) => match b {
|
||||
twopar::<T>(b1, b2) => return a1 == b1 && a2 == b2,
|
||||
Twopar::<T>(a1, a2) => match b {
|
||||
Twopar::<T>(b1, b2) => return a1 == b1 && a2 == b2,
|
||||
_ => return false
|
||||
},
|
||||
threepar::<T>(a1, a2, a3) => match b {
|
||||
threepar::<T>(b1, b2, b3) => {
|
||||
Threepar::<T>(a1, a2, a3) => match b {
|
||||
Threepar::<T>(b1, b2, b3) => {
|
||||
return a1 == b1 && a2 == b2 && a3 == b3
|
||||
}
|
||||
_ => return false
|
||||
}
|
||||
}
|
||||
}
|
||||
fn reccyeq(a: reccy, b: reccy) -> bool {
|
||||
fn reccyeq(a: RecCy, b: RecCy) -> bool {
|
||||
return a.x == b.x && a.y == b.y && taggyeq(a.t, b.t);
|
||||
}
|
||||
debug!("*** test boxes");
|
||||
test_boxes(@5, @72, @64, @175);
|
||||
debug!("*** end test boxes");
|
||||
debug!("test parameterized: int");
|
||||
let eq1: eqfn<int> = inteq;
|
||||
let eq1: EqFn<int> = inteq;
|
||||
test_parameterized::<int>(eq1, 5, 72, 64, 175);
|
||||
debug!("*** test parameterized: @int");
|
||||
let eq2: eqfn<@int> = intboxeq;
|
||||
let eq2: EqFn<@int> = intboxeq;
|
||||
test_parameterized::<@int>(eq2, @5, @72, @64, @175);
|
||||
debug!("*** end test parameterized @int");
|
||||
debug!("test parameterized: taggy");
|
||||
let eq3: eqfn<taggy> = taggyeq;
|
||||
test_parameterized::<taggy>(eq3, one(1), two(1, 2), three(1, 2, 3),
|
||||
two(17, 42));
|
||||
let eq3: EqFn<Taggy> = taggyeq;
|
||||
test_parameterized::<Taggy>(eq3, One(1), Two(1, 2), Three(1, 2, 3),
|
||||
Two(17, 42));
|
||||
|
||||
debug!("*** test parameterized: taggypar<int>");
|
||||
let eq4: eqfn<taggypar<int>> = |x,y| taggypareq::<int>(x, y);
|
||||
test_parameterized::<taggypar<int>>(eq4, onepar::<int>(1),
|
||||
twopar::<int>(1, 2),
|
||||
threepar::<int>(1, 2, 3),
|
||||
twopar::<int>(17, 42));
|
||||
let eq4: EqFn<Taggypar<int>> = |x,y| taggypareq::<int>(x, y);
|
||||
test_parameterized::<Taggypar<int>>(eq4, Onepar::<int>(1),
|
||||
Twopar::<int>(1, 2),
|
||||
Threepar::<int>(1, 2, 3),
|
||||
Twopar::<int>(17, 42));
|
||||
debug!("*** end test parameterized: taggypar::<int>");
|
||||
|
||||
debug!("*** test parameterized: reccy");
|
||||
let reccy1: reccy = {x: 1, y: 2, t: one(1)};
|
||||
let reccy2: reccy = {x: 345, y: 2, t: two(1, 2)};
|
||||
let reccy3: reccy = {x: 1, y: 777, t: three(1, 2, 3)};
|
||||
let reccy4: reccy = {x: 19, y: 252, t: two(17, 42)};
|
||||
let eq5: eqfn<reccy> = reccyeq;
|
||||
test_parameterized::<reccy>(eq5, reccy1, reccy2, reccy3, reccy4);
|
||||
let reccy1: RecCy = {x: 1, y: 2, t: One(1)};
|
||||
let reccy2: RecCy = {x: 345, y: 2, t: Two(1, 2)};
|
||||
let reccy3: RecCy = {x: 1, y: 777, t: Three(1, 2, 3)};
|
||||
let reccy4: RecCy = {x: 19, y: 252, t: Two(17, 42)};
|
||||
let eq5: EqFn<RecCy> = reccyeq;
|
||||
test_parameterized::<RecCy>(eq5, reccy1, reccy2, reccy3, reccy4);
|
||||
debug!("*** end test parameterized: reccy");
|
||||
debug!("*** done");
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
|
||||
#[deny(non_camel_case_types)];
|
||||
|
||||
// Simple Extensible Binary Markup Language (ebml) reader and writer on a
|
||||
// cursor model. See the specification here:
|
||||
@ -7,6 +7,7 @@ import core::Option;
|
||||
import option::{Some, None};
|
||||
|
||||
export doc;
|
||||
export Doc;
|
||||
export doc_at;
|
||||
export maybe_get_doc;
|
||||
export get_doc;
|
||||
@ -22,28 +23,29 @@ export doc_as_i8;
|
||||
export doc_as_i16;
|
||||
export doc_as_i32;
|
||||
export doc_as_i64;
|
||||
export writer;
|
||||
export Writer;
|
||||
export serializer;
|
||||
export ebml_deserializer;
|
||||
export EbmlDeserializer;
|
||||
export deserializer;
|
||||
export with_doc_data;
|
||||
export get_doc;
|
||||
export extensions;
|
||||
|
||||
type ebml_tag = {id: uint, size: uint};
|
||||
type EbmlTag = {id: uint, size: uint};
|
||||
|
||||
type ebml_state = {ebml_tag: ebml_tag, tag_pos: uint, data_pos: uint};
|
||||
type EbmlState = {ebml_tag: EbmlTag, tag_pos: uint, data_pos: uint};
|
||||
|
||||
// FIXME (#2739): When we have module renaming, make "reader" and "writer"
|
||||
// separate modules within this file.
|
||||
|
||||
// ebml reading
|
||||
type doc = {data: @~[u8], start: uint, end: uint};
|
||||
type Doc = {data: @~[u8], start: uint, end: uint};
|
||||
|
||||
type tagged_doc = {tag: uint, doc: doc};
|
||||
type TaggedDoc = {tag: uint, doc: Doc};
|
||||
|
||||
impl doc: ops::index<uint,doc> {
|
||||
pure fn index(&&tag: uint) -> doc {
|
||||
impl Doc: ops::index<uint,Doc> {
|
||||
pure fn index(&&tag: uint) -> Doc {
|
||||
unchecked {
|
||||
get_doc(self, tag)
|
||||
}
|
||||
@ -73,11 +75,11 @@ fn vuint_at(data: &[u8], start: uint) -> {val: uint, next: uint} {
|
||||
} else { error!("vint too big"); fail; }
|
||||
}
|
||||
|
||||
fn doc(data: @~[u8]) -> doc {
|
||||
fn doc(data: @~[u8]) -> Doc {
|
||||
return {data: data, start: 0u, end: vec::len::<u8>(*data)};
|
||||
}
|
||||
|
||||
fn doc_at(data: @~[u8], start: uint) -> tagged_doc {
|
||||
fn doc_at(data: @~[u8], start: uint) -> TaggedDoc {
|
||||
let elt_tag = vuint_at(*data, start);
|
||||
let elt_size = vuint_at(*data, elt_tag.next);
|
||||
let end = elt_size.next + elt_size.val;
|
||||
@ -85,24 +87,24 @@ fn doc_at(data: @~[u8], start: uint) -> tagged_doc {
|
||||
doc: {data: data, start: elt_size.next, end: end}};
|
||||
}
|
||||
|
||||
fn maybe_get_doc(d: doc, tg: uint) -> Option<doc> {
|
||||
fn maybe_get_doc(d: Doc, tg: uint) -> Option<Doc> {
|
||||
let mut pos = d.start;
|
||||
while pos < d.end {
|
||||
let elt_tag = vuint_at(*d.data, pos);
|
||||
let elt_size = vuint_at(*d.data, elt_tag.next);
|
||||
pos = elt_size.next + elt_size.val;
|
||||
if elt_tag.val == tg {
|
||||
return Some::<doc>({
|
||||
return Some::<Doc>({
|
||||
data: d.data,
|
||||
start: elt_size.next,
|
||||
end: pos
|
||||
});
|
||||
}
|
||||
}
|
||||
return None::<doc>;
|
||||
return None::<Doc>;
|
||||
}
|
||||
|
||||
fn get_doc(d: doc, tg: uint) -> doc {
|
||||
fn get_doc(d: Doc, tg: uint) -> Doc {
|
||||
match maybe_get_doc(d, tg) {
|
||||
Some(d) => return d,
|
||||
None => {
|
||||
@ -112,7 +114,7 @@ fn get_doc(d: doc, tg: uint) -> doc {
|
||||
}
|
||||
}
|
||||
|
||||
fn docs(d: doc, it: fn(uint, doc) -> bool) {
|
||||
fn docs(d: Doc, it: fn(uint, Doc) -> bool) {
|
||||
let mut pos = d.start;
|
||||
while pos < d.end {
|
||||
let elt_tag = vuint_at(*d.data, pos);
|
||||
@ -124,7 +126,7 @@ fn docs(d: doc, it: fn(uint, doc) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
fn tagged_docs(d: doc, tg: uint, it: fn(doc) -> bool) {
|
||||
fn tagged_docs(d: Doc, tg: uint, it: fn(Doc) -> bool) {
|
||||
let mut pos = d.start;
|
||||
while pos < d.end {
|
||||
let elt_tag = vuint_at(*d.data, pos);
|
||||
@ -138,44 +140,44 @@ fn tagged_docs(d: doc, tg: uint, it: fn(doc) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_data(d: doc) -> ~[u8] { vec::slice::<u8>(*d.data, d.start, d.end) }
|
||||
fn doc_data(d: Doc) -> ~[u8] { vec::slice::<u8>(*d.data, d.start, d.end) }
|
||||
|
||||
fn with_doc_data<T>(d: doc, f: fn(x: &[u8]) -> T) -> T {
|
||||
fn with_doc_data<T>(d: Doc, f: fn(x: &[u8]) -> T) -> T {
|
||||
return f(vec::view(*d.data, d.start, d.end));
|
||||
}
|
||||
|
||||
fn doc_as_str(d: doc) -> ~str { return str::from_bytes(doc_data(d)); }
|
||||
fn doc_as_str(d: Doc) -> ~str { return str::from_bytes(doc_data(d)); }
|
||||
|
||||
fn doc_as_u8(d: doc) -> u8 {
|
||||
fn doc_as_u8(d: Doc) -> u8 {
|
||||
assert d.end == d.start + 1u;
|
||||
return (*d.data)[d.start];
|
||||
}
|
||||
|
||||
fn doc_as_u16(d: doc) -> u16 {
|
||||
fn doc_as_u16(d: Doc) -> u16 {
|
||||
assert d.end == d.start + 2u;
|
||||
return io::u64_from_be_bytes(*d.data, d.start, 2u) as u16;
|
||||
}
|
||||
|
||||
fn doc_as_u32(d: doc) -> u32 {
|
||||
fn doc_as_u32(d: Doc) -> u32 {
|
||||
assert d.end == d.start + 4u;
|
||||
return io::u64_from_be_bytes(*d.data, d.start, 4u) as u32;
|
||||
}
|
||||
|
||||
fn doc_as_u64(d: doc) -> u64 {
|
||||
fn doc_as_u64(d: Doc) -> u64 {
|
||||
assert d.end == d.start + 8u;
|
||||
return io::u64_from_be_bytes(*d.data, d.start, 8u);
|
||||
}
|
||||
|
||||
fn doc_as_i8(d: doc) -> i8 { doc_as_u8(d) as i8 }
|
||||
fn doc_as_i16(d: doc) -> i16 { doc_as_u16(d) as i16 }
|
||||
fn doc_as_i32(d: doc) -> i32 { doc_as_u32(d) as i32 }
|
||||
fn doc_as_i64(d: doc) -> i64 { doc_as_u64(d) as i64 }
|
||||
fn doc_as_i8(d: Doc) -> i8 { doc_as_u8(d) as i8 }
|
||||
fn doc_as_i16(d: Doc) -> i16 { doc_as_u16(d) as i16 }
|
||||
fn doc_as_i32(d: Doc) -> i32 { doc_as_u32(d) as i32 }
|
||||
fn doc_as_i64(d: Doc) -> i64 { doc_as_u64(d) as i64 }
|
||||
|
||||
// ebml writing
|
||||
type writer_ = {writer: io::Writer, mut size_positions: ~[uint]};
|
||||
type Writer_ = {writer: io::Writer, mut size_positions: ~[uint]};
|
||||
|
||||
enum writer {
|
||||
writer_(writer_)
|
||||
enum Writer {
|
||||
Writer_(Writer_)
|
||||
}
|
||||
|
||||
fn write_sized_vuint(w: io::Writer, n: uint, size: uint) {
|
||||
@ -198,13 +200,13 @@ fn write_vuint(w: io::Writer, n: uint) {
|
||||
fail fmt!("vint to write too big: %?", n);
|
||||
}
|
||||
|
||||
fn writer(w: io::Writer) -> writer {
|
||||
fn Writer(w: io::Writer) -> Writer {
|
||||
let size_positions: ~[uint] = ~[];
|
||||
return writer_({writer: w, mut size_positions: size_positions});
|
||||
return Writer_({writer: w, mut size_positions: size_positions});
|
||||
}
|
||||
|
||||
// FIXME (#2741): Provide a function to write the standard ebml header.
|
||||
impl writer {
|
||||
impl Writer {
|
||||
fn start_tag(tag_id: uint) {
|
||||
debug!("Start tag %u", tag_id);
|
||||
|
||||
@ -306,26 +308,26 @@ impl writer {
|
||||
// Totally lame approach.
|
||||
const debug: bool = false;
|
||||
|
||||
enum ebml_serializer_tag {
|
||||
es_uint, es_u64, es_u32, es_u16, es_u8,
|
||||
es_int, es_i64, es_i32, es_i16, es_i8,
|
||||
es_bool,
|
||||
es_str,
|
||||
es_f64, es_f32, es_float,
|
||||
es_enum, es_enum_vid, es_enum_body,
|
||||
es_vec, es_vec_len, es_vec_elt,
|
||||
enum EbmlSerializerTag {
|
||||
EsUint, EsU64, EsU32, EsU16, EsU8,
|
||||
EsInt, EsI64, EsI32, EsI16, EsI8,
|
||||
EsBool,
|
||||
EsStr,
|
||||
EsF64, EsF32, EsFloat,
|
||||
EsEnum, EsEnumVid, EsEnumBody,
|
||||
EsVec, EsVecLen, EsVecElt,
|
||||
|
||||
es_label // Used only when debugging
|
||||
EsLabel // Used only when debugging
|
||||
}
|
||||
|
||||
trait serializer_priv {
|
||||
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint);
|
||||
trait SerializerPriv {
|
||||
fn _emit_tagged_uint(t: EbmlSerializerTag, v: uint);
|
||||
fn _emit_label(label: &str);
|
||||
}
|
||||
|
||||
impl ebml::writer: serializer_priv {
|
||||
impl ebml::Writer: SerializerPriv {
|
||||
// used internally to emit things like the vector length and so on
|
||||
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint) {
|
||||
fn _emit_tagged_uint(t: EbmlSerializerTag, v: uint) {
|
||||
assert v <= 0xFFFF_FFFF_u;
|
||||
self.wr_tagged_u32(t as uint, v as u32);
|
||||
}
|
||||
@ -337,53 +339,53 @@ impl ebml::writer: serializer_priv {
|
||||
// efficiency. When debugging, though, we can emit such
|
||||
// labels and then they will be checked by deserializer to
|
||||
// try and check failures more quickly.
|
||||
if debug { self.wr_tagged_str(es_label as uint, label) }
|
||||
if debug { self.wr_tagged_str(EsLabel as uint, label) }
|
||||
}
|
||||
}
|
||||
|
||||
impl ebml::writer: serialization::serializer {
|
||||
impl ebml::Writer: serialization::serializer {
|
||||
fn emit_nil() {}
|
||||
|
||||
fn emit_uint(v: uint) { self.wr_tagged_u64(es_uint as uint, v as u64); }
|
||||
fn emit_u64(v: u64) { self.wr_tagged_u64(es_u64 as uint, v); }
|
||||
fn emit_u32(v: u32) { self.wr_tagged_u32(es_u32 as uint, v); }
|
||||
fn emit_u16(v: u16) { self.wr_tagged_u16(es_u16 as uint, v); }
|
||||
fn emit_u8(v: u8) { self.wr_tagged_u8 (es_u8 as uint, v); }
|
||||
fn emit_uint(v: uint) { self.wr_tagged_u64(EsUint as uint, v as u64); }
|
||||
fn emit_u64(v: u64) { self.wr_tagged_u64(EsU64 as uint, v); }
|
||||
fn emit_u32(v: u32) { self.wr_tagged_u32(EsU32 as uint, v); }
|
||||
fn emit_u16(v: u16) { self.wr_tagged_u16(EsU16 as uint, v); }
|
||||
fn emit_u8(v: u8) { self.wr_tagged_u8 (EsU8 as uint, v); }
|
||||
|
||||
fn emit_int(v: int) { self.wr_tagged_i64(es_int as uint, v as i64); }
|
||||
fn emit_i64(v: i64) { self.wr_tagged_i64(es_i64 as uint, v); }
|
||||
fn emit_i32(v: i32) { self.wr_tagged_i32(es_i32 as uint, v); }
|
||||
fn emit_i16(v: i16) { self.wr_tagged_i16(es_i16 as uint, v); }
|
||||
fn emit_i8(v: i8) { self.wr_tagged_i8 (es_i8 as uint, v); }
|
||||
fn emit_int(v: int) { self.wr_tagged_i64(EsInt as uint, v as i64); }
|
||||
fn emit_i64(v: i64) { self.wr_tagged_i64(EsI64 as uint, v); }
|
||||
fn emit_i32(v: i32) { self.wr_tagged_i32(EsI32 as uint, v); }
|
||||
fn emit_i16(v: i16) { self.wr_tagged_i16(EsI16 as uint, v); }
|
||||
fn emit_i8(v: i8) { self.wr_tagged_i8 (EsI8 as uint, v); }
|
||||
|
||||
fn emit_bool(v: bool) { self.wr_tagged_u8(es_bool as uint, v as u8) }
|
||||
fn emit_bool(v: bool) { self.wr_tagged_u8(EsBool as uint, v as u8) }
|
||||
|
||||
// FIXME (#2742): implement these
|
||||
fn emit_f64(_v: f64) { fail ~"Unimplemented: serializing an f64"; }
|
||||
fn emit_f32(_v: f32) { fail ~"Unimplemented: serializing an f32"; }
|
||||
fn emit_float(_v: float) { fail ~"Unimplemented: serializing a float"; }
|
||||
|
||||
fn emit_str(v: &str) { self.wr_tagged_str(es_str as uint, v) }
|
||||
fn emit_str(v: &str) { self.wr_tagged_str(EsStr as uint, v) }
|
||||
|
||||
fn emit_enum(name: &str, f: fn()) {
|
||||
self._emit_label(name);
|
||||
self.wr_tag(es_enum as uint, f)
|
||||
self.wr_tag(EsEnum as uint, f)
|
||||
}
|
||||
fn emit_enum_variant(_v_name: &str, v_id: uint, _cnt: uint, f: fn()) {
|
||||
self._emit_tagged_uint(es_enum_vid, v_id);
|
||||
self.wr_tag(es_enum_body as uint, f)
|
||||
self._emit_tagged_uint(EsEnumVid, v_id);
|
||||
self.wr_tag(EsEnumBody as uint, f)
|
||||
}
|
||||
fn emit_enum_variant_arg(_idx: uint, f: fn()) { f() }
|
||||
|
||||
fn emit_vec(len: uint, f: fn()) {
|
||||
do self.wr_tag(es_vec as uint) {
|
||||
self._emit_tagged_uint(es_vec_len, len);
|
||||
do self.wr_tag(EsVec as uint) {
|
||||
self._emit_tagged_uint(EsVecLen, len);
|
||||
f()
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_vec_elt(_idx: uint, f: fn()) {
|
||||
self.wr_tag(es_vec_elt as uint, f)
|
||||
self.wr_tag(EsVecElt as uint, f)
|
||||
}
|
||||
|
||||
fn emit_box(f: fn()) { f() }
|
||||
@ -397,23 +399,23 @@ impl ebml::writer: serialization::serializer {
|
||||
fn emit_tup_elt(_idx: uint, f: fn()) { f() }
|
||||
}
|
||||
|
||||
type ebml_deserializer_ = {mut parent: ebml::doc,
|
||||
type EbmlDeserializer_ = {mut parent: ebml::Doc,
|
||||
mut pos: uint};
|
||||
|
||||
enum ebml_deserializer {
|
||||
ebml_deserializer_(ebml_deserializer_)
|
||||
enum EbmlDeserializer {
|
||||
EbmlDeserializer_(EbmlDeserializer_)
|
||||
}
|
||||
|
||||
fn ebml_deserializer(d: ebml::doc) -> ebml_deserializer {
|
||||
ebml_deserializer_({mut parent: d, mut pos: d.start})
|
||||
fn ebml_deserializer(d: ebml::Doc) -> EbmlDeserializer {
|
||||
EbmlDeserializer_({mut parent: d, mut pos: d.start})
|
||||
}
|
||||
|
||||
priv impl ebml_deserializer {
|
||||
priv impl EbmlDeserializer {
|
||||
fn _check_label(lbl: ~str) {
|
||||
if self.pos < self.parent.end {
|
||||
let {tag: r_tag, doc: r_doc} =
|
||||
ebml::doc_at(self.parent.data, self.pos);
|
||||
if r_tag == (es_label as uint) {
|
||||
if r_tag == (EsLabel as uint) {
|
||||
self.pos = r_doc.end;
|
||||
let str = ebml::doc_as_str(r_doc);
|
||||
if lbl != str {
|
||||
@ -423,7 +425,7 @@ priv impl ebml_deserializer {
|
||||
}
|
||||
}
|
||||
|
||||
fn next_doc(exp_tag: ebml_serializer_tag) -> ebml::doc {
|
||||
fn next_doc(exp_tag: EbmlSerializerTag) -> ebml::Doc {
|
||||
debug!(". next_doc(exp_tag=%?)", exp_tag);
|
||||
if self.pos >= self.parent.end {
|
||||
fail ~"no more documents in current node!";
|
||||
@ -445,7 +447,7 @@ priv impl ebml_deserializer {
|
||||
return r_doc;
|
||||
}
|
||||
|
||||
fn push_doc<T>(d: ebml::doc, f: fn() -> T) -> T{
|
||||
fn push_doc<T>(d: ebml::Doc, f: fn() -> T) -> T{
|
||||
let old_parent = self.parent;
|
||||
let old_pos = self.pos;
|
||||
self.parent = d;
|
||||
@ -456,60 +458,60 @@ priv impl ebml_deserializer {
|
||||
return r;
|
||||
}
|
||||
|
||||
fn _next_uint(exp_tag: ebml_serializer_tag) -> uint {
|
||||
fn _next_uint(exp_tag: EbmlSerializerTag) -> uint {
|
||||
let r = ebml::doc_as_u32(self.next_doc(exp_tag));
|
||||
debug!("_next_uint exp_tag=%? result=%?", exp_tag, r);
|
||||
return r as uint;
|
||||
}
|
||||
}
|
||||
|
||||
impl ebml_deserializer: serialization::deserializer {
|
||||
impl EbmlDeserializer: serialization::deserializer {
|
||||
fn read_nil() -> () { () }
|
||||
|
||||
fn read_u64() -> u64 { ebml::doc_as_u64(self.next_doc(es_u64)) }
|
||||
fn read_u32() -> u32 { ebml::doc_as_u32(self.next_doc(es_u32)) }
|
||||
fn read_u16() -> u16 { ebml::doc_as_u16(self.next_doc(es_u16)) }
|
||||
fn read_u8 () -> u8 { ebml::doc_as_u8 (self.next_doc(es_u8 )) }
|
||||
fn read_u64() -> u64 { ebml::doc_as_u64(self.next_doc(EsU64)) }
|
||||
fn read_u32() -> u32 { ebml::doc_as_u32(self.next_doc(EsU32)) }
|
||||
fn read_u16() -> u16 { ebml::doc_as_u16(self.next_doc(EsU16)) }
|
||||
fn read_u8 () -> u8 { ebml::doc_as_u8 (self.next_doc(EsU8 )) }
|
||||
fn read_uint() -> uint {
|
||||
let v = ebml::doc_as_u64(self.next_doc(es_uint));
|
||||
let v = ebml::doc_as_u64(self.next_doc(EsUint));
|
||||
if v > (core::uint::max_value as u64) {
|
||||
fail fmt!("uint %? too large for this architecture", v);
|
||||
}
|
||||
return v as uint;
|
||||
}
|
||||
|
||||
fn read_i64() -> i64 { ebml::doc_as_u64(self.next_doc(es_i64)) as i64 }
|
||||
fn read_i32() -> i32 { ebml::doc_as_u32(self.next_doc(es_i32)) as i32 }
|
||||
fn read_i16() -> i16 { ebml::doc_as_u16(self.next_doc(es_i16)) as i16 }
|
||||
fn read_i8 () -> i8 { ebml::doc_as_u8 (self.next_doc(es_i8 )) as i8 }
|
||||
fn read_i64() -> i64 { ebml::doc_as_u64(self.next_doc(EsI64)) as i64 }
|
||||
fn read_i32() -> i32 { ebml::doc_as_u32(self.next_doc(EsI32)) as i32 }
|
||||
fn read_i16() -> i16 { ebml::doc_as_u16(self.next_doc(EsI16)) as i16 }
|
||||
fn read_i8 () -> i8 { ebml::doc_as_u8 (self.next_doc(EsI8 )) as i8 }
|
||||
fn read_int() -> int {
|
||||
let v = ebml::doc_as_u64(self.next_doc(es_int)) as i64;
|
||||
let v = ebml::doc_as_u64(self.next_doc(EsInt)) as i64;
|
||||
if v > (int::max_value as i64) || v < (int::min_value as i64) {
|
||||
fail fmt!("int %? out of range for this architecture", v);
|
||||
}
|
||||
return v as int;
|
||||
}
|
||||
|
||||
fn read_bool() -> bool { ebml::doc_as_u8(self.next_doc(es_bool)) as bool }
|
||||
fn read_bool() -> bool { ebml::doc_as_u8(self.next_doc(EsBool)) as bool }
|
||||
|
||||
fn read_f64() -> f64 { fail ~"read_f64()"; }
|
||||
fn read_f32() -> f32 { fail ~"read_f32()"; }
|
||||
fn read_float() -> float { fail ~"read_float()"; }
|
||||
|
||||
fn read_str() -> ~str { ebml::doc_as_str(self.next_doc(es_str)) }
|
||||
fn read_str() -> ~str { ebml::doc_as_str(self.next_doc(EsStr)) }
|
||||
|
||||
// Compound types:
|
||||
fn read_enum<T>(name: ~str, f: fn() -> T) -> T {
|
||||
debug!("read_enum(%s)", name);
|
||||
self._check_label(name);
|
||||
self.push_doc(self.next_doc(es_enum), f)
|
||||
self.push_doc(self.next_doc(EsEnum), f)
|
||||
}
|
||||
|
||||
fn read_enum_variant<T>(f: fn(uint) -> T) -> T {
|
||||
debug!("read_enum_variant()");
|
||||
let idx = self._next_uint(es_enum_vid);
|
||||
let idx = self._next_uint(EsEnumVid);
|
||||
debug!(" idx=%u", idx);
|
||||
do self.push_doc(self.next_doc(es_enum_body)) {
|
||||
do self.push_doc(self.next_doc(EsEnumBody)) {
|
||||
f(idx)
|
||||
}
|
||||
}
|
||||
@ -521,8 +523,8 @@ impl ebml_deserializer: serialization::deserializer {
|
||||
|
||||
fn read_vec<T>(f: fn(uint) -> T) -> T {
|
||||
debug!("read_vec()");
|
||||
do self.push_doc(self.next_doc(es_vec)) {
|
||||
let len = self._next_uint(es_vec_len);
|
||||
do self.push_doc(self.next_doc(EsVec)) {
|
||||
let len = self._next_uint(EsVecLen);
|
||||
debug!(" len=%u", len);
|
||||
f(len)
|
||||
}
|
||||
@ -530,7 +532,7 @@ impl ebml_deserializer: serialization::deserializer {
|
||||
|
||||
fn read_vec_elt<T>(idx: uint, f: fn() -> T) -> T {
|
||||
debug!("read_vec_elt(idx=%u)", idx);
|
||||
self.push_doc(self.next_doc(es_vec_elt), f)
|
||||
self.push_doc(self.next_doc(EsVecElt), f)
|
||||
}
|
||||
|
||||
fn read_box<T>(f: fn() -> T) -> T {
|
||||
@ -615,7 +617,7 @@ fn test_option_int() {
|
||||
fn test_v(v: Option<int>) {
|
||||
debug!("v == %?", v);
|
||||
let mbuf = io::mem_buffer();
|
||||
let ebml_w = ebml::writer(io::mem_buffer_writer(mbuf));
|
||||
let ebml_w = ebml::Writer(io::mem_buffer_writer(mbuf));
|
||||
serialize_0(ebml_w, v);
|
||||
let ebml_doc = ebml::doc(@io::mem_buffer_buf(mbuf));
|
||||
let deser = ebml_deserializer(ebml_doc);
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
|
||||
/*!
|
||||
* A functional key,value store that works on anything.
|
||||
*
|
||||
@ -12,41 +14,41 @@
|
||||
import option::{Some, None};
|
||||
import option = option;
|
||||
|
||||
export treemap;
|
||||
export Treemap;
|
||||
export init;
|
||||
export insert;
|
||||
export find;
|
||||
export traverse;
|
||||
|
||||
type treemap<K, V> = @tree_node<K, V>;
|
||||
type Treemap<K, V> = @TreeNode<K, V>;
|
||||
|
||||
enum tree_node<K, V> {
|
||||
empty,
|
||||
node(@K, @V, @tree_node<K, V>, @tree_node<K, V>)
|
||||
enum TreeNode<K, V> {
|
||||
Empty,
|
||||
Node(@K, @V, @TreeNode<K, V>, @TreeNode<K, V>)
|
||||
}
|
||||
|
||||
/// Create a treemap
|
||||
fn init<K, V>() -> treemap<K, V> { @empty }
|
||||
fn init<K, V>() -> Treemap<K, V> { @Empty }
|
||||
|
||||
/// Insert a value into the map
|
||||
fn insert<K: copy, V: copy>(m: treemap<K, V>, k: K, v: V) -> treemap<K, V> {
|
||||
fn insert<K: copy, V: copy>(m: Treemap<K, V>, k: K, v: V) -> Treemap<K, V> {
|
||||
@match m {
|
||||
@empty => node(@k, @v, @empty, @empty),
|
||||
@node(@kk, vv, left, right) => {
|
||||
@Empty => Node(@k, @v, @Empty, @Empty),
|
||||
@Node(@kk, vv, left, right) => {
|
||||
if k < kk {
|
||||
node(@kk, vv, insert(left, k, v), right)
|
||||
Node(@kk, vv, insert(left, k, v), right)
|
||||
} else if k == kk {
|
||||
node(@kk, @v, left, right)
|
||||
} else { node(@kk, vv, left, insert(right, k, v)) }
|
||||
Node(@kk, @v, left, right)
|
||||
} else { Node(@kk, vv, left, insert(right, k, v)) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a value based on the key
|
||||
fn find<K, V: copy>(m: treemap<K, V>, k: K) -> Option<V> {
|
||||
fn find<K, V: copy>(m: Treemap<K, V>, k: K) -> Option<V> {
|
||||
match *m {
|
||||
empty => None,
|
||||
node(@kk, @v, left, right) => {
|
||||
Empty => None,
|
||||
Node(@kk, @v, left, right) => {
|
||||
if k == kk {
|
||||
Some(v)
|
||||
} else if k < kk { find(left, k) } else { find(right, k) }
|
||||
@ -55,15 +57,15 @@ fn find<K, V: copy>(m: treemap<K, V>, k: K) -> Option<V> {
|
||||
}
|
||||
|
||||
/// Visit all pairs in the map in order.
|
||||
fn traverse<K, V: copy>(m: treemap<K, V>, f: fn(K, V)) {
|
||||
fn traverse<K, V: copy>(m: Treemap<K, V>, f: fn(K, V)) {
|
||||
match *m {
|
||||
empty => (),
|
||||
Empty => (),
|
||||
/*
|
||||
Previously, this had what looked like redundant
|
||||
matches to me, so I changed it. but that may be a
|
||||
de-optimization -- tjc
|
||||
*/
|
||||
node(@k, @v, left, right) => {
|
||||
Node(@k, @v, left, right) => {
|
||||
// copy v to make aliases work out
|
||||
let v1 = v;
|
||||
traverse(left, f);
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
|
||||
/*!
|
||||
* Simple getopt alternative.
|
||||
*
|
||||
@ -68,15 +70,15 @@
|
||||
import core::result::{Err, Ok};
|
||||
import core::option;
|
||||
import core::option::{Some, None};
|
||||
export opt;
|
||||
export Opt;
|
||||
export reqopt;
|
||||
export optopt;
|
||||
export optflag;
|
||||
export optflagopt;
|
||||
export optmulti;
|
||||
export getopts;
|
||||
export matches;
|
||||
export fail_;
|
||||
export Matches;
|
||||
export Fail_;
|
||||
export fail_str;
|
||||
export opt_present;
|
||||
export opts_present;
|
||||
@ -85,72 +87,72 @@ export opts_str;
|
||||
export opt_strs;
|
||||
export opt_maybe_str;
|
||||
export opt_default;
|
||||
export result; //NDM
|
||||
export Result; //NDM
|
||||
|
||||
enum name { long(~str), short(char), }
|
||||
enum Name { Long(~str), Short(char), }
|
||||
|
||||
enum hasarg { yes, no, maybe, }
|
||||
enum HasArg { Yes, No, Maybe, }
|
||||
|
||||
enum occur { req, optional, multi, }
|
||||
enum Occur { Req, Optional, Multi, }
|
||||
|
||||
/// A description of a possible option
|
||||
type opt = {name: name, hasarg: hasarg, occur: occur};
|
||||
type Opt = {name: Name, hasarg: HasArg, occur: Occur};
|
||||
|
||||
fn mkname(nm: &str) -> name {
|
||||
fn mkname(nm: &str) -> Name {
|
||||
let unm = str::from_slice(nm);
|
||||
return if str::len(nm) == 1u {
|
||||
short(str::char_at(unm, 0u))
|
||||
} else { long(unm) };
|
||||
Short(str::char_at(unm, 0u))
|
||||
} else { Long(unm) };
|
||||
}
|
||||
|
||||
/// Create an option that is required and takes an argument
|
||||
fn reqopt(name: &str) -> opt {
|
||||
return {name: mkname(name), hasarg: yes, occur: req};
|
||||
fn reqopt(name: &str) -> Opt {
|
||||
return {name: mkname(name), hasarg: Yes, occur: Req};
|
||||
}
|
||||
|
||||
/// Create an option that is optional and takes an argument
|
||||
fn optopt(name: &str) -> opt {
|
||||
return {name: mkname(name), hasarg: yes, occur: optional};
|
||||
fn optopt(name: &str) -> Opt {
|
||||
return {name: mkname(name), hasarg: Yes, occur: Optional};
|
||||
}
|
||||
|
||||
/// Create an option that is optional and does not take an argument
|
||||
fn optflag(name: &str) -> opt {
|
||||
return {name: mkname(name), hasarg: no, occur: optional};
|
||||
fn optflag(name: &str) -> Opt {
|
||||
return {name: mkname(name), hasarg: No, occur: Optional};
|
||||
}
|
||||
|
||||
/// Create an option that is optional and takes an optional argument
|
||||
fn optflagopt(name: &str) -> opt {
|
||||
return {name: mkname(name), hasarg: maybe, occur: optional};
|
||||
fn optflagopt(name: &str) -> Opt {
|
||||
return {name: mkname(name), hasarg: Maybe, occur: Optional};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an option that is optional, takes an argument, and may occur
|
||||
* multiple times
|
||||
*/
|
||||
fn optmulti(name: &str) -> opt {
|
||||
return {name: mkname(name), hasarg: yes, occur: multi};
|
||||
fn optmulti(name: &str) -> Opt {
|
||||
return {name: mkname(name), hasarg: Yes, occur: Multi};
|
||||
}
|
||||
|
||||
enum optval { val(~str), given, }
|
||||
enum Optval { Val(~str), Given, }
|
||||
|
||||
/**
|
||||
* The result of checking command line arguments. Contains a vector
|
||||
* of matches and a vector of free strings.
|
||||
*/
|
||||
type matches = {opts: ~[opt], vals: ~[~[optval]], free: ~[~str]};
|
||||
type Matches = {opts: ~[Opt], vals: ~[~[Optval]], free: ~[~str]};
|
||||
|
||||
fn is_arg(arg: &str) -> bool {
|
||||
return str::len(arg) > 1u && arg[0] == '-' as u8;
|
||||
}
|
||||
|
||||
fn name_str(nm: &name) -> ~str {
|
||||
fn name_str(nm: &Name) -> ~str {
|
||||
return match *nm {
|
||||
short(ch) => str::from_char(ch),
|
||||
long(s) => s
|
||||
Short(ch) => str::from_char(ch),
|
||||
Long(s) => s
|
||||
};
|
||||
}
|
||||
|
||||
fn find_opt(opts: &[opt], +nm: name) -> Option<uint> {
|
||||
fn find_opt(opts: &[Opt], +nm: Name) -> Option<uint> {
|
||||
vec::position(opts, |opt| opt.name == nm)
|
||||
}
|
||||
|
||||
@ -158,22 +160,22 @@ fn find_opt(opts: &[opt], +nm: name) -> Option<uint> {
|
||||
* The type returned when the command line does not conform to the
|
||||
* expected format. Pass this value to <fail_str> to get an error message.
|
||||
*/
|
||||
enum fail_ {
|
||||
argument_missing(~str),
|
||||
unrecognized_option(~str),
|
||||
option_missing(~str),
|
||||
option_duplicated(~str),
|
||||
unexpected_argument(~str),
|
||||
enum Fail_ {
|
||||
ArgumentMissing(~str),
|
||||
UnrecognizedOption(~str),
|
||||
OptionMissing(~str),
|
||||
OptionDuplicated(~str),
|
||||
UnexpectedArgument(~str),
|
||||
}
|
||||
|
||||
/// Convert a `fail_` enum into an error string
|
||||
fn fail_str(+f: fail_) -> ~str {
|
||||
fn fail_str(+f: Fail_) -> ~str {
|
||||
return match f {
|
||||
argument_missing(nm) => ~"Argument to option '" + nm + ~"' missing.",
|
||||
unrecognized_option(nm) => ~"Unrecognized option: '" + nm + ~"'.",
|
||||
option_missing(nm) => ~"Required option '" + nm + ~"' missing.",
|
||||
option_duplicated(nm) => ~"Option '" + nm + ~"' given more than once.",
|
||||
unexpected_argument(nm) => {
|
||||
ArgumentMissing(nm) => ~"Argument to option '" + nm + ~"' missing.",
|
||||
UnrecognizedOption(nm) => ~"Unrecognized option: '" + nm + ~"'.",
|
||||
OptionMissing(nm) => ~"Required option '" + nm + ~"' missing.",
|
||||
OptionDuplicated(nm) => ~"Option '" + nm + ~"' given more than once.",
|
||||
UnexpectedArgument(nm) => {
|
||||
~"Option " + nm + ~" does not take an argument."
|
||||
}
|
||||
};
|
||||
@ -181,20 +183,20 @@ fn fail_str(+f: fail_) -> ~str {
|
||||
|
||||
/**
|
||||
* The result of parsing a command line with a set of options
|
||||
* (result::t<matches, fail_>)
|
||||
* (result::t<Matches, Fail_>)
|
||||
*/
|
||||
type result = result::Result<matches, fail_>;
|
||||
type Result = result::Result<Matches, Fail_>;
|
||||
|
||||
/**
|
||||
* Parse command line arguments according to the provided options
|
||||
*
|
||||
* On success returns `ok(opt)`. Use functions such as `opt_present`
|
||||
* `opt_str`, etc. to interrogate results. Returns `err(fail_)` on failure.
|
||||
* On success returns `ok(Opt)`. Use functions such as `opt_present`
|
||||
* `opt_str`, etc. to interrogate results. Returns `err(Fail_)` on failure.
|
||||
* Use <fail_str> to get an error message.
|
||||
*/
|
||||
fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
let n_opts = vec::len::<opt>(opts);
|
||||
fn f(_x: uint) -> ~[optval] { return ~[]; }
|
||||
fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe {
|
||||
let n_opts = vec::len::<Opt>(opts);
|
||||
fn f(_x: uint) -> ~[Optval] { return ~[]; }
|
||||
let vals = vec::to_mut(vec::from_fn(n_opts, f));
|
||||
let mut free: ~[~str] = ~[];
|
||||
let l = vec::len(args);
|
||||
@ -215,10 +217,10 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
let tail = str::slice(cur, 2u, curlen);
|
||||
let tail_eq = str::splitn_char(tail, '=', 1u);
|
||||
if vec::len(tail_eq) <= 1u {
|
||||
names = ~[long(tail)];
|
||||
names = ~[Long(tail)];
|
||||
} else {
|
||||
names =
|
||||
~[long(tail_eq[0])];
|
||||
~[Long(tail_eq[0])];
|
||||
i_arg =
|
||||
option::Some::<~str>(tail_eq[1]);
|
||||
}
|
||||
@ -228,7 +230,7 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
names = ~[];
|
||||
while j < curlen {
|
||||
let range = str::char_range_at(cur, j);
|
||||
let opt = short(range.ch);
|
||||
let opt = Short(range.ch);
|
||||
|
||||
/* In a series of potential options (eg. -aheJ), if we see
|
||||
one which takes an argument, we assume all subsequent
|
||||
@ -245,8 +247,8 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
match opts[option::get(last_valid_opt_id)]
|
||||
.hasarg {
|
||||
|
||||
yes | maybe => true,
|
||||
no => false
|
||||
Yes | Maybe => true,
|
||||
No => false
|
||||
};
|
||||
if arg_follows && j + 1 < curlen {
|
||||
i_arg = option::Some(str::slice(cur, j, curlen));
|
||||
@ -265,30 +267,30 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
name_pos += 1u;
|
||||
let optid = match find_opt(opts, nm) {
|
||||
Some(id) => id,
|
||||
None => return Err(unrecognized_option(name_str(&nm)))
|
||||
None => return Err(UnrecognizedOption(name_str(&nm)))
|
||||
};
|
||||
match opts[optid].hasarg {
|
||||
no => {
|
||||
No => {
|
||||
if !option::is_none::<~str>(i_arg) {
|
||||
return Err(unexpected_argument(name_str(&nm)));
|
||||
return Err(UnexpectedArgument(name_str(&nm)));
|
||||
}
|
||||
vec::push(vals[optid], given);
|
||||
vec::push(vals[optid], Given);
|
||||
}
|
||||
maybe => {
|
||||
Maybe => {
|
||||
if !option::is_none::<~str>(i_arg) {
|
||||
vec::push(vals[optid], val(option::get(i_arg)));
|
||||
} else if name_pos < vec::len::<name>(names) ||
|
||||
vec::push(vals[optid], Val(option::get(i_arg)));
|
||||
} else if name_pos < vec::len::<Name>(names) ||
|
||||
i + 1u == l || is_arg(args[i + 1u]) {
|
||||
vec::push(vals[optid], given);
|
||||
} else { i += 1u; vec::push(vals[optid], val(args[i])); }
|
||||
vec::push(vals[optid], Given);
|
||||
} else { i += 1u; vec::push(vals[optid], Val(args[i])); }
|
||||
}
|
||||
yes => {
|
||||
Yes => {
|
||||
if !option::is_none::<~str>(i_arg) {
|
||||
vec::push(vals[optid],
|
||||
val(option::get::<~str>(i_arg)));
|
||||
Val(option::get::<~str>(i_arg)));
|
||||
} else if i + 1u == l {
|
||||
return Err(argument_missing(name_str(&nm)));
|
||||
} else { i += 1u; vec::push(vals[optid], val(args[i])); }
|
||||
return Err(ArgumentMissing(name_str(&nm)));
|
||||
} else { i += 1u; vec::push(vals[optid], Val(args[i])); }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -297,16 +299,16 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
}
|
||||
i = 0u;
|
||||
while i < n_opts {
|
||||
let n = vec::len::<optval>(vals[i]);
|
||||
let n = vec::len::<Optval>(vals[i]);
|
||||
let occ = opts[i].occur;
|
||||
if occ == req {
|
||||
if occ == Req {
|
||||
if n == 0u {
|
||||
return Err(option_missing(name_str(&(opts[i].name))));
|
||||
return Err(OptionMissing(name_str(&(opts[i].name))));
|
||||
}
|
||||
}
|
||||
if occ != multi {
|
||||
if occ != Multi {
|
||||
if n > 1u {
|
||||
return Err(option_duplicated(name_str(&(opts[i].name))));
|
||||
return Err(OptionDuplicated(name_str(&(opts[i].name))));
|
||||
}
|
||||
}
|
||||
i += 1u;
|
||||
@ -316,7 +318,7 @@ fn getopts(args: &[~str], opts: &[opt]) -> result unsafe {
|
||||
free: free});
|
||||
}
|
||||
|
||||
fn opt_vals(+mm: matches, nm: &str) -> ~[optval] {
|
||||
fn opt_vals(+mm: Matches, nm: &str) -> ~[Optval] {
|
||||
return match find_opt(mm.opts, mkname(nm)) {
|
||||
Some(id) => mm.vals[id],
|
||||
None => {
|
||||
@ -326,15 +328,15 @@ fn opt_vals(+mm: matches, nm: &str) -> ~[optval] {
|
||||
};
|
||||
}
|
||||
|
||||
fn opt_val(+mm: matches, nm: &str) -> optval { return opt_vals(mm, nm)[0]; }
|
||||
fn opt_val(+mm: Matches, nm: &str) -> Optval { return opt_vals(mm, nm)[0]; }
|
||||
|
||||
/// Returns true if an option was matched
|
||||
fn opt_present(+mm: matches, nm: &str) -> bool {
|
||||
return vec::len::<optval>(opt_vals(mm, nm)) > 0u;
|
||||
fn opt_present(+mm: Matches, nm: &str) -> bool {
|
||||
return vec::len::<Optval>(opt_vals(mm, nm)) > 0u;
|
||||
}
|
||||
|
||||
/// Returns true if any of several options were matched
|
||||
fn opts_present(+mm: matches, names: &[~str]) -> bool {
|
||||
fn opts_present(+mm: Matches, names: &[~str]) -> bool {
|
||||
for vec::each(names) |nm| {
|
||||
match find_opt(mm.opts, mkname(nm)) {
|
||||
Some(_) => return true,
|
||||
@ -351,8 +353,8 @@ fn opts_present(+mm: matches, names: &[~str]) -> bool {
|
||||
* Fails if the option was not matched or if the match did not take an
|
||||
* argument
|
||||
*/
|
||||
fn opt_str(+mm: matches, nm: &str) -> ~str {
|
||||
return match opt_val(mm, nm) { val(s) => s, _ => fail };
|
||||
fn opt_str(+mm: Matches, nm: &str) -> ~str {
|
||||
return match opt_val(mm, nm) { Val(s) => s, _ => fail };
|
||||
}
|
||||
|
||||
/**
|
||||
@ -361,10 +363,10 @@ fn opt_str(+mm: matches, nm: &str) -> ~str {
|
||||
* Fails if the no option was provided from the given list, or if the no such
|
||||
* option took an argument
|
||||
*/
|
||||
fn opts_str(+mm: matches, names: &[~str]) -> ~str {
|
||||
fn opts_str(+mm: Matches, names: &[~str]) -> ~str {
|
||||
for vec::each(names) |nm| {
|
||||
match opt_val(mm, nm) {
|
||||
val(s) => return s,
|
||||
Val(s) => return s,
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -378,19 +380,19 @@ fn opts_str(+mm: matches, names: &[~str]) -> ~str {
|
||||
*
|
||||
* Used when an option accepts multiple values.
|
||||
*/
|
||||
fn opt_strs(+mm: matches, nm: &str) -> ~[~str] {
|
||||
fn opt_strs(+mm: Matches, nm: &str) -> ~[~str] {
|
||||
let mut acc: ~[~str] = ~[];
|
||||
for vec::each(opt_vals(mm, nm)) |v| {
|
||||
match v { val(s) => vec::push(acc, s), _ => () }
|
||||
match v { Val(s) => vec::push(acc, s), _ => () }
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
|
||||
/// Returns the string argument supplied to a matching option or none
|
||||
fn opt_maybe_str(+mm: matches, nm: &str) -> Option<~str> {
|
||||
fn opt_maybe_str(+mm: Matches, nm: &str) -> Option<~str> {
|
||||
let vals = opt_vals(mm, nm);
|
||||
if vec::len::<optval>(vals) == 0u { return None::<~str>; }
|
||||
return match vals[0] { val(s) => Some::<~str>(s), _ => None::<~str> };
|
||||
if vec::len::<Optval>(vals) == 0u { return None::<~str>; }
|
||||
return match vals[0] { Val(s) => Some::<~str>(s), _ => None::<~str> };
|
||||
}
|
||||
|
||||
|
||||
@ -401,10 +403,10 @@ fn opt_maybe_str(+mm: matches, nm: &str) -> Option<~str> {
|
||||
* present but no argument was provided, and the argument if the option was
|
||||
* present and an argument was provided.
|
||||
*/
|
||||
fn opt_default(+mm: matches, nm: &str, def: &str) -> Option<~str> {
|
||||
fn opt_default(+mm: Matches, nm: &str, def: &str) -> Option<~str> {
|
||||
let vals = opt_vals(mm, nm);
|
||||
if vec::len::<optval>(vals) == 0u { return None::<~str>; }
|
||||
return match vals[0] { val(s) => Some::<~str>(s),
|
||||
if vec::len::<Optval>(vals) == 0u { return None::<~str>; }
|
||||
return match vals[0] { Val(s) => Some::<~str>(s),
|
||||
_ => Some::<~str>(str::from_slice(def)) }
|
||||
}
|
||||
|
||||
@ -413,21 +415,21 @@ mod tests {
|
||||
import opt = getopts;
|
||||
import result::{Err, Ok};
|
||||
|
||||
enum fail_type {
|
||||
argument_missing_,
|
||||
unrecognized_option_,
|
||||
option_missing_,
|
||||
option_duplicated_,
|
||||
unexpected_argument_,
|
||||
enum FailType {
|
||||
ArgumentMissing_,
|
||||
UnrecognizedOption_,
|
||||
OptionMissing_,
|
||||
OptionDuplicated_,
|
||||
UnexpectedArgument_,
|
||||
}
|
||||
|
||||
fn check_fail_type(+f: fail_, ft: fail_type) {
|
||||
fn check_fail_type(+f: Fail_, ft: FailType) {
|
||||
match f {
|
||||
argument_missing(_) => assert ft == argument_missing_,
|
||||
unrecognized_option(_) => assert ft == unrecognized_option_,
|
||||
option_missing(_) => assert ft == option_missing_,
|
||||
option_duplicated(_) => assert ft == option_duplicated_,
|
||||
unexpected_argument(_) => assert ft == unexpected_argument_
|
||||
ArgumentMissing(_) => assert ft == ArgumentMissing_,
|
||||
UnrecognizedOption(_) => assert ft == UnrecognizedOption_,
|
||||
OptionMissing(_) => assert ft == OptionMissing_,
|
||||
OptionDuplicated(_) => assert ft == OptionDuplicated_,
|
||||
UnexpectedArgument(_) => assert ft == UnexpectedArgument_
|
||||
}
|
||||
}
|
||||
|
||||
@ -453,7 +455,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_missing_),
|
||||
Err(f) => check_fail_type(f, OptionMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -464,7 +466,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -475,7 +477,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -500,7 +502,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_missing_),
|
||||
Err(f) => check_fail_type(f, OptionMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -511,7 +513,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -522,7 +524,7 @@ mod tests {
|
||||
let opts = ~[reqopt(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -560,7 +562,7 @@ mod tests {
|
||||
let opts = ~[optopt(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -571,7 +573,7 @@ mod tests {
|
||||
let opts = ~[optopt(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -607,7 +609,7 @@ mod tests {
|
||||
let opts = ~[optopt(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -618,7 +620,7 @@ mod tests {
|
||||
let opts = ~[optopt(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -655,7 +657,7 @@ mod tests {
|
||||
match rs {
|
||||
Err(f) => {
|
||||
log(error, fail_str(f));
|
||||
check_fail_type(f, unexpected_argument_);
|
||||
check_fail_type(f, UnexpectedArgument_);
|
||||
}
|
||||
_ => fail
|
||||
}
|
||||
@ -667,7 +669,7 @@ mod tests {
|
||||
let opts = ~[optflag(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -715,7 +717,7 @@ mod tests {
|
||||
let opts = ~[optflag(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, option_duplicated_),
|
||||
Err(f) => check_fail_type(f, OptionDuplicated_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -753,7 +755,7 @@ mod tests {
|
||||
let opts = ~[optmulti(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -805,7 +807,7 @@ mod tests {
|
||||
let opts = ~[optmulti(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, argument_missing_),
|
||||
Err(f) => check_fail_type(f, ArgumentMissing_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -832,7 +834,7 @@ mod tests {
|
||||
let opts = ~[optmulti(~"t")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, unrecognized_option_),
|
||||
Err(f) => check_fail_type(f, UnrecognizedOption_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
@ -843,7 +845,7 @@ mod tests {
|
||||
let opts = ~[optmulti(~"test")];
|
||||
let rs = getopts(args, opts);
|
||||
match rs {
|
||||
Err(f) => check_fail_type(f, unrecognized_option_),
|
||||
Err(f) => check_fail_type(f, UnrecognizedOption_),
|
||||
_ => fail
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[deny(non_camel_case_types)];
|
||||
|
||||
// Rust JSON serialization library
|
||||
// Copyright (c) 2011 Google Inc.
|
||||
|
||||
@ -10,45 +12,45 @@ import map;
|
||||
import map::hashmap;
|
||||
import map::map;
|
||||
|
||||
export json;
|
||||
export error;
|
||||
export Json;
|
||||
export Error;
|
||||
export to_writer;
|
||||
export to_str;
|
||||
export from_reader;
|
||||
export from_str;
|
||||
export eq;
|
||||
export to_json;
|
||||
export ToJson;
|
||||
|
||||
export num;
|
||||
export string;
|
||||
export boolean;
|
||||
export list;
|
||||
export dict;
|
||||
export null;
|
||||
export Num;
|
||||
export String;
|
||||
export Boolean;
|
||||
export List;
|
||||
export Dict;
|
||||
export Null;
|
||||
|
||||
/// Represents a json value
|
||||
enum json {
|
||||
num(float),
|
||||
string(@~str),
|
||||
boolean(bool),
|
||||
list(@~[json]),
|
||||
dict(map::hashmap<~str, json>),
|
||||
null,
|
||||
enum Json {
|
||||
Num(float),
|
||||
String(@~str),
|
||||
Boolean(bool),
|
||||
List(@~[Json]),
|
||||
Dict(map::hashmap<~str, Json>),
|
||||
Null,
|
||||
}
|
||||
|
||||
type error = {
|
||||
type Error = {
|
||||
line: uint,
|
||||
col: uint,
|
||||
msg: @~str,
|
||||
};
|
||||
|
||||
/// Serializes a json value into a io::writer
|
||||
fn to_writer(wr: io::Writer, j: json) {
|
||||
fn to_writer(wr: io::Writer, j: Json) {
|
||||
match j {
|
||||
num(n) => wr.write_str(float::to_str(n, 6u)),
|
||||
string(s) => wr.write_str(escape_str(*s)),
|
||||
boolean(b) => wr.write_str(if b { ~"true" } else { ~"false" }),
|
||||
list(v) => {
|
||||
Num(n) => wr.write_str(float::to_str(n, 6u)),
|
||||
String(s) => wr.write_str(escape_str(*s)),
|
||||
Boolean(b) => wr.write_str(if b { ~"true" } else { ~"false" }),
|
||||
List(v) => {
|
||||
wr.write_char('[');
|
||||
let mut first = true;
|
||||
for (*v).each |item| {
|
||||
@ -60,7 +62,7 @@ fn to_writer(wr: io::Writer, j: json) {
|
||||
};
|
||||
wr.write_char(']');
|
||||
}
|
||||
dict(d) => {
|
||||
Dict(d) => {
|
||||
if d.size() == 0u {
|
||||
wr.write_str(~"{}");
|
||||
return;
|
||||
@ -79,7 +81,7 @@ fn to_writer(wr: io::Writer, j: json) {
|
||||
};
|
||||
wr.write_str(~" }");
|
||||
}
|
||||
null => wr.write_str(~"null")
|
||||
Null => wr.write_str(~"null")
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,22 +106,22 @@ fn escape_str(s: ~str) -> ~str {
|
||||
}
|
||||
|
||||
/// Serializes a json value into a string
|
||||
fn to_str(j: json) -> ~str {
|
||||
fn to_str(j: Json) -> ~str {
|
||||
io::with_str_writer(|wr| to_writer(wr, j))
|
||||
}
|
||||
|
||||
type parser_ = {
|
||||
type Parser_ = {
|
||||
rdr: io::Reader,
|
||||
mut ch: char,
|
||||
mut line: uint,
|
||||
mut col: uint,
|
||||
};
|
||||
|
||||
enum parser {
|
||||
parser_(parser_)
|
||||
enum Parser {
|
||||
Parser_(Parser_)
|
||||
}
|
||||
|
||||
impl parser {
|
||||
impl Parser {
|
||||
fn eof() -> bool { self.ch == -1 as char }
|
||||
|
||||
fn bump() {
|
||||
@ -138,11 +140,11 @@ impl parser {
|
||||
self.ch
|
||||
}
|
||||
|
||||
fn error<T>(+msg: ~str) -> Result<T, error> {
|
||||
fn error<T>(+msg: ~str) -> Result<T, Error> {
|
||||
Err({ line: self.line, col: self.col, msg: @msg })
|
||||
}
|
||||
|
||||
fn parse() -> Result<json, error> {
|
||||
fn parse() -> Result<Json, Error> {
|
||||
match self.parse_value() {
|
||||
Ok(value) => {
|
||||
// Skip trailing whitespaces.
|
||||
@ -158,18 +160,18 @@ impl parser {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_value() -> Result<json, error> {
|
||||
fn parse_value() -> Result<Json, Error> {
|
||||
self.parse_whitespace();
|
||||
|
||||
if self.eof() { return self.error(~"EOF while parsing value"); }
|
||||
|
||||
match self.ch {
|
||||
'n' => self.parse_ident(~"ull", null),
|
||||
't' => self.parse_ident(~"rue", boolean(true)),
|
||||
'f' => self.parse_ident(~"alse", boolean(false)),
|
||||
'n' => self.parse_ident(~"ull", Null),
|
||||
't' => self.parse_ident(~"rue", Boolean(true)),
|
||||
'f' => self.parse_ident(~"alse", Boolean(false)),
|
||||
'0' to '9' | '-' => self.parse_number(),
|
||||
'"' => match self.parse_str() {
|
||||
Ok(s) => Ok(string(s)),
|
||||
Ok(s) => Ok(String(s)),
|
||||
Err(e) => Err(e)
|
||||
},
|
||||
'[' => self.parse_list(),
|
||||
@ -182,7 +184,7 @@ impl parser {
|
||||
while char::is_whitespace(self.ch) { self.bump(); }
|
||||
}
|
||||
|
||||
fn parse_ident(ident: ~str, value: json) -> Result<json, error> {
|
||||
fn parse_ident(ident: ~str, value: Json) -> Result<Json, Error> {
|
||||
if str::all(ident, |c| c == self.next_char()) {
|
||||
self.bump();
|
||||
Ok(value)
|
||||
@ -191,7 +193,7 @@ impl parser {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_number() -> Result<json, error> {
|
||||
fn parse_number() -> Result<Json, Error> {
|
||||
let mut neg = 1f;
|
||||
|
||||
if self.ch == '-' {
|
||||
@ -218,10 +220,10 @@ impl parser {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(num(neg * res))
|
||||
Ok(Num(neg * res))
|
||||
}
|
||||
|
||||
fn parse_integer() -> Result<float, error> {
|
||||
fn parse_integer() -> Result<float, Error> {
|
||||
let mut res = 0f;
|
||||
|
||||
match self.ch {
|
||||
@ -253,7 +255,7 @@ impl parser {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn parse_decimal(res: float) -> Result<float, error> {
|
||||
fn parse_decimal(res: float) -> Result<float, Error> {
|
||||
self.bump();
|
||||
|
||||
// Make sure a digit follows the decimal place.
|
||||
@ -279,7 +281,7 @@ impl parser {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn parse_exponent(res: float) -> Result<float, error> {
|
||||
fn parse_exponent(res: float) -> Result<float, Error> {
|
||||
self.bump();
|
||||
|
||||
let mut res = res;
|
||||
@ -320,7 +322,7 @@ impl parser {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn parse_str() -> Result<@~str, error> {
|
||||
fn parse_str() -> Result<@~str, Error> {
|
||||
let mut escape = false;
|
||||
let mut res = ~"";
|
||||
|
||||
@ -376,7 +378,7 @@ impl parser {
|
||||
self.error(~"EOF while parsing string")
|
||||
}
|
||||
|
||||
fn parse_list() -> Result<json, error> {
|
||||
fn parse_list() -> Result<Json, Error> {
|
||||
self.bump();
|
||||
self.parse_whitespace();
|
||||
|
||||
@ -384,7 +386,7 @@ impl parser {
|
||||
|
||||
if self.ch == ']' {
|
||||
self.bump();
|
||||
return Ok(list(@values));
|
||||
return Ok(List(@values));
|
||||
}
|
||||
|
||||
loop {
|
||||
@ -400,13 +402,13 @@ impl parser {
|
||||
|
||||
match self.ch {
|
||||
',' => self.bump(),
|
||||
']' => { self.bump(); return Ok(list(@values)); }
|
||||
']' => { self.bump(); return Ok(List(@values)); }
|
||||
_ => return self.error(~"expected `,` or `]`")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_object() -> Result<json, error> {
|
||||
fn parse_object() -> Result<Json, Error> {
|
||||
self.bump();
|
||||
self.parse_whitespace();
|
||||
|
||||
@ -414,7 +416,7 @@ impl parser {
|
||||
|
||||
if self.ch == '}' {
|
||||
self.bump();
|
||||
return Ok(dict(values));
|
||||
return Ok(Dict(values));
|
||||
}
|
||||
|
||||
while !self.eof() {
|
||||
@ -445,7 +447,7 @@ impl parser {
|
||||
|
||||
match self.ch {
|
||||
',' => self.bump(),
|
||||
'}' => { self.bump(); return Ok(dict(values)); }
|
||||
'}' => { self.bump(); return Ok(Dict(values)); }
|
||||
_ => {
|
||||
if self.eof() { break; }
|
||||
return self.error(~"expected `,` or `}`");
|
||||
@ -458,8 +460,8 @@ impl parser {
|
||||
}
|
||||
|
||||
/// Deserializes a json value from an io::reader
|
||||
fn from_reader(rdr: io::Reader) -> Result<json, error> {
|
||||
let parser = parser_({
|
||||
fn from_reader(rdr: io::Reader) -> Result<Json, Error> {
|
||||
let parser = Parser_({
|
||||
rdr: rdr,
|
||||
mut ch: rdr.read_char(),
|
||||
mut line: 1u,
|
||||
@ -470,18 +472,18 @@ fn from_reader(rdr: io::Reader) -> Result<json, error> {
|
||||
}
|
||||
|
||||
/// Deserializes a json value from a string
|
||||
fn from_str(s: ~str) -> Result<json, error> {
|
||||
fn from_str(s: ~str) -> Result<Json, Error> {
|
||||
io::with_str_reader(s, from_reader)
|
||||
}
|
||||
|
||||
/// Test if two json values are equal
|
||||
fn eq(value0: json, value1: json) -> bool {
|
||||
fn eq(value0: Json, value1: Json) -> bool {
|
||||
match (value0, value1) {
|
||||
(num(f0), num(f1)) => f0 == f1,
|
||||
(string(s0), string(s1)) => s0 == s1,
|
||||
(boolean(b0), boolean(b1)) => b0 == b1,
|
||||
(list(l0), list(l1)) => vec::all2(*l0, *l1, eq),
|
||||
(dict(d0), dict(d1)) => {
|
||||
(Num(f0), Num(f1)) => f0 == f1,
|
||||
(String(s0), String(s1)) => s0 == s1,
|
||||
(Boolean(b0), Boolean(b1)) => b0 == b1,
|
||||
(List(l0), List(l1)) => vec::all2(*l0, *l1, eq),
|
||||
(Dict(d0), Dict(d1)) => {
|
||||
if d0.size() == d1.size() {
|
||||
let mut equal = true;
|
||||
for d0.each |k, v0| {
|
||||
@ -495,138 +497,138 @@ fn eq(value0: json, value1: json) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
(null, null) => true,
|
||||
(Null, Null) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
trait to_json { fn to_json() -> json; }
|
||||
trait ToJson { fn to_json() -> Json; }
|
||||
|
||||
impl json: to_json {
|
||||
fn to_json() -> json { self }
|
||||
impl Json: ToJson {
|
||||
fn to_json() -> Json { self }
|
||||
}
|
||||
|
||||
impl @json: to_json {
|
||||
fn to_json() -> json { *self }
|
||||
impl @Json: ToJson {
|
||||
fn to_json() -> Json { *self }
|
||||
}
|
||||
|
||||
impl int: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl int: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl i8: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl i8: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl i16: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl i16: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl i32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl i32: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl i64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl i64: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl uint: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl uint: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl u8: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl u8: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl u16: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl u16: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl u32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl u32: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl u64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl u64: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl float: to_json {
|
||||
fn to_json() -> json { num(self) }
|
||||
impl float: ToJson {
|
||||
fn to_json() -> Json { Num(self) }
|
||||
}
|
||||
|
||||
impl f32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl f32: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl f64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
impl f64: ToJson {
|
||||
fn to_json() -> Json { Num(self as float) }
|
||||
}
|
||||
|
||||
impl (): to_json {
|
||||
fn to_json() -> json { null }
|
||||
impl (): ToJson {
|
||||
fn to_json() -> Json { Null }
|
||||
}
|
||||
|
||||
impl bool: to_json {
|
||||
fn to_json() -> json { boolean(self) }
|
||||
impl bool: ToJson {
|
||||
fn to_json() -> Json { Boolean(self) }
|
||||
}
|
||||
|
||||
impl ~str: to_json {
|
||||
fn to_json() -> json { string(@copy self) }
|
||||
impl ~str: ToJson {
|
||||
fn to_json() -> Json { String(@copy self) }
|
||||
}
|
||||
|
||||
impl @~str: to_json {
|
||||
fn to_json() -> json { string(self) }
|
||||
impl @~str: ToJson {
|
||||
fn to_json() -> Json { String(self) }
|
||||
}
|
||||
|
||||
impl <A: to_json, B: to_json> (A, B): to_json {
|
||||
fn to_json() -> json {
|
||||
impl <A: ToJson, B: ToJson> (A, B): ToJson {
|
||||
fn to_json() -> Json {
|
||||
match self {
|
||||
(a, b) => {
|
||||
list(@~[a.to_json(), b.to_json()])
|
||||
List(@~[a.to_json(), b.to_json()])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json, B: to_json, C: to_json> (A, B, C): to_json {
|
||||
impl <A: ToJson, B: ToJson, C: ToJson> (A, B, C): ToJson {
|
||||
|
||||
fn to_json() -> json {
|
||||
fn to_json() -> Json {
|
||||
match self {
|
||||
(a, b, c) => {
|
||||
list(@~[a.to_json(), b.to_json(), c.to_json()])
|
||||
List(@~[a.to_json(), b.to_json(), c.to_json()])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json> ~[A]: to_json {
|
||||
fn to_json() -> json { list(@self.map(|elt| elt.to_json())) }
|
||||
impl <A: ToJson> ~[A]: ToJson {
|
||||
fn to_json() -> Json { List(@self.map(|elt| elt.to_json())) }
|
||||
}
|
||||
|
||||
impl <A: to_json copy> hashmap<~str, A>: to_json {
|
||||
fn to_json() -> json {
|
||||
impl <A: ToJson copy> hashmap<~str, A>: ToJson {
|
||||
fn to_json() -> Json {
|
||||
let d = map::str_hash();
|
||||
for self.each() |key, value| {
|
||||
d.insert(copy key, value.to_json());
|
||||
}
|
||||
dict(d)
|
||||
Dict(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json> Option<A>: to_json {
|
||||
fn to_json() -> json {
|
||||
impl <A: ToJson> Option<A>: ToJson {
|
||||
fn to_json() -> Json {
|
||||
match self {
|
||||
None => null,
|
||||
None => Null,
|
||||
Some(value) => value.to_json()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl json: to_str::ToStr {
|
||||
impl Json: to_str::ToStr {
|
||||
fn to_str() -> ~str { to_str(self) }
|
||||
}
|
||||
|
||||
impl error: to_str::ToStr {
|
||||
impl Error: to_str::ToStr {
|
||||
fn to_str() -> ~str {
|
||||
fmt!("%u:%u: %s", self.line, self.col, *self.msg)
|
||||
}
|
||||
@ -634,7 +636,7 @@ impl error: to_str::ToStr {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
fn mk_dict(items: ~[(~str, json)]) -> json {
|
||||
fn mk_dict(items: ~[(~str, Json)]) -> Json {
|
||||
let d = map::str_hash();
|
||||
|
||||
do vec::iter(items) |item| {
|
||||
@ -642,55 +644,55 @@ mod tests {
|
||||
d.insert(key, value);
|
||||
};
|
||||
|
||||
dict(d)
|
||||
Dict(d)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_null() {
|
||||
assert to_str(null) == ~"null";
|
||||
assert to_str(Null) == ~"null";
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_num() {
|
||||
assert to_str(num(3f)) == ~"3";
|
||||
assert to_str(num(3.1f)) == ~"3.1";
|
||||
assert to_str(num(-1.5f)) == ~"-1.5";
|
||||
assert to_str(num(0.5f)) == ~"0.5";
|
||||
assert to_str(Num(3f)) == ~"3";
|
||||
assert to_str(Num(3.1f)) == ~"3.1";
|
||||
assert to_str(Num(-1.5f)) == ~"-1.5";
|
||||
assert to_str(Num(0.5f)) == ~"0.5";
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_str() {
|
||||
assert to_str(string(@~"")) == ~"\"\"";
|
||||
assert to_str(string(@~"foo")) == ~"\"foo\"";
|
||||
assert to_str(String(@~"")) == ~"\"\"";
|
||||
assert to_str(String(@~"foo")) == ~"\"foo\"";
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_bool() {
|
||||
assert to_str(boolean(true)) == ~"true";
|
||||
assert to_str(boolean(false)) == ~"false";
|
||||
assert to_str(Boolean(true)) == ~"true";
|
||||
assert to_str(Boolean(false)) == ~"false";
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_list() {
|
||||
assert to_str(list(@~[])) == ~"[]";
|
||||
assert to_str(list(@~[boolean(true)])) == ~"[true]";
|
||||
assert to_str(list(@~[
|
||||
boolean(false),
|
||||
null,
|
||||
list(@~[string(@~"foo\nbar"), num(3.5f)])
|
||||
assert to_str(List(@~[])) == ~"[]";
|
||||
assert to_str(List(@~[Boolean(true)])) == ~"[true]";
|
||||
assert to_str(List(@~[
|
||||
Boolean(false),
|
||||
Null,
|
||||
List(@~[String(@~"foo\nbar"), Num(3.5f)])
|
||||
])) == ~"[false, null, [\"foo\\nbar\", 3.5]]";
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_dict() {
|
||||
assert to_str(mk_dict(~[])) == ~"{}";
|
||||
assert to_str(mk_dict(~[(~"a", boolean(true))]))
|
||||
assert to_str(mk_dict(~[(~"a", Boolean(true))]))
|
||||
== ~"{ \"a\": true }";
|
||||
assert to_str(mk_dict(~[
|
||||
(~"a", boolean(true)),
|
||||
(~"b", list(@~[
|
||||
mk_dict(~[(~"c", string(@~"\x0c\r"))]),
|
||||
mk_dict(~[(~"d", string(@~""))])
|
||||
(~"a", Boolean(true)),
|
||||
(~"b", List(@~[
|
||||
mk_dict(~[(~"c", String(@~"\x0c\r"))]),
|
||||
mk_dict(~[(~"d", String(@~""))])
|
||||
]))
|
||||
])) ==
|
||||
~"{ " +
|
||||
@ -735,12 +737,12 @@ mod tests {
|
||||
assert from_str(~"faz") ==
|
||||
Err({line: 1u, col: 3u, msg: @~"invalid syntax"});
|
||||
|
||||
assert from_str(~"null") == Ok(null);
|
||||
assert from_str(~"true") == Ok(boolean(true));
|
||||
assert from_str(~"false") == Ok(boolean(false));
|
||||
assert from_str(~" null ") == Ok(null);
|
||||
assert from_str(~" true ") == Ok(boolean(true));
|
||||
assert from_str(~" false ") == Ok(boolean(false));
|
||||
assert from_str(~"null") == Ok(Null);
|
||||
assert from_str(~"true") == Ok(Boolean(true));
|
||||
assert from_str(~"false") == Ok(Boolean(false));
|
||||
assert from_str(~" null ") == Ok(Null);
|
||||
assert from_str(~" true ") == Ok(Boolean(true));
|
||||
assert from_str(~" false ") == Ok(Boolean(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -761,14 +763,14 @@ mod tests {
|
||||
assert from_str(~"1e+") ==
|
||||
Err({line: 1u, col: 4u, msg: @~"invalid number"});
|
||||
|
||||
assert from_str(~"3") == Ok(num(3f));
|
||||
assert from_str(~"3.1") == Ok(num(3.1f));
|
||||
assert from_str(~"-1.2") == Ok(num(-1.2f));
|
||||
assert from_str(~"0.4") == Ok(num(0.4f));
|
||||
assert from_str(~"0.4e5") == Ok(num(0.4e5f));
|
||||
assert from_str(~"0.4e+15") == Ok(num(0.4e15f));
|
||||
assert from_str(~"0.4e-01") == Ok(num(0.4e-01f));
|
||||
assert from_str(~" 3 ") == Ok(num(3f));
|
||||
assert from_str(~"3") == Ok(Num(3f));
|
||||
assert from_str(~"3.1") == Ok(Num(3.1f));
|
||||
assert from_str(~"-1.2") == Ok(Num(-1.2f));
|
||||
assert from_str(~"0.4") == Ok(Num(0.4f));
|
||||
assert from_str(~"0.4e5") == Ok(Num(0.4e5f));
|
||||
assert from_str(~"0.4e+15") == Ok(Num(0.4e15f));
|
||||
assert from_str(~"0.4e-01") == Ok(Num(0.4e-01f));
|
||||
assert from_str(~" 3 ") == Ok(Num(3f));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -778,14 +780,14 @@ mod tests {
|
||||
assert from_str(~"\"lol") ==
|
||||
Err({line: 1u, col: 5u, msg: @~"EOF while parsing string"});
|
||||
|
||||
assert from_str(~"\"\"") == Ok(string(@~""));
|
||||
assert from_str(~"\"foo\"") == Ok(string(@~"foo"));
|
||||
assert from_str(~"\"\\\"\"") == Ok(string(@~"\""));
|
||||
assert from_str(~"\"\\b\"") == Ok(string(@~"\x08"));
|
||||
assert from_str(~"\"\\n\"") == Ok(string(@~"\n"));
|
||||
assert from_str(~"\"\\r\"") == Ok(string(@~"\r"));
|
||||
assert from_str(~"\"\\t\"") == Ok(string(@~"\t"));
|
||||
assert from_str(~" \"foo\" ") == Ok(string(@~"foo"));
|
||||
assert from_str(~"\"\"") == Ok(String(@~""));
|
||||
assert from_str(~"\"foo\"") == Ok(String(@~"foo"));
|
||||
assert from_str(~"\"\\\"\"") == Ok(String(@~"\""));
|
||||
assert from_str(~"\"\\b\"") == Ok(String(@~"\x08"));
|
||||
assert from_str(~"\"\\n\"") == Ok(String(@~"\n"));
|
||||
assert from_str(~"\"\\r\"") == Ok(String(@~"\r"));
|
||||
assert from_str(~"\"\\t\"") == Ok(String(@~"\t"));
|
||||
assert from_str(~" \"foo\" ") == Ok(String(@~"foo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -801,15 +803,15 @@ mod tests {
|
||||
assert from_str(~"[6 7]") ==
|
||||
Err({line: 1u, col: 4u, msg: @~"expected `,` or `]`"});
|
||||
|
||||
assert from_str(~"[]") == Ok(list(@~[]));
|
||||
assert from_str(~"[ ]") == Ok(list(@~[]));
|
||||
assert from_str(~"[true]") == Ok(list(@~[boolean(true)]));
|
||||
assert from_str(~"[ false ]") == Ok(list(@~[boolean(false)]));
|
||||
assert from_str(~"[null]") == Ok(list(@~[null]));
|
||||
assert from_str(~"[3, 1]") == Ok(list(@~[num(3f), num(1f)]));
|
||||
assert from_str(~"\n[3, 2]\n") == Ok(list(@~[num(3f), num(2f)]));
|
||||
assert from_str(~"[]") == Ok(List(@~[]));
|
||||
assert from_str(~"[ ]") == Ok(List(@~[]));
|
||||
assert from_str(~"[true]") == Ok(List(@~[Boolean(true)]));
|
||||
assert from_str(~"[ false ]") == Ok(List(@~[Boolean(false)]));
|
||||
assert from_str(~"[null]") == Ok(List(@~[Null]));
|
||||
assert from_str(~"[3, 1]") == Ok(List(@~[Num(3f), Num(1f)]));
|
||||
assert from_str(~"\n[3, 2]\n") == Ok(List(@~[Num(3f), Num(2f)]));
|
||||
assert from_str(~"[2, [4, 1]]") ==
|
||||
Ok(list(@~[num(2f), list(@~[num(4f), num(1f)])]));
|
||||
Ok(List(@~[Num(2f), List(@~[Num(4f), Num(1f)])]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -840,20 +842,20 @@ mod tests {
|
||||
|
||||
assert eq(result::get(from_str(~"{}")), mk_dict(~[]));
|
||||
assert eq(result::get(from_str(~"{\"a\": 3}")),
|
||||
mk_dict(~[(~"a", num(3.0f))]));
|
||||
mk_dict(~[(~"a", Num(3.0f))]));
|
||||
|
||||
assert eq(result::get(from_str(~"{ \"a\": null, \"b\" : true }")),
|
||||
mk_dict(~[
|
||||
(~"a", null),
|
||||
(~"b", boolean(true))]));
|
||||
(~"a", Null),
|
||||
(~"b", Boolean(true))]));
|
||||
assert eq(result::get(from_str(~"\n{ \"a\": null, \"b\" : true }\n")),
|
||||
mk_dict(~[
|
||||
(~"a", null),
|
||||
(~"b", boolean(true))]));
|
||||
(~"a", Null),
|
||||
(~"b", Boolean(true))]));
|
||||
assert eq(result::get(from_str(~"{\"a\" : 1.0 ,\"b\": [ true ]}")),
|
||||
mk_dict(~[
|
||||
(~"a", num(1.0)),
|
||||
(~"b", list(@~[boolean(true)]))
|
||||
(~"a", Num(1.0)),
|
||||
(~"b", List(@~[Boolean(true)]))
|
||||
]));
|
||||
assert eq(result::get(from_str(
|
||||
~"{" +
|
||||
@ -865,12 +867,12 @@ mod tests {
|
||||
~"]" +
|
||||
~"}")),
|
||||
mk_dict(~[
|
||||
(~"a", num(1.0f)),
|
||||
(~"b", list(@~[
|
||||
boolean(true),
|
||||
string(@~"foo\nbar"),
|
||||
(~"a", Num(1.0f)),
|
||||
(~"b", List(@~[
|
||||
Boolean(true),
|
||||
String(@~"foo\nbar"),
|
||||
mk_dict(~[
|
||||
(~"c", mk_dict(~[(~"d", null)]))
|
||||
(~"c", mk_dict(~[(~"d", Null)]))
|
||||
])
|
||||
]))
|
||||
]));
|
||||
|
@ -27,7 +27,7 @@ updating the states using rule (2) until there are no changes.
|
||||
|
||||
*/
|
||||
|
||||
import std::bitv::{bitv};
|
||||
import std::bitv::{Bitv};
|
||||
|
||||
import ast_builder::empty_span;
|
||||
|
||||
@ -35,7 +35,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
debug!("initializing colive analysis");
|
||||
let num_states = proto.num_states();
|
||||
let colive = do (copy proto.states).map_to_vec |state| {
|
||||
let bv = ~bitv(num_states, false);
|
||||
let bv = ~Bitv(num_states, false);
|
||||
for state.reachable |s| {
|
||||
bv.set(s.id, true);
|
||||
}
|
||||
@ -88,4 +88,4 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
debug!("protocol %s is bounded. yay!", proto.name);
|
||||
proto.bounded = Some(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -421,7 +421,7 @@ fn host_triple() -> ~str {
|
||||
};
|
||||
}
|
||||
|
||||
fn build_session_options(matches: getopts::matches,
|
||||
fn build_session_options(matches: getopts::Matches,
|
||||
demitter: diagnostic::emitter) -> @session::options {
|
||||
let crate_type = if opt_present(matches, ~"lib") {
|
||||
session::lib_crate
|
||||
@ -605,7 +605,7 @@ fn parse_pretty(sess: session, &&name: ~str) -> pp_mode {
|
||||
}
|
||||
}
|
||||
|
||||
fn opts() -> ~[getopts::opt] {
|
||||
fn opts() -> ~[getopts::Opt] {
|
||||
return ~[optflag(~"h"), optflag(~"help"),
|
||||
optflag(~"v"), optflag(~"version"),
|
||||
optflag(~"emit-llvm"), optflagopt(~"pretty"),
|
||||
|
@ -63,8 +63,8 @@ export translate_def_id;
|
||||
// what crate that's in and give us a def_id that makes sense for the current
|
||||
// build.
|
||||
|
||||
fn lookup_hash(d: ebml::doc, eq_fn: fn(x:&[u8]) -> bool, hash: uint) ->
|
||||
Option<ebml::doc> {
|
||||
fn lookup_hash(d: ebml::Doc, eq_fn: fn(x:&[u8]) -> bool, hash: uint) ->
|
||||
Option<ebml::Doc> {
|
||||
let index = ebml::get_doc(d, tag_index);
|
||||
let table = ebml::get_doc(index, tag_index_table);
|
||||
let hash_pos = table.start + hash % 256u * 4u;
|
||||
@ -81,7 +81,7 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn(x:&[u8]) -> bool, hash: uint) ->
|
||||
None
|
||||
}
|
||||
|
||||
fn maybe_find_item(item_id: int, items: ebml::doc) -> Option<ebml::doc> {
|
||||
fn maybe_find_item(item_id: int, items: ebml::Doc) -> Option<ebml::Doc> {
|
||||
fn eq_item(bytes: &[u8], item_id: int) -> bool {
|
||||
return io::u64_from_be_bytes(vec::view(bytes, 0u, 4u), 0u, 4u) as int
|
||||
== item_id;
|
||||
@ -91,13 +91,13 @@ fn maybe_find_item(item_id: int, items: ebml::doc) -> Option<ebml::doc> {
|
||||
hash_node_id(item_id))
|
||||
}
|
||||
|
||||
fn find_item(item_id: int, items: ebml::doc) -> ebml::doc {
|
||||
fn find_item(item_id: int, items: ebml::Doc) -> ebml::Doc {
|
||||
return option::get(maybe_find_item(item_id, items));
|
||||
}
|
||||
|
||||
// Looks up an item in the given metadata and returns an ebml doc pointing
|
||||
// to the item data.
|
||||
fn lookup_item(item_id: int, data: @~[u8]) -> ebml::doc {
|
||||
fn lookup_item(item_id: int, data: @~[u8]) -> ebml::Doc {
|
||||
let items = ebml::get_doc(ebml::doc(data), tag_items);
|
||||
match maybe_find_item(item_id, items) {
|
||||
None => fail(fmt!("lookup_item: id not found: %d", item_id)),
|
||||
@ -129,7 +129,7 @@ enum Family {
|
||||
InheritedField // N
|
||||
}
|
||||
|
||||
fn item_family(item: ebml::doc) -> Family {
|
||||
fn item_family(item: ebml::Doc) -> Family {
|
||||
let fam = ebml::get_doc(item, tag_items_data_item_family);
|
||||
match ebml::doc_as_u8(fam) as char {
|
||||
'c' => Const,
|
||||
@ -157,25 +157,25 @@ fn item_family(item: ebml::doc) -> Family {
|
||||
}
|
||||
}
|
||||
|
||||
fn item_symbol(item: ebml::doc) -> ~str {
|
||||
fn item_symbol(item: ebml::Doc) -> ~str {
|
||||
let sym = ebml::get_doc(item, tag_items_data_item_symbol);
|
||||
return str::from_bytes(ebml::doc_data(sym));
|
||||
}
|
||||
|
||||
fn item_parent_item(d: ebml::doc) -> Option<ast::def_id> {
|
||||
fn item_parent_item(d: ebml::Doc) -> Option<ast::def_id> {
|
||||
for ebml::tagged_docs(d, tag_items_data_parent_item) |did| {
|
||||
return Some(ebml::with_doc_data(did, |d| parse_def_id(d)));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn item_def_id(d: ebml::doc, cdata: cmd) -> ast::def_id {
|
||||
fn item_def_id(d: ebml::Doc, cdata: cmd) -> ast::def_id {
|
||||
let tagdoc = ebml::get_doc(d, tag_def_id);
|
||||
return translate_def_id(cdata, ebml::with_doc_data(tagdoc,
|
||||
|d| parse_def_id(d)));
|
||||
}
|
||||
|
||||
fn each_reexport(d: ebml::doc, f: fn(ebml::doc) -> bool) {
|
||||
fn each_reexport(d: ebml::Doc, f: fn(ebml::Doc) -> bool) {
|
||||
for ebml::tagged_docs(d, tag_items_data_item_reexport) |reexport_doc| {
|
||||
if !f(reexport_doc) {
|
||||
return;
|
||||
@ -183,7 +183,7 @@ fn each_reexport(d: ebml::doc, f: fn(ebml::doc) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
fn field_mutability(d: ebml::doc) -> ast::class_mutability {
|
||||
fn field_mutability(d: ebml::Doc) -> ast::class_mutability {
|
||||
// Use maybe_get_doc in case it's a method
|
||||
option::map_default(
|
||||
ebml::maybe_get_doc(d, tag_class_mut),
|
||||
@ -196,20 +196,20 @@ fn field_mutability(d: ebml::doc) -> ast::class_mutability {
|
||||
})
|
||||
}
|
||||
|
||||
fn variant_disr_val(d: ebml::doc) -> Option<int> {
|
||||
fn variant_disr_val(d: ebml::Doc) -> Option<int> {
|
||||
do option::chain(ebml::maybe_get_doc(d, tag_disr_val)) |val_doc| {
|
||||
int::parse_buf(ebml::doc_data(val_doc), 10u)
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_type(doc: ebml::doc, tcx: ty::ctxt, cdata: cmd) -> ty::t {
|
||||
fn doc_type(doc: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ty::t {
|
||||
let tp = ebml::get_doc(doc, tag_items_data_item_type);
|
||||
parse_ty_data(tp.data, cdata.cnum, tp.start, tcx, |did| {
|
||||
translate_def_id(cdata, did)
|
||||
})
|
||||
}
|
||||
|
||||
fn item_type(item_id: ast::def_id, item: ebml::doc,
|
||||
fn item_type(item_id: ast::def_id, item: ebml::Doc,
|
||||
tcx: ty::ctxt, cdata: cmd) -> ty::t {
|
||||
let t = doc_type(item, tcx, cdata);
|
||||
if family_names_type(item_family(item)) {
|
||||
@ -217,7 +217,7 @@ fn item_type(item_id: ast::def_id, item: ebml::doc,
|
||||
} else { t }
|
||||
}
|
||||
|
||||
fn item_impl_traits(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) -> ~[ty::t] {
|
||||
fn item_impl_traits(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ~[ty::t] {
|
||||
let mut results = ~[];
|
||||
for ebml::tagged_docs(item, tag_impl_trait) |ity| {
|
||||
vec::push(results, doc_type(ity, tcx, cdata));
|
||||
@ -225,7 +225,7 @@ fn item_impl_traits(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) -> ~[ty::t] {
|
||||
results
|
||||
}
|
||||
|
||||
fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
|
||||
fn item_ty_param_bounds(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd)
|
||||
-> @~[ty::param_bounds] {
|
||||
let mut bounds = ~[];
|
||||
for ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds) |p| {
|
||||
@ -237,21 +237,21 @@ fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
|
||||
@bounds
|
||||
}
|
||||
|
||||
fn item_ty_region_param(item: ebml::doc) -> Option<ty::region_variance> {
|
||||
fn item_ty_region_param(item: ebml::Doc) -> Option<ty::region_variance> {
|
||||
ebml::maybe_get_doc(item, tag_region_param).map(|doc| {
|
||||
let d = ebml::ebml_deserializer(doc);
|
||||
ty::deserialize_region_variance(d)
|
||||
})
|
||||
}
|
||||
|
||||
fn item_ty_param_count(item: ebml::doc) -> uint {
|
||||
fn item_ty_param_count(item: ebml::Doc) -> uint {
|
||||
let mut n = 0u;
|
||||
ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds,
|
||||
|_p| { n += 1u; true } );
|
||||
n
|
||||
}
|
||||
|
||||
fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> ~[ast::def_id] {
|
||||
fn enum_variant_ids(item: ebml::Doc, cdata: cmd) -> ~[ast::def_id] {
|
||||
let mut ids: ~[ast::def_id] = ~[];
|
||||
let v = tag_items_data_item_variant;
|
||||
for ebml::tagged_docs(item, v) |p| {
|
||||
@ -261,7 +261,7 @@ fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> ~[ast::def_id] {
|
||||
return ids;
|
||||
}
|
||||
|
||||
fn item_path(intr: ident_interner, item_doc: ebml::doc) -> ast_map::path {
|
||||
fn item_path(intr: ident_interner, item_doc: ebml::Doc) -> ast_map::path {
|
||||
let path_doc = ebml::get_doc(item_doc, tag_path);
|
||||
|
||||
let len_doc = ebml::get_doc(path_doc, tag_path_len);
|
||||
@ -285,12 +285,12 @@ fn item_path(intr: ident_interner, item_doc: ebml::doc) -> ast_map::path {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn item_name(intr: ident_interner, item: ebml::doc) -> ast::ident {
|
||||
fn item_name(intr: ident_interner, item: ebml::Doc) -> ast::ident {
|
||||
let name = ebml::get_doc(item, tag_paths_data_name);
|
||||
intr.intern(@str::from_bytes(ebml::doc_data(name)))
|
||||
}
|
||||
|
||||
fn item_to_def_like(item: ebml::doc, did: ast::def_id, cnum: ast::crate_num)
|
||||
fn item_to_def_like(item: ebml::Doc, did: ast::def_id, cnum: ast::crate_num)
|
||||
-> def_like {
|
||||
let fam = item_family(item);
|
||||
match fam {
|
||||
@ -533,7 +533,7 @@ type decode_inlined_item = fn(
|
||||
cdata: cstore::crate_metadata,
|
||||
tcx: ty::ctxt,
|
||||
path: ast_map::path,
|
||||
par_doc: ebml::doc) -> Option<ast::inlined_item>;
|
||||
par_doc: ebml::Doc) -> Option<ast::inlined_item>;
|
||||
|
||||
fn maybe_get_item_ast(intr: ident_interner, cdata: cmd, tcx: ty::ctxt,
|
||||
id: ast::node_id,
|
||||
@ -602,7 +602,7 @@ type method_info = {
|
||||
|
||||
type _impl = {did: ast::def_id, ident: ast::ident, methods: ~[@method_info]};
|
||||
|
||||
fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
|
||||
fn get_self_ty(item: ebml::Doc) -> ast::self_ty_ {
|
||||
fn get_mutability(ch: u8) -> ast::mutability {
|
||||
match ch as char {
|
||||
'i' => { ast::m_imm }
|
||||
@ -631,7 +631,7 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
|
||||
}
|
||||
}
|
||||
|
||||
fn item_impl_methods(intr: ident_interner, cdata: cmd, item: ebml::doc,
|
||||
fn item_impl_methods(intr: ident_interner, cdata: cmd, item: ebml::Doc,
|
||||
base_tps: uint) -> ~[@method_info] {
|
||||
let mut rslt = ~[];
|
||||
for ebml::tagged_docs(item, tag_item_impl_method) |doc| {
|
||||
@ -780,7 +780,7 @@ fn family_names_type(fam: Family) -> bool {
|
||||
match fam { Type | Mod | Trait => true, _ => false }
|
||||
}
|
||||
|
||||
fn read_path(d: ebml::doc) -> {path: ~str, pos: uint} {
|
||||
fn read_path(d: ebml::Doc) -> {path: ~str, pos: uint} {
|
||||
let desc = ebml::doc_data(d);
|
||||
let pos = io::u64_from_be_bytes(desc, 0u, 4u) as uint;
|
||||
let pathbytes = vec::slice::<u8>(desc, 4u, vec::len::<u8>(desc));
|
||||
@ -788,7 +788,7 @@ fn read_path(d: ebml::doc) -> {path: ~str, pos: uint} {
|
||||
return {path: path, pos: pos};
|
||||
}
|
||||
|
||||
fn describe_def(items: ebml::doc, id: ast::def_id) -> ~str {
|
||||
fn describe_def(items: ebml::Doc, id: ast::def_id) -> ~str {
|
||||
if id.crate != ast::local_crate { return ~"external"; }
|
||||
let it = match maybe_find_item(id.node, items) {
|
||||
Some(it) => it,
|
||||
@ -823,7 +823,7 @@ fn item_family_to_str(fam: Family) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] {
|
||||
fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
|
||||
let mut items: ~[@ast::meta_item] = ~[];
|
||||
for ebml::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
|
||||
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
@ -848,7 +848,7 @@ fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] {
|
||||
return items;
|
||||
}
|
||||
|
||||
fn get_attributes(md: ebml::doc) -> ~[ast::attribute] {
|
||||
fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] {
|
||||
let mut attrs: ~[ast::attribute] = ~[];
|
||||
match ebml::maybe_get_doc(md, tag_attributes) {
|
||||
option::Some(attrs_d) => {
|
||||
@ -870,13 +870,13 @@ fn get_attributes(md: ebml::doc) -> ~[ast::attribute] {
|
||||
}
|
||||
|
||||
fn list_meta_items(intr: ident_interner,
|
||||
meta_items: ebml::doc, out: io::Writer) {
|
||||
meta_items: ebml::Doc, out: io::Writer) {
|
||||
for get_meta_items(meta_items).each |mi| {
|
||||
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(mi, intr)));
|
||||
}
|
||||
}
|
||||
|
||||
fn list_crate_attributes(intr: ident_interner, md: ebml::doc, hash: ~str,
|
||||
fn list_crate_attributes(intr: ident_interner, md: ebml::Doc, hash: ~str,
|
||||
out: io::Writer) {
|
||||
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
||||
|
||||
@ -899,7 +899,7 @@ fn get_crate_deps(intr: ident_interner, data: @~[u8]) -> ~[crate_dep] {
|
||||
let cratedoc = ebml::doc(data);
|
||||
let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps);
|
||||
let mut crate_num = 1;
|
||||
fn docstr(doc: ebml::doc, tag_: uint) -> ~str {
|
||||
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str {
|
||||
str::from_bytes(ebml::doc_data(ebml::get_doc(doc, tag_)))
|
||||
}
|
||||
for ebml::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
|
||||
|
@ -5,7 +5,7 @@ import util::ppaux::ty_to_str;
|
||||
import std::{ebml, map};
|
||||
import std::map::hashmap;
|
||||
import io::WriterUtil;
|
||||
import ebml::writer;
|
||||
import ebml::Writer;
|
||||
import syntax::ast::*;
|
||||
import syntax::print::pprust;
|
||||
import syntax::{ast_util, visit};
|
||||
@ -37,7 +37,7 @@ export encode_def_id;
|
||||
type abbrev_map = map::hashmap<ty::t, tyencode::ty_abbrev>;
|
||||
|
||||
type encode_inlined_item = fn@(ecx: @encode_ctxt,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
path: ast_map::path,
|
||||
ii: ast::inlined_item);
|
||||
|
||||
@ -86,15 +86,15 @@ fn reachable(ecx: @encode_ctxt, id: node_id) -> bool {
|
||||
ecx.reachable.contains_key(id)
|
||||
}
|
||||
|
||||
fn encode_name(ecx: @encode_ctxt, ebml_w: ebml::writer, name: ident) {
|
||||
fn encode_name(ecx: @encode_ctxt, ebml_w: ebml::Writer, name: ident) {
|
||||
ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name));
|
||||
}
|
||||
|
||||
fn encode_def_id(ebml_w: ebml::writer, id: def_id) {
|
||||
fn encode_def_id(ebml_w: ebml::Writer, id: def_id) {
|
||||
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id));
|
||||
}
|
||||
|
||||
fn encode_region_param(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_region_param(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
it: @ast::item) {
|
||||
let opt_rp = ecx.tcx.region_paramd_items.find(it.id);
|
||||
for opt_rp.each |rp| {
|
||||
@ -104,7 +104,7 @@ fn encode_region_param(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_mutability(ebml_w: ebml::writer, mt: class_mutability) {
|
||||
fn encode_mutability(ebml_w: ebml::Writer, mt: class_mutability) {
|
||||
do ebml_w.wr_tag(tag_class_mut) {
|
||||
let val = match mt {
|
||||
class_immutable => 'a',
|
||||
@ -116,7 +116,7 @@ fn encode_mutability(ebml_w: ebml::writer, mt: class_mutability) {
|
||||
|
||||
type entry<T> = {val: T, pos: uint};
|
||||
|
||||
fn add_to_index(ecx: @encode_ctxt, ebml_w: ebml::writer, path: &[ident],
|
||||
fn add_to_index(ecx: @encode_ctxt, ebml_w: ebml::Writer, path: &[ident],
|
||||
&index: ~[entry<~str>], name: ident) {
|
||||
let mut full_path = ~[];
|
||||
vec::push_all(full_path, path);
|
||||
@ -127,7 +127,7 @@ fn add_to_index(ecx: @encode_ctxt, ebml_w: ebml::writer, path: &[ident],
|
||||
pos: ebml_w.writer.tell()});
|
||||
}
|
||||
|
||||
fn encode_trait_ref(ebml_w: ebml::writer, ecx: @encode_ctxt, t: @trait_ref) {
|
||||
fn encode_trait_ref(ebml_w: ebml::Writer, ecx: @encode_ctxt, t: @trait_ref) {
|
||||
ebml_w.start_tag(tag_impl_trait);
|
||||
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, t.ref_id));
|
||||
ebml_w.end_tag();
|
||||
@ -135,7 +135,7 @@ fn encode_trait_ref(ebml_w: ebml::writer, ecx: @encode_ctxt, t: @trait_ref) {
|
||||
|
||||
|
||||
// Item info table encoding
|
||||
fn encode_family(ebml_w: ebml::writer, c: char) {
|
||||
fn encode_family(ebml_w: ebml::Writer, c: char) {
|
||||
ebml_w.start_tag(tag_items_data_item_family);
|
||||
ebml_w.writer.write(&[c as u8]);
|
||||
ebml_w.end_tag();
|
||||
@ -143,7 +143,7 @@ fn encode_family(ebml_w: ebml::writer, c: char) {
|
||||
|
||||
fn def_to_str(did: def_id) -> ~str { fmt!("%d:%d", did.crate, did.node) }
|
||||
|
||||
fn encode_ty_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
fn encode_ty_type_param_bounds(ebml_w: ebml::Writer, ecx: @encode_ctxt,
|
||||
params: @~[ty::param_bounds]) {
|
||||
let ty_str_ctxt = @{diag: ecx.diag,
|
||||
ds: def_to_str,
|
||||
@ -157,7 +157,7 @@ fn encode_ty_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
fn encode_type_param_bounds(ebml_w: ebml::Writer, ecx: @encode_ctxt,
|
||||
params: ~[ty_param]) {
|
||||
let ty_param_bounds =
|
||||
@params.map(|param| ecx.tcx.ty_param_bounds.get(param.id));
|
||||
@ -165,13 +165,13 @@ fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
}
|
||||
|
||||
|
||||
fn encode_variant_id(ebml_w: ebml::writer, vid: def_id) {
|
||||
fn encode_variant_id(ebml_w: ebml::Writer, vid: def_id) {
|
||||
ebml_w.start_tag(tag_items_data_item_variant);
|
||||
ebml_w.writer.write(str::to_bytes(def_to_str(vid)));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn write_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
|
||||
fn write_type(ecx: @encode_ctxt, ebml_w: ebml::Writer, typ: ty::t) {
|
||||
let ty_str_ctxt =
|
||||
@{diag: ecx.diag,
|
||||
ds: def_to_str,
|
||||
@ -181,13 +181,13 @@ fn write_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
|
||||
tyencode::enc_ty(ebml_w.writer, ty_str_ctxt, typ);
|
||||
}
|
||||
|
||||
fn encode_type(ecx: @encode_ctxt, ebml_w: ebml::writer, typ: ty::t) {
|
||||
fn encode_type(ecx: @encode_ctxt, ebml_w: ebml::Writer, typ: ty::t) {
|
||||
ebml_w.start_tag(tag_items_data_item_type);
|
||||
write_type(ecx, ebml_w, typ);
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
|
||||
fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::Writer, id: node_id) {
|
||||
ebml_w.start_tag(tag_items_data_item_symbol);
|
||||
let sym = match ecx.item_symbols.find(id) {
|
||||
Some(x) => x,
|
||||
@ -200,25 +200,25 @@ fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_discriminant(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
|
||||
fn encode_discriminant(ecx: @encode_ctxt, ebml_w: ebml::Writer, id: node_id) {
|
||||
ebml_w.start_tag(tag_items_data_item_symbol);
|
||||
ebml_w.writer.write(str::to_bytes(ecx.discrim_symbols.get(id)));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_disr_val(_ecx: @encode_ctxt, ebml_w: ebml::writer, disr_val: int) {
|
||||
fn encode_disr_val(_ecx: @encode_ctxt, ebml_w: ebml::Writer, disr_val: int) {
|
||||
ebml_w.start_tag(tag_disr_val);
|
||||
ebml_w.writer.write(str::to_bytes(int::to_str(disr_val,10u)));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_parent_item(ebml_w: ebml::writer, id: def_id) {
|
||||
fn encode_parent_item(ebml_w: ebml::Writer, id: def_id) {
|
||||
ebml_w.start_tag(tag_items_data_parent_item);
|
||||
ebml_w.writer.write(str::to_bytes(def_to_str(id)));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
id: node_id, variants: ~[variant],
|
||||
path: ast_map::path, index: @mut ~[entry<int>],
|
||||
ty_params: ~[ty_param]) {
|
||||
@ -255,9 +255,9 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_path(ecx: @encode_ctxt, ebml_w: ebml::writer, path: ast_map::path,
|
||||
fn encode_path(ecx: @encode_ctxt, ebml_w: ebml::Writer, path: ast_map::path,
|
||||
name: ast_map::path_elt) {
|
||||
fn encode_path_elt(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_path_elt(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
elt: ast_map::path_elt) {
|
||||
let (tag, name) = match elt {
|
||||
ast_map::path_mod(name) => (tag_path_elt_mod, name),
|
||||
@ -274,7 +274,7 @@ fn encode_path(ecx: @encode_ctxt, ebml_w: ebml::writer, path: ast_map::path,
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
|
||||
fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::Writer, md: _mod,
|
||||
id: node_id, path: ast_map::path, name: ident) {
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(id));
|
||||
@ -332,7 +332,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_visibility(ebml_w: ebml::writer, visibility: visibility) {
|
||||
fn encode_visibility(ebml_w: ebml::Writer, visibility: visibility) {
|
||||
encode_family(ebml_w, match visibility {
|
||||
public => 'g',
|
||||
private => 'j',
|
||||
@ -340,7 +340,7 @@ fn encode_visibility(ebml_w: ebml::writer, visibility: visibility) {
|
||||
});
|
||||
}
|
||||
|
||||
fn encode_self_type(ebml_w: ebml::writer, self_type: ast::self_ty_) {
|
||||
fn encode_self_type(ebml_w: ebml::Writer, self_type: ast::self_ty_) {
|
||||
ebml_w.start_tag(tag_item_trait_method_self_ty);
|
||||
|
||||
// Encode the base self type.
|
||||
@ -373,7 +373,7 @@ fn encode_self_type(ebml_w: ebml::writer, self_type: ast::self_ty_) {
|
||||
}
|
||||
|
||||
/* Returns an index of items in this class */
|
||||
fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
id: node_id, path: ast_map::path,
|
||||
class_tps: ~[ty_param],
|
||||
fields: ~[@struct_field],
|
||||
@ -429,7 +429,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
}
|
||||
|
||||
// This is for encoding info for ctors and dtors
|
||||
fn encode_info_for_ctor(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_info_for_ctor(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
id: node_id, ident: ident, path: ast_map::path,
|
||||
item: Option<inlined_item>, tps: ~[ty_param]) {
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
@ -454,7 +454,7 @@ fn encode_info_for_ctor(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
impl_path: ast_map::path, should_inline: bool,
|
||||
parent_id: node_id,
|
||||
m: @method, all_tps: ~[ty_param]) {
|
||||
@ -504,7 +504,7 @@ fn should_inline(attrs: ~[attribute]) -> bool {
|
||||
}
|
||||
|
||||
|
||||
fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
||||
fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
index: @mut ~[entry<int>], path: ast_map::path) {
|
||||
|
||||
let tcx = ecx.tcx;
|
||||
@ -516,7 +516,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
||||
};
|
||||
if !must_write && !reachable(ecx, item.id) { return; }
|
||||
|
||||
fn add_to_index_(item: @item, ebml_w: ebml::writer,
|
||||
fn add_to_index_(item: @item, ebml_w: ebml::Writer,
|
||||
index: @mut ~[entry<int>]) {
|
||||
vec::push(*index, {val: item.id, pos: ebml_w.writer.tell()});
|
||||
}
|
||||
@ -795,7 +795,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
nitem: @foreign_item,
|
||||
index: @mut ~[entry<int>],
|
||||
path: ast_map::path, abi: foreign_abi) {
|
||||
@ -829,7 +829,7 @@ fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
crate: @crate) -> ~[entry<int>] {
|
||||
let index = @mut ~[];
|
||||
ebml_w.start_tag(tag_items_data);
|
||||
@ -884,7 +884,7 @@ fn create_index<T: copy>(index: ~[entry<T>], hash_fn: fn@(T) -> uint) ->
|
||||
return buckets_frozen;
|
||||
}
|
||||
|
||||
fn encode_index<T>(ebml_w: ebml::writer, buckets: ~[@~[entry<T>]],
|
||||
fn encode_index<T>(ebml_w: ebml::Writer, buckets: ~[@~[entry<T>]],
|
||||
write_fn: fn(io::Writer, T)) {
|
||||
let writer = ebml_w.writer;
|
||||
ebml_w.start_tag(tag_index);
|
||||
@ -919,7 +919,7 @@ fn write_int(writer: io::Writer, &&n: int) {
|
||||
writer.write_be_u32(n as u32);
|
||||
}
|
||||
|
||||
fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
|
||||
fn encode_meta_item(ebml_w: ebml::Writer, mi: meta_item) {
|
||||
match mi.node {
|
||||
meta_word(name) => {
|
||||
ebml_w.start_tag(tag_meta_item_word);
|
||||
@ -956,7 +956,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_attributes(ebml_w: ebml::writer, attrs: ~[attribute]) {
|
||||
fn encode_attributes(ebml_w: ebml::Writer, attrs: ~[attribute]) {
|
||||
ebml_w.start_tag(tag_attributes);
|
||||
for attrs.each |attr| {
|
||||
ebml_w.start_tag(tag_attribute);
|
||||
@ -1018,7 +1018,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
|
||||
return attrs;
|
||||
}
|
||||
|
||||
fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
cstore: cstore::cstore) {
|
||||
|
||||
fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::cstore)
|
||||
@ -1064,7 +1064,7 @@ fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_crate_dep(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
fn encode_crate_dep(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
dep: decoder::crate_dep) {
|
||||
ebml_w.start_tag(tag_crate_dep);
|
||||
ebml_w.start_tag(tag_crate_dep_name);
|
||||
@ -1079,7 +1079,7 @@ fn encode_crate_dep(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_hash(ebml_w: ebml::writer, hash: ~str) {
|
||||
fn encode_hash(ebml_w: ebml::Writer, hash: ~str) {
|
||||
ebml_w.start_tag(tag_crate_hash);
|
||||
ebml_w.writer.write(str::to_bytes(hash));
|
||||
ebml_w.end_tag();
|
||||
@ -1113,7 +1113,7 @@ fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] {
|
||||
});
|
||||
|
||||
let buf_w = io::mem_buffer_writer(buf);
|
||||
let ebml_w = ebml::writer(buf_w);
|
||||
let ebml_w = ebml::Writer(buf_w);
|
||||
|
||||
encode_hash(ebml_w, ecx.link_meta.extras_hash);
|
||||
|
||||
|
@ -8,7 +8,7 @@ import syntax::ast_map;
|
||||
import syntax::ast_util;
|
||||
import syntax::codemap::span;
|
||||
import std::ebml;
|
||||
import std::ebml::writer;
|
||||
import std::ebml::Writer;
|
||||
import std::ebml::get_doc;
|
||||
import std::map::hashmap;
|
||||
import std::serialization::serializer;
|
||||
@ -78,7 +78,7 @@ trait tr {
|
||||
// Top-level methods.
|
||||
|
||||
fn encode_inlined_item(ecx: @e::encode_ctxt,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
path: ast_map::path,
|
||||
ii: ast::inlined_item,
|
||||
maps: maps) {
|
||||
@ -104,7 +104,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
|
||||
tcx: ty::ctxt,
|
||||
maps: maps,
|
||||
path: ast_map::path,
|
||||
par_doc: ebml::doc) -> Option<ast::inlined_item> {
|
||||
par_doc: ebml::Doc) -> Option<ast::inlined_item> {
|
||||
let dcx = @{cdata: cdata, tcx: tcx, maps: maps};
|
||||
match par_doc.opt_child(c::tag_ast) {
|
||||
None => None,
|
||||
@ -222,7 +222,7 @@ impl<D: deserializer> D: def_id_deserializer_helpers {
|
||||
// We also have to adjust the spans: for now we just insert a dummy span,
|
||||
// but eventually we should add entries to the local codemap as required.
|
||||
|
||||
fn encode_ast(ebml_w: ebml::writer, item: ast::inlined_item) {
|
||||
fn encode_ast(ebml_w: ebml::Writer, item: ast::inlined_item) {
|
||||
do ebml_w.wr_tag(c::tag_tree as uint) {
|
||||
ast::serialize_inlined_item(ebml_w, item)
|
||||
}
|
||||
@ -282,7 +282,7 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item {
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_ast(par_doc: ebml::doc) -> ast::inlined_item {
|
||||
fn decode_ast(par_doc: ebml::Doc) -> ast::inlined_item {
|
||||
let chi_doc = par_doc[c::tag_tree as uint];
|
||||
let d = ebml::ebml_deserializer(chi_doc);
|
||||
ast::deserialize_inlined_item(d)
|
||||
@ -336,11 +336,11 @@ fn renumber_ast(xcx: extended_decode_ctxt, ii: ast::inlined_item)
|
||||
// ______________________________________________________________________
|
||||
// Encoding and decoding of ast::def
|
||||
|
||||
fn encode_def(ebml_w: ebml::writer, def: ast::def) {
|
||||
fn encode_def(ebml_w: ebml::Writer, def: ast::def) {
|
||||
ast::serialize_def(ebml_w, def)
|
||||
}
|
||||
|
||||
fn decode_def(xcx: extended_decode_ctxt, doc: ebml::doc) -> ast::def {
|
||||
fn decode_def(xcx: extended_decode_ctxt, doc: ebml::Doc) -> ast::def {
|
||||
let dsr = ebml::ebml_deserializer(doc);
|
||||
let def = ast::deserialize_def(dsr);
|
||||
def.tr(xcx)
|
||||
@ -388,7 +388,7 @@ impl ast::def: tr {
|
||||
// ______________________________________________________________________
|
||||
// Encoding and decoding of freevar information
|
||||
|
||||
fn encode_freevar_entry(ebml_w: ebml::writer, fv: freevar_entry) {
|
||||
fn encode_freevar_entry(ebml_w: ebml::Writer, fv: freevar_entry) {
|
||||
serialize_freevar_entry(ebml_w, fv)
|
||||
}
|
||||
|
||||
@ -396,7 +396,7 @@ trait ebml_deserializer_helper {
|
||||
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry;
|
||||
}
|
||||
|
||||
impl ebml::ebml_deserializer: ebml_deserializer_helper {
|
||||
impl ebml::EbmlDeserializer: ebml_deserializer_helper {
|
||||
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry {
|
||||
let fv = deserialize_freevar_entry(self);
|
||||
fv.tr(xcx)
|
||||
@ -416,7 +416,7 @@ trait read_method_map_entry_helper {
|
||||
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry;
|
||||
}
|
||||
|
||||
impl ebml::ebml_deserializer: read_method_map_entry_helper {
|
||||
impl ebml::EbmlDeserializer: read_method_map_entry_helper {
|
||||
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry {
|
||||
let mme = deserialize_method_map_entry(self);
|
||||
{derefs: mme.derefs,
|
||||
@ -445,7 +445,7 @@ impl method_origin: tr {
|
||||
// Encoding and decoding vtable_res
|
||||
|
||||
fn encode_vtable_res(ecx: @e::encode_ctxt,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
dr: typeck::vtable_res) {
|
||||
// can't autogenerate this code because automatic serialization of
|
||||
// ty::t doesn't work, and there is no way (atm) to have
|
||||
@ -457,7 +457,7 @@ fn encode_vtable_res(ecx: @e::encode_ctxt,
|
||||
}
|
||||
|
||||
fn encode_vtable_origin(ecx: @e::encode_ctxt,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
vtable_origin: typeck::vtable_origin) {
|
||||
do ebml_w.emit_enum(~"vtable_origin") {
|
||||
match vtable_origin {
|
||||
@ -504,7 +504,7 @@ trait vtable_deserialization_helpers {
|
||||
fn read_vtable_origin(xcx: extended_decode_ctxt) -> typeck::vtable_origin;
|
||||
}
|
||||
|
||||
impl ebml::ebml_deserializer: vtable_deserialization_helpers {
|
||||
impl ebml::EbmlDeserializer: vtable_deserialization_helpers {
|
||||
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res {
|
||||
@self.read_to_vec(|| self.read_vtable_origin(xcx) )
|
||||
}
|
||||
@ -579,7 +579,7 @@ trait ebml_writer_helpers {
|
||||
fn emit_tpbt(ecx: @e::encode_ctxt, tpbt: ty::ty_param_bounds_and_ty);
|
||||
}
|
||||
|
||||
impl ebml::writer: ebml_writer_helpers {
|
||||
impl ebml::Writer: ebml_writer_helpers {
|
||||
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t) {
|
||||
e::write_type(ecx, self, ty)
|
||||
}
|
||||
@ -618,7 +618,7 @@ trait write_tag_and_id {
|
||||
fn id(id: ast::node_id);
|
||||
}
|
||||
|
||||
impl ebml::writer: write_tag_and_id {
|
||||
impl ebml::Writer: write_tag_and_id {
|
||||
fn tag(tag_id: c::astencode_tag, f: fn()) {
|
||||
do self.wr_tag(tag_id as uint) { f() }
|
||||
}
|
||||
@ -630,7 +630,7 @@ impl ebml::writer: write_tag_and_id {
|
||||
|
||||
fn encode_side_tables_for_ii(ecx: @e::encode_ctxt,
|
||||
maps: maps,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
ii: ast::inlined_item) {
|
||||
do ebml_w.wr_tag(c::tag_table as uint) {
|
||||
ast_util::visit_ids_for_inlined_item(
|
||||
@ -646,7 +646,7 @@ fn encode_side_tables_for_ii(ecx: @e::encode_ctxt,
|
||||
|
||||
fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
|
||||
maps: maps,
|
||||
ebml_w: ebml::writer,
|
||||
ebml_w: ebml::Writer,
|
||||
id: ast::node_id) {
|
||||
let tcx = ecx.tcx;
|
||||
|
||||
@ -771,12 +771,12 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
|
||||
|
||||
trait doc_decoder_helpers {
|
||||
fn as_int() -> int;
|
||||
fn opt_child(tag: c::astencode_tag) -> Option<ebml::doc>;
|
||||
fn opt_child(tag: c::astencode_tag) -> Option<ebml::Doc>;
|
||||
}
|
||||
|
||||
impl ebml::doc: doc_decoder_helpers {
|
||||
impl ebml::Doc: doc_decoder_helpers {
|
||||
fn as_int() -> int { ebml::doc_as_u64(self) as int }
|
||||
fn opt_child(tag: c::astencode_tag) -> Option<ebml::doc> {
|
||||
fn opt_child(tag: c::astencode_tag) -> Option<ebml::Doc> {
|
||||
ebml::maybe_get_doc(self, tag as uint)
|
||||
}
|
||||
}
|
||||
@ -789,7 +789,7 @@ trait ebml_deserializer_decoder_helpers {
|
||||
-> ty::ty_param_bounds_and_ty;
|
||||
}
|
||||
|
||||
impl ebml::ebml_deserializer: ebml_deserializer_decoder_helpers {
|
||||
impl ebml::EbmlDeserializer: ebml_deserializer_decoder_helpers {
|
||||
|
||||
fn read_ty(xcx: extended_decode_ctxt) -> ty::t {
|
||||
// Note: regions types embed local node ids. In principle, we
|
||||
@ -831,7 +831,7 @@ impl ebml::ebml_deserializer: ebml_deserializer_decoder_helpers {
|
||||
}
|
||||
|
||||
fn decode_side_tables(xcx: extended_decode_ctxt,
|
||||
ast_doc: ebml::doc) {
|
||||
ast_doc: ebml::Doc) {
|
||||
let dcx = xcx.dcx;
|
||||
let tbl_doc = ast_doc[c::tag_table as uint];
|
||||
for ebml::docs(tbl_doc) |tag, entry_doc| {
|
||||
@ -901,14 +901,14 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
|
||||
// Testing of astencode_gen
|
||||
|
||||
#[cfg(test)]
|
||||
fn encode_item_ast(ebml_w: ebml::writer, item: @ast::item) {
|
||||
fn encode_item_ast(ebml_w: ebml::Writer, item: @ast::item) {
|
||||
do ebml_w.wr_tag(c::tag_tree as uint) {
|
||||
ast::serialize_item(ebml_w, *item);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn decode_item_ast(par_doc: ebml::doc) -> @ast::item {
|
||||
fn decode_item_ast(par_doc: ebml::Doc) -> @ast::item {
|
||||
let chi_doc = par_doc[c::tag_tree as uint];
|
||||
let d = ebml::ebml_deserializer(chi_doc);
|
||||
@ast::deserialize_item(d)
|
||||
@ -937,7 +937,7 @@ fn mk_ctxt() -> fake_ext_ctxt {
|
||||
#[cfg(test)]
|
||||
fn roundtrip(in_item: @ast::item) {
|
||||
let mbuf = io::mem_buffer();
|
||||
let ebml_w = ebml::writer(io::mem_buffer_writer(mbuf));
|
||||
let ebml_w = ebml::Writer(io::mem_buffer_writer(mbuf));
|
||||
encode_item_ast(ebml_w, in_item);
|
||||
let ebml_doc = ebml::doc(@io::mem_buffer_buf(mbuf));
|
||||
let out_item = decode_item_ast(ebml_doc);
|
||||
|
@ -41,7 +41,7 @@ fn opt_output_style() -> ~str { ~"output-style" }
|
||||
fn opt_pandoc_cmd() -> ~str { ~"pandoc-cmd" }
|
||||
fn opt_help() -> ~str { ~"h" }
|
||||
|
||||
fn opts() -> ~[(getopts::opt, ~str)] {
|
||||
fn opts() -> ~[(getopts::Opt, ~str)] {
|
||||
~[
|
||||
(getopts::optopt(opt_output_dir()),
|
||||
~"--output-dir <val> put documents here"),
|
||||
@ -119,7 +119,7 @@ fn parse_config_(
|
||||
|
||||
fn config_from_opts(
|
||||
input_crate: &Path,
|
||||
matches: getopts::matches,
|
||||
matches: getopts::Matches,
|
||||
program_output: program_output
|
||||
) -> Result<config, ~str> {
|
||||
|
||||
|
@ -11,7 +11,7 @@ import std::map;
|
||||
import std::map::map;
|
||||
import std::map::hashmap;
|
||||
import std::deque;
|
||||
import std::deque::t;
|
||||
import std::deque::Deque;
|
||||
import std::par;
|
||||
import io::WriterUtil;
|
||||
import comm::*;
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std;
|
||||
import std::arena;
|
||||
import methods = std::arena::arena;
|
||||
import methods = std::arena::Arena;
|
||||
|
||||
enum tree/& { nil, node(&tree, &tree, int), }
|
||||
|
||||
@ -13,7 +13,7 @@ fn item_check(t: &tree) -> int {
|
||||
}
|
||||
}
|
||||
|
||||
fn bottom_up_tree(arena: &r/arena::arena,
|
||||
fn bottom_up_tree(arena: &r/arena::Arena,
|
||||
item: int,
|
||||
depth: int) -> &r/tree {
|
||||
if depth > 0 {
|
||||
@ -43,7 +43,7 @@ fn main(args: ~[~str]) {
|
||||
max_depth = n;
|
||||
}
|
||||
|
||||
let stretch_arena = arena::arena();
|
||||
let stretch_arena = arena::Arena();
|
||||
let stretch_depth = max_depth + 1;
|
||||
let stretch_tree = bottom_up_tree(&stretch_arena, 0, stretch_depth);
|
||||
|
||||
@ -51,7 +51,7 @@ fn main(args: ~[~str]) {
|
||||
stretch_depth,
|
||||
item_check(stretch_tree)));
|
||||
|
||||
let long_lived_arena = arena::arena();
|
||||
let long_lived_arena = arena::Arena();
|
||||
let long_lived_tree = bottom_up_tree(&long_lived_arena, 0, max_depth);
|
||||
let mut depth = min_depth;
|
||||
while depth <= max_depth {
|
||||
|
@ -50,7 +50,7 @@ fn solve_grid(g: grid_t) {
|
||||
fn next_color(g: grid, row: u8, col: u8, start_color: u8) -> bool {
|
||||
if start_color < 10u8 {
|
||||
// colors not yet used
|
||||
let avail = bitv::bitv(10u, false);
|
||||
let avail = bitv::Bitv(10u, false);
|
||||
for u8::range(start_color, 10u8) |color| {
|
||||
avail.set(color as uint, true);
|
||||
}
|
||||
@ -71,8 +71,8 @@ fn solve_grid(g: grid_t) {
|
||||
}
|
||||
|
||||
// find colors available in neighbourhood of (row, col)
|
||||
fn drop_colors(g: grid, avail: bitv::bitv, row: u8, col: u8) {
|
||||
fn drop_color(g: grid, colors: bitv::bitv, row: u8, col: u8) {
|
||||
fn drop_colors(g: grid, avail: bitv::Bitv, row: u8, col: u8) {
|
||||
fn drop_color(g: grid, colors: bitv::Bitv, row: u8, col: u8) {
|
||||
let color = g[row][col];
|
||||
if color != 0u8 { colors.set(color as uint, false); }
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ import std::bitv;
|
||||
type fn_info = {vars: hashmap<uint, var_info>};
|
||||
type var_info = {a: uint, b: uint};
|
||||
|
||||
fn bitv_to_str(enclosing: fn_info, v: ~bitv::bitv) -> str {
|
||||
fn bitv_to_str(enclosing: fn_info, v: ~bitv::Bitv) -> str {
|
||||
let s = "";
|
||||
|
||||
// error is that the value type in the hash map is var_info, not a box
|
||||
|
@ -9,8 +9,8 @@ import std::serialization::{serialize_uint, deserialize_uint};
|
||||
|
||||
fn test_ser_and_deser<A>(a1: A,
|
||||
expected: ~str,
|
||||
ebml_ser_fn: fn(ebml::writer, A),
|
||||
ebml_deser_fn: fn(ebml::ebml_deserializer) -> A,
|
||||
ebml_ser_fn: fn(ebml::Writer, A),
|
||||
ebml_deser_fn: fn(ebml::EbmlDeserializer) -> A,
|
||||
io_ser_fn: fn(io::Writer, A)) {
|
||||
|
||||
// check the pretty printer:
|
||||
@ -21,7 +21,7 @@ fn test_ser_and_deser<A>(a1: A,
|
||||
|
||||
// check the EBML serializer:
|
||||
let buf = io::mem_buffer();
|
||||
let w = ebml::writer(buf as io::Writer);
|
||||
let w = ebml::Writer(buf as io::Writer);
|
||||
ebml_ser_fn(w, a1);
|
||||
let d = ebml::doc(@io::mem_buffer_buf(buf));
|
||||
let a2 = ebml_deser_fn(ebml::ebml_deserializer(d));
|
||||
|
@ -2,12 +2,12 @@ use std;
|
||||
import std::bitv::*;
|
||||
|
||||
fn bitv_test() -> bool {
|
||||
let v1 = ~bitv(31, false);
|
||||
let v2 = ~bitv(31, true);
|
||||
let v1 = ~Bitv(31, false);
|
||||
let v2 = ~Bitv(31, true);
|
||||
v1.union(v2);
|
||||
true
|
||||
}
|
||||
|
||||
fn main() {
|
||||
do iter::repeat(10000) || {bitv_test()};
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +1,16 @@
|
||||
// xfail-test
|
||||
use std;
|
||||
import std::arena;
|
||||
import std::arena::arena;
|
||||
import std::arena::Arena;
|
||||
|
||||
enum hold { s(str) }
|
||||
|
||||
fn init(ar: &a.arena::arena, str: str) -> &a.hold {
|
||||
fn init(ar: &a.arena::Arena, str: str) -> &a.hold {
|
||||
new(*ar) s(str)
|
||||
}
|
||||
|
||||
fn main(args: ~[str]) {
|
||||
let ar = arena::arena();
|
||||
let ar = arena::Arena();
|
||||
let leak = init(&ar, args[0]);
|
||||
match *leak {
|
||||
s(astr) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std;
|
||||
import std::deque;
|
||||
import std::deque::t;
|
||||
import std::deque::Deque;
|
||||
|
||||
fn main() {
|
||||
let Q = deque::create();
|
||||
|
@ -9,11 +9,11 @@ enum object
|
||||
int_value(i64),
|
||||
}
|
||||
|
||||
fn lookup(table: std::map::hashmap<~str, std::json::json>, key: ~str, default: ~str) -> ~str
|
||||
fn lookup(table: std::map::hashmap<~str, std::json::Json>, key: ~str, default: ~str) -> ~str
|
||||
{
|
||||
match table.find(key)
|
||||
{
|
||||
option::Some(std::json::string(s)) =>
|
||||
option::Some(std::json::String(s)) =>
|
||||
{
|
||||
*s
|
||||
}
|
||||
@ -29,11 +29,11 @@ fn lookup(table: std::map::hashmap<~str, std::json::json>, key: ~str, default: ~
|
||||
}
|
||||
}
|
||||
|
||||
fn add_interface(store: int, managed_ip: ~str, data: std::json::json) -> (~str, object)
|
||||
fn add_interface(store: int, managed_ip: ~str, data: std::json::Json) -> (~str, object)
|
||||
{
|
||||
match data
|
||||
{
|
||||
std::json::dict(interface) =>
|
||||
std::json::Dict(interface) =>
|
||||
{
|
||||
let name = lookup(interface, ~"ifDescr", ~"");
|
||||
let label = fmt!("%s-%s", managed_ip, name);
|
||||
@ -48,11 +48,11 @@ fn add_interface(store: int, managed_ip: ~str, data: std::json::json) -> (~str,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_interfaces(store: int, managed_ip: ~str, device: std::map::hashmap<~str, std::json::json>) -> ~[(~str, object)]
|
||||
fn add_interfaces(store: int, managed_ip: ~str, device: std::map::hashmap<~str, std::json::Json>) -> ~[(~str, object)]
|
||||
{
|
||||
match device[~"interfaces"]
|
||||
{
|
||||
std::json::list(interfaces) =>
|
||||
std::json::List(interfaces) =>
|
||||
{
|
||||
do vec::map(*interfaces) |interface| {
|
||||
add_interface(store, managed_ip, interface)
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std;
|
||||
import std::arena::arena;
|
||||
import std::arena;
|
||||
|
||||
fn main() {
|
||||
let p = &arena();
|
||||
let p = &arena::Arena();
|
||||
let x = p.alloc(|| 4u);
|
||||
io::print(fmt!("%u", *x));
|
||||
assert *x == 4u;
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std;
|
||||
import libc, sys, unsafe;
|
||||
import std::arena::arena;
|
||||
import std::arena::Arena;
|
||||
|
||||
type bcx = {
|
||||
fcx: &fcx
|
||||
};
|
||||
|
||||
type fcx = {
|
||||
arena: &arena,
|
||||
arena: &Arena,
|
||||
ccx: &ccx
|
||||
};
|
||||
|
||||
@ -25,7 +25,7 @@ fn g(fcx : &fcx) {
|
||||
}
|
||||
|
||||
fn f(ccx : &ccx) {
|
||||
let a = arena();
|
||||
let a = Arena();
|
||||
let fcx = &{ arena: &a, ccx: ccx };
|
||||
return g(fcx);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user