Auto merge of #93839 - Mark-Simulacrum:delete-json-rust-deserialization, r=nnethercote

Simplify rustc_serialize by dropping support for decoding into JSON

This PR currently bundles two (somewhat separate) tasks.

First, it removes the JSON Decoder trait impl, which permitted going from JSON to Rust structs. For now, we keep supporting JSON deserialization, but only to `Json` (an equivalent of serde_json::Value). The primary hard to remove user there is for custom targets -- which need some form of JSON deserialization -- but they already have a custom ad-hoc pass for moving from Json to a Rust struct.

A [comment](e7aca89598/compiler/rustc_target/src/spec/mod.rs (L1653)) there suggests that it would be impractical to move them to a Decodable-based impl, at least without backwards compatibility concerns. I suspect that if we were widely breaking compat there, it would make sense to use serde_json at this point which would produce better error messages; the types in rustc_target are relatively isolated so we would not particularly suffer from using serde_derive.

The second part of the PR (all but the first commit) is to simplify the Decoder API by removing the non-primitive `read_*` functions. These primarily add indirection (through a closure), which doesn't directly cause a performance issue (the unique closure types essentially guarantee monomorphization), but does increase the amount of work rustc and LLVM need to do. This could be split out to a separate PR, but is included here in part to help motivate the first part.

Future work might consist of:

* Specializing enum discriminant encoding to avoid leb128 for small enums (since we know the variant count, we can directly use read/write u8 in almost all cases)
* Adding new methods to support faster deserialization (e.g., access to the underlying byte stream)
   * Currently these are somewhat ad-hoc supported by specializations for e.g. `Vec<u8>`, but other types which could benefit don't today.
* Removing the Decoder trait entirely in favor of a concrete type -- today, we only really have one impl of it modulo wrappers used for specialization-based dispatch.

Highly recommend review with whitespace changes off, as the removal of closures frequently causes things to be de-indented.
This commit is contained in:
bors 2022-02-22 07:54:22 +00:00
commit 58a721af9f
15 changed files with 205 additions and 1056 deletions

View File

@ -5,17 +5,12 @@ use rustc_span::source_map::{FilePathMapping, SourceMap};
use crate::emitter::{ColorConfig, HumanReadableErrorType};
use crate::Handler;
use rustc_serialize::json::decode;
use rustc_serialize::json;
use rustc_span::{BytePos, Span};
use std::str;
#[derive(Decodable, Debug, PartialEq, Eq)]
struct TestData {
spans: Vec<SpanTestData>,
}
#[derive(Decodable, Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq)]
struct SpanTestData {
pub byte_start: u32,
pub byte_end: u32,
@ -41,8 +36,6 @@ impl<T: Write> Write for Shared<T> {
/// Test the span yields correct positions in JSON.
fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
let expected_output = TestData { spans: vec![expected_output] };
rustc_span::create_default_session_globals_then(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned());
@ -64,9 +57,19 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
let actual_output: TestData = decode(actual_output);
assert_eq!(expected_output, actual_output)
let actual_output = json::from_str(&actual_output).unwrap();
let spans = actual_output["spans"].as_array().unwrap();
assert_eq!(spans.len(), 1);
let obj = &spans[0];
let actual_output = SpanTestData {
byte_start: obj["byte_start"].as_u64().unwrap() as u32,
byte_end: obj["byte_end"].as_u64().unwrap() as u32,
line_start: obj["line_start"].as_u64().unwrap() as u32,
line_end: obj["line_end"].as_u64().unwrap() as u32,
column_start: obj["column_start"].as_u64().unwrap() as u32,
column_end: obj["column_end"].as_u64().unwrap() as u32,
};
assert_eq!(expected_output, actual_output);
})
}

View File

@ -42,51 +42,26 @@ fn decodable_body(
}
let ty_name = s.ast().ident.to_string();
let decode_body = match s.variants() {
[vi] => {
let construct = vi.construct(|field, index| decode_field(field, index, true));
quote! {
::rustc_serialize::Decoder::read_struct(
__decoder,
|__decoder| { #construct },
)
}
}
[vi] => vi.construct(|field, _index| decode_field(field)),
variants => {
let match_inner: TokenStream = variants
.iter()
.enumerate()
.map(|(idx, vi)| {
let construct = vi.construct(|field, index| decode_field(field, index, false));
let construct = vi.construct(|field, _index| decode_field(field));
quote! { #idx => { #construct } }
})
.collect();
let names: TokenStream = variants
.iter()
.map(|vi| {
let variant_name = vi.ast().ident.to_string();
quote!(#variant_name,)
})
.collect();
let message = format!(
"invalid enum variant tag while decoding `{}`, expected 0..{}",
ty_name,
variants.len()
);
quote! {
::rustc_serialize::Decoder::read_enum(
__decoder,
|__decoder| {
::rustc_serialize::Decoder::read_enum_variant(
__decoder,
&[#names],
|__decoder, __variant_idx| {
match __variant_idx {
#match_inner
_ => panic!(#message),
}
})
}
)
match ::rustc_serialize::Decoder::read_usize(__decoder) {
#match_inner
_ => panic!(#message),
}
}
}
};
@ -101,7 +76,7 @@ fn decodable_body(
)
}
fn decode_field(field: &syn::Field, index: usize, is_struct: bool) -> proc_macro2::TokenStream {
fn decode_field(field: &syn::Field) -> proc_macro2::TokenStream {
let field_span = field.ident.as_ref().map_or(field.ty.span(), |ident| ident.span());
let decode_inner_method = if let syn::Type::Reference(_) = field.ty {
@ -109,22 +84,10 @@ fn decode_field(field: &syn::Field, index: usize, is_struct: bool) -> proc_macro
} else {
quote! { ::rustc_serialize::Decodable::decode }
};
let (decode_method, opt_field_name) = if is_struct {
let field_name = field.ident.as_ref().map_or_else(|| index.to_string(), |i| i.to_string());
(proc_macro2::Ident::new("read_struct_field", field_span), quote! { #field_name, })
} else {
(proc_macro2::Ident::new("read_enum_variant_arg", field_span), quote! {})
};
let __decoder = quote! { __decoder };
// Use the span of the field for the method call, so
// that backtraces will point to the field.
let decode_call = quote_spanned! {field_span=>
::rustc_serialize::Decoder::#decode_method(
#__decoder, #opt_field_name #decode_inner_method)
};
quote! { #decode_call }
quote_spanned! {field_span=> #decode_inner_method(#__decoder) }
}
pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {

View File

@ -122,29 +122,24 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
let mut edge_list_data = Vec::with_capacity(edge_count);
for _index in 0..node_count {
d.read_struct(|d| {
let dep_node: DepNode<K> = d.read_struct_field("node", Decodable::decode);
let _i: SerializedDepNodeIndex = nodes.push(dep_node);
debug_assert_eq!(_i.index(), _index);
let dep_node: DepNode<K> = Decodable::decode(d);
let _i: SerializedDepNodeIndex = nodes.push(dep_node);
debug_assert_eq!(_i.index(), _index);
let fingerprint: Fingerprint =
d.read_struct_field("fingerprint", Decodable::decode);
let _i: SerializedDepNodeIndex = fingerprints.push(fingerprint);
debug_assert_eq!(_i.index(), _index);
let fingerprint: Fingerprint = Decodable::decode(d);
let _i: SerializedDepNodeIndex = fingerprints.push(fingerprint);
debug_assert_eq!(_i.index(), _index);
d.read_struct_field("edges", |d| {
d.read_seq(|d, len| {
let start = edge_list_data.len().try_into().unwrap();
for _ in 0..len {
let edge = d.read_seq_elt(Decodable::decode);
edge_list_data.push(edge);
}
let end = edge_list_data.len().try_into().unwrap();
let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
debug_assert_eq!(_i.index(), _index);
})
})
});
// Deserialize edges -- sequence of DepNodeIndex
let len = d.read_usize();
let start = edge_list_data.len().try_into().unwrap();
for _ in 0..len {
let edge = Decodable::decode(d);
edge_list_data.push(edge);
}
let end = edge_list_data.len().try_into().unwrap();
let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
debug_assert_eq!(_i.index(), _index);
}
let index: FxHashMap<_, _> =

View File

@ -18,7 +18,8 @@ impl<S: Encoder, A: Array<Item: Encodable<S>>> Encodable<S> for SmallVec<A> {
impl<D: Decoder, A: Array<Item: Decodable<D>>> Decodable<D> for SmallVec<A> {
fn decode(d: &mut D) -> SmallVec<A> {
d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let len = d.read_usize();
(0..len).map(|_| Decodable::decode(d)).collect()
}
}
@ -35,7 +36,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for LinkedList<T> {
impl<D: Decoder, T: Decodable<D>> Decodable<D> for LinkedList<T> {
fn decode(d: &mut D) -> LinkedList<T> {
d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let len = d.read_usize();
(0..len).map(|_| Decodable::decode(d)).collect()
}
}
@ -52,7 +54,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for VecDeque<T> {
impl<D: Decoder, T: Decodable<D>> Decodable<D> for VecDeque<T> {
fn decode(d: &mut D) -> VecDeque<T> {
d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let len = d.read_usize();
(0..len).map(|_| Decodable::decode(d)).collect()
}
}
@ -78,15 +81,14 @@ where
V: Decodable<D>,
{
fn decode(d: &mut D) -> BTreeMap<K, V> {
d.read_map(|d, len| {
let mut map = BTreeMap::new();
for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val);
}
map
})
let len = d.read_usize();
let mut map = BTreeMap::new();
for _ in 0..len {
let key = Decodable::decode(d);
let val = Decodable::decode(d);
map.insert(key, val);
}
map
}
}
@ -109,13 +111,12 @@ where
T: Decodable<D> + PartialEq + Ord,
{
fn decode(d: &mut D) -> BTreeSet<T> {
d.read_seq(|d, len| {
let mut set = BTreeSet::new();
for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
}
set
})
let len = d.read_usize();
let mut set = BTreeSet::new();
for _ in 0..len {
set.insert(Decodable::decode(d));
}
set
}
}
@ -143,16 +144,15 @@ where
S: BuildHasher + Default,
{
fn decode(d: &mut D) -> HashMap<K, V, S> {
d.read_map(|d, len| {
let state = Default::default();
let mut map = HashMap::with_capacity_and_hasher(len, state);
for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val);
}
map
})
let len = d.read_usize();
let state = Default::default();
let mut map = HashMap::with_capacity_and_hasher(len, state);
for _ in 0..len {
let key = Decodable::decode(d);
let val = Decodable::decode(d);
map.insert(key, val);
}
map
}
}
@ -187,14 +187,13 @@ where
S: BuildHasher + Default,
{
fn decode(d: &mut D) -> HashSet<T, S> {
d.read_seq(|d, len| {
let state = Default::default();
let mut set = HashSet::with_capacity_and_hasher(len, state);
for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
}
set
})
let len = d.read_usize();
let state = Default::default();
let mut set = HashSet::with_capacity_and_hasher(len, state);
for _ in 0..len {
set.insert(Decodable::decode(d));
}
set
}
}
@ -222,16 +221,15 @@ where
S: BuildHasher + Default,
{
fn decode(d: &mut D) -> indexmap::IndexMap<K, V, S> {
d.read_map(|d, len| {
let state = Default::default();
let mut map = indexmap::IndexMap::with_capacity_and_hasher(len, state);
for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val);
}
map
})
let len = d.read_usize();
let state = Default::default();
let mut map = indexmap::IndexMap::with_capacity_and_hasher(len, state);
for _ in 0..len {
let key = Decodable::decode(d);
let val = Decodable::decode(d);
map.insert(key, val);
}
map
}
}
@ -256,14 +254,13 @@ where
S: BuildHasher + Default,
{
fn decode(d: &mut D) -> indexmap::IndexSet<T, S> {
d.read_seq(|d, len| {
let state = Default::default();
let mut set = indexmap::IndexSet::with_capacity_and_hasher(len, state);
for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
}
set
})
let len = d.read_usize();
let state = Default::default();
let mut set = indexmap::IndexSet::with_capacity_and_hasher(len, state);
for _ in 0..len {
set.insert(Decodable::decode(d));
}
set
}
}

View File

@ -45,12 +45,9 @@
//!
//! # Rust Type-based Encoding and Decoding
//!
//! Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via
//! the serialization API.
//! To be able to encode a piece of data, it must implement the `serialize::Encodable` trait.
//! To be able to decode a piece of data, it must implement the `serialize::Decodable` trait.
//! The Rust compiler provides an annotation to automatically generate the code for these traits:
//! `#[derive(Decodable, Encodable)]`
//! To be able to encode a piece of data, it must implement the
//! `serialize::Encodable` trait. The `rustc_macros` crate provides an
//! annotation to automatically generate the code for this trait: `#[derive(Encodable)]`.
//!
//! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects.
//! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value.
@ -68,11 +65,11 @@
//!
//! ```rust
//! # #![feature(rustc_private)]
//! use rustc_macros::{Decodable, Encodable};
//! use rustc_macros::{Encodable};
//! use rustc_serialize::json;
//!
//! // Automatically generate `Decodable` and `Encodable` trait implementations
//! #[derive(Decodable, Encodable)]
//! // Automatically generate `Encodable` trait implementations
//! #[derive(Encodable)]
//! pub struct TestStruct {
//! data_int: u8,
//! data_str: String,
@ -87,9 +84,6 @@
//!
//! // Serialize using `json::encode`
//! let encoded = json::encode(&object).unwrap();
//!
//! // Deserialize using `json::decode`
//! let decoded: TestStruct = json::decode(&encoded[..]);
//! ```
//!
//! ## Using the `ToJson` trait
@ -139,12 +133,9 @@
//!
//! ```rust
//! # #![feature(rustc_private)]
//! use rustc_macros::Decodable;
//! use std::collections::BTreeMap;
//! use rustc_serialize::json::{self, Json, ToJson};
//! use rustc_serialize::json::{Json, ToJson};
//!
//! // Only generate `Decodable` trait implementation
//! #[derive(Decodable)]
//! pub struct TestStruct {
//! data_int: u8,
//! data_str: String,
@ -171,19 +162,14 @@
//! };
//! let json_obj: Json = input_data.to_json();
//! let json_str: String = json_obj.to_string();
//!
//! // Deserialize like before
//! let decoded: TestStruct = json::decode(&json_str);
//! ```
use self::DecoderError::*;
use self::ErrorCode::*;
use self::InternalStackElement::*;
use self::JsonEvent::*;
use self::ParserError::*;
use self::ParserState::*;
use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap};
use std::mem::swap;
use std::num::FpCategory as Fp;
@ -253,21 +239,6 @@ pub enum ParserError {
// Builder and Parser have the same errors.
pub type BuilderError = ParserError;
#[derive(Clone, PartialEq, Debug)]
pub enum DecoderError {
ParseError(ParserError),
ExpectedError(string::String, string::String),
MissingFieldError(string::String),
UnknownVariantError(string::String),
ApplicationError(string::String),
}
macro_rules! bad {
($e:expr) => {{
panic!("json decode error: {:?}", $e);
}};
}
#[derive(Copy, Clone, Debug)]
pub enum EncoderError {
FmtError(fmt::Error),
@ -297,17 +268,6 @@ pub fn error_str(error: ErrorCode) -> &'static str {
}
}
/// Shortcut function to decode a JSON `&str` into an object
pub fn decode<T: crate::Decodable<Decoder>>(s: &str) -> T {
let json = match from_str(s) {
Ok(x) => x,
Err(e) => bad!(ParseError(e)),
};
let mut decoder = Decoder::new(json);
crate::Decodable::decode(&mut decoder)
}
/// Shortcut function to encode a `T` into a JSON `String`
pub fn encode<T: for<'r> crate::Encodable<Encoder<'r>>>(
object: &T,
@ -352,7 +312,6 @@ impl From<fmt::Error> for EncoderError {
}
pub type EncodeResult = Result<(), EncoderError>;
pub type DecodeResult<T> = Result<T, DecoderError>;
fn escape_str(wr: &mut dyn fmt::Write, v: &str) -> EncodeResult {
wr.write_str("\"")?;
@ -2162,272 +2121,6 @@ pub fn from_str(s: &str) -> Result<Json, BuilderError> {
builder.build()
}
/// A structure to decode JSON to values in rust.
pub struct Decoder {
stack: Vec<Json>,
}
impl Decoder {
/// Creates a new decoder instance for decoding the specified JSON value.
pub fn new(json: Json) -> Decoder {
Decoder { stack: vec![json] }
}
fn pop(&mut self) -> Json {
self.stack.pop().unwrap()
}
}
macro_rules! expect {
($e:expr, Null) => {{
match $e {
Json::Null => (),
other => bad!(ExpectedError("Null".to_owned(), other.to_string())),
}
}};
($e:expr, $t:ident) => {{
match $e {
Json::$t(v) => v,
other => bad!(ExpectedError(stringify!($t).to_owned(), other.to_string())),
}
}};
}
macro_rules! read_primitive {
($name:ident, $ty:ty) => {
fn $name(&mut self) -> $ty {
match self.pop() {
Json::I64(f) => f as $ty,
Json::U64(f) => f as $ty,
Json::F64(f) => bad!(ExpectedError("Integer".to_owned(), f.to_string())),
// re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc)
// is going to have a string here, as per JSON spec.
Json::String(s) => match s.parse().ok() {
Some(f) => f,
None => bad!(ExpectedError("Number".to_owned(), s)),
},
value => bad!(ExpectedError("Number".to_owned(), value.to_string())),
}
}
};
}
impl crate::Decoder for Decoder {
fn read_unit(&mut self) -> () {
expect!(self.pop(), Null)
}
read_primitive! { read_usize, usize }
read_primitive! { read_u8, u8 }
read_primitive! { read_u16, u16 }
read_primitive! { read_u32, u32 }
read_primitive! { read_u64, u64 }
read_primitive! { read_u128, u128 }
read_primitive! { read_isize, isize }
read_primitive! { read_i8, i8 }
read_primitive! { read_i16, i16 }
read_primitive! { read_i32, i32 }
read_primitive! { read_i64, i64 }
read_primitive! { read_i128, i128 }
fn read_f32(&mut self) -> f32 {
self.read_f64() as f32
}
fn read_f64(&mut self) -> f64 {
match self.pop() {
Json::I64(f) => f as f64,
Json::U64(f) => f as f64,
Json::F64(f) => f,
Json::String(s) => {
// re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc)
// is going to have a string here, as per JSON spec.
match s.parse().ok() {
Some(f) => f,
None => bad!(ExpectedError("Number".to_owned(), s)),
}
}
Json::Null => f64::NAN,
value => bad!(ExpectedError("Number".to_owned(), value.to_string())),
}
}
fn read_bool(&mut self) -> bool {
expect!(self.pop(), Boolean)
}
fn read_char(&mut self) -> char {
let s = self.read_str();
let mut it = s.chars();
if let (Some(c), None) = (it.next(), it.next()) {
// exactly one character
return c;
}
bad!(ExpectedError("single character string".to_owned(), s.to_string()));
}
fn read_str(&mut self) -> Cow<'_, str> {
Cow::Owned(expect!(self.pop(), String))
}
fn read_raw_bytes_into(&mut self, s: &mut [u8]) {
for c in s.iter_mut() {
*c = self.read_u8();
}
}
fn read_enum<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
f(self)
}
fn read_enum_variant<T, F>(&mut self, names: &[&str], mut f: F) -> T
where
F: FnMut(&mut Decoder, usize) -> T,
{
let name = match self.pop() {
Json::String(s) => s,
Json::Object(mut o) => {
let n = match o.remove("variant") {
Some(Json::String(s)) => s,
Some(val) => bad!(ExpectedError("String".to_owned(), val.to_string())),
None => bad!(MissingFieldError("variant".to_owned())),
};
match o.remove("fields") {
Some(Json::Array(l)) => {
self.stack.extend(l.into_iter().rev());
}
Some(val) => bad!(ExpectedError("Array".to_owned(), val.to_string())),
None => bad!(MissingFieldError("fields".to_owned())),
}
n
}
json => bad!(ExpectedError("String or Object".to_owned(), json.to_string())),
};
let Some(idx) = names.iter().position(|n| *n == &name[..]) else {
bad!(UnknownVariantError(name));
};
f(self, idx)
}
fn read_enum_variant_arg<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
f(self)
}
fn read_struct<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
let value = f(self);
self.pop();
value
}
fn read_struct_field<T, F>(&mut self, name: &str, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
let mut obj = expect!(self.pop(), Object);
let value = match obj.remove(name) {
None => {
// Add a Null and try to parse it as an Option<_>
// to get None as a default value.
self.stack.push(Json::Null);
f(self)
}
Some(json) => {
self.stack.push(json);
f(self)
}
};
self.stack.push(Json::Object(obj));
value
}
fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
self.read_seq(move |d, len| {
if len == tuple_len {
f(d)
} else {
bad!(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len)));
}
})
}
fn read_tuple_arg<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
self.read_seq_elt(f)
}
fn read_option<T, F>(&mut self, mut f: F) -> T
where
F: FnMut(&mut Decoder, bool) -> T,
{
match self.pop() {
Json::Null => f(self, false),
value => {
self.stack.push(value);
f(self, true)
}
}
}
fn read_seq<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder, usize) -> T,
{
let array = expect!(self.pop(), Array);
let len = array.len();
self.stack.extend(array.into_iter().rev());
f(self, len)
}
fn read_seq_elt<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
f(self)
}
fn read_map<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder, usize) -> T,
{
let obj = expect!(self.pop(), Object);
let len = obj.len();
for (key, value) in obj {
self.stack.push(value);
self.stack.push(Json::String(key));
}
f(self, len)
}
fn read_map_elt_key<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
f(self)
}
fn read_map_elt_val<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Decoder) -> T,
{
f(self)
}
}
/// A trait for converting values to JSON
pub trait ToJson {
/// Converts the value of `self` to an instance of JSON

View File

@ -200,118 +200,6 @@ pub trait Decoder {
fn read_char(&mut self) -> char;
fn read_str(&mut self) -> Cow<'_, str>;
fn read_raw_bytes_into(&mut self, s: &mut [u8]);
// Compound types:
#[inline]
fn read_enum<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_enum_variant<T, F>(&mut self, _names: &[&str], mut f: F) -> T
where
F: FnMut(&mut Self, usize) -> T,
{
let disr = self.read_usize();
f(self, disr)
}
#[inline]
fn read_enum_variant_arg<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_struct<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_struct_field<T, F>(&mut self, _f_name: &str, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_tuple<T, F>(&mut self, _len: usize, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_tuple_arg<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
// Specialized types:
fn read_option<T, F>(&mut self, mut f: F) -> T
where
F: FnMut(&mut Self, bool) -> T,
{
self.read_enum(move |this| {
this.read_enum_variant(&["None", "Some"], move |this, idx| match idx {
0 => f(this, false),
1 => f(this, true),
_ => panic!("read_option: expected 0 for None or 1 for Some"),
})
})
}
fn read_seq<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self, usize) -> T,
{
let len = self.read_usize();
f(self, len)
}
#[inline]
fn read_seq_elt<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
fn read_map<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self, usize) -> T,
{
let len = self.read_usize();
f(self, len)
}
#[inline]
fn read_map_elt_key<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
#[inline]
fn read_map_elt_val<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
f(self)
}
}
/// Trait for types that can be serialized
@ -493,22 +381,18 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Vec<T> {
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Vec<T> {
default fn decode(d: &mut D) -> Vec<T> {
d.read_seq(|d, len| {
// SAFETY: we set the capacity in advance, only write elements, and
// only set the length at the end once the writing has succeeded.
let mut vec = Vec::with_capacity(len);
unsafe {
let ptr: *mut T = vec.as_mut_ptr();
for i in 0..len {
std::ptr::write(
ptr.offset(i as isize),
d.read_seq_elt(|d| Decodable::decode(d)),
);
}
vec.set_len(len);
let len = d.read_usize();
// SAFETY: we set the capacity in advance, only write elements, and
// only set the length at the end once the writing has succeeded.
let mut vec = Vec::with_capacity(len);
unsafe {
let ptr: *mut T = vec.as_mut_ptr();
for i in 0..len {
std::ptr::write(ptr.offset(i as isize), Decodable::decode(d));
}
vec
})
vec.set_len(len);
}
vec
}
}
@ -521,14 +405,13 @@ impl<S: Encoder, T: Encodable<S>, const N: usize> Encodable<S> for [T; N] {
impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] {
fn decode(d: &mut D) -> [u8; N] {
d.read_seq(|d, len| {
assert!(len == N);
let mut v = [0u8; N];
for i in 0..len {
v[i] = d.read_seq_elt(|d| Decodable::decode(d));
}
v
})
let len = d.read_usize();
assert!(len == N);
let mut v = [0u8; N];
for i in 0..len {
v[i] = Decodable::decode(d);
}
v
}
}
@ -563,7 +446,11 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Option<T> {
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> {
fn decode(d: &mut D) -> Option<T> {
d.read_option(|d, b| if b { Some(Decodable::decode(d)) } else { None })
match d.read_usize() {
0 => None,
1 => Some(Decodable::decode(d)),
_ => panic!("Encountered invalid discriminant while decoding `Option`."),
}
}
}
@ -582,13 +469,11 @@ impl<S: Encoder, T1: Encodable<S>, T2: Encodable<S>> Encodable<S> for Result<T1,
impl<D: Decoder, T1: Decodable<D>, T2: Decodable<D>> Decodable<D> for Result<T1, T2> {
fn decode(d: &mut D) -> Result<T1, T2> {
d.read_enum(|d| {
d.read_enum_variant(&["Ok", "Err"], |d, disr| match disr {
0 => Ok(d.read_enum_variant_arg(|d| T1::decode(d))),
1 => Err(d.read_enum_variant_arg(|d| T2::decode(d))),
_ => panic!("Encountered invalid discriminant while decoding `Result`."),
})
})
match d.read_usize() {
0 => Ok(T1::decode(d)),
1 => Err(T2::decode(d)),
_ => panic!("Encountered invalid discriminant while decoding `Result`."),
}
}
}
@ -613,24 +498,16 @@ macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
impl<D: Decoder, $($name: Decodable<D>),+> Decodable<D> for ($($name,)+) {
#[allow(non_snake_case)]
fn decode(d: &mut D) -> ($($name,)+) {
let len: usize = count!($($name)+);
d.read_tuple(len, |d| {
let ret = ($(d.read_tuple_arg(|d| -> $name {
Decodable::decode(d)
}),)+);
ret
})
($({ let element: $name = Decodable::decode(d); element },)+)
}
}
impl<S: Encoder, $($name: Encodable<S>),+> Encodable<S> for ($($name,)+) {
#[allow(non_snake_case)]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
let ($(ref $name,)+) = *self;
let mut n = 0;
$(let $name = $name; n += 1;)+
s.emit_tuple(n, |s| {
let len: usize = count!($($name)+);
s.emit_tuple(len, |s| {
let mut i = 0;
$(s.emit_tuple_arg({ i+=1; i-1 }, |s| $name.encode(s))?;)+
Ok(())

View File

@ -4,61 +4,35 @@ use json::ErrorCode::*;
use json::Json::*;
use json::JsonEvent::*;
use json::ParserError::*;
use json::{from_str, Decoder, Encoder, EncoderError, Json, JsonEvent, Parser, StackElement};
use rustc_macros::{Decodable, Encodable};
use json::{from_str, Encoder, EncoderError, Json, JsonEvent, Parser, StackElement};
use rustc_macros::Encodable;
use rustc_serialize::json;
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::Encodable;
use std::collections::BTreeMap;
use std::io::prelude::*;
use std::string;
use Animal::*;
#[derive(Decodable, Eq, PartialEq, Debug)]
#[derive(Eq, PartialEq, Debug)]
struct OptionData {
opt: Option<usize>,
}
#[test]
fn test_decode_option_none() {
let s = "{}";
let obj: OptionData = json::decode(s);
assert_eq!(obj, OptionData { opt: None });
}
#[test]
fn test_decode_option_some() {
let s = "{ \"opt\": 10 }";
let obj: OptionData = json::decode(s);
assert_eq!(obj, OptionData { opt: Some(10) });
}
#[test]
#[should_panic(expected = r#"ExpectedError("Number", "[]")"#)]
fn test_decode_option_malformed1() {
check_err::<OptionData>(r#"{ "opt": [] }"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Number", "false")"#)]
fn test_decode_option_malformed2() {
check_err::<OptionData>(r#"{ "opt": false }"#);
}
#[derive(PartialEq, Encodable, Decodable, Debug)]
#[derive(PartialEq, Encodable, Debug)]
enum Animal {
Dog,
Frog(string::String, isize),
}
#[derive(PartialEq, Encodable, Decodable, Debug)]
#[derive(PartialEq, Encodable, Debug)]
struct Inner {
a: (),
b: usize,
c: Vec<string::String>,
}
#[derive(PartialEq, Encodable, Decodable, Debug)]
#[derive(PartialEq, Encodable, Debug)]
struct Outer {
inner: Vec<Inner>,
}
@ -323,18 +297,6 @@ fn test_read_identifiers() {
assert_eq!(from_str(" false "), Ok(Boolean(false)));
}
#[test]
fn test_decode_identifiers() {
let v: () = json::decode("null");
assert_eq!(v, ());
let v: bool = json::decode("true");
assert_eq!(v, true);
let v: bool = json::decode("false");
assert_eq!(v, false);
}
#[test]
fn test_read_number() {
assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
@ -363,45 +325,6 @@ fn test_read_number() {
assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
}
#[test]
#[should_panic(expected = r#"ExpectedError("Integer", "765.25")"#)]
fn test_decode_numbers() {
let v: f64 = json::decode("3");
assert_eq!(v, 3.0);
let v: f64 = json::decode("3.1");
assert_eq!(v, 3.1);
let v: f64 = json::decode("-1.2");
assert_eq!(v, -1.2);
let v: f64 = json::decode("0.4");
assert_eq!(v, 0.4);
let v: f64 = json::decode("0.4e5");
assert_eq!(v, 0.4e5);
let v: f64 = json::decode("0.4e15");
assert_eq!(v, 0.4e15);
let v: f64 = json::decode("0.4e-01");
assert_eq!(v, 0.4e-01);
let v: u64 = json::decode("0");
assert_eq!(v, 0);
let v: u64 = json::decode("18446744073709551615");
assert_eq!(v, u64::MAX);
let v: i64 = json::decode("-9223372036854775808");
assert_eq!(v, i64::MIN);
let v: i64 = json::decode("9223372036854775807");
assert_eq!(v, i64::MAX);
json::decode::<i64>("765.25");
}
#[test]
fn test_read_str() {
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
@ -419,26 +342,6 @@ fn test_read_str() {
assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
}
#[test]
fn test_decode_str() {
let s = [
("\"\"", ""),
("\"foo\"", "foo"),
("\"\\\"\"", "\""),
("\"\\b\"", "\x08"),
("\"\\n\"", "\n"),
("\"\\r\"", "\r"),
("\"\\t\"", "\t"),
("\"\\u12ab\"", "\u{12ab}"),
("\"\\uAB12\"", "\u{AB12}"),
];
for (i, o) in s {
let v: string::String = json::decode(i);
assert_eq!(v, o);
}
}
#[test]
fn test_read_array() {
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
@ -457,45 +360,6 @@ fn test_read_array() {
assert_eq!(from_str("[2, [4, 1]]"), Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
}
#[test]
fn test_decode_array() {
let v: Vec<()> = json::decode("[]");
assert_eq!(v, []);
let v: Vec<()> = json::decode("[null]");
assert_eq!(v, [()]);
let v: Vec<bool> = json::decode("[true]");
assert_eq!(v, [true]);
let v: Vec<isize> = json::decode("[3, 1]");
assert_eq!(v, [3, 1]);
let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]");
assert_eq!(v, [vec![3], vec![1, 2]]);
}
#[test]
fn test_decode_tuple() {
let t: (usize, usize, usize) = json::decode("[1, 2, 3]");
assert_eq!(t, (1, 2, 3));
let t: (usize, string::String) = json::decode("[1, \"two\"]");
assert_eq!(t, (1, "two".to_string()));
}
#[test]
#[should_panic]
fn test_decode_tuple_malformed_types() {
json::decode::<(usize, string::String)>("[1, 2]");
}
#[test]
#[should_panic]
fn test_decode_tuple_malformed_length() {
json::decode::<(usize, usize)>("[1, 2, 3]");
}
#[test]
fn test_read_object() {
assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
@ -552,143 +416,11 @@ fn test_read_object() {
);
}
#[test]
fn test_decode_struct() {
let s = "{
\"inner\": [
{ \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
]
}";
let v: Outer = json::decode(s);
assert_eq!(
v,
Outer { inner: vec![Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }] }
);
}
#[derive(Decodable)]
struct FloatStruct {
f: f64,
a: Vec<f64>,
}
#[test]
fn test_decode_struct_with_nan() {
let s = "{\"f\":null,\"a\":[null,123]}";
let obj: FloatStruct = json::decode(s);
assert!(obj.f.is_nan());
assert!(obj.a[0].is_nan());
assert_eq!(obj.a[1], 123f64);
}
#[test]
fn test_decode_option() {
let value: Option<string::String> = json::decode("null");
assert_eq!(value, None);
let value: Option<string::String> = json::decode("\"jodhpurs\"");
assert_eq!(value, Some("jodhpurs".to_string()));
}
#[test]
fn test_decode_enum() {
let value: Animal = json::decode("\"Dog\"");
assert_eq!(value, Dog);
let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
let value: Animal = json::decode(s);
assert_eq!(value, Frog("Henry".to_string(), 349));
}
#[test]
fn test_decode_map() {
let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
\"fields\":[\"Henry\", 349]}}";
let mut map: BTreeMap<string::String, Animal> = json::decode(s);
assert_eq!(map.remove(&"a".to_string()), Some(Dog));
assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
}
#[test]
fn test_multiline_errors() {
assert_eq!(from_str("{\n \"foo\":\n \"bar\""), Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
}
#[derive(Decodable)]
#[allow(dead_code)]
struct DecodeStruct {
x: f64,
y: bool,
z: string::String,
w: Vec<DecodeStruct>,
}
#[derive(Decodable)]
enum DecodeEnum {
A(f64),
B(string::String),
}
fn check_err<T: Decodable<Decoder>>(to_parse: &str) {
let json = from_str(to_parse).unwrap();
let _: T = Decodable::decode(&mut Decoder::new(json));
}
#[test]
#[should_panic(expected = r#"ExpectedError("Object", "[]")"#)]
fn test_decode_errors_struct1() {
check_err::<DecodeStruct>("[]");
}
#[test]
#[should_panic(expected = r#"ExpectedError("Number", "true")"#)]
fn test_decode_errors_struct2() {
check_err::<DecodeStruct>(r#"{"x": true, "y": true, "z": "", "w": []}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Boolean", "[]")"#)]
fn test_decode_errors_struct3() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": [], "z": "", "w": []}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("String", "{}")"#)]
fn test_decode_errors_struct4() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": {}, "w": []}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_struct5() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": "", "w": null}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_struct6() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": ""}"#);
}
#[test]
#[should_panic(expected = r#"MissingFieldError("variant")"#)]
fn test_decode_errors_enum1() {
check_err::<DecodeEnum>(r#"{}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("String", "1")"#)]
fn test_decode_errors_enum2() {
check_err::<DecodeEnum>(r#"{"variant": 1}"#);
}
#[test]
#[should_panic(expected = r#"MissingFieldError("fields")"#)]
fn test_decode_errors_enum3() {
check_err::<DecodeEnum>(r#"{"variant": "A"}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_enum4() {
check_err::<DecodeEnum>(r#"{"variant": "A", "fields": null}"#);
}
#[test]
#[should_panic(expected = r#"UnknownVariantError("C")"#)]
fn test_decode_errors_enum5() {
check_err::<DecodeEnum>(r#"{"variant": "C", "fields": []}"#);
}
#[test]
fn test_find() {
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
@ -938,7 +670,7 @@ fn test_prettyencoder_indent_level_param() {
#[test]
fn test_hashmap_with_enum_key() {
use std::collections::HashMap;
#[derive(Encodable, Eq, Hash, PartialEq, Decodable, Debug)]
#[derive(Encodable, Eq, Hash, PartialEq, Debug)]
enum Enum {
Foo,
#[allow(dead_code)]
@ -948,33 +680,6 @@ fn test_hashmap_with_enum_key() {
map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap();
assert_eq!(&result[..], r#"{"Foo":0}"#);
let decoded: HashMap<Enum, _> = json::decode(&result);
assert_eq!(map, decoded);
}
#[test]
fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
use std::collections::HashMap;
let json_str = "{\"1\":true}";
let json_obj = match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
Ok(o) => o,
};
let mut decoder = Decoder::new(json_obj);
let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Number", "a")"#)]
fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
use std::collections::HashMap;
let json_str = "{\"a\":true}";
let json_obj = match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
Ok(o) => o,
};
let mut decoder = Decoder::new(json_obj);
let _: HashMap<usize, bool> = Decodable::decode(&mut decoder);
}
fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {

View File

@ -299,10 +299,7 @@ impl<E: Encoder> Encodable<E> for DefId {
impl<D: Decoder> Decodable<D> for DefId {
default fn decode(d: &mut D) -> DefId {
d.read_struct(|d| DefId {
krate: d.read_struct_field("krate", Decodable::decode),
index: d.read_struct_field("index", Decodable::decode),
})
DefId { krate: Decodable::decode(d), index: Decodable::decode(d) }
}
}

View File

@ -979,12 +979,10 @@ impl<E: Encoder> Encodable<E> for Span {
}
impl<D: Decoder> Decodable<D> for Span {
default fn decode(s: &mut D) -> Span {
s.read_struct(|d| {
let lo = d.read_struct_field("lo", Decodable::decode);
let hi = d.read_struct_field("hi", Decodable::decode);
let lo = Decodable::decode(s);
let hi = Decodable::decode(s);
Span::new(lo, hi, SyntaxContext::root(), None)
})
Span::new(lo, hi, SyntaxContext::root(), None)
}
}
@ -1440,65 +1438,59 @@ impl<S: Encoder> Encodable<S> for SourceFile {
impl<D: Decoder> Decodable<D> for SourceFile {
fn decode(d: &mut D) -> SourceFile {
d.read_struct(|d| {
let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d));
let src_hash: SourceFileHash =
d.read_struct_field("src_hash", |d| Decodable::decode(d));
let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d));
let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d));
let lines: Vec<BytePos> = d.read_struct_field("lines", |d| {
let num_lines: u32 = Decodable::decode(d);
let mut lines = Vec::with_capacity(num_lines as usize);
let name: FileName = Decodable::decode(d);
let src_hash: SourceFileHash = Decodable::decode(d);
let start_pos: BytePos = Decodable::decode(d);
let end_pos: BytePos = Decodable::decode(d);
let lines: Vec<BytePos> = {
let num_lines: u32 = Decodable::decode(d);
let mut lines = Vec::with_capacity(num_lines as usize);
if num_lines > 0 {
// Read the number of bytes used per diff.
let bytes_per_diff: u8 = Decodable::decode(d);
if num_lines > 0 {
// Read the number of bytes used per diff.
let bytes_per_diff: u8 = Decodable::decode(d);
// Read the first element.
let mut line_start: BytePos = Decodable::decode(d);
lines.push(line_start);
for _ in 1..num_lines {
let diff = match bytes_per_diff {
1 => d.read_u8() as u32,
2 => d.read_u16() as u32,
4 => d.read_u32(),
_ => unreachable!(),
};
line_start = line_start + BytePos(diff);
// Read the first element.
let mut line_start: BytePos = Decodable::decode(d);
lines.push(line_start);
for _ in 1..num_lines {
let diff = match bytes_per_diff {
1 => d.read_u8() as u32,
2 => d.read_u16() as u32,
4 => d.read_u32(),
_ => unreachable!(),
};
line_start = line_start + BytePos(diff);
lines.push(line_start);
}
}
lines
});
let multibyte_chars: Vec<MultiByteChar> =
d.read_struct_field("multibyte_chars", |d| Decodable::decode(d));
let non_narrow_chars: Vec<NonNarrowChar> =
d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d));
let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d));
let normalized_pos: Vec<NormalizedPos> =
d.read_struct_field("normalized_pos", |d| Decodable::decode(d));
let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d));
SourceFile {
name,
start_pos,
end_pos,
src: None,
src_hash,
// Unused - the metadata decoder will construct
// a new SourceFile, filling in `external_src` properly
external_src: Lock::new(ExternalSource::Unneeded),
lines,
multibyte_chars,
non_narrow_chars,
normalized_pos,
name_hash,
cnum,
}
})
lines
};
let multibyte_chars: Vec<MultiByteChar> = Decodable::decode(d);
let non_narrow_chars: Vec<NonNarrowChar> = Decodable::decode(d);
let name_hash: u128 = Decodable::decode(d);
let normalized_pos: Vec<NormalizedPos> = Decodable::decode(d);
let cnum: CrateNum = Decodable::decode(d);
SourceFile {
name,
start_pos,
end_pos,
src: None,
src_hash,
// Unused - the metadata decoder will construct
// a new SourceFile, filling in `external_src` properly
external_src: Lock::new(ExternalSource::Unneeded),
lines,
multibyte_chars,
non_narrow_chars,
normalized_pos,
name_hash,
cnum,
}
}
}

View File

@ -7,7 +7,7 @@ extern crate rustc_macros;
extern crate rustc_serialize;
use rustc_macros::{Decodable, Encodable};
use rustc_serialize::json;
use rustc_serialize::opaque;
use rustc_serialize::{Decodable, Encodable};
#[derive(Encodable, Decodable)]
@ -17,7 +17,9 @@ struct A {
fn main() {
let obj = A { foo: Box::new([true, false]) };
let s = json::encode(&obj).unwrap();
let obj2: A = json::decode(&s);
let mut encoder = opaque::Encoder::new(vec![]);
obj.encode(&mut encoder).unwrap();
let mut decoder = opaque::Decoder::new(&encoder.data, 0);
let obj2 = A::decode(&mut decoder);
assert_eq!(obj.foo, obj2.foo);
}

View File

@ -9,7 +9,7 @@ extern crate rustc_macros;
extern crate rustc_serialize;
use rustc_macros::{Decodable, Encodable};
use rustc_serialize::json;
use rustc_serialize::opaque;
use rustc_serialize::{Decodable, Encodable};
use std::cell::{Cell, RefCell};
@ -26,8 +26,10 @@ struct B {
fn main() {
let obj = B { foo: Cell::new(true), bar: RefCell::new(A { baz: 2 }) };
let s = json::encode(&obj).unwrap();
let obj2: B = json::decode(&s);
let mut encoder = opaque::Encoder::new(vec![]);
obj.encode(&mut encoder).unwrap();
let mut decoder = opaque::Decoder::new(&encoder.data, 0);
let obj2 = B::decode(&mut decoder);
assert_eq!(obj.foo.get(), obj2.foo.get());
assert_eq!(obj.bar.borrow().baz, obj2.bar.borrow().baz);
}

View File

@ -8,7 +8,7 @@ extern crate rustc_macros;
extern crate rustc_serialize;
use rustc_macros::{Decodable, Encodable};
use rustc_serialize::json;
use rustc_serialize::opaque;
use rustc_serialize::{Decodable, Encodable};
#[derive(Encodable, Decodable, PartialEq, Debug)]
@ -16,11 +16,9 @@ struct UnitLikeStruct;
pub fn main() {
let obj = UnitLikeStruct;
let json_str: String = json::encode(&obj).unwrap();
let json_object = json::from_str(&json_str);
let mut decoder = json::Decoder::new(json_object.unwrap());
let mut decoded_obj: UnitLikeStruct = Decodable::decode(&mut decoder);
assert_eq!(obj, decoded_obj);
let mut encoder = opaque::Encoder::new(vec![]);
obj.encode(&mut encoder).unwrap();
let mut decoder = opaque::Decoder::new(&encoder.data, 0);
let obj2 = UnitLikeStruct::decode(&mut decoder);
assert_eq!(obj, obj2);
}

View File

@ -1,39 +0,0 @@
// run-pass
#![allow(dead_code)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::{json, Decodable, Encodable};
use std::fmt::Display;
pub trait Entity: Decodable<json::Decoder> + for<'a> Encodable<json::Encoder<'a>> + Sized {
type Key: Clone
+ Decodable<json::Decoder>
+ for<'a> Encodable<json::Encoder<'a>>
+ ToString
+ Display
+ Eq
+ Ord
+ Sized;
fn id(&self) -> Self::Key;
fn find_by_id(id: Self::Key) -> Option<Self>;
}
pub struct DbRef<E: Entity> {
pub id: E::Key,
}
impl<E> DbRef<E>
where
E: Entity,
{
fn get(self) -> Option<E> {
E::find_by_id(self.id)
}
}
fn main() {}

View File

@ -1,19 +0,0 @@
// run-pass
#![allow(dead_code)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::{json, Decodable};
trait JD: Decodable<json::Decoder> {}
fn exec<T: JD>() {
let doc = json::from_str("").unwrap();
let mut decoder = json::Decoder::new(doc);
let _v: T = Decodable::decode(&mut decoder);
panic!()
}
pub fn main() {}

View File

@ -1,17 +0,0 @@
// run-pass
// Issue #4036: Test for an issue that arose around fixing up type inference
// byproducts in vtable records.
// pretty-expanded FIXME #23616
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::{json, Decodable};
pub fn main() {
let json = json::from_str("[1]").unwrap();
let mut decoder = json::Decoder::new(json);
let _x: Vec<isize> = Decodable::decode(&mut decoder);
}