Auto merge of #85993 - bjorn3:serde_json, r=wesleywiser

Remove all json handling from rustc_serialize

Json is now handled using serde_json. Where appropriate I have replaced json usage with binary serialization (rmeta files) or manual string formatting (emcc linker arg generation).

This allowed for removing and simplifying a lot of code, which hopefully results in faster serialization/deserialization and faster compiles of rustc itself.

Where sensible we now use serde. Metadata and incr cache serialization keeps using a heavily modified (compared to crates.io) rustc-serialize version that in the future could probably be extended with zero-copy deserialization or other perf tricks that serde can't support due to supporting more than one serialization format.

Note that I had to remove `-Zast-json` and `-Zast-json-noexpand` as the relevant AST types don't implement `serde::Serialize`.

Fixes #40177

See also https://github.com/rust-lang/compiler-team/issues/418
This commit is contained in:
bors 2022-06-03 17:55:02 +00:00
commit 7e9b92cb43
48 changed files with 598 additions and 4230 deletions

View File

@ -3686,6 +3686,7 @@ dependencies = [
"rustc_span",
"rustc_symbol_mangling",
"rustc_target",
"serde_json",
"smallvec",
"snap",
"tempfile",
@ -3774,6 +3775,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_typeck",
"serde_json",
"tracing",
"winapi",
]
@ -3809,6 +3811,8 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"serde",
"serde_json",
"termcolor",
"termize",
"tracing",
@ -4024,6 +4028,7 @@ dependencies = [
"rustc_serialize",
"rustc_span",
"rustc_target",
"serde",
]
[[package]]
@ -4445,6 +4450,7 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"serde_json",
"tracing",
]

View File

@ -41,9 +41,6 @@ use std::convert::TryFrom;
use std::fmt;
use std::mem;
#[cfg(test)]
mod tests;
/// A "Label" is an identifier of some point in sources,
/// e.g. in the following code:
///
@ -2476,8 +2473,8 @@ rustc_index::newtype_index! {
}
impl<S: Encoder> rustc_serialize::Encodable<S> for AttrId {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}

View File

@ -1,11 +0,0 @@
use super::*;
// Are ASTs encodable?
#[test]
fn check_asts_encodable() {
fn assert_encodable<
T: for<'a> rustc_serialize::Encodable<rustc_serialize::json::Encoder<'a>>,
>() {
}
assert_encodable::<Crate>();
}

View File

@ -16,6 +16,7 @@ jobserver = "0.1.22"
tempfile = "3.2"
thorin-dwp = "0.2"
pathdiff = "0.2.0"
serde_json = "1.0.59"
snap = "1"
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
regex = "1.4"

View File

@ -14,7 +14,6 @@ use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo, SymbolExportKind};
use rustc_middle::ty::TyCtxt;
use rustc_serialize::{json, Encoder};
use rustc_session::config::{self, CrateType, DebugInfo, LinkerPluginLto, Lto, OptLevel, Strip};
use rustc_session::Session;
use rustc_span::symbol::Symbol;
@ -1152,21 +1151,12 @@ impl<'a> Linker for EmLinker<'a> {
self.cmd.arg("-s");
let mut arg = OsString::from("EXPORTED_FUNCTIONS=");
let mut encoded = String::new();
{
let mut encoder = json::Encoder::new(&mut encoded);
let res = encoder.emit_seq(symbols.len(), |encoder| {
for (i, sym) in symbols.iter().enumerate() {
encoder.emit_seq_elt(i, |encoder| encoder.emit_str(&("_".to_owned() + sym)))?;
}
Ok(())
});
if let Err(e) = res {
self.sess.fatal(&format!("failed to encode exported symbols: {}", e));
}
}
let encoded = serde_json::to_string(
&symbols.iter().map(|sym| "_".to_owned() + sym).collect::<Vec<_>>(),
)
.unwrap();
debug!("{}", encoded);
arg.push(encoded);
self.cmd.arg(arg);

View File

@ -9,6 +9,7 @@ crate-type = ["dylib"]
[dependencies]
libc = "0.2"
tracing = { version = "0.1.28" }
serde_json = "1.0.59"
rustc_log = { path = "../rustc_log" }
rustc_middle = { path = "../rustc_middle" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }

View File

@ -30,7 +30,6 @@ use rustc_log::stdout_isatty;
use rustc_metadata::locator;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
use rustc_serialize::json::ToJson;
use rustc_session::config::{nightly_options, CG_OPTIONS, DB_OPTIONS};
use rustc_session::config::{ErrorOutputType, Input, OutputType, PrintRequest, TrimmedDefPaths};
use rustc_session::cstore::MetadataLoader;
@ -40,6 +39,7 @@ use rustc_session::{config, DiagnosticOutput, Session};
use rustc_session::{early_error, early_error_no_abort, early_warn};
use rustc_span::source_map::{FileLoader, FileName};
use rustc_span::symbol::sym;
use rustc_target::json::ToJson;
use std::borrow::Cow;
use std::cmp::max;
@ -343,10 +343,7 @@ fn run_compiler(
return early_exit();
}
if sess.opts.debugging_opts.parse_only
|| sess.opts.debugging_opts.show_span.is_some()
|| sess.opts.debugging_opts.ast_json_noexpand
{
if sess.opts.debugging_opts.parse_only || sess.opts.debugging_opts.show_span.is_some() {
return early_exit();
}
@ -375,7 +372,7 @@ fn run_compiler(
queries.global_ctxt()?;
if sess.opts.debugging_opts.no_analysis || sess.opts.debugging_opts.ast_json {
if sess.opts.debugging_opts.no_analysis {
return early_exit();
}
@ -665,7 +662,9 @@ fn print_crate_info(
}
Sysroot => println!("{}", sess.sysroot.display()),
TargetLibdir => println!("{}", sess.target_tlib_path.dir.display()),
TargetSpec => println!("{}", sess.target.to_json().pretty()),
TargetSpec => {
println!("{}", serde_json::to_string_pretty(&sess.target.to_json()).unwrap());
}
FileNames | CrateName => {
let input = input.unwrap_or_else(|| {
early_error(ErrorOutputType::default(), "no input file provided")

View File

@ -19,6 +19,8 @@ atty = "0.2"
termcolor = "1.0"
annotate-snippets = "0.8.0"
termize = "0.1.1"
serde = { version = "1.0.125", features = ["derive"] }
serde_json = "1.0.59"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3", features = ["handleapi", "synchapi", "winbase"] }

View File

@ -28,7 +28,7 @@ use std::path::Path;
use std::sync::{Arc, Mutex};
use std::vec;
use rustc_serialize::json::{as_json, as_pretty_json};
use serde::Serialize;
#[cfg(test)]
mod tests;
@ -126,9 +126,9 @@ impl Emitter for JsonEmitter {
fn emit_diagnostic(&mut self, diag: &crate::Diagnostic) {
let data = Diagnostic::from_errors_diagnostic(diag, self);
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@ -139,9 +139,9 @@ impl Emitter for JsonEmitter {
fn emit_artifact_notification(&mut self, path: &Path, artifact_type: &str) {
let data = ArtifactNotification { artifact: path, emit: artifact_type };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@ -161,9 +161,9 @@ impl Emitter for JsonEmitter {
.collect();
let report = FutureIncompatReport { future_incompat_report: data };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&report))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&report).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&report))
writeln!(&mut self.dst, "{}", serde_json::to_string(&report).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@ -175,9 +175,9 @@ impl Emitter for JsonEmitter {
let lint_level = lint_level.as_str();
let data = UnusedExterns { lint_level, unused_extern_names: unused_externs };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@ -204,7 +204,7 @@ impl Emitter for JsonEmitter {
// The following data types are provided just for serialisation.
#[derive(Encodable)]
#[derive(Serialize)]
struct Diagnostic {
/// The primary error message.
message: String,
@ -218,7 +218,7 @@ struct Diagnostic {
rendered: Option<String>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpan {
file_name: String,
byte_start: u32,
@ -245,7 +245,7 @@ struct DiagnosticSpan {
expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpanLine {
text: String,
@ -255,7 +255,7 @@ struct DiagnosticSpanLine {
highlight_end: usize,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpanMacroExpansion {
/// span where macro was applied to generate this code; note that
/// this may itself derive from a macro (if
@ -269,7 +269,7 @@ struct DiagnosticSpanMacroExpansion {
def_site_span: DiagnosticSpan,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticCode {
/// The code itself.
code: String,
@ -277,7 +277,7 @@ struct DiagnosticCode {
explanation: Option<&'static str>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct ArtifactNotification<'a> {
/// The path of the artifact.
artifact: &'a Path,
@ -285,12 +285,12 @@ struct ArtifactNotification<'a> {
emit: &'a str,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct FutureBreakageItem {
diagnostic: Diagnostic,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct FutureIncompatReport {
future_incompat_report: Vec<FutureBreakageItem>,
}
@ -299,7 +299,7 @@ struct FutureIncompatReport {
// doctest component (as well as cargo).
// We could unify this struct the one in rustdoc but they have different
// ownership semantics, so doing so would create wasteful allocations.
#[derive(Encodable)]
#[derive(Serialize)]
struct UnusedExterns<'a, 'b, 'c> {
/// The severity level of the unused dependencies lint
lint_level: &'a str,

View File

@ -5,12 +5,18 @@ use rustc_span::source_map::{FilePathMapping, SourceMap};
use crate::emitter::{ColorConfig, HumanReadableErrorType};
use crate::Handler;
use rustc_serialize::json;
use rustc_span::{BytePos, Span};
use std::str;
#[derive(Debug, PartialEq, Eq)]
use serde::Deserialize;
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct TestData {
spans: Vec<SpanTestData>,
}
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct SpanTestData {
pub byte_start: u32,
pub byte_end: u32,
@ -61,19 +67,11 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
let actual_output = json::from_str(&actual_output).unwrap();
let spans = actual_output["spans"].as_array().unwrap();
let actual_output: TestData = serde_json::from_str(actual_output).unwrap();
let spans = actual_output.spans;
assert_eq!(spans.len(), 1);
let obj = &spans[0];
let actual_output = SpanTestData {
byte_start: obj["byte_start"].as_u64().unwrap() as u32,
byte_end: obj["byte_end"].as_u64().unwrap() as u32,
line_start: obj["line_start"].as_u64().unwrap() as u32,
line_end: obj["line_end"].as_u64().unwrap() as u32,
column_start: obj["column_start"].as_u64().unwrap() as u32,
column_end: obj["column_end"].as_u64().unwrap() as u32,
};
assert_eq!(expected_output, actual_output);
assert_eq!(expected_output, spans[0])
})
}

View File

@ -27,7 +27,6 @@ use rustc_passes::{self, hir_stats, layout_test};
use rustc_plugin_impl as plugin;
use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
use rustc_resolve::{Resolver, ResolverArenas};
use rustc_serialize::json;
use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
use rustc_session::cstore::{MetadataLoader, MetadataLoaderDyn};
use rustc_session::output::{filename_for_input, filename_for_metadata};
@ -59,10 +58,6 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
}
})?;
if sess.opts.debugging_opts.ast_json_noexpand {
println!("{}", json::as_json(&krate));
}
if sess.opts.debugging_opts.input_stats {
eprintln!("Lines of code: {}", sess.source_map().count_lines());
eprintln!("Pre-expansion node count: {}", count_nodes(&krate));
@ -423,10 +418,6 @@ pub fn configure_and_expand(
hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS");
}
if sess.opts.debugging_opts.ast_json {
println!("{}", json::as_json(&krate));
}
resolver.resolve_crate(&krate);
// Needs to go *after* expansion to be able to check the results of macro expansion.

View File

@ -644,8 +644,6 @@ fn test_debugging_options_tracking_hash() {
// Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
// This list is in alphabetical order.
untracked!(assert_incr_state, Some(String::from("loaded")));
untracked!(ast_json, true);
untracked!(ast_json_noexpand, true);
untracked!(borrowck, String::from("other"));
untracked!(deduplicate_diagnostics, false);
untracked!(dep_tasks, true);

View File

@ -4,6 +4,7 @@ version = "0.0.0"
edition = "2021"
[dependencies]
serde = { version = "1.0.125", features = ["derive"] }
rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_error_messages = { path = "../rustc_error_messages" }

View File

@ -14,6 +14,8 @@ use rustc_span::edition::Edition;
use rustc_span::{sym, symbol::Ident, Span, Symbol};
use rustc_target::spec::abi::Abi;
use serde::{Deserialize, Serialize};
pub mod builtin;
#[macro_export]
@ -34,7 +36,7 @@ macro_rules! pluralize {
/// All suggestions are marked with an `Applicability`. Tools use the applicability of a suggestion
/// to determine whether it should be automatically applied or if the user should be consulted
/// before applying the suggestion.
#[derive(Copy, Clone, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Copy, Clone, Debug, PartialEq, Hash, Encodable, Decodable, Serialize, Deserialize)]
pub enum Applicability {
/// The suggestion is definitely what the user intended, or maintains the exact meaning of the code.
/// This suggestion should be automatically applied.

View File

@ -140,79 +140,56 @@ fn encodable_body(
let encode_body = match s.variants() {
[_] => {
let mut field_idx = 0usize;
let encode_inner = s.each_variant(|vi| {
vi.bindings()
.iter()
.map(|binding| {
let bind_ident = &binding.binding;
let field_name = binding
.ast()
.ident
.as_ref()
.map_or_else(|| field_idx.to_string(), |i| i.to_string());
let first = field_idx == 0;
let result = quote! {
match ::rustc_serialize::Encoder::emit_struct_field(
match ::rustc_serialize::Encodable::<#encoder_ty>::encode(
#bind_ident,
__encoder,
#field_name,
#first,
|__encoder|
::rustc_serialize::Encodable::<#encoder_ty>::encode(#bind_ident, __encoder),
) {
::std::result::Result::Ok(()) => (),
::std::result::Result::Err(__err)
=> return ::std::result::Result::Err(__err),
}
};
field_idx += 1;
result
})
.collect::<TokenStream>()
});
let no_fields = field_idx == 0;
quote! {
::rustc_serialize::Encoder::emit_struct(__encoder, #no_fields, |__encoder| {
::std::result::Result::Ok(match *self { #encode_inner })
})
::std::result::Result::Ok(match *self { #encode_inner })
}
}
_ => {
let mut variant_idx = 0usize;
let encode_inner = s.each_variant(|vi| {
let variant_name = vi.ast().ident.to_string();
let mut field_idx = 0usize;
let encode_fields: TokenStream = vi
.bindings()
.iter()
.map(|binding| {
let bind_ident = &binding.binding;
let first = field_idx == 0;
let result = quote! {
match ::rustc_serialize::Encoder::emit_enum_variant_arg(
match ::rustc_serialize::Encodable::<#encoder_ty>::encode(
#bind_ident,
__encoder,
#first,
|__encoder|
::rustc_serialize::Encodable::<#encoder_ty>::encode(#bind_ident, __encoder),
) {
::std::result::Result::Ok(()) => (),
::std::result::Result::Err(__err)
=> return ::std::result::Result::Err(__err),
}
};
field_idx += 1;
result
})
.collect();
let result = if field_idx != 0 {
let result = if !vi.bindings().is_empty() {
quote! {
::rustc_serialize::Encoder::emit_enum_variant(
__encoder,
#variant_name,
#variant_idx,
#field_idx,
|__encoder| { ::std::result::Result::Ok({ #encode_fields }) }
)
}
@ -220,7 +197,6 @@ fn encodable_body(
quote! {
::rustc_serialize::Encoder::emit_fieldless_enum_variant::<#variant_idx>(
__encoder,
#variant_name,
)
}
};
@ -228,11 +204,9 @@ fn encodable_body(
result
});
quote! {
::rustc_serialize::Encoder::emit_enum(__encoder, |__encoder| {
match *self {
#encode_inner
}
})
match *self {
#encode_inner
}
}
}
};

View File

@ -95,11 +95,6 @@ macro_rules! encoder_methods {
impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
type Error = <opaque::Encoder as Encoder>::Error;
#[inline]
fn emit_unit(&mut self) -> Result<(), Self::Error> {
Ok(())
}
encoder_methods! {
emit_usize(usize);
emit_u128(u128);

View File

@ -315,7 +315,7 @@ impl<Tag> Scalar<Tag> {
ScalarSizeMismatch { target_size: target_size.bytes(), data_size: size.bytes() }
})?),
Scalar::Ptr(ptr, sz) => {
if target_size.bytes() != sz.into() {
if target_size.bytes() != u64::from(sz) {
return Err(ScalarSizeMismatch {
target_size: target_size.bytes(),
data_size: sz.into(),

View File

@ -56,8 +56,8 @@ impl PredecessorCache {
impl<S: serialize::Encoder> serialize::Encodable<S> for PredecessorCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}

View File

@ -56,8 +56,8 @@ impl SwitchSourceCache {
impl<S: serialize::Encoder> serialize::Encodable<S> for SwitchSourceCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}

View File

@ -367,8 +367,8 @@ impl PostorderCache {
impl<S: serialize::Encoder> serialize::Encodable<S> for PostorderCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}

View File

@ -993,11 +993,6 @@ where
{
type Error = E::Error;
#[inline]
fn emit_unit(&mut self) -> Result<(), Self::Error> {
Ok(())
}
encoder_methods! {
emit_usize(usize);
emit_u128(u128);

View File

@ -25,12 +25,11 @@ impl<D: Decoder, A: Array<Item: Decodable<D>>> Decodable<D> for SmallVec<A> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for LinkedList<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@ -43,12 +42,11 @@ impl<D: Decoder, T: Decodable<D>> Decodable<D> for LinkedList<T> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for VecDeque<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@ -65,13 +63,12 @@ where
V: Encodable<S>,
{
fn encode(&self, e: &mut S) -> Result<(), S::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@ -97,12 +94,11 @@ where
T: Encodable<S> + PartialEq + Ord,
{
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@ -127,13 +123,12 @@ where
S: BuildHasher,
{
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@ -162,12 +157,11 @@ where
S: BuildHasher,
{
fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@ -194,13 +188,12 @@ where
S: BuildHasher,
{
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@ -229,12 +222,11 @@ where
S: BuildHasher,
{
fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,147 +0,0 @@
// Benchmarks and tests that require private items
extern crate test;
use super::{from_str, Parser, Stack, StackElement};
use std::string;
use test::Bencher;
#[test]
fn test_stack() {
let mut stack = Stack::new();
assert!(stack.is_empty());
assert!(stack.is_empty());
assert!(!stack.last_is_index());
stack.push_index(0);
stack.bump_index();
assert!(stack.len() == 1);
assert!(stack.is_equal_to(&[StackElement::Index(1)]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1)]));
assert!(stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
stack.push_key("foo".to_string());
assert!(stack.len() == 2);
assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.ends_with(&[StackElement::Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
stack.push_key("bar".to_string());
assert!(stack.len() == 3);
assert!(stack.is_equal_to(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(stack.ends_with(&[StackElement::Key("bar")]));
assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")]));
assert!(stack.ends_with(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
assert!(stack.get(2) == StackElement::Key("bar"));
stack.pop();
assert!(stack.len() == 2);
assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.ends_with(&[StackElement::Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
}
#[bench]
fn bench_streaming_small(b: &mut Bencher) {
b.iter(|| {
let mut parser = Parser::new(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#
.chars(),
);
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_small(b: &mut Bencher) {
b.iter(|| {
let _ = from_str(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#,
);
});
}
fn big_json() -> string::String {
let mut src = "[\n".to_string();
for _ in 0..500 {
src.push_str(
r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
[1,2,3]},"#,
);
}
src.push_str("{}]");
return src;
}
#[bench]
fn bench_streaming_large(b: &mut Bencher) {
let src = big_json();
b.iter(|| {
let mut parser = Parser::new(src.chars());
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_large(b: &mut Bencher) {
let src = big_json();
b.iter(|| {
let _ = from_str(&src);
});
}

View File

@ -25,7 +25,5 @@ pub use self::serialize::{Decodable, Decoder, Encodable, Encoder};
mod collection_impls;
mod serialize;
pub mod json;
pub mod leb128;
pub mod opaque;

View File

@ -64,11 +64,6 @@ const STR_SENTINEL: u8 = 0xC1;
impl serialize::Encoder for Encoder {
type Error = !;
#[inline]
fn emit_unit(&mut self) -> EncodeResult {
Ok(())
}
#[inline]
fn emit_usize(&mut self, v: usize) -> EncodeResult {
write_leb128!(self, v, usize, write_usize_leb128)
@ -419,11 +414,6 @@ macro_rules! file_encoder_write_leb128 {
impl serialize::Encoder for FileEncoder {
type Error = io::Error;
#[inline]
fn emit_unit(&mut self) -> FileEncodeResult {
Ok(())
}
#[inline]
fn emit_usize(&mut self, v: usize) -> FileEncodeResult {
file_encoder_write_leb128!(self, v, usize, write_usize_leb128)

View File

@ -15,7 +15,6 @@ pub trait Encoder {
type Error;
// Primitive types:
fn emit_unit(&mut self) -> Result<(), Self::Error>;
fn emit_usize(&mut self, v: usize) -> Result<(), Self::Error>;
fn emit_u128(&mut self, v: u128) -> Result<(), Self::Error>;
fn emit_u64(&mut self, v: u64) -> Result<(), Self::Error>;
@ -35,22 +34,8 @@ pub trait Encoder {
fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>;
fn emit_raw_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error>;
// Compound types:
#[inline]
fn emit_enum<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
fn emit_enum_variant<F>(
&mut self,
_v_name: &str,
v_id: usize,
_len: usize,
f: F,
) -> Result<(), Self::Error>
// Convenience for the derive macro:
fn emit_enum_variant<F>(&mut self, v_id: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
@ -65,112 +50,9 @@ pub trait Encoder {
// optimization that would otherwise be necessary here, likely due to the
// multiple levels of inlining and const-prop that are needed.
#[inline]
fn emit_fieldless_enum_variant<const ID: usize>(
&mut self,
_v_name: &str,
) -> Result<(), Self::Error> {
fn emit_fieldless_enum_variant<const ID: usize>(&mut self) -> Result<(), Self::Error> {
self.emit_usize(ID)
}
#[inline]
fn emit_enum_variant_arg<F>(&mut self, _first: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_struct<F>(&mut self, _no_fields: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_struct_field<F>(&mut self, _f_name: &str, _first: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_tuple<F>(&mut self, _len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_tuple_arg<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
// Specialized types:
fn emit_option<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_enum(f)
}
#[inline]
fn emit_option_none(&mut self) -> Result<(), Self::Error> {
self.emit_enum_variant("None", 0, 0, |_| Ok(()))
}
fn emit_option_some<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_enum_variant("Some", 1, 1, f)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_usize(len)?;
f(self)
}
#[inline]
fn emit_seq_elt<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_usize(len)?;
f(self)
}
#[inline]
fn emit_map_elt_key<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_map_elt_val<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
}
// Note: all the methods in this trait are infallible, which may be surprising.
@ -320,8 +202,8 @@ impl<D: Decoder> Decodable<D> for String {
}
impl<S: Encoder> Encodable<S> for () {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@ -330,8 +212,8 @@ impl<D: Decoder> Decodable<D> for () {
}
impl<S: Encoder, T> Encodable<S> for PhantomData<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@ -362,12 +244,11 @@ impl<D: Decoder, T: Decodable<D>> Decodable<D> for Rc<T> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for [T] {
default fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?
}
Ok(())
}
}
@ -450,10 +331,10 @@ impl<'a, D: Decoder> Decodable<D> for Cow<'a, str> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for Option<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_option(|s| match *self {
None => s.emit_option_none(),
Some(ref v) => s.emit_option_some(|s| v.encode(s)),
})
match *self {
None => s.emit_enum_variant(0, |_| Ok(())),
Some(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
}
}
}
@ -469,14 +350,10 @@ impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> {
impl<S: Encoder, T1: Encodable<S>, T2: Encodable<S>> Encodable<S> for Result<T1, T2> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_enum(|s| match *self {
Ok(ref v) => {
s.emit_enum_variant("Ok", 0, 1, |s| s.emit_enum_variant_arg(true, |s| v.encode(s)))
}
Err(ref v) => {
s.emit_enum_variant("Err", 1, 1, |s| s.emit_enum_variant_arg(true, |s| v.encode(s)))
}
})
match *self {
Ok(ref v) => s.emit_enum_variant(0, |s| v.encode(s)),
Err(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
}
}
}
@ -494,18 +371,6 @@ macro_rules! peel {
($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
}
/// Evaluates to the number of tokens passed to it.
///
/// Logarithmic counting: every one or two recursive expansions, the number of
/// tokens to count is divided by two, instead of being reduced by one.
/// Therefore, the recursion depth is the binary logarithm of the number of
/// tokens to count, and the expanded tree is likewise very small.
macro_rules! count {
($one:tt) => (1usize);
($($pairs:tt $_p:tt)*) => (count!($($pairs)*) << 1usize);
($odd:tt $($rest:tt)*) => (count!($($rest)*) | 1usize);
}
macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
@ -518,12 +383,8 @@ macro_rules! tuple {
#[allow(non_snake_case)]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
let ($(ref $name,)+) = *self;
let len: usize = count!($($name)+);
s.emit_tuple(len, |s| {
let mut i = 0;
$(s.emit_tuple_arg({ i+=1; i-1 }, |s| $name.encode(s))?;)+
Ok(())
})
$($name.encode(s)?;)+
Ok(())
}
}
peel! { $($name,)+ }

View File

@ -1,978 +0,0 @@
#![allow(rustc::internal)]
use json::ErrorCode::*;
use json::Json::*;
use json::JsonEvent::*;
use json::ParserError::*;
use json::{from_str, Encoder, EncoderError, Json, JsonEvent, Parser, StackElement};
use rustc_macros::Encodable;
use rustc_serialize::json;
use rustc_serialize::Encodable;
use std::collections::BTreeMap;
use std::io::prelude::*;
use std::string;
use Animal::*;
#[derive(Eq, PartialEq, Debug)]
struct OptionData {
opt: Option<usize>,
}
#[derive(PartialEq, Encodable, Debug)]
enum Animal {
Dog,
Frog(string::String, isize),
}
#[derive(PartialEq, Encodable, Debug)]
struct Inner {
a: (),
b: usize,
c: Vec<string::String>,
}
#[derive(PartialEq, Encodable, Debug)]
struct Outer {
inner: Vec<Inner>,
}
fn mk_object(items: &[(string::String, Json)]) -> Json {
let mut d = BTreeMap::new();
for item in items {
match *item {
(ref key, ref value) => {
d.insert((*key).clone(), (*value).clone());
}
}
}
Object(d)
}
#[test]
fn test_from_str_trait() {
let s = "null";
assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
}
#[test]
fn test_write_null() {
assert_eq!(Null.to_string(), "null");
assert_eq!(Null.pretty().to_string(), "null");
}
#[test]
fn test_write_i64() {
assert_eq!(U64(0).to_string(), "0");
assert_eq!(U64(0).pretty().to_string(), "0");
assert_eq!(U64(1234).to_string(), "1234");
assert_eq!(U64(1234).pretty().to_string(), "1234");
assert_eq!(I64(-5678).to_string(), "-5678");
assert_eq!(I64(-5678).pretty().to_string(), "-5678");
assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
}
#[test]
fn test_write_f64() {
assert_eq!(F64(3.0).to_string(), "3.0");
assert_eq!(F64(3.0).pretty().to_string(), "3.0");
assert_eq!(F64(3.1).to_string(), "3.1");
assert_eq!(F64(3.1).pretty().to_string(), "3.1");
assert_eq!(F64(-1.5).to_string(), "-1.5");
assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
assert_eq!(F64(0.5).to_string(), "0.5");
assert_eq!(F64(0.5).pretty().to_string(), "0.5");
assert_eq!(F64(f64::NAN).to_string(), "null");
assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
assert_eq!(F64(f64::INFINITY).to_string(), "null");
assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
}
#[test]
fn test_write_str() {
assert_eq!(String("".to_string()).to_string(), "\"\"");
assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
}
#[test]
fn test_write_bool() {
assert_eq!(Boolean(true).to_string(), "true");
assert_eq!(Boolean(true).pretty().to_string(), "true");
assert_eq!(Boolean(false).to_string(), "false");
assert_eq!(Boolean(false).pretty().to_string(), "false");
}
#[test]
fn test_write_array() {
assert_eq!(Array(vec![]).to_string(), "[]");
assert_eq!(Array(vec![]).pretty().to_string(), "[]");
assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
assert_eq!(
Array(vec![Boolean(true)]).pretty().to_string(),
"\
[\n \
true\n\
]"
);
let long_test_array =
Array(vec![Boolean(false), Null, Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
assert_eq!(long_test_array.to_string(), "[false,null,[\"foo\\nbar\",3.5]]");
assert_eq!(
long_test_array.pretty().to_string(),
"\
[\n \
false,\n \
null,\n \
[\n \
\"foo\\nbar\",\n \
3.5\n \
]\n\
]"
);
}
#[test]
fn test_write_object() {
assert_eq!(mk_object(&[]).to_string(), "{}");
assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
assert_eq!(mk_object(&[("a".to_string(), Boolean(true))]).to_string(), "{\"a\":true}");
assert_eq!(
mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
"\
{\n \
\"a\": true\n\
}"
);
let complex_obj = mk_object(&[(
"b".to_string(),
Array(vec![
mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
mk_object(&[("d".to_string(), String("".to_string()))]),
]),
)]);
assert_eq!(
complex_obj.to_string(),
"{\
\"b\":[\
{\"c\":\"\\f\\r\"},\
{\"d\":\"\"}\
]\
}"
);
assert_eq!(
complex_obj.pretty().to_string(),
"\
{\n \
\"b\": [\n \
{\n \
\"c\": \"\\f\\r\"\n \
},\n \
{\n \
\"d\": \"\"\n \
}\n \
]\n\
}"
);
let a = mk_object(&[
("a".to_string(), Boolean(true)),
(
"b".to_string(),
Array(vec![
mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
mk_object(&[("d".to_string(), String("".to_string()))]),
]),
),
]);
// We can't compare the strings directly because the object fields be
// printed in a different order.
assert_eq!(a.clone(), a.to_string().parse().unwrap());
assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
}
#[test]
fn test_write_enum() {
let animal = Dog;
assert_eq!(json::as_json(&animal).to_string(), "\"Dog\"");
assert_eq!(json::as_pretty_json(&animal).to_string(), "\"Dog\"");
let animal = Frog("Henry".to_string(), 349);
assert_eq!(
json::as_json(&animal).to_string(),
"{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
);
assert_eq!(
json::as_pretty_json(&animal).to_string(),
"{\n \
\"variant\": \"Frog\",\n \
\"fields\": [\n \
\"Henry\",\n \
349\n \
]\n\
}"
);
}
macro_rules! check_encoder_for_simple {
($value:expr, $expected:expr) => {{
let s = json::as_json(&$value).to_string();
assert_eq!(s, $expected);
let s = json::as_pretty_json(&$value).to_string();
assert_eq!(s, $expected);
}};
}
#[test]
fn test_write_some() {
check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
}
#[test]
fn test_write_none() {
check_encoder_for_simple!(None::<string::String>, "null");
}
#[test]
fn test_write_char() {
check_encoder_for_simple!('a', "\"a\"");
check_encoder_for_simple!('\t', "\"\\t\"");
check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
}
#[test]
fn test_trailing_characters() {
assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers() {
assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
assert_eq!(from_str("null"), Ok(Null));
assert_eq!(from_str("true"), Ok(Boolean(true)));
assert_eq!(from_str("false"), Ok(Boolean(false)));
assert_eq!(from_str(" null "), Ok(Null));
assert_eq!(from_str(" true "), Ok(Boolean(true)));
assert_eq!(from_str(" false "), Ok(Boolean(false)));
}
#[test]
fn test_read_number() {
assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
assert_eq!(from_str("3"), Ok(U64(3)));
assert_eq!(from_str("3.1"), Ok(F64(3.1)));
assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
assert_eq!(from_str("0.4"), Ok(F64(0.4)));
assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
assert_eq!(from_str(" 3 "), Ok(U64(3)));
assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
}
#[test]
fn test_read_str() {
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
assert_eq!(from_str("\"\""), Ok(String("".to_string())));
assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
}
#[test]
fn test_read_array() {
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[]"), Ok(Array(vec![])));
assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
assert_eq!(from_str("[3, 1]"), Ok(Array(vec![U64(3), U64(1)])));
assert_eq!(from_str("\n[3, 2]\n"), Ok(Array(vec![U64(3), U64(2)])));
assert_eq!(from_str("[2, [4, 1]]"), Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
}
#[test]
fn test_read_object() {
assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
assert_eq!(from_str("{\"a\": 3}").unwrap(), mk_object(&[("a".to_string(), U64(3))]));
assert_eq!(
from_str("{ \"a\": null, \"b\" : true }").unwrap(),
mk_object(&[("a".to_string(), Null), ("b".to_string(), Boolean(true))])
);
assert_eq!(
from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
mk_object(&[("a".to_string(), Null), ("b".to_string(), Boolean(true))])
);
assert_eq!(
from_str("{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
mk_object(&[("a".to_string(), F64(1.0)), ("b".to_string(), Array(vec![Boolean(true)]))])
);
assert_eq!(
from_str(
"{\
\"a\": 1.0, \
\"b\": [\
true,\
\"foo\\nbar\", \
{ \"c\": {\"d\": null} } \
]\
}"
)
.unwrap(),
mk_object(&[
("a".to_string(), F64(1.0)),
(
"b".to_string(),
Array(vec![
Boolean(true),
String("foo\nbar".to_string()),
mk_object(&[("c".to_string(), mk_object(&[("d".to_string(), Null)]))])
])
)
])
);
}
#[test]
fn test_multiline_errors() {
assert_eq!(from_str("{\n \"foo\":\n \"bar\""), Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
}
#[test]
fn test_find() {
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
let found_str = json_value.find("dog");
assert!(found_str.unwrap().as_string().unwrap() == "cat");
}
#[test]
fn test_find_path() {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
assert!(found_str.unwrap().as_string().unwrap() == "cheese");
}
#[test]
fn test_search() {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.search("mouse").and_then(|j| j.as_string());
assert!(found_str.unwrap() == "cheese");
}
#[test]
fn test_index() {
let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
let ref array = json_value["animals"];
assert_eq!(array[0].as_string().unwrap(), "dog");
assert_eq!(array[1].as_string().unwrap(), "cat");
assert_eq!(array[2].as_string().unwrap(), "mouse");
}
#[test]
fn test_is_object() {
let json_value = from_str("{}").unwrap();
assert!(json_value.is_object());
}
#[test]
fn test_as_object() {
let json_value = from_str("{}").unwrap();
let json_object = json_value.as_object();
assert!(json_object.is_some());
}
#[test]
fn test_is_array() {
let json_value = from_str("[1, 2, 3]").unwrap();
assert!(json_value.is_array());
}
#[test]
fn test_as_array() {
let json_value = from_str("[1, 2, 3]").unwrap();
let json_array = json_value.as_array();
let expected_length = 3;
assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
}
#[test]
fn test_is_string() {
let json_value = from_str("\"dog\"").unwrap();
assert!(json_value.is_string());
}
#[test]
fn test_as_string() {
let json_value = from_str("\"dog\"").unwrap();
let json_str = json_value.as_string();
let expected_str = "dog";
assert_eq!(json_str, Some(expected_str));
}
#[test]
fn test_is_number() {
let json_value = from_str("12").unwrap();
assert!(json_value.is_number());
}
#[test]
fn test_is_i64() {
let json_value = from_str("-12").unwrap();
assert!(json_value.is_i64());
let json_value = from_str("12").unwrap();
assert!(!json_value.is_i64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_i64());
}
#[test]
fn test_is_u64() {
let json_value = from_str("12").unwrap();
assert!(json_value.is_u64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_u64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_u64());
}
#[test]
fn test_is_f64() {
let json_value = from_str("12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("12.0").unwrap();
assert!(json_value.is_f64());
let json_value = from_str("-12.0").unwrap();
assert!(json_value.is_f64());
}
#[test]
fn test_as_i64() {
let json_value = from_str("-12").unwrap();
let json_num = json_value.as_i64();
assert_eq!(json_num, Some(-12));
}
#[test]
fn test_as_u64() {
let json_value = from_str("12").unwrap();
let json_num = json_value.as_u64();
assert_eq!(json_num, Some(12));
}
#[test]
fn test_as_f64() {
let json_value = from_str("12.0").unwrap();
let json_num = json_value.as_f64();
assert_eq!(json_num, Some(12f64));
}
#[test]
fn test_is_boolean() {
let json_value = from_str("false").unwrap();
assert!(json_value.is_boolean());
}
#[test]
fn test_as_boolean() {
let json_value = from_str("false").unwrap();
let json_bool = json_value.as_boolean();
let expected_bool = false;
assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
}
#[test]
fn test_is_null() {
let json_value = from_str("null").unwrap();
assert!(json_value.is_null());
}
#[test]
fn test_as_null() {
let json_value = from_str("null").unwrap();
let json_null = json_value.as_null();
let expected_null = ();
assert!(json_null.is_some() && json_null.unwrap() == expected_null);
}
#[test]
fn test_encode_hashmap_with_numeric_key() {
use std::collections::HashMap;
use std::str::from_utf8;
let mut hm: HashMap<usize, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencode_hashmap_with_numeric_key() {
use std::collections::HashMap;
use std::str::from_utf8;
let mut hm: HashMap<usize, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencoder_indent_level_param() {
use std::collections::BTreeMap;
use std::str::from_utf8;
let mut tree = BTreeMap::new();
tree.insert("hello".to_string(), String("guten tag".to_string()));
tree.insert("goodbye".to_string(), String("sayonara".to_string()));
let json = Array(
// The following layout below should look a lot like
// the pretty-printed JSON (indent * x)
vec![
// 0x
String("greetings".to_string()), // 1x
Object(tree), // 1x + 2x + 2x + 1x
], // 0x
// End JSON array (7 lines)
);
// Helper function for counting indents
fn indents(source: &str) -> usize {
let trimmed = source.trim_start_matches(' ');
source.len() - trimmed.len()
}
// Test up to 4 spaces of indents (more?)
for i in 0..4 {
let mut writer = Vec::new();
write!(&mut writer, "{}", json::as_pretty_json(&json).indent(i)).unwrap();
let printed = from_utf8(&writer[..]).unwrap();
// Check for indents at each line
let lines: Vec<&str> = printed.lines().collect();
assert_eq!(lines.len(), 7); // JSON should be 7 lines
assert_eq!(indents(lines[0]), 0 * i); // [
assert_eq!(indents(lines[1]), 1 * i); // "greetings",
assert_eq!(indents(lines[2]), 1 * i); // {
assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
assert_eq!(indents(lines[5]), 1 * i); // },
assert_eq!(indents(lines[6]), 0 * i); // ]
// Finally, test that the pretty-printed JSON is valid
from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
}
}
#[test]
fn test_hashmap_with_enum_key() {
use std::collections::HashMap;
#[derive(Encodable, Eq, Hash, PartialEq, Debug)]
enum Enum {
Foo,
#[allow(dead_code)]
Bar,
}
let mut map = HashMap::new();
map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap();
assert_eq!(&result[..], r#"{"Foo":0}"#);
}
fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {
let mut parser = Parser::new(src.chars());
let mut i = 0;
loop {
let evt = match parser.next() {
Some(e) => e,
None => {
break;
}
};
let (ref expected_evt, ref expected_stack) = expected[i];
if !parser.stack().is_equal_to(expected_stack) {
panic!("Parser stack is not equal to {:?}", expected_stack);
}
assert_eq!(&evt, expected_evt);
i += 1;
}
}
#[test]
fn test_streaming_parser() {
assert_stream_equal(
r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
vec![
(ObjectStart, vec![]),
(StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
(ArrayStart, vec![StackElement::Key("array")]),
(U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
(U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
(U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
(U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
(U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
(ArrayEnd, vec![StackElement::Key("array")]),
(ArrayStart, vec![StackElement::Key("idents")]),
(NullValue, vec![StackElement::Key("idents"), StackElement::Index(0)]),
(BooleanValue(true), vec![StackElement::Key("idents"), StackElement::Index(1)]),
(BooleanValue(false), vec![StackElement::Key("idents"), StackElement::Index(2)]),
(ArrayEnd, vec![StackElement::Key("idents")]),
(ObjectEnd, vec![]),
],
);
}
fn last_event(src: &str) -> JsonEvent {
let mut parser = Parser::new(src.chars());
let mut evt = NullValue;
loop {
evt = match parser.next() {
Some(e) => e,
None => return evt,
}
}
}
#[test]
fn test_read_object_streaming() {
assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
assert_stream_equal("{}", vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]);
assert_stream_equal(
"{\"a\": 3}",
vec![
(ObjectStart, vec![]),
(U64Value(3), vec![StackElement::Key("a")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
"{ \"a\": null, \"b\" : true }",
vec![
(ObjectStart, vec![]),
(NullValue, vec![StackElement::Key("a")]),
(BooleanValue(true), vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
"{\"a\" : 1.0 ,\"b\": [ true ]}",
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![StackElement::Key("a")]),
(ArrayStart, vec![StackElement::Key("b")]),
(BooleanValue(true), vec![StackElement::Key("b"), StackElement::Index(0)]),
(ArrayEnd, vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#,
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![StackElement::Key("a")]),
(ArrayStart, vec![StackElement::Key("b")]),
(BooleanValue(true), vec![StackElement::Key("b"), StackElement::Index(0)]),
(
StringValue("foo\nbar".to_string()),
vec![StackElement::Key("b"), StackElement::Index(1)],
),
(ObjectStart, vec![StackElement::Key("b"), StackElement::Index(2)]),
(
ObjectStart,
vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")],
),
(
NullValue,
vec![
StackElement::Key("b"),
StackElement::Index(2),
StackElement::Key("c"),
StackElement::Key("d"),
],
),
(
ObjectEnd,
vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")],
),
(ObjectEnd, vec![StackElement::Key("b"), StackElement::Index(2)]),
(ArrayEnd, vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
}
#[test]
fn test_read_array_streaming() {
assert_stream_equal("[]", vec![(ArrayStart, vec![]), (ArrayEnd, vec![])]);
assert_stream_equal("[ ]", vec![(ArrayStart, vec![]), (ArrayEnd, vec![])]);
assert_stream_equal(
"[true]",
vec![
(ArrayStart, vec![]),
(BooleanValue(true), vec![StackElement::Index(0)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[ false ]",
vec![
(ArrayStart, vec![]),
(BooleanValue(false), vec![StackElement::Index(0)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[null]",
vec![(ArrayStart, vec![]), (NullValue, vec![StackElement::Index(0)]), (ArrayEnd, vec![])],
);
assert_stream_equal(
"[3, 1]",
vec![
(ArrayStart, vec![]),
(U64Value(3), vec![StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"\n[3, 2]\n",
vec![
(ArrayStart, vec![]),
(U64Value(3), vec![StackElement::Index(0)]),
(U64Value(2), vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[2, [4, 1]]",
vec![
(ArrayStart, vec![]),
(U64Value(2), vec![StackElement::Index(0)]),
(ArrayStart, vec![StackElement::Index(1)]),
(U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
(ArrayEnd, vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
}
#[test]
fn test_trailing_characters_streaming() {
assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers_streaming() {
assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
}
#[test]
fn test_to_json() {
use json::ToJson;
use std::collections::{BTreeMap, HashMap};
let array2 = Array(vec![U64(1), U64(2)]);
let array3 = Array(vec![U64(1), U64(2), U64(3)]);
let object = {
let mut tree_map = BTreeMap::new();
tree_map.insert("a".to_string(), U64(1));
tree_map.insert("b".to_string(), U64(2));
Object(tree_map)
};
assert_eq!(array2.to_json(), array2);
assert_eq!(object.to_json(), object);
assert_eq!(3_isize.to_json(), I64(3));
assert_eq!(4_i8.to_json(), I64(4));
assert_eq!(5_i16.to_json(), I64(5));
assert_eq!(6_i32.to_json(), I64(6));
assert_eq!(7_i64.to_json(), I64(7));
assert_eq!(8_usize.to_json(), U64(8));
assert_eq!(9_u8.to_json(), U64(9));
assert_eq!(10_u16.to_json(), U64(10));
assert_eq!(11_u32.to_json(), U64(11));
assert_eq!(12_u64.to_json(), U64(12));
assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
assert_eq!(().to_json(), Null);
assert_eq!(f32::INFINITY.to_json(), Null);
assert_eq!(f64::NAN.to_json(), Null);
assert_eq!(true.to_json(), Boolean(true));
assert_eq!(false.to_json(), Boolean(false));
assert_eq!("abc".to_json(), String("abc".to_string()));
assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
assert_eq!((1_usize, 2_usize).to_json(), array2);
assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
assert_eq!([1_usize, 2_usize].to_json(), array2);
assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
let mut tree_map = BTreeMap::new();
tree_map.insert("a".to_string(), 1 as usize);
tree_map.insert("b".to_string(), 2);
assert_eq!(tree_map.to_json(), object);
let mut hash_map = HashMap::new();
hash_map.insert("a".to_string(), 1 as usize);
hash_map.insert("b".to_string(), 2);
assert_eq!(hash_map.to_json(), object);
assert_eq!(Some(15).to_json(), I64(15));
assert_eq!(Some(15 as usize).to_json(), U64(15));
assert_eq!(None::<isize>.to_json(), Null);
}
#[test]
fn test_encode_hashmap_with_arbitrary_key() {
use std::collections::HashMap;
#[derive(PartialEq, Eq, Hash, Encodable)]
struct ArbitraryType(usize);
let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
hm.insert(ArbitraryType(1), true);
let mut mem_buf = string::String::new();
let mut encoder = Encoder::new(&mut mem_buf);
let result = hm.encode(&mut encoder);
match result.unwrap_err() {
EncoderError::BadHashmapKey => (),
_ => panic!("expected bad hash map key"),
}
}

View File

@ -1207,10 +1207,6 @@ options! {
assert_incr_state: Option<String> = (None, parse_opt_string, [UNTRACKED],
"assert that the incremental cache is in given state: \
either `loaded` or `not-loaded`."),
ast_json: bool = (false, parse_bool, [UNTRACKED],
"print the AST as JSON and halt (default: no)"),
ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED],
"print the pre-expansion AST as JSON and halt (default: no)"),
binary_dep_depinfo: bool = (false, parse_bool, [TRACKED],
"include artifacts (sysroot, crate dependencies) used during compilation in dep-info \
(default: no)"),

View File

@ -307,11 +307,8 @@ impl DefId {
impl<E: Encoder> Encodable<E> for DefId {
default fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_struct(false, |s| {
s.emit_struct_field("krate", true, |s| self.krate.encode(s))?;
s.emit_struct_field("index", false, |s| self.index.encode(s))
})
self.krate.encode(s)?;
self.index.encode(s)
}
}

View File

@ -196,24 +196,23 @@ impl Hash for RealFileName {
// an added assert statement
impl<S: Encoder> Encodable<S> for RealFileName {
fn encode(&self, encoder: &mut S) -> Result<(), S::Error> {
encoder.emit_enum(|encoder| match *self {
RealFileName::LocalPath(ref local_path) => {
encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| {
encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?;
Ok(())
match *self {
RealFileName::LocalPath(ref local_path) => encoder.emit_enum_variant(0, |encoder| {
Ok({
local_path.encode(encoder)?;
})
}
}),
RealFileName::Remapped { ref local_path, ref virtual_name } => encoder
.emit_enum_variant("Remapped", 1, 2, |encoder| {
.emit_enum_variant(1, |encoder| {
// For privacy and build reproducibility, we must not embed host-dependant path in artifacts
// if they have been remapped by --remap-path-prefix
assert!(local_path.is_none());
encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?;
encoder.emit_enum_variant_arg(false, |encoder| virtual_name.encode(encoder))?;
local_path.encode(encoder)?;
virtual_name.encode(encoder)?;
Ok(())
}),
})
}
}
}
@ -950,10 +949,8 @@ impl Default for Span {
impl<E: Encoder> Encodable<E> for Span {
default fn encode(&self, s: &mut E) -> Result<(), E::Error> {
let span = self.data();
s.emit_struct(false, |s| {
s.emit_struct_field("lo", true, |s| span.lo.encode(s))?;
s.emit_struct_field("hi", false, |s| span.hi.encode(s))
})
span.lo.encode(s)?;
span.hi.encode(s)
}
}
impl<D: Decoder> Decodable<D> for Span {
@ -1302,79 +1299,77 @@ pub struct SourceFile {
impl<S: Encoder> Encodable<S> for SourceFile {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct(false, |s| {
s.emit_struct_field("name", true, |s| self.name.encode(s))?;
s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?;
s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", false, |s| {
// We are always in `Lines` form by the time we reach here.
assert!(self.lines.borrow().is_lines());
self.lines(|lines| {
// Store the length.
s.emit_u32(lines.len() as u32)?;
self.name.encode(s)?;
self.src_hash.encode(s)?;
self.start_pos.encode(s)?;
self.end_pos.encode(s)?;
// Compute and store the difference list.
if lines.len() != 0 {
let max_line_length = if lines.len() == 1 {
0
} else {
lines
.array_windows()
.map(|&[fst, snd]| snd - fst)
.map(|bp| bp.to_usize())
.max()
.unwrap()
};
// We are always in `Lines` form by the time we reach here.
assert!(self.lines.borrow().is_lines());
self.lines(|lines| {
// Store the length.
s.emit_u32(lines.len() as u32)?;
let bytes_per_diff: usize = match max_line_length {
0..=0xFF => 1,
0x100..=0xFFFF => 2,
_ => 4,
};
// Compute and store the difference list.
if lines.len() != 0 {
let max_line_length = if lines.len() == 1 {
0
} else {
lines
.array_windows()
.map(|&[fst, snd]| snd - fst)
.map(|bp| bp.to_usize())
.max()
.unwrap()
};
// Encode the number of bytes used per diff.
s.emit_u8(bytes_per_diff as u8)?;
let bytes_per_diff: usize = match max_line_length {
0..=0xFF => 1,
0x100..=0xFFFF => 2,
_ => 4,
};
// Encode the first element.
lines[0].encode(s)?;
// Encode the number of bytes used per diff.
s.emit_u8(bytes_per_diff as u8)?;
// Encode the difference list.
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
let num_diffs = lines.len() - 1;
let mut raw_diffs;
match bytes_per_diff {
1 => {
raw_diffs = Vec::with_capacity(num_diffs);
for diff in diff_iter {
raw_diffs.push(diff.0 as u8);
}
}
2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u32).to_le_bytes());
}
}
_ => unreachable!(),
// Encode the first element.
lines[0].encode(s)?;
// Encode the difference list.
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
let num_diffs = lines.len() - 1;
let mut raw_diffs;
match bytes_per_diff {
1 => {
raw_diffs = Vec::with_capacity(num_diffs);
for diff in diff_iter {
raw_diffs.push(diff.0 as u8);
}
s.emit_raw_bytes(&raw_diffs)?;
}
Ok(())
})
})?;
s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?;
s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?;
s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?;
s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?;
s.emit_struct_field("cnum", false, |s| self.cnum.encode(s))
})
2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u32).to_le_bytes());
}
}
_ => unreachable!(),
}
s.emit_raw_bytes(&raw_diffs)?;
}
Ok(())
})?;
self.multibyte_chars.encode(s)?;
self.non_narrow_chars.encode(s)?;
self.name_hash.encode(s)?;
self.normalized_pos.encode(s)?;
self.cnum.encode(s)
}
}

View File

@ -6,6 +6,7 @@ edition = "2021"
[dependencies]
bitflags = "1.2.1"
tracing = "0.1"
serde_json = "1.0.59"
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }

View File

@ -1,6 +1,7 @@
pub use Integer::*;
pub use Primitive::*;
use crate::json::{Json, ToJson};
use crate::spec::Target;
use std::convert::{TryFrom, TryInto};
@ -13,7 +14,6 @@ use std::str::FromStr;
use rustc_data_structures::intern::Interned;
use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable_Generic;
use rustc_serialize::json::{Json, ToJson};
pub mod call;
@ -166,7 +166,8 @@ impl TargetDataLayout {
));
}
if dl.pointer_size.bits() != target.pointer_width.into() {
let target_pointer_width: u64 = target.pointer_width.into();
if dl.pointer_size.bits() != target_pointer_width {
return Err(format!(
"inconsistent target specification: \"data-layout\" claims \
pointers are {}-bit, while \"target-pointer-width\" is `{}`",
@ -574,7 +575,7 @@ impl Align {
}
/// A pair of alignments, ABI-mandated and preferred.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[derive(HashStable_Generic)]
pub struct AbiAndPrefAlign {
pub abi: Align,

View File

@ -0,0 +1,91 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
pub use serde_json::Value as Json;
use serde_json::{Map, Number};
pub trait ToJson {
fn to_json(&self) -> Json;
}
impl ToJson for Json {
fn to_json(&self) -> Json {
self.clone()
}
}
macro_rules! to_json_impl_num {
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json {
Json::Number(Number::from(*self))
}
})+
)
}
to_json_impl_num! { isize, i8, i16, i32, i64, usize, u8, u16, u32, u64 }
impl ToJson for bool {
fn to_json(&self) -> Json {
Json::Bool(*self)
}
}
impl ToJson for str {
fn to_json(&self) -> Json {
Json::String(self.to_owned())
}
}
impl ToJson for String {
fn to_json(&self) -> Json {
Json::String(self.to_owned())
}
}
impl<'a> ToJson for Cow<'a, str> {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
impl<A: ToJson> ToJson for [A] {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<A: ToJson> ToJson for Vec<A> {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<'a, A: ToJson> ToJson for Cow<'a, [A]>
where
[A]: ToOwned,
{
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<T: ToString, A: ToJson> ToJson for BTreeMap<T, A> {
fn to_json(&self) -> Json {
let mut d = Map::new();
for (key, value) in self {
d.insert(key.to_string(), value.to_json());
}
Json::Object(d)
}
}
impl<A: ToJson> ToJson for Option<A> {
fn to_json(&self) -> Json {
match *self {
None => Json::Null,
Some(ref value) => value.to_json(),
}
}
}

View File

@ -28,6 +28,7 @@ extern crate tracing;
pub mod abi;
pub mod asm;
pub mod json;
pub mod spec;
#[cfg(test)]

View File

@ -40,8 +40,8 @@
//! but not gcc's. As a result rustc cannot link with C++ static libraries (#36710)
//! when linking in self-contained mode.
use crate::json::{Json, ToJson};
use crate::spec::LinkOutputKind;
use rustc_serialize::json::{Json, ToJson};
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::str::FromStr;

View File

@ -35,11 +35,12 @@
//! to the list specified by the target, rather than replace.
use crate::abi::Endian;
use crate::json::{Json, ToJson};
use crate::spec::abi::{lookup as lookup_abi, Abi};
use crate::spec::crt_objects::{CrtObjects, CrtObjectsFallback};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_serialize::json::{Json, ToJson};
use rustc_span::symbol::{sym, Symbol};
use serde_json::Value;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::TryFrom;
@ -211,7 +212,7 @@ impl ToJson for PanicStrategy {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
pub enum RelroLevel {
Full,
Partial,
@ -255,7 +256,7 @@ impl ToJson for RelroLevel {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
pub enum MergeFunctions {
Disabled,
Trampolines,
@ -548,7 +549,7 @@ impl StackProbeType {
let object = json.as_object().ok_or_else(|| "expected a JSON object")?;
let kind = object
.get("kind")
.and_then(|o| o.as_string())
.and_then(|o| o.as_str())
.ok_or_else(|| "expected `kind` to be a string")?;
match kind {
"none" => Ok(StackProbeType::None),
@ -592,11 +593,11 @@ impl ToJson for StackProbeType {
StackProbeType::Call => {
[(String::from("kind"), "call".to_json())].into_iter().collect()
}
StackProbeType::InlineOrCall { min_llvm_version_for_inline } => [
StackProbeType::InlineOrCall { min_llvm_version_for_inline: (maj, min, patch) } => [
(String::from("kind"), "inline-or-call".to_json()),
(
String::from("min-llvm-version-for-inline"),
min_llvm_version_for_inline.to_json(),
Json::Array(vec![maj.to_json(), min.to_json(), patch.to_json()]),
),
]
.into_iter()
@ -1682,7 +1683,7 @@ impl Target {
}
/// Loads a target descriptor from a JSON object.
pub fn from_json(mut obj: Json) -> Result<(Target, TargetWarnings), String> {
pub fn from_json(obj: Json) -> Result<(Target, TargetWarnings), String> {
// While ugly, this code must remain this way to retain
// compatibility with existing JSON fields and the internal
// expected naming of the Target and TargetOptions structs.
@ -1690,9 +1691,14 @@ impl Target {
// are round-tripped through this code to catch cases where
// the JSON parser is not updated to match the structs.
let mut obj = match obj {
Value::Object(obj) => obj,
_ => return Err("Expected JSON object for target")?,
};
let mut get_req_field = |name: &str| {
obj.remove_key(name)
.and_then(|j| Json::as_string(&j).map(str::to_string))
obj.remove(name)
.and_then(|j| j.as_str().map(str::to_string))
.ok_or_else(|| format!("Field {} in target specification is required", name))
};
@ -1711,31 +1717,31 @@ impl Target {
macro_rules! key {
($key_name:ident) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_string(&j).map(str::to_string).map(Cow::from)) {
if let Some(s) = obj.remove(&name).and_then(|s| s.as_str().map(str::to_string).map(Cow::from)) {
base.$key_name = s;
}
} );
($key_name:ident = $json_name:expr) => ( {
let name = $json_name;
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_string(&j).map(str::to_string).map(Cow::from)) {
if let Some(s) = obj.remove(name).and_then(|s| s.as_str().map(str::to_string).map(Cow::from)) {
base.$key_name = s;
}
} );
($key_name:ident, bool) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_boolean(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_bool()) {
base.$key_name = s;
}
} );
($key_name:ident, u64) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|j| Json::as_u64(&j)) {
base.$key_name = s;
}
} );
($key_name:ident, Option<u32>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_u64()) {
if s < 1 || s > 5 {
return Err("Not a valid DWARF version number".into());
}
@ -1744,13 +1750,13 @@ impl Target {
} );
($key_name:ident, Option<u64>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_u64()) {
base.$key_name = Some(s);
}
} );
($key_name:ident, MergeFunctions) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<MergeFunctions>() {
Ok(mergefunc) => base.$key_name = mergefunc,
_ => return Some(Err(format!("'{}' is not a valid value for \
@ -1763,7 +1769,7 @@ impl Target {
} );
($key_name:ident, RelocModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<RelocModel>() {
Ok(relocation_model) => base.$key_name = relocation_model,
_ => return Some(Err(format!("'{}' is not a valid relocation model. \
@ -1775,7 +1781,7 @@ impl Target {
} );
($key_name:ident, CodeModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<CodeModel>() {
Ok(code_model) => base.$key_name = Some(code_model),
_ => return Some(Err(format!("'{}' is not a valid code model. \
@ -1787,7 +1793,7 @@ impl Target {
} );
($key_name:ident, TlsModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<TlsModel>() {
Ok(tls_model) => base.$key_name = tls_model,
_ => return Some(Err(format!("'{}' is not a valid TLS model. \
@ -1799,7 +1805,7 @@ impl Target {
} );
($key_name:ident, PanicStrategy) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s {
"unwind" => base.$key_name = PanicStrategy::Unwind,
"abort" => base.$key_name = PanicStrategy::Abort,
@ -1812,7 +1818,7 @@ impl Target {
} );
($key_name:ident, RelroLevel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<RelroLevel>() {
Ok(level) => base.$key_name = level,
_ => return Some(Err(format!("'{}' is not a valid value for \
@ -1824,7 +1830,7 @@ impl Target {
} );
($key_name:ident, SplitDebuginfo) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<SplitDebuginfo>() {
Ok(level) => base.$key_name = level,
_ => return Some(Err(format!("'{}' is not a valid value for \
@ -1836,10 +1842,10 @@ impl Target {
} );
($key_name:ident, list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(j) = obj.remove_key(&name){
if let Some(v) = Json::as_array(&j) {
if let Some(j) = obj.remove(&name) {
if let Some(v) = j.as_array() {
base.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect();
} else {
incorrect_type.push(name)
@ -1848,10 +1854,10 @@ impl Target {
} );
($key_name:ident, opt_list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(j) = obj.remove_key(&name) {
if let Some(v) = Json::as_array(&j) {
if let Some(j) = obj.remove(&name) {
if let Some(v) = j.as_array() {
base.$key_name = Some(v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect());
} else {
incorrect_type.push(name)
@ -1860,15 +1866,15 @@ impl Target {
} );
($key_name:ident, optional) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
base.$key_name = o
.as_string()
.as_str()
.map(|s| s.to_string().into());
}
} );
($key_name:ident, LldFlavor) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
if let Some(flavor) = LldFlavor::from_str(&s) {
base.$key_name = flavor;
} else {
@ -1882,7 +1888,7 @@ impl Target {
} );
($key_name:ident, LinkerFlavor) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match LinkerFlavor::from_str(s) {
Some(linker_flavor) => base.$key_name = linker_flavor,
_ => return Some(Err(format!("'{}' is not a valid value for linker-flavor. \
@ -1893,7 +1899,7 @@ impl Target {
} );
($key_name:ident, StackProbeType) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| match StackProbeType::from_json(&o) {
obj.remove(&name).and_then(|o| match StackProbeType::from_json(&o) {
Ok(v) => {
base.$key_name = v;
Some(Ok(()))
@ -1905,10 +1911,10 @@ impl Target {
} );
($key_name:ident, SanitizerSet) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
if let Some(a) = o.as_array() {
for s in a {
base.$key_name |= match s.as_string() {
base.$key_name |= match s.as_str() {
Some("address") => SanitizerSet::ADDRESS,
Some("cfi") => SanitizerSet::CFI,
Some("leak") => SanitizerSet::LEAK,
@ -1929,7 +1935,7 @@ impl Target {
($key_name:ident, crt_objects_fallback) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<CrtObjectsFallback>() {
Ok(fallback) => base.$key_name = Some(fallback),
_ => return Some(Err(format!("'{}' is not a valid CRT objects fallback. \
@ -1940,7 +1946,7 @@ impl Target {
} );
($key_name:ident, link_objects) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(val) = obj.remove_key(&name[..]) {
if let Some(val) = obj.remove(&name) {
let obj = val.as_object().ok_or_else(|| format!("{}: expected a \
JSON object with fields per CRT object kind.", name))?;
let mut args = CrtObjects::new();
@ -1955,7 +1961,7 @@ impl Target {
format!("{}.{}: expected a JSON array", name, k)
)?.iter().enumerate()
.map(|(i,s)| {
let s = s.as_string().ok_or_else(||
let s = s.as_str().ok_or_else(||
format!("{}.{}[{}]: expected a JSON string", name, k, i))?;
Ok(s.to_string().into())
})
@ -1968,7 +1974,7 @@ impl Target {
} );
($key_name:ident, link_args) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(val) = obj.remove_key(&name[..]) {
if let Some(val) = obj.remove(&name) {
let obj = val.as_object().ok_or_else(|| format!("{}: expected a \
JSON object with fields per linker-flavor.", name))?;
let mut args = LinkArgs::new();
@ -1982,7 +1988,7 @@ impl Target {
format!("{}.{}: expected a JSON array", name, k)
)?.iter().enumerate()
.map(|(i,s)| {
let s = s.as_string().ok_or_else(||
let s = s.as_str().ok_or_else(||
format!("{}.{}[{}]: expected a JSON string", name, k, i))?;
Ok(s.to_string().into())
})
@ -1995,10 +2001,10 @@ impl Target {
} );
($key_name:ident, env) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
if let Some(a) = o.as_array() {
for o in a {
if let Some(s) = o.as_string() {
if let Some(s) = o.as_str() {
let p = s.split('=').collect::<Vec<_>>();
if p.len() == 2 {
let k = p[0].to_string();
@ -2014,7 +2020,7 @@ impl Target {
} );
($key_name:ident, Option<Abi>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match lookup_abi(s) {
Some(abi) => base.$key_name = Some(abi),
_ => return Some(Err(format!("'{}' is not a valid value for abi", s))),
@ -2023,28 +2029,28 @@ impl Target {
})).unwrap_or(Ok(()))
} );
($key_name:ident, TargetFamilies) => ( {
if let Some(value) = obj.remove_key("target-family") {
if let Some(v) = Json::as_array(&value) {
if let Some(value) = obj.remove("target-family") {
if let Some(v) = value.as_array() {
base.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect();
} else if let Some(v) = Json::as_string(&value) {
} else if let Some(v) = value.as_str() {
base.$key_name = vec![v.to_string().into()].into();
}
}
} );
}
if let Some(j) = obj.remove_key("target-endian") {
if let Some(s) = Json::as_string(&j) {
if let Some(j) = obj.remove("target-endian") {
if let Some(s) = j.as_str() {
base.endian = s.parse()?;
} else {
incorrect_type.push("target-endian".into())
}
}
if let Some(fp) = obj.remove_key("frame-pointer") {
if let Some(s) = Json::as_string(&fp) {
if let Some(fp) = obj.remove("frame-pointer") {
if let Some(s) = fp.as_str() {
base.frame_pointer = s
.parse()
.map_err(|()| format!("'{}' is not a valid value for frame-pointer", s))?;
@ -2156,8 +2162,8 @@ impl Target {
// This can cause unfortunate ICEs later down the line.
return Err("may not set is_builtin for targets not built-in".into());
}
// Each field should have been read using `Json::remove_key` so any keys remaining are unused.
let remaining_keys = obj.as_object().ok_or("Expected JSON object for target")?.keys();
// Each field should have been read using `Json::remove` so any keys remaining are unused.
let remaining_keys = obj.keys();
Ok((
base,
TargetWarnings { unused_fields: remaining_keys.cloned().collect(), incorrect_type },
@ -2189,13 +2195,12 @@ impl Target {
target_triple: &TargetTriple,
sysroot: &Path,
) -> Result<(Target, TargetWarnings), String> {
use rustc_serialize::json;
use std::env;
use std::fs;
fn load_file(path: &Path) -> Result<(Target, TargetWarnings), String> {
let contents = fs::read_to_string(path).map_err(|e| e.to_string())?;
let obj = json::from_str(&contents).map_err(|e| e.to_string())?;
let obj = serde_json::from_str(&contents).map_err(|e| e.to_string())?;
Target::from_json(obj)
}
@ -2248,7 +2253,7 @@ impl Target {
impl ToJson for Target {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
let mut d = serde_json::Map::new();
let default: TargetOptions = Default::default();
macro_rules! target_val {

View File

@ -1,10 +1,8 @@
use crate::spec::Target;
use rustc_serialize::json::Json;
use std::str::FromStr;
#[test]
fn report_unused_fields() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",
@ -23,7 +21,7 @@ fn report_unused_fields() {
#[test]
fn report_incorrect_json_type() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",
@ -42,7 +40,7 @@ fn report_incorrect_json_type() {
#[test]
fn no_warnings_for_valid_target() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",

View File

@ -716,118 +716,116 @@ where
I::AllocId: Encodable<E>,
{
fn encode(&self, e: &mut E) -> Result<(), <E as rustc_serialize::Encoder>::Error> {
rustc_serialize::Encoder::emit_enum(e, |e| {
let disc = discriminant(self);
match self {
Bool => e.emit_enum_variant("Bool", disc, 0, |_| Ok(())),
Char => e.emit_enum_variant("Char", disc, 0, |_| Ok(())),
Int(i) => e.emit_enum_variant("Int", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| i.encode(e))?;
Ok(())
}),
Uint(u) => e.emit_enum_variant("Uint", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| u.encode(e))?;
Ok(())
}),
Float(f) => e.emit_enum_variant("Float", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| f.encode(e))?;
Ok(())
}),
Adt(adt, substs) => e.emit_enum_variant("Adt", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| adt.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Foreign(def_id) => e.emit_enum_variant("Foreign", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
Ok(())
}),
Str => e.emit_enum_variant("Str", disc, 0, |_| Ok(())),
Array(t, c) => e.emit_enum_variant("Array", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| t.encode(e))?;
e.emit_enum_variant_arg(false, |e| c.encode(e))?;
Ok(())
}),
Slice(t) => e.emit_enum_variant("Slice", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| t.encode(e))?;
Ok(())
}),
RawPtr(tam) => e.emit_enum_variant("RawPtr", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| tam.encode(e))?;
Ok(())
}),
Ref(r, t, m) => e.emit_enum_variant("Ref", disc, 3, |e| {
e.emit_enum_variant_arg(true, |e| r.encode(e))?;
e.emit_enum_variant_arg(false, |e| t.encode(e))?;
e.emit_enum_variant_arg(false, |e| m.encode(e))?;
Ok(())
}),
FnDef(def_id, substs) => e.emit_enum_variant("FnDef", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
FnPtr(polyfnsig) => e.emit_enum_variant("FnPtr", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| polyfnsig.encode(e))?;
Ok(())
}),
Dynamic(l, r) => e.emit_enum_variant("Dynamic", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| l.encode(e))?;
e.emit_enum_variant_arg(false, |e| r.encode(e))?;
Ok(())
}),
Closure(def_id, substs) => e.emit_enum_variant("Closure", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Generator(def_id, substs, m) => e.emit_enum_variant("Generator", disc, 3, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
e.emit_enum_variant_arg(false, |e| m.encode(e))?;
Ok(())
}),
GeneratorWitness(b) => e.emit_enum_variant("GeneratorWitness", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| b.encode(e))?;
Ok(())
}),
Never => e.emit_enum_variant("Never", disc, 0, |_| Ok(())),
Tuple(substs) => e.emit_enum_variant("Tuple", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| substs.encode(e))?;
Ok(())
}),
Projection(p) => e.emit_enum_variant("Projection", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Opaque(def_id, substs) => e.emit_enum_variant("Opaque", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Param(p) => e.emit_enum_variant("Param", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Bound(d, b) => e.emit_enum_variant("Bound", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| d.encode(e))?;
e.emit_enum_variant_arg(false, |e| b.encode(e))?;
Ok(())
}),
Placeholder(p) => e.emit_enum_variant("Placeholder", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Infer(i) => e.emit_enum_variant("Infer", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| i.encode(e))?;
Ok(())
}),
Error(d) => e.emit_enum_variant("Error", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| d.encode(e))?;
Ok(())
}),
}
})
let disc = discriminant(self);
match self {
Bool => e.emit_enum_variant(disc, |_| Ok(())),
Char => e.emit_enum_variant(disc, |_| Ok(())),
Int(i) => e.emit_enum_variant(disc, |e| {
i.encode(e)?;
Ok(())
}),
Uint(u) => e.emit_enum_variant(disc, |e| {
u.encode(e)?;
Ok(())
}),
Float(f) => e.emit_enum_variant(disc, |e| {
f.encode(e)?;
Ok(())
}),
Adt(adt, substs) => e.emit_enum_variant(disc, |e| {
adt.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Foreign(def_id) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
Ok(())
}),
Str => e.emit_enum_variant(disc, |_| Ok(())),
Array(t, c) => e.emit_enum_variant(disc, |e| {
t.encode(e)?;
c.encode(e)?;
Ok(())
}),
Slice(t) => e.emit_enum_variant(disc, |e| {
t.encode(e)?;
Ok(())
}),
RawPtr(tam) => e.emit_enum_variant(disc, |e| {
tam.encode(e)?;
Ok(())
}),
Ref(r, t, m) => e.emit_enum_variant(disc, |e| {
r.encode(e)?;
t.encode(e)?;
m.encode(e)?;
Ok(())
}),
FnDef(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
FnPtr(polyfnsig) => e.emit_enum_variant(disc, |e| {
polyfnsig.encode(e)?;
Ok(())
}),
Dynamic(l, r) => e.emit_enum_variant(disc, |e| {
l.encode(e)?;
r.encode(e)?;
Ok(())
}),
Closure(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Generator(def_id, substs, m) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
m.encode(e)?;
Ok(())
}),
GeneratorWitness(b) => e.emit_enum_variant(disc, |e| {
b.encode(e)?;
Ok(())
}),
Never => e.emit_enum_variant(disc, |_| Ok(())),
Tuple(substs) => e.emit_enum_variant(disc, |e| {
substs.encode(e)?;
Ok(())
}),
Projection(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Opaque(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Param(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Bound(d, b) => e.emit_enum_variant(disc, |e| {
d.encode(e)?;
b.encode(e)?;
Ok(())
}),
Placeholder(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Infer(i) => e.emit_enum_variant(disc, |e| {
i.encode(e)?;
Ok(())
}),
Error(d) => e.emit_enum_variant(disc, |e| {
d.encode(e)?;
Ok(())
}),
}
}
}

View File

@ -3,8 +3,8 @@
#![allow(unused_imports)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::json::Object;
extern crate libc;
use libc::c_void;
pub fn main() {
println!("Hello world!");

View File

@ -3,42 +3,80 @@
#![allow(unused_must_use)]
#![allow(dead_code)]
#![allow(unused_imports)]
#![feature(rustc_private)]
extern crate rustc_macros;
extern crate rustc_serialize;
use std::fmt;
use std::io::prelude::*;
use std::io::Cursor;
use std::slice;
use std::marker::PhantomData;
trait Encoder {
type Error;
}
trait Encodable<S: Encoder> {
fn encode(&self, s: &mut S) -> Result<(), S::Error>;
}
struct JsonEncoder<'a>(PhantomData<&'a mut ()>);
impl Encoder for JsonEncoder<'_> {
type Error = ();
}
struct AsJson<'a, T> {
inner: &'a T,
}
impl<'a, T: for<'r> Encodable<JsonEncoder<'r>>> fmt::Display for AsJson<'a, T> {
/// Encodes a json value into a string
fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
Ok(())
}
}
fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
struct OpaqueEncoder(Vec<u8>);
impl Encoder for OpaqueEncoder {
type Error = ();
}
use rustc_macros::Encodable;
use rustc_serialize::json;
use rustc_serialize::opaque;
use rustc_serialize::{Encodable, Encoder};
#[derive(Encodable)]
struct Foo {
baz: bool,
}
#[derive(Encodable)]
impl<S: Encoder> Encodable<S> for Foo {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
struct Bar {
froboz: usize,
}
impl<S: Encoder> Encodable<S> for Bar {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
enum WireProtocol {
JSON,
Opaque,
// ...
}
fn encode_json<T: for<'a> Encodable<json::Encoder<'a>>>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", json::as_json(val));
fn encode_json<T: for<'a> Encodable<JsonEncoder<'a>>>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", as_json(val));
}
fn encode_opaque<T: Encodable<opaque::Encoder>>(val: &T, wr: Vec<u8>) {
let mut encoder = opaque::Encoder::new(wr);
fn encode_opaque<T: Encodable<OpaqueEncoder>>(val: &T, wr: Vec<u8>) {
let mut encoder = OpaqueEncoder(wr);
val.encode(&mut encoder);
}

View File

@ -3,21 +3,48 @@
#![allow(unused_imports)]
#![allow(unused_must_use)]
// pretty-expanded FIXME #23616
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::json;
use rustc_serialize::{Encodable, Encoder};
use std::fmt;
use std::marker::PhantomData;
struct Foo<T: for<'a> Encodable<json::Encoder<'a>>> {
trait Encoder {
type Error;
}
trait Encodable<S: Encoder> {
fn encode(&self, s: &mut S) -> Result<(), S::Error>;
}
impl<S: Encoder> Encodable<S> for i32 {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
struct JsonEncoder<'a>(PhantomData<&'a mut ()>);
impl Encoder for JsonEncoder<'_> {
type Error = ();
}
fn encode_json<T: for<'r> Encodable<JsonEncoder<'r>>>(
object: &T,
) -> Result<String, ()> {
let s = String::new();
{
let mut encoder = JsonEncoder(PhantomData);
object.encode(&mut encoder)?;
}
Ok(s)
}
struct Foo<T: for<'a> Encodable<JsonEncoder<'a>>> {
v: T,
}
impl<T: for<'a> Encodable<json::Encoder<'a>>> Drop for Foo<T> {
impl<T: for<'a> Encodable<JsonEncoder<'a>>> Drop for Foo<T> {
fn drop(&mut self) {
json::encode(&self.v);
encode_json(&self.v);
}
}

View File

@ -2,27 +2,38 @@
#![allow(non_camel_case_types)]
#![allow(dead_code)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use std::collections::HashMap;
use rustc_serialize::json::{self, Json};
use std::collections::{BTreeMap, HashMap};
use std::option;
#[derive(Clone, Debug)]
enum Json {
I64(i64),
U64(u64),
F64(f64),
String(String),
Boolean(bool),
Array(Array),
Object(Object),
Null,
}
type Array = Vec<Json>;
type Object = BTreeMap<String, Json>;
enum object {
bool_value(bool),
int_value(i64),
}
fn lookup(table: json::Object, key: String, default: String) -> String
fn lookup(table: Object, key: String, default: String) -> String
{
match table.get(&key) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
println!("{} was expected to be a string but is a {:?}", key, value);
default
}
option::Option::None => {
@ -31,7 +42,7 @@ fn lookup(table: json::Object, key: String, default: String) -> String
}
}
fn add_interface(_store: isize, managed_ip: String, data: json::Json) -> (String, object)
fn add_interface(_store: isize, managed_ip: String, data: Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
@ -43,13 +54,13 @@ fn add_interface(_store: isize, managed_ip: String, data: json::Json) -> (String
(label, object::bool_value(false))
}
_ => {
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
println!("Expected dict for {} interfaces, found {:?}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
}
}
fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, json::Json>)
fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, Json>)
-> Vec<(String, object)> {
match device["interfaces"] {
Json::Array(ref interfaces) =>
@ -60,7 +71,7 @@ fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, json
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
println!("Expected list for {} interfaces, found {:?}", managed_ip,
device["interfaces"]);
Vec::new()
}

View File

@ -1,56 +0,0 @@
// Test that AST json serialization doesn't ICE (#63728).
// revisions: expand noexpand
//[expand] compile-flags: -Zast-json
//[noexpand] compile-flags: -Zast-json-noexpand
// check-pass
// dont-check-compiler-stdout - don't check for any AST change.
enum V {
A(i32),
B { f: [i64; 3 + 4] }
}
trait X {
type Output;
fn read(&self) -> Self::Output;
fn write(&mut self, _: Self::Output);
}
macro_rules! call_println {
($y:ident) => { println!("{}", $y) }
}
fn main() {
let x: (i32) = 35;
let y = x as i64<> + 5;
call_println!(y);
struct A;
}
// Regressions tests for issues #78398 and #78510 (captured tokens in associated and foreign items)
struct S;
macro_rules! mac_extern {
($i:item) => {
extern "C" { $i }
}
}
macro_rules! mac_assoc {
($i:item) => {
impl S { $i }
trait Bar { $i }
}
}
mac_extern! {
fn foo();
}
mac_assoc! {
fn foo() {}
}

View File

@ -1,10 +0,0 @@
// Check that AST json printing works.
#![crate_type = "lib"]
// check-pass
// compile-flags: -Zast-json-noexpand
// normalize-stdout-test ":\d+" -> ":0"
// Only include a single item to reduce how often the test output needs
// updating.
extern crate core;

View File

@ -1 +0,0 @@
{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"variant":"Ast","fields":[{"id":0,"kind":{"variant":"Lit","fields":[{"token":{"kind":"Str","symbol":"lib","suffix":null},"kind":{"variant":"Str","fields":["lib","Cooked"]},"span":{"lo":0,"hi":0}}]},"span":{"lo":0,"hi":0},"attrs":{"0":null},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}}]}]},"tokens":null},{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"spans":{"inner_span":{"lo":0,"hi":0},"inject_use_span":{"lo":0,"hi":0}},"id":0,"is_placeholder":false}

View File

@ -1,10 +0,0 @@
// Check that AST json printing works.
#![crate_type = "lib"]
// check-pass
// compile-flags: -Zast-json
// normalize-stdout-test ":\d+" -> ":0"
// Only include a single item to reduce how often the test output needs
// updating.
extern crate core;

View File

@ -1 +0,0 @@
{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"variant":"Ast","fields":[{"id":0,"kind":{"variant":"Lit","fields":[{"token":{"kind":"Str","symbol":"lib","suffix":null},"kind":{"variant":"Str","fields":["lib","Cooked"]},"span":{"lo":0,"hi":0}}]},"span":{"lo":0,"hi":0},"attrs":{"0":null},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}}]}]},"tokens":null},{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null},null]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"rust_2015","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null},null]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"spans":{"inner_span":{"lo":0,"hi":0},"inject_use_span":{"lo":0,"hi":0}},"id":0,"is_placeholder":false}