mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Auto merge of #131573 - tgross35:rollup-r7l182a, r=tgross35
Rollup of 7 pull requests Successful merges: - #130078 (rustdoc-json: change item ID's repr from a string to an int) - #131065 (Port sort-research-rs test suite to Rust stdlib tests) - #131109 (Stabilize `debug_more_non_exhaustive`) - #131287 (stabilize const_result) - #131463 (Stabilise `const_char_encode_utf8`.) - #131543 (coverage: Remove code related to LLVM 17) - #131552 (RustWrapper: adapt for rename of Intrinsic::getDeclaration) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
fb20e4d3b9
@ -14,29 +14,20 @@ use crate::coverageinfo::ffi::CounterMappingRegion;
|
|||||||
use crate::coverageinfo::map_data::{FunctionCoverage, FunctionCoverageCollector};
|
use crate::coverageinfo::map_data::{FunctionCoverage, FunctionCoverageCollector};
|
||||||
use crate::{coverageinfo, llvm};
|
use crate::{coverageinfo, llvm};
|
||||||
|
|
||||||
/// Generates and exports the Coverage Map.
|
/// Generates and exports the coverage map, which is embedded in special
|
||||||
|
/// linker sections in the final binary.
|
||||||
///
|
///
|
||||||
/// Rust Coverage Map generation supports LLVM Coverage Mapping Format versions
|
/// Those sections are then read and understood by LLVM's `llvm-cov` tool,
|
||||||
/// 6 and 7 (encoded as 5 and 6 respectively), as described at
|
/// which is distributed in the `llvm-tools` rustup component.
|
||||||
/// [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/rustc/18.0-2024-02-13/llvm/docs/CoverageMappingFormat.rst).
|
|
||||||
/// These versions are supported by the LLVM coverage tools (`llvm-profdata` and `llvm-cov`)
|
|
||||||
/// distributed in the `llvm-tools-preview` rustup component.
|
|
||||||
///
|
|
||||||
/// Consequently, Rust's bundled version of Clang also generates Coverage Maps compliant with
|
|
||||||
/// the same version. Clang's implementation of Coverage Map generation was referenced when
|
|
||||||
/// implementing this Rust version, and though the format documentation is very explicit and
|
|
||||||
/// detailed, some undocumented details in Clang's implementation (that may or may not be important)
|
|
||||||
/// were also replicated for Rust's Coverage Map.
|
|
||||||
pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) {
|
pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) {
|
||||||
let tcx = cx.tcx;
|
let tcx = cx.tcx;
|
||||||
|
|
||||||
// Ensure that LLVM is using a version of the coverage mapping format that
|
// Ensure that LLVM is using a version of the coverage mapping format that
|
||||||
// agrees with our Rust-side code. Expected versions (encoded as n-1) are:
|
// agrees with our Rust-side code. Expected versions (encoded as n-1) are:
|
||||||
// - `CovMapVersion::Version6` (5) used by LLVM 13-17
|
// - `CovMapVersion::Version7` (6) used by LLVM 18-19
|
||||||
// - `CovMapVersion::Version7` (6) used by LLVM 18
|
|
||||||
let covmap_version = {
|
let covmap_version = {
|
||||||
let llvm_covmap_version = coverageinfo::mapping_version();
|
let llvm_covmap_version = coverageinfo::mapping_version();
|
||||||
let expected_versions = 5..=6;
|
let expected_versions = 6..=6;
|
||||||
assert!(
|
assert!(
|
||||||
expected_versions.contains(&llvm_covmap_version),
|
expected_versions.contains(&llvm_covmap_version),
|
||||||
"Coverage mapping version exposed by `llvm-wrapper` is out of sync; \
|
"Coverage mapping version exposed by `llvm-wrapper` is out of sync; \
|
||||||
|
@ -183,7 +183,7 @@ extern "C" void LLVMRustCoverageWriteMappingToBuffer(
|
|||||||
RustMappingRegions, NumMappingRegions)) {
|
RustMappingRegions, NumMappingRegions)) {
|
||||||
MappingRegions.emplace_back(
|
MappingRegions.emplace_back(
|
||||||
fromRust(Region.Count), fromRust(Region.FalseCount),
|
fromRust(Region.Count), fromRust(Region.FalseCount),
|
||||||
#if LLVM_VERSION_GE(18, 0) && LLVM_VERSION_LT(19, 0)
|
#if LLVM_VERSION_LT(19, 0)
|
||||||
coverage::CounterMappingRegion::MCDCParameters{},
|
coverage::CounterMappingRegion::MCDCParameters{},
|
||||||
#endif
|
#endif
|
||||||
Region.FileID, Region.ExpandedFileID, // File IDs, then region info.
|
Region.FileID, Region.ExpandedFileID, // File IDs, then region info.
|
||||||
|
@ -1533,27 +1533,40 @@ extern "C" LLVMValueRef LLVMRustBuildCall(LLVMBuilderRef B, LLVMTypeRef Ty,
|
|||||||
|
|
||||||
extern "C" LLVMValueRef
|
extern "C" LLVMValueRef
|
||||||
LLVMRustGetInstrProfIncrementIntrinsic(LLVMModuleRef M) {
|
LLVMRustGetInstrProfIncrementIntrinsic(LLVMModuleRef M) {
|
||||||
|
#if LLVM_VERSION_GE(20, 0)
|
||||||
|
return wrap(llvm::Intrinsic::getOrInsertDeclaration(
|
||||||
|
unwrap(M), llvm::Intrinsic::instrprof_increment));
|
||||||
|
#else
|
||||||
return wrap(llvm::Intrinsic::getDeclaration(
|
return wrap(llvm::Intrinsic::getDeclaration(
|
||||||
unwrap(M), llvm::Intrinsic::instrprof_increment));
|
unwrap(M), llvm::Intrinsic::instrprof_increment));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" LLVMValueRef
|
extern "C" LLVMValueRef
|
||||||
LLVMRustGetInstrProfMCDCParametersIntrinsic(LLVMModuleRef M) {
|
LLVMRustGetInstrProfMCDCParametersIntrinsic(LLVMModuleRef M) {
|
||||||
#if LLVM_VERSION_GE(19, 0)
|
#if LLVM_VERSION_LT(19, 0)
|
||||||
return wrap(llvm::Intrinsic::getDeclaration(
|
report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
|
||||||
|
#endif
|
||||||
|
#if LLVM_VERSION_GE(20, 0)
|
||||||
|
return wrap(llvm::Intrinsic::getOrInsertDeclaration(
|
||||||
unwrap(M), llvm::Intrinsic::instrprof_mcdc_parameters));
|
unwrap(M), llvm::Intrinsic::instrprof_mcdc_parameters));
|
||||||
#else
|
#else
|
||||||
report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
|
return wrap(llvm::Intrinsic::getDeclaration(
|
||||||
|
unwrap(M), llvm::Intrinsic::instrprof_mcdc_parameters));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" LLVMValueRef
|
extern "C" LLVMValueRef
|
||||||
LLVMRustGetInstrProfMCDCTVBitmapUpdateIntrinsic(LLVMModuleRef M) {
|
LLVMRustGetInstrProfMCDCTVBitmapUpdateIntrinsic(LLVMModuleRef M) {
|
||||||
#if LLVM_VERSION_GE(19, 0)
|
#if LLVM_VERSION_LT(19, 0)
|
||||||
return wrap(llvm::Intrinsic::getDeclaration(
|
report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
|
||||||
|
#endif
|
||||||
|
#if LLVM_VERSION_GE(20, 0)
|
||||||
|
return wrap(llvm::Intrinsic::getOrInsertDeclaration(
|
||||||
unwrap(M), llvm::Intrinsic::instrprof_mcdc_tvbitmap_update));
|
unwrap(M), llvm::Intrinsic::instrprof_mcdc_tvbitmap_update));
|
||||||
#else
|
#else
|
||||||
report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
|
return wrap(llvm::Intrinsic::getDeclaration(
|
||||||
|
unwrap(M), llvm::Intrinsic::instrprof_mcdc_tvbitmap_update));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,20 +19,6 @@ use core::cmp::Ordering::{self, Less};
|
|||||||
use core::mem::{self, MaybeUninit};
|
use core::mem::{self, MaybeUninit};
|
||||||
#[cfg(not(no_global_oom_handling))]
|
#[cfg(not(no_global_oom_handling))]
|
||||||
use core::ptr;
|
use core::ptr;
|
||||||
#[cfg(not(no_global_oom_handling))]
|
|
||||||
use core::slice::sort;
|
|
||||||
|
|
||||||
use crate::alloc::Allocator;
|
|
||||||
#[cfg(not(no_global_oom_handling))]
|
|
||||||
use crate::alloc::Global;
|
|
||||||
#[cfg(not(no_global_oom_handling))]
|
|
||||||
use crate::borrow::ToOwned;
|
|
||||||
use crate::boxed::Box;
|
|
||||||
use crate::vec::Vec;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
#[unstable(feature = "array_chunks", issue = "74985")]
|
#[unstable(feature = "array_chunks", issue = "74985")]
|
||||||
pub use core::slice::ArrayChunks;
|
pub use core::slice::ArrayChunks;
|
||||||
#[unstable(feature = "array_chunks", issue = "74985")]
|
#[unstable(feature = "array_chunks", issue = "74985")]
|
||||||
@ -43,6 +29,8 @@ pub use core::slice::ArrayWindows;
|
|||||||
pub use core::slice::EscapeAscii;
|
pub use core::slice::EscapeAscii;
|
||||||
#[stable(feature = "slice_get_slice", since = "1.28.0")]
|
#[stable(feature = "slice_get_slice", since = "1.28.0")]
|
||||||
pub use core::slice::SliceIndex;
|
pub use core::slice::SliceIndex;
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
use core::slice::sort;
|
||||||
#[stable(feature = "slice_group_by", since = "1.77.0")]
|
#[stable(feature = "slice_group_by", since = "1.77.0")]
|
||||||
pub use core::slice::{ChunkBy, ChunkByMut};
|
pub use core::slice::{ChunkBy, ChunkByMut};
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
@ -83,6 +71,14 @@ pub use hack::into_vec;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub use hack::to_vec;
|
pub use hack::to_vec;
|
||||||
|
|
||||||
|
use crate::alloc::Allocator;
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
use crate::alloc::Global;
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
use crate::borrow::ToOwned;
|
||||||
|
use crate::boxed::Box;
|
||||||
|
use crate::vec::Vec;
|
||||||
|
|
||||||
// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
|
// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
|
||||||
// functions are actually methods that are in `impl [T]` but not in
|
// functions are actually methods that are in `impl [T]` but not in
|
||||||
// `core::slice::SliceExt` - we need to supply these functions for the
|
// `core::slice::SliceExt` - we need to supply these functions for the
|
||||||
|
@ -1,369 +0,0 @@
|
|||||||
use core::cell::Cell;
|
|
||||||
use core::cmp::Ordering::{self, Equal, Greater, Less};
|
|
||||||
use core::convert::identity;
|
|
||||||
use core::sync::atomic::AtomicUsize;
|
|
||||||
use core::sync::atomic::Ordering::Relaxed;
|
|
||||||
use core::{fmt, mem};
|
|
||||||
use std::panic;
|
|
||||||
|
|
||||||
use rand::distributions::Standard;
|
|
||||||
use rand::prelude::*;
|
|
||||||
use rand::{Rng, RngCore};
|
|
||||||
|
|
||||||
use crate::borrow::ToOwned;
|
|
||||||
use crate::rc::Rc;
|
|
||||||
use crate::string::ToString;
|
|
||||||
use crate::test_helpers::test_rng;
|
|
||||||
use crate::vec::Vec;
|
|
||||||
|
|
||||||
macro_rules! do_test {
|
|
||||||
($input:ident, $func:ident) => {
|
|
||||||
let len = $input.len();
|
|
||||||
|
|
||||||
// Work out the total number of comparisons required to sort
|
|
||||||
// this array...
|
|
||||||
let mut count = 0usize;
|
|
||||||
$input.to_owned().$func(|a, b| {
|
|
||||||
count += 1;
|
|
||||||
a.cmp(b)
|
|
||||||
});
|
|
||||||
|
|
||||||
// ... and then panic on each and every single one.
|
|
||||||
for panic_countdown in 0..count {
|
|
||||||
// Refresh the counters.
|
|
||||||
VERSIONS.store(0, Relaxed);
|
|
||||||
for i in 0..len {
|
|
||||||
DROP_COUNTS[i].store(0, Relaxed);
|
|
||||||
}
|
|
||||||
|
|
||||||
let v = $input.to_owned();
|
|
||||||
let _ = panic::catch_unwind(move || {
|
|
||||||
let mut v = v;
|
|
||||||
let mut panic_countdown = panic_countdown;
|
|
||||||
v.$func(|a, b| {
|
|
||||||
if panic_countdown == 0 {
|
|
||||||
SILENCE_PANIC.with(|s| s.set(true));
|
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
panic_countdown -= 1;
|
|
||||||
a.cmp(b)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
// Check that the number of things dropped is exactly
|
|
||||||
// what we expect (i.e., the contents of `v`).
|
|
||||||
for (i, c) in DROP_COUNTS.iter().enumerate().take(len) {
|
|
||||||
let count = c.load(Relaxed);
|
|
||||||
assert!(count == 1, "found drop count == {} for i == {}, len == {}", count, i, len);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that the most recent versions of values were dropped.
|
|
||||||
assert_eq!(VERSIONS.load(Relaxed), 0);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_LEN: usize = 80;
|
|
||||||
|
|
||||||
static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [
|
|
||||||
// FIXME(RFC 1109): AtomicUsize is not Copy.
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
AtomicUsize::new(0),
|
|
||||||
];
|
|
||||||
|
|
||||||
static VERSIONS: AtomicUsize = AtomicUsize::new(0);
|
|
||||||
|
|
||||||
#[derive(Clone, Eq)]
|
|
||||||
struct DropCounter {
|
|
||||||
x: u32,
|
|
||||||
id: usize,
|
|
||||||
version: Cell<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for DropCounter {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.partial_cmp(other) == Some(Ordering::Equal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for DropCounter {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
self.version.set(self.version.get() + 1);
|
|
||||||
other.version.set(other.version.get() + 1);
|
|
||||||
VERSIONS.fetch_add(2, Relaxed);
|
|
||||||
self.x.partial_cmp(&other.x)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for DropCounter {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.partial_cmp(other).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for DropCounter {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
DROP_COUNTS[self.id].fetch_add(1, Relaxed);
|
|
||||||
VERSIONS.fetch_sub(self.version.get(), Relaxed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[cfg_attr(target_os = "emscripten", ignore)] // no threads
|
|
||||||
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
|
|
||||||
fn panic_safe() {
|
|
||||||
panic::update_hook(move |prev, info| {
|
|
||||||
if !SILENCE_PANIC.with(|s| s.get()) {
|
|
||||||
prev(info);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut rng = test_rng();
|
|
||||||
|
|
||||||
// Miri is too slow (but still need to `chain` to make the types match)
|
|
||||||
let lens = if cfg!(miri) { (1..10).chain(0..0) } else { (1..20).chain(70..MAX_LEN) };
|
|
||||||
let moduli: &[u32] = if cfg!(miri) { &[5] } else { &[5, 20, 50] };
|
|
||||||
|
|
||||||
for len in lens {
|
|
||||||
for &modulus in moduli {
|
|
||||||
for &has_runs in &[false, true] {
|
|
||||||
let mut input = (0..len)
|
|
||||||
.map(|id| DropCounter {
|
|
||||||
x: rng.next_u32() % modulus,
|
|
||||||
id: id,
|
|
||||||
version: Cell::new(0),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if has_runs {
|
|
||||||
for c in &mut input {
|
|
||||||
c.x = c.id as u32;
|
|
||||||
}
|
|
||||||
|
|
||||||
for _ in 0..5 {
|
|
||||||
let a = rng.gen::<usize>() % len;
|
|
||||||
let b = rng.gen::<usize>() % len;
|
|
||||||
if a < b {
|
|
||||||
input[a..b].reverse();
|
|
||||||
} else {
|
|
||||||
input.swap(a, b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
do_test!(input, sort_by);
|
|
||||||
do_test!(input, sort_unstable_by);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set default panic hook again.
|
|
||||||
drop(panic::take_hook());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[cfg_attr(miri, ignore)] // Miri is too slow
|
|
||||||
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
|
|
||||||
fn test_sort() {
|
|
||||||
let mut rng = test_rng();
|
|
||||||
|
|
||||||
for len in (2..25).chain(500..510) {
|
|
||||||
for &modulus in &[5, 10, 100, 1000] {
|
|
||||||
for _ in 0..10 {
|
|
||||||
let orig: Vec<_> = (&mut rng)
|
|
||||||
.sample_iter::<i32, _>(&Standard)
|
|
||||||
.map(|x| x % modulus)
|
|
||||||
.take(len)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Sort in default order.
|
|
||||||
let mut v = orig.clone();
|
|
||||||
v.sort();
|
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
|
|
||||||
// Sort in ascending order.
|
|
||||||
let mut v = orig.clone();
|
|
||||||
v.sort_by(|a, b| a.cmp(b));
|
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
|
|
||||||
// Sort in descending order.
|
|
||||||
let mut v = orig.clone();
|
|
||||||
v.sort_by(|a, b| b.cmp(a));
|
|
||||||
assert!(v.windows(2).all(|w| w[0] >= w[1]));
|
|
||||||
|
|
||||||
// Sort in lexicographic order.
|
|
||||||
let mut v1 = orig.clone();
|
|
||||||
let mut v2 = orig.clone();
|
|
||||||
v1.sort_by_key(|x| x.to_string());
|
|
||||||
v2.sort_by_cached_key(|x| x.to_string());
|
|
||||||
assert!(v1.windows(2).all(|w| w[0].to_string() <= w[1].to_string()));
|
|
||||||
assert!(v1 == v2);
|
|
||||||
|
|
||||||
// Sort with many pre-sorted runs.
|
|
||||||
let mut v = orig.clone();
|
|
||||||
v.sort();
|
|
||||||
v.reverse();
|
|
||||||
for _ in 0..5 {
|
|
||||||
let a = rng.gen::<usize>() % len;
|
|
||||||
let b = rng.gen::<usize>() % len;
|
|
||||||
if a < b {
|
|
||||||
v[a..b].reverse();
|
|
||||||
} else {
|
|
||||||
v.swap(a, b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
v.sort();
|
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const ORD_VIOLATION_MAX_LEN: usize = 500;
|
|
||||||
let mut v = [0; ORD_VIOLATION_MAX_LEN];
|
|
||||||
for i in 0..ORD_VIOLATION_MAX_LEN {
|
|
||||||
v[i] = i as i32;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort using a completely random comparison function. This will reorder the elements *somehow*,
|
|
||||||
// it may panic but the original elements must still be present.
|
|
||||||
let _ = panic::catch_unwind(move || {
|
|
||||||
v.sort_by(|_, _| *[Less, Equal, Greater].choose(&mut rng).unwrap());
|
|
||||||
});
|
|
||||||
|
|
||||||
v.sort();
|
|
||||||
for i in 0..ORD_VIOLATION_MAX_LEN {
|
|
||||||
assert_eq!(v[i], i as i32);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Should not panic.
|
|
||||||
[0i32; 0].sort();
|
|
||||||
[(); 10].sort();
|
|
||||||
[(); 100].sort();
|
|
||||||
|
|
||||||
let mut v = [0xDEADBEEFu64];
|
|
||||||
v.sort();
|
|
||||||
assert!(v == [0xDEADBEEF]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_sort_stability() {
|
|
||||||
// Miri is too slow
|
|
||||||
let large_range = if cfg!(miri) { 0..0 } else { 500..510 };
|
|
||||||
let rounds = if cfg!(miri) { 1 } else { 10 };
|
|
||||||
|
|
||||||
let mut rng = test_rng();
|
|
||||||
for len in (2..25).chain(large_range) {
|
|
||||||
for _ in 0..rounds {
|
|
||||||
let mut counts = [0; 10];
|
|
||||||
|
|
||||||
// create a vector like [(6, 1), (5, 1), (6, 2), ...],
|
|
||||||
// where the first item of each tuple is random, but
|
|
||||||
// the second item represents which occurrence of that
|
|
||||||
// number this element is, i.e., the second elements
|
|
||||||
// will occur in sorted order.
|
|
||||||
let orig: Vec<_> = (0..len)
|
|
||||||
.map(|_| {
|
|
||||||
let n = rng.gen::<usize>() % 10;
|
|
||||||
counts[n] += 1;
|
|
||||||
(n, counts[n])
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut v = orig.clone();
|
|
||||||
// Only sort on the first element, so an unstable sort
|
|
||||||
// may mix up the counts.
|
|
||||||
v.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
|
|
||||||
|
|
||||||
// This comparison includes the count (the second item
|
|
||||||
// of the tuple), so elements with equal first items
|
|
||||||
// will need to be ordered with increasing
|
|
||||||
// counts... i.e., exactly asserting that this sort is
|
|
||||||
// stable.
|
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
|
|
||||||
let mut v = orig.clone();
|
|
||||||
v.sort_by_cached_key(|&(x, _)| x);
|
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -40,6 +40,7 @@
|
|||||||
#![feature(local_waker)]
|
#![feature(local_waker)]
|
||||||
#![feature(vec_pop_if)]
|
#![feature(vec_pop_if)]
|
||||||
#![feature(unique_rc_arc)]
|
#![feature(unique_rc_arc)]
|
||||||
|
#![feature(macro_metavar_expr_concat)]
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![deny(fuzzy_provenance_casts)]
|
#![deny(fuzzy_provenance_casts)]
|
||||||
#![deny(unsafe_op_in_unsafe_fn)]
|
#![deny(unsafe_op_in_unsafe_fn)]
|
||||||
@ -59,6 +60,7 @@ mod heap;
|
|||||||
mod linked_list;
|
mod linked_list;
|
||||||
mod rc;
|
mod rc;
|
||||||
mod slice;
|
mod slice;
|
||||||
|
mod sort;
|
||||||
mod str;
|
mod str;
|
||||||
mod string;
|
mod string;
|
||||||
mod task;
|
mod task;
|
||||||
|
82
library/alloc/tests/sort/ffi_types.rs
Normal file
82
library/alloc/tests/sort/ffi_types.rs
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
// Very large stack value.
|
||||||
|
#[repr(C)]
|
||||||
|
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||||
|
pub struct FFIOneKibiByte {
|
||||||
|
values: [i64; 128],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FFIOneKibiByte {
|
||||||
|
pub fn new(val: i32) -> Self {
|
||||||
|
let mut values = [0i64; 128];
|
||||||
|
let mut val_i64 = val as i64;
|
||||||
|
|
||||||
|
for elem in &mut values {
|
||||||
|
*elem = val_i64;
|
||||||
|
val_i64 = std::hint::black_box(val_i64 + 1);
|
||||||
|
}
|
||||||
|
Self { values }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_i64(&self) -> i64 {
|
||||||
|
self.values[11] + self.values[55] + self.values[77]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for FFIOneKibiByte {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for FFIOneKibiByte {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
self.as_i64().cmp(&other.as_i64())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 16 byte stack value, with more expensive comparison.
|
||||||
|
#[repr(C)]
|
||||||
|
#[derive(PartialEq, Debug, Clone, Copy)]
|
||||||
|
pub struct F128 {
|
||||||
|
x: f64,
|
||||||
|
y: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl F128 {
|
||||||
|
pub fn new(val: i32) -> Self {
|
||||||
|
let val_f = (val as f64) + (i32::MAX as f64) + 10.0;
|
||||||
|
|
||||||
|
let x = val_f + 0.1;
|
||||||
|
let y = val_f.log(4.1);
|
||||||
|
|
||||||
|
assert!(y < x);
|
||||||
|
assert!(x.is_normal() && y.is_normal());
|
||||||
|
|
||||||
|
Self { x, y }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is kind of hacky, but we know we only have normal comparable floats in there.
|
||||||
|
impl Eq for F128 {}
|
||||||
|
|
||||||
|
impl PartialOrd for F128 {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Goal is similar code-gen between Rust and C++
|
||||||
|
// - Rust https://godbolt.org/z/3YM3xenPP
|
||||||
|
// - C++ https://godbolt.org/z/178M6j1zz
|
||||||
|
impl Ord for F128 {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
// Simulate expensive comparison function.
|
||||||
|
let this_div = self.x / self.y;
|
||||||
|
let other_div = other.x / other.y;
|
||||||
|
|
||||||
|
// SAFETY: We checked in the ctor that both are normal.
|
||||||
|
unsafe { this_div.partial_cmp(&other_div).unwrap_unchecked() }
|
||||||
|
}
|
||||||
|
}
|
192
library/alloc/tests/sort/known_good_stable_sort.rs
Normal file
192
library/alloc/tests/sort/known_good_stable_sort.rs
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
// This module implements a known good stable sort implementation that helps provide better error
|
||||||
|
// messages when the correctness tests fail, we can't use the stdlib sort functions because we are
|
||||||
|
// testing them for correctness.
|
||||||
|
//
|
||||||
|
// Based on https://github.com/voultapher/tiny-sort-rs.
|
||||||
|
|
||||||
|
use alloc::alloc::{Layout, alloc, dealloc};
|
||||||
|
use std::{mem, ptr};
|
||||||
|
|
||||||
|
/// Sort `v` preserving initial order of equal elements.
|
||||||
|
///
|
||||||
|
/// - Guaranteed O(N * log(N)) worst case perf
|
||||||
|
/// - No adaptiveness
|
||||||
|
/// - Branch miss-prediction not affected by outcome of comparison function
|
||||||
|
/// - Uses `v.len()` auxiliary memory.
|
||||||
|
///
|
||||||
|
/// If `T: Ord` does not implement a total order the resulting order is
|
||||||
|
/// unspecified. All original elements will remain in `v` and any possible modifications via
|
||||||
|
/// interior mutability will be observable. Same is true if `T: Ord` panics.
|
||||||
|
///
|
||||||
|
/// Panics if allocating the auxiliary memory fails.
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn sort<T: Ord>(v: &mut [T]) {
|
||||||
|
stable_sort(v, |a, b| a.lt(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn stable_sort<T, F: FnMut(&T, &T) -> bool>(v: &mut [T], mut is_less: F) {
|
||||||
|
if mem::size_of::<T>() == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let len = v.len();
|
||||||
|
|
||||||
|
// Inline the check for len < 2. This happens a lot, instrumenting the Rust compiler suggests
|
||||||
|
// len < 2 accounts for 94% of its calls to `slice::sort`.
|
||||||
|
if len < 2 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// SAFETY: We checked that len is > 0 and that T is not a ZST.
|
||||||
|
unsafe {
|
||||||
|
mergesort_main(v, &mut is_less);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The core logic should not be inlined.
|
||||||
|
///
|
||||||
|
/// SAFETY: The caller has to ensure that len is > 0 and that T is not a ZST.
|
||||||
|
#[inline(never)]
|
||||||
|
unsafe fn mergesort_main<T, F: FnMut(&T, &T) -> bool>(v: &mut [T], is_less: &mut F) {
|
||||||
|
// While it would be nice to have a merge implementation that only requires N / 2 auxiliary
|
||||||
|
// memory. Doing so would make the merge implementation significantly more complex and
|
||||||
|
|
||||||
|
// SAFETY: See function safety description.
|
||||||
|
let buf = unsafe { BufGuard::new(v.len()) };
|
||||||
|
|
||||||
|
// SAFETY: `scratch` has space for `v.len()` writes. And does not alias `v`.
|
||||||
|
unsafe {
|
||||||
|
mergesort_core(v, buf.buf_ptr.as_ptr(), is_less);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tiny recursive top-down merge sort optimized for binary size. It has no adaptiveness whatsoever,
|
||||||
|
/// no run detection, etc.
|
||||||
|
///
|
||||||
|
/// Buffer as pointed to by `scratch` must have space for `v.len()` writes. And must not alias `v`.
|
||||||
|
#[inline(always)]
|
||||||
|
unsafe fn mergesort_core<T, F: FnMut(&T, &T) -> bool>(
|
||||||
|
v: &mut [T],
|
||||||
|
scratch_ptr: *mut T,
|
||||||
|
is_less: &mut F,
|
||||||
|
) {
|
||||||
|
let len = v.len();
|
||||||
|
|
||||||
|
if len > 2 {
|
||||||
|
// SAFETY: `mid` is guaranteed in-bounds. And caller has to ensure that `scratch_ptr` can
|
||||||
|
// hold `v.len()` values.
|
||||||
|
unsafe {
|
||||||
|
let mid = len / 2;
|
||||||
|
// Sort the left half recursively.
|
||||||
|
mergesort_core(v.get_unchecked_mut(..mid), scratch_ptr, is_less);
|
||||||
|
// Sort the right half recursively.
|
||||||
|
mergesort_core(v.get_unchecked_mut(mid..), scratch_ptr, is_less);
|
||||||
|
// Combine the two halves.
|
||||||
|
merge(v, scratch_ptr, is_less, mid);
|
||||||
|
}
|
||||||
|
} else if len == 2 {
|
||||||
|
if is_less(&v[1], &v[0]) {
|
||||||
|
v.swap(0, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Branchless merge function.
|
||||||
|
///
|
||||||
|
/// SAFETY: The caller must ensure that `scratch_ptr` is valid for `v.len()` writes. And that mid is
|
||||||
|
/// in-bounds.
|
||||||
|
#[inline(always)]
|
||||||
|
unsafe fn merge<T, F>(v: &mut [T], scratch_ptr: *mut T, is_less: &mut F, mid: usize)
|
||||||
|
where
|
||||||
|
F: FnMut(&T, &T) -> bool,
|
||||||
|
{
|
||||||
|
let len = v.len();
|
||||||
|
debug_assert!(mid > 0 && mid < len);
|
||||||
|
|
||||||
|
let len = v.len();
|
||||||
|
|
||||||
|
// Indexes to track the positions while merging.
|
||||||
|
let mut l = 0;
|
||||||
|
let mut r = mid;
|
||||||
|
|
||||||
|
// SAFETY: No matter what the result of is_less is we check that l and r remain in-bounds and if
|
||||||
|
// is_less panics the original elements remain in `v`.
|
||||||
|
unsafe {
|
||||||
|
let arr_ptr = v.as_ptr();
|
||||||
|
|
||||||
|
for i in 0..len {
|
||||||
|
let left_ptr = arr_ptr.add(l);
|
||||||
|
let right_ptr = arr_ptr.add(r);
|
||||||
|
|
||||||
|
let is_lt = !is_less(&*right_ptr, &*left_ptr);
|
||||||
|
let copy_ptr = if is_lt { left_ptr } else { right_ptr };
|
||||||
|
ptr::copy_nonoverlapping(copy_ptr, scratch_ptr.add(i), 1);
|
||||||
|
|
||||||
|
l += is_lt as usize;
|
||||||
|
r += !is_lt as usize;
|
||||||
|
|
||||||
|
// As long as neither side is exhausted merge left and right elements.
|
||||||
|
if ((l == mid) as u8 + (r == len) as u8) != 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The left or right side is exhausted, drain the right side in one go.
|
||||||
|
let copy_ptr = if l == mid { arr_ptr.add(r) } else { arr_ptr.add(l) };
|
||||||
|
let i = l + (r - mid);
|
||||||
|
ptr::copy_nonoverlapping(copy_ptr, scratch_ptr.add(i), len - i);
|
||||||
|
|
||||||
|
// Now that scratch_ptr holds the full merged content, write it back on-top of v.
|
||||||
|
ptr::copy_nonoverlapping(scratch_ptr, v.as_mut_ptr(), len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SAFETY: The caller has to ensure that Option is Some, UB otherwise.
|
||||||
|
unsafe fn unwrap_unchecked<T>(opt_val: Option<T>) -> T {
|
||||||
|
match opt_val {
|
||||||
|
Some(val) => val,
|
||||||
|
None => {
|
||||||
|
// SAFETY: See function safety description.
|
||||||
|
unsafe {
|
||||||
|
core::hint::unreachable_unchecked();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extremely basic versions of Vec.
|
||||||
|
// Their use is super limited and by having the code here, it allows reuse between the sort
|
||||||
|
// implementations.
|
||||||
|
struct BufGuard<T> {
|
||||||
|
buf_ptr: ptr::NonNull<T>,
|
||||||
|
capacity: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> BufGuard<T> {
|
||||||
|
// SAFETY: The caller has to ensure that len is not 0 and that T is not a ZST.
|
||||||
|
unsafe fn new(len: usize) -> Self {
|
||||||
|
debug_assert!(len > 0 && mem::size_of::<T>() > 0);
|
||||||
|
|
||||||
|
// SAFETY: See function safety description.
|
||||||
|
let layout = unsafe { unwrap_unchecked(Layout::array::<T>(len).ok()) };
|
||||||
|
|
||||||
|
// SAFETY: We checked that T is not a ZST.
|
||||||
|
let buf_ptr = unsafe { alloc(layout) as *mut T };
|
||||||
|
|
||||||
|
if buf_ptr.is_null() {
|
||||||
|
panic!("allocation failure");
|
||||||
|
}
|
||||||
|
|
||||||
|
Self { buf_ptr: ptr::NonNull::new(buf_ptr).unwrap(), capacity: len }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Drop for BufGuard<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// SAFETY: We checked that T is not a ZST.
|
||||||
|
unsafe {
|
||||||
|
dealloc(self.buf_ptr.as_ptr() as *mut u8, Layout::array::<T>(self.capacity).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
17
library/alloc/tests/sort/mod.rs
Normal file
17
library/alloc/tests/sort/mod.rs
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
pub trait Sort {
|
||||||
|
fn name() -> String;
|
||||||
|
|
||||||
|
fn sort<T>(v: &mut [T])
|
||||||
|
where
|
||||||
|
T: Ord;
|
||||||
|
|
||||||
|
fn sort_by<T, F>(v: &mut [T], compare: F)
|
||||||
|
where
|
||||||
|
F: FnMut(&T, &T) -> std::cmp::Ordering;
|
||||||
|
}
|
||||||
|
|
||||||
|
mod ffi_types;
|
||||||
|
mod known_good_stable_sort;
|
||||||
|
mod patterns;
|
||||||
|
mod tests;
|
||||||
|
mod zipf;
|
211
library/alloc/tests/sort/patterns.rs
Normal file
211
library/alloc/tests/sort/patterns.rs
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
use std::env;
|
||||||
|
use std::hash::Hash;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
|
use rand::prelude::*;
|
||||||
|
use rand_xorshift::XorShiftRng;
|
||||||
|
|
||||||
|
use crate::sort::zipf::ZipfDistribution;
|
||||||
|
|
||||||
|
/// Provides a set of patterns useful for testing and benchmarking sorting algorithms.
|
||||||
|
/// Currently limited to i32 values.
|
||||||
|
|
||||||
|
// --- Public ---
|
||||||
|
|
||||||
|
pub fn random(len: usize) -> Vec<i32> {
|
||||||
|
// .
|
||||||
|
// : . : :
|
||||||
|
// :.:::.::
|
||||||
|
|
||||||
|
random_vec(len)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_uniform<R>(len: usize, range: R) -> Vec<i32>
|
||||||
|
where
|
||||||
|
R: Into<rand::distributions::Uniform<i32>> + Hash,
|
||||||
|
{
|
||||||
|
// :.:.:.::
|
||||||
|
|
||||||
|
let mut rng: XorShiftRng = rand::SeedableRng::seed_from_u64(get_or_init_rand_seed());
|
||||||
|
|
||||||
|
// Abstracting over ranges in Rust :(
|
||||||
|
let dist: rand::distributions::Uniform<i32> = range.into();
|
||||||
|
(0..len).map(|_| dist.sample(&mut rng)).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_zipf(len: usize, exponent: f64) -> Vec<i32> {
|
||||||
|
// https://en.wikipedia.org/wiki/Zipf's_law
|
||||||
|
|
||||||
|
let mut rng: XorShiftRng = rand::SeedableRng::seed_from_u64(get_or_init_rand_seed());
|
||||||
|
|
||||||
|
// Abstracting over ranges in Rust :(
|
||||||
|
let dist = ZipfDistribution::new(len, exponent).unwrap();
|
||||||
|
(0..len).map(|_| dist.sample(&mut rng) as i32).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_sorted(len: usize, sorted_percent: f64) -> Vec<i32> {
|
||||||
|
// .:
|
||||||
|
// .:::. :
|
||||||
|
// .::::::.::
|
||||||
|
// [----][--]
|
||||||
|
// ^ ^
|
||||||
|
// | |
|
||||||
|
// sorted |
|
||||||
|
// unsorted
|
||||||
|
|
||||||
|
// Simulate pre-existing sorted slice, where len - sorted_percent are the new unsorted values
|
||||||
|
// and part of the overall distribution.
|
||||||
|
let mut v = random_vec(len);
|
||||||
|
let sorted_len = ((len as f64) * (sorted_percent / 100.0)).round() as usize;
|
||||||
|
|
||||||
|
v[0..sorted_len].sort_unstable();
|
||||||
|
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_equal(len: usize) -> Vec<i32> {
|
||||||
|
// ......
|
||||||
|
// ::::::
|
||||||
|
|
||||||
|
(0..len).map(|_| 66).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ascending(len: usize) -> Vec<i32> {
|
||||||
|
// .:
|
||||||
|
// .:::
|
||||||
|
// .:::::
|
||||||
|
|
||||||
|
(0..len as i32).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn descending(len: usize) -> Vec<i32> {
|
||||||
|
// :.
|
||||||
|
// :::.
|
||||||
|
// :::::.
|
||||||
|
|
||||||
|
(0..len as i32).rev().collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn saw_mixed(len: usize, saw_count: usize) -> Vec<i32> {
|
||||||
|
// :. :. .::. .:
|
||||||
|
// :::.:::..::::::..:::
|
||||||
|
|
||||||
|
if len == 0 {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut vals = random_vec(len);
|
||||||
|
let chunks_size = len / saw_count.max(1);
|
||||||
|
let saw_directions = random_uniform((len / chunks_size) + 1, 0..=1);
|
||||||
|
|
||||||
|
for (i, chunk) in vals.chunks_mut(chunks_size).enumerate() {
|
||||||
|
if saw_directions[i] == 0 {
|
||||||
|
chunk.sort_unstable();
|
||||||
|
} else if saw_directions[i] == 1 {
|
||||||
|
chunk.sort_unstable_by_key(|&e| std::cmp::Reverse(e));
|
||||||
|
} else {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vals
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn saw_mixed_range(len: usize, range: std::ops::Range<usize>) -> Vec<i32> {
|
||||||
|
// :.
|
||||||
|
// :. :::. .::. .:
|
||||||
|
// :::.:::::..::::::..:.:::
|
||||||
|
|
||||||
|
// ascending and descending randomly picked, with length in `range`.
|
||||||
|
|
||||||
|
if len == 0 {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut vals = random_vec(len);
|
||||||
|
|
||||||
|
let max_chunks = len / range.start;
|
||||||
|
let saw_directions = random_uniform(max_chunks + 1, 0..=1);
|
||||||
|
let chunk_sizes = random_uniform(max_chunks + 1, (range.start as i32)..(range.end as i32));
|
||||||
|
|
||||||
|
let mut i = 0;
|
||||||
|
let mut l = 0;
|
||||||
|
while l < len {
|
||||||
|
let chunk_size = chunk_sizes[i] as usize;
|
||||||
|
let chunk_end = std::cmp::min(l + chunk_size, len);
|
||||||
|
let chunk = &mut vals[l..chunk_end];
|
||||||
|
|
||||||
|
if saw_directions[i] == 0 {
|
||||||
|
chunk.sort_unstable();
|
||||||
|
} else if saw_directions[i] == 1 {
|
||||||
|
chunk.sort_unstable_by_key(|&e| std::cmp::Reverse(e));
|
||||||
|
} else {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
|
||||||
|
i += 1;
|
||||||
|
l += chunk_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
vals
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pipe_organ(len: usize) -> Vec<i32> {
|
||||||
|
// .:.
|
||||||
|
// .:::::.
|
||||||
|
|
||||||
|
let mut vals = random_vec(len);
|
||||||
|
|
||||||
|
let first_half = &mut vals[0..(len / 2)];
|
||||||
|
first_half.sort_unstable();
|
||||||
|
|
||||||
|
let second_half = &mut vals[(len / 2)..len];
|
||||||
|
second_half.sort_unstable_by_key(|&e| std::cmp::Reverse(e));
|
||||||
|
|
||||||
|
vals
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_init_rand_seed() -> u64 {
|
||||||
|
*SEED_VALUE.get_or_init(|| {
|
||||||
|
env::var("OVERRIDE_SEED")
|
||||||
|
.ok()
|
||||||
|
.map(|seed| u64::from_str(&seed).unwrap())
|
||||||
|
.unwrap_or_else(rand_root_seed)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Private ---
|
||||||
|
|
||||||
|
static SEED_VALUE: OnceLock<u64> = OnceLock::new();
|
||||||
|
|
||||||
|
#[cfg(not(miri))]
|
||||||
|
fn rand_root_seed() -> u64 {
|
||||||
|
// Other test code hashes `panic::Location::caller()` and constructs a seed from that, in these
|
||||||
|
// tests we want to have a fuzzer like exploration of the test space, if we used the same caller
|
||||||
|
// based construction we would always test the same.
|
||||||
|
//
|
||||||
|
// Instead we use the seconds since UNIX epoch / 10, given CI log output this value should be
|
||||||
|
// reasonably easy to re-construct.
|
||||||
|
|
||||||
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
let epoch_seconds = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs();
|
||||||
|
|
||||||
|
epoch_seconds / 10
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(miri)]
|
||||||
|
fn rand_root_seed() -> u64 {
|
||||||
|
// Miri is usually run with isolation with gives us repeatability but also permutations based on
|
||||||
|
// other code that runs before.
|
||||||
|
use core::hash::{BuildHasher, Hash, Hasher};
|
||||||
|
let mut hasher = std::hash::RandomState::new().build_hasher();
|
||||||
|
core::panic::Location::caller().hash(&mut hasher);
|
||||||
|
hasher.finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn random_vec(len: usize) -> Vec<i32> {
|
||||||
|
let mut rng: XorShiftRng = rand::SeedableRng::seed_from_u64(get_or_init_rand_seed());
|
||||||
|
(0..len).map(|_| rng.gen::<i32>()).collect()
|
||||||
|
}
|
1233
library/alloc/tests/sort/tests.rs
Normal file
1233
library/alloc/tests/sort/tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
208
library/alloc/tests/sort/zipf.rs
Normal file
208
library/alloc/tests/sort/zipf.rs
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
// This module implements a Zipfian distribution generator.
|
||||||
|
//
|
||||||
|
// Based on https://github.com/jonhoo/rust-zipf.
|
||||||
|
|
||||||
|
use rand::Rng;
|
||||||
|
|
||||||
|
/// Random number generator that generates Zipf-distributed random numbers using rejection
|
||||||
|
/// inversion.
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ZipfDistribution {
|
||||||
|
/// Number of elements
|
||||||
|
num_elements: f64,
|
||||||
|
/// Exponent parameter of the distribution
|
||||||
|
exponent: f64,
|
||||||
|
/// `hIntegral(1.5) - 1}`
|
||||||
|
h_integral_x1: f64,
|
||||||
|
/// `hIntegral(num_elements + 0.5)}`
|
||||||
|
h_integral_num_elements: f64,
|
||||||
|
/// `2 - hIntegralInverse(hIntegral(2.5) - h(2)}`
|
||||||
|
s: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ZipfDistribution {
|
||||||
|
/// Creates a new [Zipf-distributed](https://en.wikipedia.org/wiki/Zipf's_law)
|
||||||
|
/// random number generator.
|
||||||
|
///
|
||||||
|
/// Note that both the number of elements and the exponent must be greater than 0.
|
||||||
|
pub fn new(num_elements: usize, exponent: f64) -> Result<Self, ()> {
|
||||||
|
if num_elements == 0 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
if exponent <= 0f64 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let z = ZipfDistribution {
|
||||||
|
num_elements: num_elements as f64,
|
||||||
|
exponent,
|
||||||
|
h_integral_x1: ZipfDistribution::h_integral(1.5, exponent) - 1f64,
|
||||||
|
h_integral_num_elements: ZipfDistribution::h_integral(
|
||||||
|
num_elements as f64 + 0.5,
|
||||||
|
exponent,
|
||||||
|
),
|
||||||
|
s: 2f64
|
||||||
|
- ZipfDistribution::h_integral_inv(
|
||||||
|
ZipfDistribution::h_integral(2.5, exponent)
|
||||||
|
- ZipfDistribution::h(2f64, exponent),
|
||||||
|
exponent,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
// populate cache
|
||||||
|
|
||||||
|
Ok(z)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ZipfDistribution {
|
||||||
|
fn next<R: Rng + ?Sized>(&self, rng: &mut R) -> usize {
|
||||||
|
// The paper describes an algorithm for exponents larger than 1 (Algorithm ZRI).
|
||||||
|
//
|
||||||
|
// The original method uses
|
||||||
|
// H(x) = (v + x)^(1 - q) / (1 - q)
|
||||||
|
// as the integral of the hat function.
|
||||||
|
//
|
||||||
|
// This function is undefined for q = 1, which is the reason for the limitation of the
|
||||||
|
// exponent.
|
||||||
|
//
|
||||||
|
// If instead the integral function
|
||||||
|
// H(x) = ((v + x)^(1 - q) - 1) / (1 - q)
|
||||||
|
// is used, for which a meaningful limit exists for q = 1, the method works for all
|
||||||
|
// positive exponents.
|
||||||
|
//
|
||||||
|
// The following implementation uses v = 0 and generates integral number in the range [1,
|
||||||
|
// num_elements]. This is different to the original method where v is defined to
|
||||||
|
// be positive and numbers are taken from [0, i_max]. This explains why the implementation
|
||||||
|
// looks slightly different.
|
||||||
|
|
||||||
|
let hnum = self.h_integral_num_elements;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
use std::cmp;
|
||||||
|
let u: f64 = hnum + rng.gen::<f64>() * (self.h_integral_x1 - hnum);
|
||||||
|
// u is uniformly distributed in (h_integral_x1, h_integral_num_elements]
|
||||||
|
|
||||||
|
let x: f64 = ZipfDistribution::h_integral_inv(u, self.exponent);
|
||||||
|
|
||||||
|
// Limit k to the range [1, num_elements] if it would be outside
|
||||||
|
// due to numerical inaccuracies.
|
||||||
|
let k64 = x.max(1.0).min(self.num_elements);
|
||||||
|
// float -> integer rounds towards zero, so we add 0.5
|
||||||
|
// to prevent bias towards k == 1
|
||||||
|
let k = cmp::max(1, (k64 + 0.5) as usize);
|
||||||
|
|
||||||
|
// Here, the distribution of k is given by:
|
||||||
|
//
|
||||||
|
// P(k = 1) = C * (hIntegral(1.5) - h_integral_x1) = C
|
||||||
|
// P(k = m) = C * (hIntegral(m + 1/2) - hIntegral(m - 1/2)) for m >= 2
|
||||||
|
//
|
||||||
|
// where C = 1 / (h_integral_num_elements - h_integral_x1)
|
||||||
|
if k64 - x <= self.s
|
||||||
|
|| u >= ZipfDistribution::h_integral(k64 + 0.5, self.exponent)
|
||||||
|
- ZipfDistribution::h(k64, self.exponent)
|
||||||
|
{
|
||||||
|
// Case k = 1:
|
||||||
|
//
|
||||||
|
// The right inequality is always true, because replacing k by 1 gives
|
||||||
|
// u >= hIntegral(1.5) - h(1) = h_integral_x1 and u is taken from
|
||||||
|
// (h_integral_x1, h_integral_num_elements].
|
||||||
|
//
|
||||||
|
// Therefore, the acceptance rate for k = 1 is P(accepted | k = 1) = 1
|
||||||
|
// and the probability that 1 is returned as random value is
|
||||||
|
// P(k = 1 and accepted) = P(accepted | k = 1) * P(k = 1) = C = C / 1^exponent
|
||||||
|
//
|
||||||
|
// Case k >= 2:
|
||||||
|
//
|
||||||
|
// The left inequality (k - x <= s) is just a short cut
|
||||||
|
// to avoid the more expensive evaluation of the right inequality
|
||||||
|
// (u >= hIntegral(k + 0.5) - h(k)) in many cases.
|
||||||
|
//
|
||||||
|
// If the left inequality is true, the right inequality is also true:
|
||||||
|
// Theorem 2 in the paper is valid for all positive exponents, because
|
||||||
|
// the requirements h'(x) = -exponent/x^(exponent + 1) < 0 and
|
||||||
|
// (-1/hInverse'(x))'' = (1+1/exponent) * x^(1/exponent-1) >= 0
|
||||||
|
// are both fulfilled.
|
||||||
|
// Therefore, f(x) = x - hIntegralInverse(hIntegral(x + 0.5) - h(x))
|
||||||
|
// is a non-decreasing function. If k - x <= s holds,
|
||||||
|
// k - x <= s + f(k) - f(2) is obviously also true which is equivalent to
|
||||||
|
// -x <= -hIntegralInverse(hIntegral(k + 0.5) - h(k)),
|
||||||
|
// -hIntegralInverse(u) <= -hIntegralInverse(hIntegral(k + 0.5) - h(k)),
|
||||||
|
// and finally u >= hIntegral(k + 0.5) - h(k).
|
||||||
|
//
|
||||||
|
// Hence, the right inequality determines the acceptance rate:
|
||||||
|
// P(accepted | k = m) = h(m) / (hIntegrated(m+1/2) - hIntegrated(m-1/2))
|
||||||
|
// The probability that m is returned is given by
|
||||||
|
// P(k = m and accepted) = P(accepted | k = m) * P(k = m)
|
||||||
|
// = C * h(m) = C / m^exponent.
|
||||||
|
//
|
||||||
|
// In both cases the probabilities are proportional to the probability mass
|
||||||
|
// function of the Zipf distribution.
|
||||||
|
|
||||||
|
return k;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl rand::distributions::Distribution<usize> for ZipfDistribution {
|
||||||
|
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> usize {
|
||||||
|
self.next(rng)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
impl fmt::Debug for ZipfDistribution {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||||
|
f.debug_struct("ZipfDistribution")
|
||||||
|
.field("e", &self.exponent)
|
||||||
|
.field("n", &self.num_elements)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ZipfDistribution {
|
||||||
|
/// Computes `H(x)`, defined as
|
||||||
|
///
|
||||||
|
/// - `(x^(1 - exponent) - 1) / (1 - exponent)`, if `exponent != 1`
|
||||||
|
/// - `log(x)`, if `exponent == 1`
|
||||||
|
///
|
||||||
|
/// `H(x)` is an integral function of `h(x)`, the derivative of `H(x)` is `h(x)`.
|
||||||
|
fn h_integral(x: f64, exponent: f64) -> f64 {
|
||||||
|
let log_x = x.ln();
|
||||||
|
helper2((1f64 - exponent) * log_x) * log_x
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes `h(x) = 1 / x^exponent`
|
||||||
|
fn h(x: f64, exponent: f64) -> f64 {
|
||||||
|
(-exponent * x.ln()).exp()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The inverse function of `H(x)`.
|
||||||
|
/// Returns the `y` for which `H(y) = x`.
|
||||||
|
fn h_integral_inv(x: f64, exponent: f64) -> f64 {
|
||||||
|
let mut t: f64 = x * (1f64 - exponent);
|
||||||
|
if t < -1f64 {
|
||||||
|
// Limit value to the range [-1, +inf).
|
||||||
|
// t could be smaller than -1 in some rare cases due to numerical errors.
|
||||||
|
t = -1f64;
|
||||||
|
}
|
||||||
|
(helper1(t) * x).exp()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function that calculates `log(1 + x) / x`.
|
||||||
|
/// A Taylor series expansion is used, if x is close to 0.
|
||||||
|
fn helper1(x: f64) -> f64 {
|
||||||
|
if x.abs() > 1e-8 { x.ln_1p() / x } else { 1f64 - x * (0.5 - x * (1.0 / 3.0 - 0.25 * x)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to calculate `(exp(x) - 1) / x`.
|
||||||
|
/// A Taylor series expansion is used, if x is close to 0.
|
||||||
|
fn helper2(x: f64) -> f64 {
|
||||||
|
if x.abs() > 1e-8 {
|
||||||
|
x.exp_m1() / x
|
||||||
|
} else {
|
||||||
|
1f64 + x * 0.5 * (1f64 + x * 1.0 / 3.0 * (1f64 + 0.25 * x))
|
||||||
|
}
|
||||||
|
}
|
@ -674,8 +674,9 @@ impl char {
|
|||||||
/// 'ß'.encode_utf8(&mut b);
|
/// 'ß'.encode_utf8(&mut b);
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
|
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
|
||||||
#[rustc_const_unstable(feature = "const_char_encode_utf8", issue = "130512")]
|
#[rustc_const_stable(feature = "const_char_encode_utf8", since = "CURRENT_RUSTC_VERSION")]
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[cfg_attr(bootstrap, rustc_allow_const_fn_unstable(const_mut_refs))]
|
||||||
pub const fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
|
pub const fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
|
||||||
// SAFETY: `char` is not a surrogate, so this is valid UTF-8.
|
// SAFETY: `char` is not a surrogate, so this is valid UTF-8.
|
||||||
unsafe { from_utf8_unchecked_mut(encode_utf8_raw(self as u32, dst)) }
|
unsafe { from_utf8_unchecked_mut(encode_utf8_raw(self as u32, dst)) }
|
||||||
@ -1770,9 +1771,11 @@ const fn len_utf16(code: u32) -> usize {
|
|||||||
/// Panics if the buffer is not large enough.
|
/// Panics if the buffer is not large enough.
|
||||||
/// A buffer of length four is large enough to encode any `char`.
|
/// A buffer of length four is large enough to encode any `char`.
|
||||||
#[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")]
|
#[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")]
|
||||||
#[rustc_const_unstable(feature = "const_char_encode_utf8", issue = "130512")]
|
#[rustc_const_stable(feature = "const_char_encode_utf8", since = "CURRENT_RUSTC_VERSION")]
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||||
|
#[cfg_attr(bootstrap, rustc_allow_const_fn_unstable(const_mut_refs))]
|
||||||
pub const fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> &mut [u8] {
|
pub const fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> &mut [u8] {
|
||||||
const fn panic_at_const(_code: u32, _len: usize, _dst_len: usize) {
|
const fn panic_at_const(_code: u32, _len: usize, _dst_len: usize) {
|
||||||
// Note that we cannot format in constant expressions.
|
// Note that we cannot format in constant expressions.
|
||||||
|
@ -366,8 +366,6 @@ impl<'a, 'b: 'a> DebugTuple<'a, 'b> {
|
|||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(debug_more_non_exhaustive)]
|
|
||||||
///
|
|
||||||
/// use std::fmt;
|
/// use std::fmt;
|
||||||
///
|
///
|
||||||
/// struct Foo(i32, String);
|
/// struct Foo(i32, String);
|
||||||
@ -385,7 +383,7 @@ impl<'a, 'b: 'a> DebugTuple<'a, 'b> {
|
|||||||
/// "Foo(10, ..)",
|
/// "Foo(10, ..)",
|
||||||
/// );
|
/// );
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "debug_more_non_exhaustive", issue = "127942")]
|
#[stable(feature = "debug_more_non_exhaustive", since = "CURRENT_RUSTC_VERSION")]
|
||||||
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
||||||
self.result = self.result.and_then(|_| {
|
self.result = self.result.and_then(|_| {
|
||||||
if self.fields > 0 {
|
if self.fields > 0 {
|
||||||
@ -606,8 +604,6 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> {
|
|||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(debug_more_non_exhaustive)]
|
|
||||||
///
|
|
||||||
/// use std::fmt;
|
/// use std::fmt;
|
||||||
///
|
///
|
||||||
/// struct Foo(Vec<i32>);
|
/// struct Foo(Vec<i32>);
|
||||||
@ -630,7 +626,7 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> {
|
|||||||
/// "{1, 2, ..}",
|
/// "{1, 2, ..}",
|
||||||
/// );
|
/// );
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "debug_more_non_exhaustive", issue = "127942")]
|
#[stable(feature = "debug_more_non_exhaustive", since = "CURRENT_RUSTC_VERSION")]
|
||||||
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
||||||
self.inner.result = self.inner.result.and_then(|_| {
|
self.inner.result = self.inner.result.and_then(|_| {
|
||||||
if self.inner.has_fields {
|
if self.inner.has_fields {
|
||||||
@ -800,8 +796,6 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> {
|
|||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(debug_more_non_exhaustive)]
|
|
||||||
///
|
|
||||||
/// use std::fmt;
|
/// use std::fmt;
|
||||||
///
|
///
|
||||||
/// struct Foo(Vec<i32>);
|
/// struct Foo(Vec<i32>);
|
||||||
@ -824,7 +818,7 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> {
|
|||||||
/// "[1, 2, ..]",
|
/// "[1, 2, ..]",
|
||||||
/// );
|
/// );
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "debug_more_non_exhaustive", issue = "127942")]
|
#[stable(feature = "debug_more_non_exhaustive", since = "CURRENT_RUSTC_VERSION")]
|
||||||
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
||||||
self.inner.result.and_then(|_| {
|
self.inner.result.and_then(|_| {
|
||||||
if self.inner.has_fields {
|
if self.inner.has_fields {
|
||||||
@ -1126,8 +1120,6 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> {
|
|||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(debug_more_non_exhaustive)]
|
|
||||||
///
|
|
||||||
/// use std::fmt;
|
/// use std::fmt;
|
||||||
///
|
///
|
||||||
/// struct Foo(Vec<(String, i32)>);
|
/// struct Foo(Vec<(String, i32)>);
|
||||||
@ -1154,7 +1146,7 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> {
|
|||||||
/// r#"{"A": 10, "B": 11, ..}"#,
|
/// r#"{"A": 10, "B": 11, ..}"#,
|
||||||
/// );
|
/// );
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "debug_more_non_exhaustive", issue = "127942")]
|
#[stable(feature = "debug_more_non_exhaustive", since = "CURRENT_RUSTC_VERSION")]
|
||||||
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
pub fn finish_non_exhaustive(&mut self) -> fmt::Result {
|
||||||
self.result = self.result.and_then(|_| {
|
self.result = self.result.and_then(|_| {
|
||||||
assert!(!self.has_key, "attempted to finish a map with a partial entry");
|
assert!(!self.has_key, "attempted to finish a map with a partial entry");
|
||||||
|
@ -118,7 +118,6 @@
|
|||||||
#![feature(const_bigint_helper_methods)]
|
#![feature(const_bigint_helper_methods)]
|
||||||
#![feature(const_black_box)]
|
#![feature(const_black_box)]
|
||||||
#![feature(const_char_encode_utf16)]
|
#![feature(const_char_encode_utf16)]
|
||||||
#![feature(const_char_encode_utf8)]
|
|
||||||
#![feature(const_eval_select)]
|
#![feature(const_eval_select)]
|
||||||
#![feature(const_exact_div)]
|
#![feature(const_exact_div)]
|
||||||
#![feature(const_fmt_arguments_new)]
|
#![feature(const_fmt_arguments_new)]
|
||||||
|
@ -734,7 +734,8 @@ impl<T, E> Result<T, E> {
|
|||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[rustc_const_unstable(feature = "const_result", issue = "82814")]
|
#[rustc_const_stable(feature = "const_result", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
#[cfg_attr(bootstrap, rustc_allow_const_fn_unstable(const_mut_refs))]
|
||||||
pub const fn as_mut(&mut self) -> Result<&mut T, &mut E> {
|
pub const fn as_mut(&mut self) -> Result<&mut T, &mut E> {
|
||||||
match *self {
|
match *self {
|
||||||
Ok(ref mut x) => Ok(x),
|
Ok(ref mut x) => Ok(x),
|
||||||
@ -1536,7 +1537,8 @@ impl<T, E> Result<&T, E> {
|
|||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "result_copied", since = "1.59.0")]
|
#[stable(feature = "result_copied", since = "1.59.0")]
|
||||||
#[rustc_const_unstable(feature = "const_result", issue = "82814")]
|
#[rustc_const_stable(feature = "const_result", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_precise_live_drops)]
|
||||||
pub const fn copied(self) -> Result<T, E>
|
pub const fn copied(self) -> Result<T, E>
|
||||||
where
|
where
|
||||||
T: Copy,
|
T: Copy,
|
||||||
@ -1586,7 +1588,9 @@ impl<T, E> Result<&mut T, E> {
|
|||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "result_copied", since = "1.59.0")]
|
#[stable(feature = "result_copied", since = "1.59.0")]
|
||||||
#[rustc_const_unstable(feature = "const_result", issue = "82814")]
|
#[rustc_const_stable(feature = "const_result", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
#[cfg_attr(bootstrap, rustc_allow_const_fn_unstable(const_mut_refs))]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_precise_live_drops)]
|
||||||
pub const fn copied(self) -> Result<T, E>
|
pub const fn copied(self) -> Result<T, E>
|
||||||
where
|
where
|
||||||
T: Copy,
|
T: Copy,
|
||||||
@ -1639,7 +1643,8 @@ impl<T, E> Result<Option<T>, E> {
|
|||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "transpose_result", since = "1.33.0")]
|
#[stable(feature = "transpose_result", since = "1.33.0")]
|
||||||
#[rustc_const_unstable(feature = "const_result", issue = "82814")]
|
#[rustc_const_stable(feature = "const_result", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_precise_live_drops)]
|
||||||
pub const fn transpose(self) -> Option<Result<T, E>> {
|
pub const fn transpose(self) -> Option<Result<T, E>> {
|
||||||
match self {
|
match self {
|
||||||
Ok(Some(x)) => Some(Ok(x)),
|
Ok(Some(x)) => Some(Ok(x)),
|
||||||
|
@ -29,14 +29,12 @@
|
|||||||
#![feature(const_pin)]
|
#![feature(const_pin)]
|
||||||
#![feature(const_pointer_is_aligned)]
|
#![feature(const_pointer_is_aligned)]
|
||||||
#![feature(const_ptr_write)]
|
#![feature(const_ptr_write)]
|
||||||
#![feature(const_result)]
|
|
||||||
#![feature(const_three_way_compare)]
|
#![feature(const_three_way_compare)]
|
||||||
#![feature(const_trait_impl)]
|
#![feature(const_trait_impl)]
|
||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(core_io_borrowed_buf)]
|
#![feature(core_io_borrowed_buf)]
|
||||||
#![feature(core_private_bignum)]
|
#![feature(core_private_bignum)]
|
||||||
#![feature(core_private_diy_float)]
|
#![feature(core_private_diy_float)]
|
||||||
#![feature(debug_more_non_exhaustive)]
|
|
||||||
#![feature(dec2flt)]
|
#![feature(dec2flt)]
|
||||||
#![feature(duration_constants)]
|
#![feature(duration_constants)]
|
||||||
#![feature(duration_constructors)]
|
#![feature(duration_constructors)]
|
||||||
|
@ -1800,57 +1800,6 @@ fn brute_force_rotate_test_1() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
fn sort_unstable() {
|
|
||||||
use rand::Rng;
|
|
||||||
|
|
||||||
// Miri is too slow (but still need to `chain` to make the types match)
|
|
||||||
let lens = if cfg!(miri) { (2..20).chain(0..0) } else { (2..25).chain(500..510) };
|
|
||||||
let rounds = if cfg!(miri) { 1 } else { 100 };
|
|
||||||
|
|
||||||
let mut v = [0; 600];
|
|
||||||
let mut tmp = [0; 600];
|
|
||||||
let mut rng = crate::test_rng();
|
|
||||||
|
|
||||||
for len in lens {
|
|
||||||
let v = &mut v[0..len];
|
|
||||||
let tmp = &mut tmp[0..len];
|
|
||||||
|
|
||||||
for &modulus in &[5, 10, 100, 1000] {
|
|
||||||
for _ in 0..rounds {
|
|
||||||
for i in 0..len {
|
|
||||||
v[i] = rng.gen::<i32>() % modulus;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort in default order.
|
|
||||||
tmp.copy_from_slice(v);
|
|
||||||
tmp.sort_unstable();
|
|
||||||
assert!(tmp.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
|
|
||||||
// Sort in ascending order.
|
|
||||||
tmp.copy_from_slice(v);
|
|
||||||
tmp.sort_unstable_by(|a, b| a.cmp(b));
|
|
||||||
assert!(tmp.windows(2).all(|w| w[0] <= w[1]));
|
|
||||||
|
|
||||||
// Sort in descending order.
|
|
||||||
tmp.copy_from_slice(v);
|
|
||||||
tmp.sort_unstable_by(|a, b| b.cmp(a));
|
|
||||||
assert!(tmp.windows(2).all(|w| w[0] >= w[1]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Should not panic.
|
|
||||||
[0i32; 0].sort_unstable();
|
|
||||||
[(); 10].sort_unstable();
|
|
||||||
[(); 100].sort_unstable();
|
|
||||||
|
|
||||||
let mut v = [0xDEADBEEFu64];
|
|
||||||
v.sort_unstable();
|
|
||||||
assert!(v == [0xDEADBEEF]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
#[cfg_attr(miri, ignore)] // Miri is too slow
|
#[cfg_attr(miri, ignore)] // Miri is too slow
|
||||||
|
@ -4,20 +4,17 @@
|
|||||||
|
|
||||||
#![allow(rustc::default_hash_types)]
|
#![allow(rustc::default_hash_types)]
|
||||||
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use rustc_ast::ast;
|
use rustc_ast::ast;
|
||||||
use rustc_attr::DeprecatedSince;
|
use rustc_attr::DeprecatedSince;
|
||||||
use rustc_hir::def::{CtorKind, DefKind};
|
use rustc_hir::def::{CtorKind, DefKind};
|
||||||
use rustc_hir::def_id::DefId;
|
use rustc_hir::def_id::DefId;
|
||||||
use rustc_metadata::rendered_const;
|
use rustc_metadata::rendered_const;
|
||||||
use rustc_middle::bug;
|
use rustc_middle::{bug, ty};
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_span::{Pos, Symbol, sym};
|
||||||
use rustc_span::symbol::sym;
|
|
||||||
use rustc_span::{Pos, Symbol};
|
|
||||||
use rustc_target::spec::abi::Abi as RustcAbi;
|
use rustc_target::spec::abi::Abi as RustcAbi;
|
||||||
use rustdoc_json_types::*;
|
use rustdoc_json_types::*;
|
||||||
|
|
||||||
|
use super::FullItemId;
|
||||||
use crate::clean::{self, ItemId};
|
use crate::clean::{self, ItemId};
|
||||||
use crate::formats::FormatRenderer;
|
use crate::formats::FormatRenderer;
|
||||||
use crate::formats::item_type::ItemType;
|
use crate::formats::item_type::ItemType;
|
||||||
@ -40,7 +37,7 @@ impl JsonRenderer<'_> {
|
|||||||
Some(UrlFragment::UserWritten(_)) | None => *page_id,
|
Some(UrlFragment::UserWritten(_)) | None => *page_id,
|
||||||
};
|
};
|
||||||
|
|
||||||
(String::from(&**link), id_from_item_default(id.into(), self.tcx))
|
(String::from(&**link), self.id_from_item_default(id.into()))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let docs = item.opt_doc_value();
|
let docs = item.opt_doc_value();
|
||||||
@ -48,7 +45,7 @@ impl JsonRenderer<'_> {
|
|||||||
let span = item.span(self.tcx);
|
let span = item.span(self.tcx);
|
||||||
let visibility = item.visibility(self.tcx);
|
let visibility = item.visibility(self.tcx);
|
||||||
let clean::Item { name, item_id, .. } = item;
|
let clean::Item { name, item_id, .. } = item;
|
||||||
let id = id_from_item(&item, self.tcx);
|
let id = self.id_from_item(&item);
|
||||||
let inner = match item.kind {
|
let inner = match item.kind {
|
||||||
clean::KeywordItem => return None,
|
clean::KeywordItem => return None,
|
||||||
clean::StrippedItem(ref inner) => {
|
clean::StrippedItem(ref inner) => {
|
||||||
@ -59,12 +56,12 @@ impl JsonRenderer<'_> {
|
|||||||
clean::ModuleItem(_)
|
clean::ModuleItem(_)
|
||||||
if self.imported_items.contains(&item_id.expect_def_id()) =>
|
if self.imported_items.contains(&item_id.expect_def_id()) =>
|
||||||
{
|
{
|
||||||
from_clean_item(item, self.tcx)
|
from_clean_item(item, self)
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => from_clean_item(item, self.tcx),
|
_ => from_clean_item(item, self),
|
||||||
};
|
};
|
||||||
Some(Item {
|
Some(Item {
|
||||||
id,
|
id,
|
||||||
@ -105,37 +102,116 @@ impl JsonRenderer<'_> {
|
|||||||
Some(ty::Visibility::Public) => Visibility::Public,
|
Some(ty::Visibility::Public) => Visibility::Public,
|
||||||
Some(ty::Visibility::Restricted(did)) if did.is_crate_root() => Visibility::Crate,
|
Some(ty::Visibility::Restricted(did)) if did.is_crate_root() => Visibility::Crate,
|
||||||
Some(ty::Visibility::Restricted(did)) => Visibility::Restricted {
|
Some(ty::Visibility::Restricted(did)) => Visibility::Restricted {
|
||||||
parent: id_from_item_default(did.into(), self.tcx),
|
parent: self.id_from_item_default(did.into()),
|
||||||
path: self.tcx.def_path(did).to_string_no_crate_verbose(),
|
path: self.tcx.def_path(did).to_string_no_crate_verbose(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) trait FromWithTcx<T> {
|
pub(crate) fn id_from_item_default(&self, item_id: ItemId) -> Id {
|
||||||
fn from_tcx(f: T, tcx: TyCtxt<'_>) -> Self;
|
self.id_from_item_inner(item_id, None, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait IntoWithTcx<T> {
|
pub(crate) fn id_from_item_inner(
|
||||||
fn into_tcx(self, tcx: TyCtxt<'_>) -> T;
|
&self,
|
||||||
}
|
item_id: ItemId,
|
||||||
|
name: Option<Symbol>,
|
||||||
|
extra: Option<Id>,
|
||||||
|
) -> Id {
|
||||||
|
let make_part = |def_id: DefId, name: Option<Symbol>, extra: Option<Id>| {
|
||||||
|
let name = match name {
|
||||||
|
Some(name) => Some(name),
|
||||||
|
None => {
|
||||||
|
// We need this workaround because primitive types' DefId actually refers to
|
||||||
|
// their parent module, which isn't present in the output JSON items. So
|
||||||
|
// instead, we directly get the primitive symbol
|
||||||
|
if matches!(self.tcx.def_kind(def_id), DefKind::Mod)
|
||||||
|
&& let Some(prim) = self
|
||||||
|
.tcx
|
||||||
|
.get_attrs(def_id, sym::rustc_doc_primitive)
|
||||||
|
.find_map(|attr| attr.value_str())
|
||||||
|
{
|
||||||
|
Some(prim)
|
||||||
|
} else {
|
||||||
|
self.tcx.opt_item_name(def_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
impl<T, U> IntoWithTcx<U> for T
|
FullItemId { def_id, name, extra }
|
||||||
where
|
};
|
||||||
U: FromWithTcx<T>,
|
|
||||||
{
|
let key = match item_id {
|
||||||
fn into_tcx(self, tcx: TyCtxt<'_>) -> U {
|
ItemId::DefId(did) => (make_part(did, name, extra), None),
|
||||||
U::from_tcx(self, tcx)
|
ItemId::Blanket { for_, impl_id } => {
|
||||||
|
(make_part(impl_id, None, None), Some(make_part(for_, name, extra)))
|
||||||
|
}
|
||||||
|
ItemId::Auto { for_, trait_ } => {
|
||||||
|
(make_part(trait_, None, None), Some(make_part(for_, name, extra)))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut interner = self.id_interner.borrow_mut();
|
||||||
|
let len = interner.len();
|
||||||
|
*interner
|
||||||
|
.entry(key)
|
||||||
|
.or_insert_with(|| Id(len.try_into().expect("too many items in a crate")))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn id_from_item(&self, item: &clean::Item) -> Id {
|
||||||
|
match item.kind {
|
||||||
|
clean::ItemKind::ImportItem(ref import) => {
|
||||||
|
let extra =
|
||||||
|
import.source.did.map(ItemId::from).map(|i| self.id_from_item_default(i));
|
||||||
|
self.id_from_item_inner(item.item_id, item.name, extra)
|
||||||
|
}
|
||||||
|
_ => self.id_from_item_inner(item.item_id, item.name, None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ids(&self, items: impl IntoIterator<Item = clean::Item>) -> Vec<Id> {
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| !x.is_stripped() && !x.is_keyword())
|
||||||
|
.map(|i| self.id_from_item(&i))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ids_keeping_stripped(
|
||||||
|
&self,
|
||||||
|
items: impl IntoIterator<Item = clean::Item>,
|
||||||
|
) -> Vec<Option<Id>> {
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.map(|i| (!i.is_stripped() && !i.is_keyword()).then(|| self.id_from_item(&i)))
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I, T, U> FromWithTcx<I> for Vec<U>
|
pub(crate) trait FromClean<T> {
|
||||||
|
fn from_clean(f: T, renderer: &JsonRenderer<'_>) -> Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait IntoJson<T> {
|
||||||
|
fn into_json(self, renderer: &JsonRenderer<'_>) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, U> IntoJson<U> for T
|
||||||
|
where
|
||||||
|
U: FromClean<T>,
|
||||||
|
{
|
||||||
|
fn into_json(self, renderer: &JsonRenderer<'_>) -> U {
|
||||||
|
U::from_clean(self, renderer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T, U> FromClean<I> for Vec<U>
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = T>,
|
I: IntoIterator<Item = T>,
|
||||||
U: FromWithTcx<T>,
|
U: FromClean<T>,
|
||||||
{
|
{
|
||||||
fn from_tcx(f: I, tcx: TyCtxt<'_>) -> Vec<U> {
|
fn from_clean(f: I, renderer: &JsonRenderer<'_>) -> Vec<U> {
|
||||||
f.into_iter().map(|x| x.into_tcx(tcx)).collect()
|
f.into_iter().map(|x| x.into_json(renderer)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,37 +226,38 @@ pub(crate) fn from_deprecation(deprecation: rustc_attr::Deprecation) -> Deprecat
|
|||||||
Deprecation { since, note: note.map(|s| s.to_string()) }
|
Deprecation { since, note: note.map(|s| s.to_string()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::GenericArgs> for GenericArgs {
|
impl FromClean<clean::GenericArgs> for GenericArgs {
|
||||||
fn from_tcx(args: clean::GenericArgs, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(args: clean::GenericArgs, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::GenericArgs::*;
|
use clean::GenericArgs::*;
|
||||||
match args {
|
match args {
|
||||||
AngleBracketed { args, constraints } => GenericArgs::AngleBracketed {
|
AngleBracketed { args, constraints } => GenericArgs::AngleBracketed {
|
||||||
args: args.into_vec().into_tcx(tcx),
|
args: args.into_vec().into_json(renderer),
|
||||||
constraints: constraints.into_tcx(tcx),
|
constraints: constraints.into_json(renderer),
|
||||||
},
|
},
|
||||||
Parenthesized { inputs, output } => GenericArgs::Parenthesized {
|
Parenthesized { inputs, output } => GenericArgs::Parenthesized {
|
||||||
inputs: inputs.into_vec().into_tcx(tcx),
|
inputs: inputs.into_vec().into_json(renderer),
|
||||||
output: output.map(|a| (*a).into_tcx(tcx)),
|
output: output.map(|a| (*a).into_json(renderer)),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::GenericArg> for GenericArg {
|
impl FromClean<clean::GenericArg> for GenericArg {
|
||||||
fn from_tcx(arg: clean::GenericArg, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(arg: clean::GenericArg, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::GenericArg::*;
|
use clean::GenericArg::*;
|
||||||
match arg {
|
match arg {
|
||||||
Lifetime(l) => GenericArg::Lifetime(convert_lifetime(l)),
|
Lifetime(l) => GenericArg::Lifetime(convert_lifetime(l)),
|
||||||
Type(t) => GenericArg::Type(t.into_tcx(tcx)),
|
Type(t) => GenericArg::Type(t.into_json(renderer)),
|
||||||
Const(box c) => GenericArg::Const(c.into_tcx(tcx)),
|
Const(box c) => GenericArg::Const(c.into_json(renderer)),
|
||||||
Infer => GenericArg::Infer,
|
Infer => GenericArg::Infer,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Constant> for Constant {
|
impl FromClean<clean::Constant> for Constant {
|
||||||
// FIXME(generic_const_items): Add support for generic const items.
|
// FIXME(generic_const_items): Add support for generic const items.
|
||||||
fn from_tcx(constant: clean::Constant, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(constant: clean::Constant, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
|
let tcx = renderer.tcx;
|
||||||
let expr = constant.expr(tcx);
|
let expr = constant.expr(tcx);
|
||||||
let value = constant.value(tcx);
|
let value = constant.value(tcx);
|
||||||
let is_literal = constant.is_literal(tcx);
|
let is_literal = constant.is_literal(tcx);
|
||||||
@ -188,9 +265,10 @@ impl FromWithTcx<clean::Constant> for Constant {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::ConstantKind> for Constant {
|
impl FromClean<clean::ConstantKind> for Constant {
|
||||||
// FIXME(generic_const_items): Add support for generic const items.
|
// FIXME(generic_const_items): Add support for generic const items.
|
||||||
fn from_tcx(constant: clean::ConstantKind, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(constant: clean::ConstantKind, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
|
let tcx = renderer.tcx;
|
||||||
let expr = constant.expr(tcx);
|
let expr = constant.expr(tcx);
|
||||||
let value = constant.value(tcx);
|
let value = constant.value(tcx);
|
||||||
let is_literal = constant.is_literal(tcx);
|
let is_literal = constant.is_literal(tcx);
|
||||||
@ -198,147 +276,62 @@ impl FromWithTcx<clean::ConstantKind> for Constant {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::AssocItemConstraint> for AssocItemConstraint {
|
impl FromClean<clean::AssocItemConstraint> for AssocItemConstraint {
|
||||||
fn from_tcx(constraint: clean::AssocItemConstraint, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(constraint: clean::AssocItemConstraint, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
AssocItemConstraint {
|
AssocItemConstraint {
|
||||||
name: constraint.assoc.name.to_string(),
|
name: constraint.assoc.name.to_string(),
|
||||||
args: constraint.assoc.args.into_tcx(tcx),
|
args: constraint.assoc.args.into_json(renderer),
|
||||||
binding: constraint.kind.into_tcx(tcx),
|
binding: constraint.kind.into_json(renderer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::AssocItemConstraintKind> for AssocItemConstraintKind {
|
impl FromClean<clean::AssocItemConstraintKind> for AssocItemConstraintKind {
|
||||||
fn from_tcx(kind: clean::AssocItemConstraintKind, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(kind: clean::AssocItemConstraintKind, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::AssocItemConstraintKind::*;
|
use clean::AssocItemConstraintKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
Equality { term } => AssocItemConstraintKind::Equality(term.into_tcx(tcx)),
|
Equality { term } => AssocItemConstraintKind::Equality(term.into_json(renderer)),
|
||||||
Bound { bounds } => AssocItemConstraintKind::Constraint(bounds.into_tcx(tcx)),
|
Bound { bounds } => AssocItemConstraintKind::Constraint(bounds.into_json(renderer)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
fn from_clean_item(item: clean::Item, renderer: &JsonRenderer<'_>) -> ItemEnum {
|
||||||
pub(crate) fn id_from_item_default(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
|
|
||||||
id_from_item_inner(item_id, tcx, None, None)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// It generates an ID as follows:
|
|
||||||
///
|
|
||||||
/// `CRATE_ID:ITEM_ID[:NAME_ID][-EXTRA]`:
|
|
||||||
/// * If there is no `name`, `NAME_ID` is not generated.
|
|
||||||
/// * If there is no `extra`, `EXTRA` is not generated.
|
|
||||||
///
|
|
||||||
/// * `name` is the item's name if available (it's not for impl blocks for example).
|
|
||||||
/// * `extra` is used for reexports: it contains the ID of the reexported item. It is used to allow
|
|
||||||
/// to have items with the same name but different types to both appear in the generated JSON.
|
|
||||||
pub(crate) fn id_from_item_inner(
|
|
||||||
item_id: ItemId,
|
|
||||||
tcx: TyCtxt<'_>,
|
|
||||||
name: Option<Symbol>,
|
|
||||||
extra: Option<&Id>,
|
|
||||||
) -> Id {
|
|
||||||
struct DisplayDefId<'a, 'b>(DefId, TyCtxt<'a>, Option<&'b Id>, Option<Symbol>);
|
|
||||||
|
|
||||||
impl<'a, 'b> fmt::Display for DisplayDefId<'a, 'b> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let DisplayDefId(def_id, tcx, extra, name) = self;
|
|
||||||
// We need this workaround because primitive types' DefId actually refers to
|
|
||||||
// their parent module, which isn't present in the output JSON items. So
|
|
||||||
// instead, we directly get the primitive symbol and convert it to u32 to
|
|
||||||
// generate the ID.
|
|
||||||
let s;
|
|
||||||
let extra = if let Some(e) = extra {
|
|
||||||
s = format!("-{}", e.0);
|
|
||||||
&s
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
};
|
|
||||||
let name = match name {
|
|
||||||
Some(name) => format!(":{}", name.as_u32()),
|
|
||||||
None => {
|
|
||||||
// We need this workaround because primitive types' DefId actually refers to
|
|
||||||
// their parent module, which isn't present in the output JSON items. So
|
|
||||||
// instead, we directly get the primitive symbol and convert it to u32 to
|
|
||||||
// generate the ID.
|
|
||||||
if matches!(tcx.def_kind(def_id), DefKind::Mod)
|
|
||||||
&& let Some(prim) = tcx
|
|
||||||
.get_attrs(*def_id, sym::rustc_doc_primitive)
|
|
||||||
.find_map(|attr| attr.value_str())
|
|
||||||
{
|
|
||||||
format!(":{}", prim.as_u32())
|
|
||||||
} else {
|
|
||||||
tcx.opt_item_name(*def_id)
|
|
||||||
.map(|n| format!(":{}", n.as_u32()))
|
|
||||||
.unwrap_or_default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
write!(f, "{}:{}{name}{extra}", def_id.krate.as_u32(), u32::from(def_id.index))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match item_id {
|
|
||||||
ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, extra, name))),
|
|
||||||
ItemId::Blanket { for_, impl_id } => Id(format!(
|
|
||||||
"b:{}-{}",
|
|
||||||
DisplayDefId(impl_id, tcx, None, None),
|
|
||||||
DisplayDefId(for_, tcx, extra, name)
|
|
||||||
)),
|
|
||||||
ItemId::Auto { for_, trait_ } => Id(format!(
|
|
||||||
"a:{}-{}",
|
|
||||||
DisplayDefId(trait_, tcx, None, None),
|
|
||||||
DisplayDefId(for_, tcx, extra, name)
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn id_from_item(item: &clean::Item, tcx: TyCtxt<'_>) -> Id {
|
|
||||||
match item.kind {
|
|
||||||
clean::ItemKind::ImportItem(ref import) => {
|
|
||||||
let extra =
|
|
||||||
import.source.did.map(ItemId::from).map(|i| id_from_item_inner(i, tcx, None, None));
|
|
||||||
id_from_item_inner(item.item_id, tcx, item.name, extra.as_ref())
|
|
||||||
}
|
|
||||||
_ => id_from_item_inner(item.item_id, tcx, item.name, None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum {
|
|
||||||
use clean::ItemKind::*;
|
use clean::ItemKind::*;
|
||||||
let name = item.name;
|
let name = item.name;
|
||||||
let is_crate = item.is_crate();
|
let is_crate = item.is_crate();
|
||||||
let header = item.fn_header(tcx);
|
let header = item.fn_header(renderer.tcx);
|
||||||
|
|
||||||
match item.inner.kind {
|
match item.inner.kind {
|
||||||
ModuleItem(m) => {
|
ModuleItem(m) => {
|
||||||
ItemEnum::Module(Module { is_crate, items: ids(m.items, tcx), is_stripped: false })
|
ItemEnum::Module(Module { is_crate, items: renderer.ids(m.items), is_stripped: false })
|
||||||
}
|
}
|
||||||
ImportItem(i) => ItemEnum::Use(i.into_tcx(tcx)),
|
ImportItem(i) => ItemEnum::Use(i.into_json(renderer)),
|
||||||
StructItem(s) => ItemEnum::Struct(s.into_tcx(tcx)),
|
StructItem(s) => ItemEnum::Struct(s.into_json(renderer)),
|
||||||
UnionItem(u) => ItemEnum::Union(u.into_tcx(tcx)),
|
UnionItem(u) => ItemEnum::Union(u.into_json(renderer)),
|
||||||
StructFieldItem(f) => ItemEnum::StructField(f.into_tcx(tcx)),
|
StructFieldItem(f) => ItemEnum::StructField(f.into_json(renderer)),
|
||||||
EnumItem(e) => ItemEnum::Enum(e.into_tcx(tcx)),
|
EnumItem(e) => ItemEnum::Enum(e.into_json(renderer)),
|
||||||
VariantItem(v) => ItemEnum::Variant(v.into_tcx(tcx)),
|
VariantItem(v) => ItemEnum::Variant(v.into_json(renderer)),
|
||||||
FunctionItem(f) => ItemEnum::Function(from_function(f, true, header.unwrap(), tcx)),
|
FunctionItem(f) => ItemEnum::Function(from_function(f, true, header.unwrap(), renderer)),
|
||||||
ForeignFunctionItem(f, _) => {
|
ForeignFunctionItem(f, _) => {
|
||||||
ItemEnum::Function(from_function(f, false, header.unwrap(), tcx))
|
ItemEnum::Function(from_function(f, false, header.unwrap(), renderer))
|
||||||
}
|
}
|
||||||
TraitItem(t) => ItemEnum::Trait((*t).into_tcx(tcx)),
|
TraitItem(t) => ItemEnum::Trait((*t).into_json(renderer)),
|
||||||
TraitAliasItem(t) => ItemEnum::TraitAlias(t.into_tcx(tcx)),
|
TraitAliasItem(t) => ItemEnum::TraitAlias(t.into_json(renderer)),
|
||||||
MethodItem(m, _) => ItemEnum::Function(from_function(m, true, header.unwrap(), tcx)),
|
MethodItem(m, _) => ItemEnum::Function(from_function(m, true, header.unwrap(), renderer)),
|
||||||
TyMethodItem(m) => ItemEnum::Function(from_function(m, false, header.unwrap(), tcx)),
|
TyMethodItem(m) => ItemEnum::Function(from_function(m, false, header.unwrap(), renderer)),
|
||||||
ImplItem(i) => ItemEnum::Impl((*i).into_tcx(tcx)),
|
ImplItem(i) => ItemEnum::Impl((*i).into_json(renderer)),
|
||||||
StaticItem(s) => ItemEnum::Static(s.into_tcx(tcx)),
|
StaticItem(s) => ItemEnum::Static(s.into_json(renderer)),
|
||||||
ForeignStaticItem(s, _) => ItemEnum::Static(s.into_tcx(tcx)),
|
ForeignStaticItem(s, _) => ItemEnum::Static(s.into_json(renderer)),
|
||||||
ForeignTypeItem => ItemEnum::ExternType,
|
ForeignTypeItem => ItemEnum::ExternType,
|
||||||
TypeAliasItem(t) => ItemEnum::TypeAlias(t.into_tcx(tcx)),
|
TypeAliasItem(t) => ItemEnum::TypeAlias(t.into_json(renderer)),
|
||||||
// FIXME(generic_const_items): Add support for generic free consts
|
// FIXME(generic_const_items): Add support for generic free consts
|
||||||
ConstantItem(ci) => {
|
ConstantItem(ci) => ItemEnum::Constant {
|
||||||
ItemEnum::Constant { type_: ci.type_.into_tcx(tcx), const_: ci.kind.into_tcx(tcx) }
|
type_: ci.type_.into_json(renderer),
|
||||||
}
|
const_: ci.kind.into_json(renderer),
|
||||||
|
},
|
||||||
MacroItem(m) => ItemEnum::Macro(m.source),
|
MacroItem(m) => ItemEnum::Macro(m.source),
|
||||||
ProcMacroItem(m) => ItemEnum::ProcMacro(m.into_tcx(tcx)),
|
ProcMacroItem(m) => ItemEnum::ProcMacro(m.into_json(renderer)),
|
||||||
PrimitiveItem(p) => {
|
PrimitiveItem(p) => {
|
||||||
ItemEnum::Primitive(Primitive {
|
ItemEnum::Primitive(Primitive {
|
||||||
name: p.as_sym().to_string(),
|
name: p.as_sym().to_string(),
|
||||||
@ -347,19 +340,22 @@ fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum {
|
|||||||
}
|
}
|
||||||
// FIXME(generic_const_items): Add support for generic associated consts.
|
// FIXME(generic_const_items): Add support for generic associated consts.
|
||||||
TyAssocConstItem(_generics, ty) => {
|
TyAssocConstItem(_generics, ty) => {
|
||||||
ItemEnum::AssocConst { type_: (*ty).into_tcx(tcx), value: None }
|
ItemEnum::AssocConst { type_: (*ty).into_json(renderer), value: None }
|
||||||
}
|
}
|
||||||
// FIXME(generic_const_items): Add support for generic associated consts.
|
// FIXME(generic_const_items): Add support for generic associated consts.
|
||||||
AssocConstItem(ci) => {
|
AssocConstItem(ci) => ItemEnum::AssocConst {
|
||||||
ItemEnum::AssocConst { type_: ci.type_.into_tcx(tcx), value: Some(ci.kind.expr(tcx)) }
|
type_: ci.type_.into_json(renderer),
|
||||||
}
|
value: Some(ci.kind.expr(renderer.tcx)),
|
||||||
TyAssocTypeItem(g, b) => {
|
},
|
||||||
ItemEnum::AssocType { generics: g.into_tcx(tcx), bounds: b.into_tcx(tcx), type_: None }
|
TyAssocTypeItem(g, b) => ItemEnum::AssocType {
|
||||||
}
|
generics: g.into_json(renderer),
|
||||||
|
bounds: b.into_json(renderer),
|
||||||
|
type_: None,
|
||||||
|
},
|
||||||
AssocTypeItem(t, b) => ItemEnum::AssocType {
|
AssocTypeItem(t, b) => ItemEnum::AssocType {
|
||||||
generics: t.generics.into_tcx(tcx),
|
generics: t.generics.into_json(renderer),
|
||||||
bounds: b.into_tcx(tcx),
|
bounds: b.into_json(renderer),
|
||||||
type_: Some(t.item_type.unwrap_or(t.type_).into_tcx(tcx)),
|
type_: Some(t.item_type.unwrap_or(t.type_).into_json(renderer)),
|
||||||
},
|
},
|
||||||
// `convert_item` early returns `None` for stripped items and keywords.
|
// `convert_item` early returns `None` for stripped items and keywords.
|
||||||
KeywordItem => unreachable!(),
|
KeywordItem => unreachable!(),
|
||||||
@ -367,7 +363,7 @@ fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum {
|
|||||||
match *inner {
|
match *inner {
|
||||||
ModuleItem(m) => ItemEnum::Module(Module {
|
ModuleItem(m) => ItemEnum::Module(Module {
|
||||||
is_crate,
|
is_crate,
|
||||||
items: ids(m.items, tcx),
|
items: renderer.ids(m.items),
|
||||||
is_stripped: true,
|
is_stripped: true,
|
||||||
}),
|
}),
|
||||||
// `convert_item` early returns `None` for stripped items we're not including
|
// `convert_item` early returns `None` for stripped items we're not including
|
||||||
@ -381,36 +377,36 @@ fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Struct> for Struct {
|
impl FromClean<clean::Struct> for Struct {
|
||||||
fn from_tcx(struct_: clean::Struct, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(struct_: clean::Struct, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let has_stripped_fields = struct_.has_stripped_entries();
|
let has_stripped_fields = struct_.has_stripped_entries();
|
||||||
let clean::Struct { ctor_kind, generics, fields } = struct_;
|
let clean::Struct { ctor_kind, generics, fields } = struct_;
|
||||||
|
|
||||||
let kind = match ctor_kind {
|
let kind = match ctor_kind {
|
||||||
Some(CtorKind::Fn) => StructKind::Tuple(ids_keeping_stripped(fields, tcx)),
|
Some(CtorKind::Fn) => StructKind::Tuple(renderer.ids_keeping_stripped(fields)),
|
||||||
Some(CtorKind::Const) => {
|
Some(CtorKind::Const) => {
|
||||||
assert!(fields.is_empty());
|
assert!(fields.is_empty());
|
||||||
StructKind::Unit
|
StructKind::Unit
|
||||||
}
|
}
|
||||||
None => StructKind::Plain { fields: ids(fields, tcx), has_stripped_fields },
|
None => StructKind::Plain { fields: renderer.ids(fields), has_stripped_fields },
|
||||||
};
|
};
|
||||||
|
|
||||||
Struct {
|
Struct {
|
||||||
kind,
|
kind,
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
impls: Vec::new(), // Added in JsonRenderer::item
|
impls: Vec::new(), // Added in JsonRenderer::item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Union> for Union {
|
impl FromClean<clean::Union> for Union {
|
||||||
fn from_tcx(union_: clean::Union, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(union_: clean::Union, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let has_stripped_fields = union_.has_stripped_entries();
|
let has_stripped_fields = union_.has_stripped_entries();
|
||||||
let clean::Union { generics, fields } = union_;
|
let clean::Union { generics, fields } = union_;
|
||||||
Union {
|
Union {
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
has_stripped_fields,
|
has_stripped_fields,
|
||||||
fields: ids(fields, tcx),
|
fields: renderer.ids(fields),
|
||||||
impls: Vec::new(), // Added in JsonRenderer::item
|
impls: Vec::new(), // Added in JsonRenderer::item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -444,51 +440,51 @@ fn convert_lifetime(l: clean::Lifetime) -> String {
|
|||||||
l.0.to_string()
|
l.0.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Generics> for Generics {
|
impl FromClean<clean::Generics> for Generics {
|
||||||
fn from_tcx(generics: clean::Generics, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(generics: clean::Generics, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
Generics {
|
Generics {
|
||||||
params: generics.params.into_tcx(tcx),
|
params: generics.params.into_json(renderer),
|
||||||
where_predicates: generics.where_predicates.into_tcx(tcx),
|
where_predicates: generics.where_predicates.into_json(renderer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::GenericParamDef> for GenericParamDef {
|
impl FromClean<clean::GenericParamDef> for GenericParamDef {
|
||||||
fn from_tcx(generic_param: clean::GenericParamDef, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(generic_param: clean::GenericParamDef, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
GenericParamDef {
|
GenericParamDef {
|
||||||
name: generic_param.name.to_string(),
|
name: generic_param.name.to_string(),
|
||||||
kind: generic_param.kind.into_tcx(tcx),
|
kind: generic_param.kind.into_json(renderer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::GenericParamDefKind> for GenericParamDefKind {
|
impl FromClean<clean::GenericParamDefKind> for GenericParamDefKind {
|
||||||
fn from_tcx(kind: clean::GenericParamDefKind, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(kind: clean::GenericParamDefKind, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::GenericParamDefKind::*;
|
use clean::GenericParamDefKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
Lifetime { outlives } => GenericParamDefKind::Lifetime {
|
Lifetime { outlives } => GenericParamDefKind::Lifetime {
|
||||||
outlives: outlives.into_iter().map(convert_lifetime).collect(),
|
outlives: outlives.into_iter().map(convert_lifetime).collect(),
|
||||||
},
|
},
|
||||||
Type { bounds, default, synthetic } => GenericParamDefKind::Type {
|
Type { bounds, default, synthetic } => GenericParamDefKind::Type {
|
||||||
bounds: bounds.into_tcx(tcx),
|
bounds: bounds.into_json(renderer),
|
||||||
default: default.map(|x| (*x).into_tcx(tcx)),
|
default: default.map(|x| (*x).into_json(renderer)),
|
||||||
is_synthetic: synthetic,
|
is_synthetic: synthetic,
|
||||||
},
|
},
|
||||||
Const { ty, default, synthetic: _ } => GenericParamDefKind::Const {
|
Const { ty, default, synthetic: _ } => GenericParamDefKind::Const {
|
||||||
type_: (*ty).into_tcx(tcx),
|
type_: (*ty).into_json(renderer),
|
||||||
default: default.map(|x| *x),
|
default: default.map(|x| *x),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::WherePredicate> for WherePredicate {
|
impl FromClean<clean::WherePredicate> for WherePredicate {
|
||||||
fn from_tcx(predicate: clean::WherePredicate, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(predicate: clean::WherePredicate, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::WherePredicate::*;
|
use clean::WherePredicate::*;
|
||||||
match predicate {
|
match predicate {
|
||||||
BoundPredicate { ty, bounds, bound_params } => WherePredicate::BoundPredicate {
|
BoundPredicate { ty, bounds, bound_params } => WherePredicate::BoundPredicate {
|
||||||
type_: ty.into_tcx(tcx),
|
type_: ty.into_json(renderer),
|
||||||
bounds: bounds.into_tcx(tcx),
|
bounds: bounds.into_json(renderer),
|
||||||
generic_params: bound_params
|
generic_params: bound_params
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|x| {
|
.map(|x| {
|
||||||
@ -503,15 +499,15 @@ impl FromWithTcx<clean::WherePredicate> for WherePredicate {
|
|||||||
GenericParamDefKind::Type {
|
GenericParamDefKind::Type {
|
||||||
bounds: bounds
|
bounds: bounds
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|bound| bound.into_tcx(tcx))
|
.map(|bound| bound.into_json(renderer))
|
||||||
.collect(),
|
.collect(),
|
||||||
default: default.map(|ty| (*ty).into_tcx(tcx)),
|
default: default.map(|ty| (*ty).into_json(renderer)),
|
||||||
is_synthetic: synthetic,
|
is_synthetic: synthetic,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
clean::GenericParamDefKind::Const { ty, default, synthetic: _ } => {
|
clean::GenericParamDefKind::Const { ty, default, synthetic: _ } => {
|
||||||
GenericParamDefKind::Const {
|
GenericParamDefKind::Const {
|
||||||
type_: (*ty).into_tcx(tcx),
|
type_: (*ty).into_json(renderer),
|
||||||
default: default.map(|d| *d),
|
default: default.map(|d| *d),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -530,21 +526,22 @@ impl FromWithTcx<clean::WherePredicate> for WherePredicate {
|
|||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
},
|
},
|
||||||
EqPredicate { lhs, rhs } => {
|
EqPredicate { lhs, rhs } => WherePredicate::EqPredicate {
|
||||||
WherePredicate::EqPredicate { lhs: lhs.into_tcx(tcx), rhs: rhs.into_tcx(tcx) }
|
lhs: lhs.into_json(renderer),
|
||||||
}
|
rhs: rhs.into_json(renderer),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::GenericBound> for GenericBound {
|
impl FromClean<clean::GenericBound> for GenericBound {
|
||||||
fn from_tcx(bound: clean::GenericBound, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(bound: clean::GenericBound, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::GenericBound::*;
|
use clean::GenericBound::*;
|
||||||
match bound {
|
match bound {
|
||||||
TraitBound(clean::PolyTrait { trait_, generic_params }, modifier) => {
|
TraitBound(clean::PolyTrait { trait_, generic_params }, modifier) => {
|
||||||
GenericBound::TraitBound {
|
GenericBound::TraitBound {
|
||||||
trait_: trait_.into_tcx(tcx),
|
trait_: trait_.into_json(renderer),
|
||||||
generic_params: generic_params.into_tcx(tcx),
|
generic_params: generic_params.into_json(renderer),
|
||||||
modifier: from_trait_bound_modifier(modifier),
|
modifier: from_trait_bound_modifier(modifier),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -572,73 +569,75 @@ pub(crate) fn from_trait_bound_modifier(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Type> for Type {
|
impl FromClean<clean::Type> for Type {
|
||||||
fn from_tcx(ty: clean::Type, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(ty: clean::Type, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::Type::{
|
use clean::Type::{
|
||||||
Array, BareFunction, BorrowedRef, Generic, ImplTrait, Infer, Primitive, QPath,
|
Array, BareFunction, BorrowedRef, Generic, ImplTrait, Infer, Primitive, QPath,
|
||||||
RawPointer, SelfTy, Slice, Tuple,
|
RawPointer, SelfTy, Slice, Tuple,
|
||||||
};
|
};
|
||||||
|
|
||||||
match ty {
|
match ty {
|
||||||
clean::Type::Path { path } => Type::ResolvedPath(path.into_tcx(tcx)),
|
clean::Type::Path { path } => Type::ResolvedPath(path.into_json(renderer)),
|
||||||
clean::Type::DynTrait(bounds, lt) => Type::DynTrait(DynTrait {
|
clean::Type::DynTrait(bounds, lt) => Type::DynTrait(DynTrait {
|
||||||
lifetime: lt.map(convert_lifetime),
|
lifetime: lt.map(convert_lifetime),
|
||||||
traits: bounds.into_tcx(tcx),
|
traits: bounds.into_json(renderer),
|
||||||
}),
|
}),
|
||||||
Generic(s) => Type::Generic(s.to_string()),
|
Generic(s) => Type::Generic(s.to_string()),
|
||||||
// FIXME: add dedicated variant to json Type?
|
// FIXME: add dedicated variant to json Type?
|
||||||
SelfTy => Type::Generic("Self".to_owned()),
|
SelfTy => Type::Generic("Self".to_owned()),
|
||||||
Primitive(p) => Type::Primitive(p.as_sym().to_string()),
|
Primitive(p) => Type::Primitive(p.as_sym().to_string()),
|
||||||
BareFunction(f) => Type::FunctionPointer(Box::new((*f).into_tcx(tcx))),
|
BareFunction(f) => Type::FunctionPointer(Box::new((*f).into_json(renderer))),
|
||||||
Tuple(t) => Type::Tuple(t.into_tcx(tcx)),
|
Tuple(t) => Type::Tuple(t.into_json(renderer)),
|
||||||
Slice(t) => Type::Slice(Box::new((*t).into_tcx(tcx))),
|
Slice(t) => Type::Slice(Box::new((*t).into_json(renderer))),
|
||||||
Array(t, s) => Type::Array { type_: Box::new((*t).into_tcx(tcx)), len: s.to_string() },
|
Array(t, s) => {
|
||||||
|
Type::Array { type_: Box::new((*t).into_json(renderer)), len: s.to_string() }
|
||||||
|
}
|
||||||
clean::Type::Pat(t, p) => Type::Pat {
|
clean::Type::Pat(t, p) => Type::Pat {
|
||||||
type_: Box::new((*t).into_tcx(tcx)),
|
type_: Box::new((*t).into_json(renderer)),
|
||||||
__pat_unstable_do_not_use: p.to_string(),
|
__pat_unstable_do_not_use: p.to_string(),
|
||||||
},
|
},
|
||||||
ImplTrait(g) => Type::ImplTrait(g.into_tcx(tcx)),
|
ImplTrait(g) => Type::ImplTrait(g.into_json(renderer)),
|
||||||
Infer => Type::Infer,
|
Infer => Type::Infer,
|
||||||
RawPointer(mutability, type_) => Type::RawPointer {
|
RawPointer(mutability, type_) => Type::RawPointer {
|
||||||
is_mutable: mutability == ast::Mutability::Mut,
|
is_mutable: mutability == ast::Mutability::Mut,
|
||||||
type_: Box::new((*type_).into_tcx(tcx)),
|
type_: Box::new((*type_).into_json(renderer)),
|
||||||
},
|
},
|
||||||
BorrowedRef { lifetime, mutability, type_ } => Type::BorrowedRef {
|
BorrowedRef { lifetime, mutability, type_ } => Type::BorrowedRef {
|
||||||
lifetime: lifetime.map(convert_lifetime),
|
lifetime: lifetime.map(convert_lifetime),
|
||||||
is_mutable: mutability == ast::Mutability::Mut,
|
is_mutable: mutability == ast::Mutability::Mut,
|
||||||
type_: Box::new((*type_).into_tcx(tcx)),
|
type_: Box::new((*type_).into_json(renderer)),
|
||||||
},
|
},
|
||||||
QPath(box clean::QPathData { assoc, self_type, trait_, .. }) => Type::QualifiedPath {
|
QPath(box clean::QPathData { assoc, self_type, trait_, .. }) => Type::QualifiedPath {
|
||||||
name: assoc.name.to_string(),
|
name: assoc.name.to_string(),
|
||||||
args: Box::new(assoc.args.into_tcx(tcx)),
|
args: Box::new(assoc.args.into_json(renderer)),
|
||||||
self_type: Box::new(self_type.into_tcx(tcx)),
|
self_type: Box::new(self_type.into_json(renderer)),
|
||||||
trait_: trait_.map(|trait_| trait_.into_tcx(tcx)),
|
trait_: trait_.map(|trait_| trait_.into_json(renderer)),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Path> for Path {
|
impl FromClean<clean::Path> for Path {
|
||||||
fn from_tcx(path: clean::Path, tcx: TyCtxt<'_>) -> Path {
|
fn from_clean(path: clean::Path, renderer: &JsonRenderer<'_>) -> Path {
|
||||||
Path {
|
Path {
|
||||||
name: path.whole_name(),
|
name: path.whole_name(),
|
||||||
id: id_from_item_default(path.def_id().into(), tcx),
|
id: renderer.id_from_item_default(path.def_id().into()),
|
||||||
args: path.segments.last().map(|args| Box::new(args.clone().args.into_tcx(tcx))),
|
args: path.segments.last().map(|args| Box::new(args.clone().args.into_json(renderer))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Term> for Term {
|
impl FromClean<clean::Term> for Term {
|
||||||
fn from_tcx(term: clean::Term, tcx: TyCtxt<'_>) -> Term {
|
fn from_clean(term: clean::Term, renderer: &JsonRenderer<'_>) -> Term {
|
||||||
match term {
|
match term {
|
||||||
clean::Term::Type(ty) => Term::Type(FromWithTcx::from_tcx(ty, tcx)),
|
clean::Term::Type(ty) => Term::Type(ty.into_json(renderer)),
|
||||||
clean::Term::Constant(c) => Term::Constant(FromWithTcx::from_tcx(c, tcx)),
|
clean::Term::Constant(c) => Term::Constant(c.into_json(renderer)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::BareFunctionDecl> for FunctionPointer {
|
impl FromClean<clean::BareFunctionDecl> for FunctionPointer {
|
||||||
fn from_tcx(bare_decl: clean::BareFunctionDecl, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(bare_decl: clean::BareFunctionDecl, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let clean::BareFunctionDecl { safety, generic_params, decl, abi } = bare_decl;
|
let clean::BareFunctionDecl { safety, generic_params, decl, abi } = bare_decl;
|
||||||
FunctionPointer {
|
FunctionPointer {
|
||||||
header: FunctionHeader {
|
header: FunctionHeader {
|
||||||
@ -647,29 +646,30 @@ impl FromWithTcx<clean::BareFunctionDecl> for FunctionPointer {
|
|||||||
is_async: false,
|
is_async: false,
|
||||||
abi: convert_abi(abi),
|
abi: convert_abi(abi),
|
||||||
},
|
},
|
||||||
generic_params: generic_params.into_tcx(tcx),
|
generic_params: generic_params.into_json(renderer),
|
||||||
sig: decl.into_tcx(tcx),
|
sig: decl.into_json(renderer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::FnDecl> for FunctionSignature {
|
impl FromClean<clean::FnDecl> for FunctionSignature {
|
||||||
fn from_tcx(decl: clean::FnDecl, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(decl: clean::FnDecl, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let clean::FnDecl { inputs, output, c_variadic } = decl;
|
let clean::FnDecl { inputs, output, c_variadic } = decl;
|
||||||
FunctionSignature {
|
FunctionSignature {
|
||||||
inputs: inputs
|
inputs: inputs
|
||||||
.values
|
.values
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|arg| (arg.name.to_string(), arg.type_.into_tcx(tcx)))
|
.map(|arg| (arg.name.to_string(), arg.type_.into_json(renderer)))
|
||||||
.collect(),
|
.collect(),
|
||||||
output: if output.is_unit() { None } else { Some(output.into_tcx(tcx)) },
|
output: if output.is_unit() { None } else { Some(output.into_json(renderer)) },
|
||||||
is_c_variadic: c_variadic,
|
is_c_variadic: c_variadic,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Trait> for Trait {
|
impl FromClean<clean::Trait> for Trait {
|
||||||
fn from_tcx(trait_: clean::Trait, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(trait_: clean::Trait, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
|
let tcx = renderer.tcx;
|
||||||
let is_auto = trait_.is_auto(tcx);
|
let is_auto = trait_.is_auto(tcx);
|
||||||
let is_unsafe = trait_.safety(tcx) == rustc_hir::Safety::Unsafe;
|
let is_unsafe = trait_.safety(tcx) == rustc_hir::Safety::Unsafe;
|
||||||
let is_object_safe = trait_.is_object_safe(tcx);
|
let is_object_safe = trait_.is_object_safe(tcx);
|
||||||
@ -678,26 +678,29 @@ impl FromWithTcx<clean::Trait> for Trait {
|
|||||||
is_auto,
|
is_auto,
|
||||||
is_unsafe,
|
is_unsafe,
|
||||||
is_object_safe,
|
is_object_safe,
|
||||||
items: ids(items, tcx),
|
items: renderer.ids(items),
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
bounds: bounds.into_tcx(tcx),
|
bounds: bounds.into_json(renderer),
|
||||||
implementations: Vec::new(), // Added in JsonRenderer::item
|
implementations: Vec::new(), // Added in JsonRenderer::item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::PolyTrait> for PolyTrait {
|
impl FromClean<clean::PolyTrait> for PolyTrait {
|
||||||
fn from_tcx(
|
fn from_clean(
|
||||||
clean::PolyTrait { trait_, generic_params }: clean::PolyTrait,
|
clean::PolyTrait { trait_, generic_params }: clean::PolyTrait,
|
||||||
tcx: TyCtxt<'_>,
|
renderer: &JsonRenderer<'_>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
PolyTrait { trait_: trait_.into_tcx(tcx), generic_params: generic_params.into_tcx(tcx) }
|
PolyTrait {
|
||||||
|
trait_: trait_.into_json(renderer),
|
||||||
|
generic_params: generic_params.into_json(renderer),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Impl> for Impl {
|
impl FromClean<clean::Impl> for Impl {
|
||||||
fn from_tcx(impl_: clean::Impl, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(impl_: clean::Impl, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let provided_trait_methods = impl_.provided_trait_methods(tcx);
|
let provided_trait_methods = impl_.provided_trait_methods(renderer.tcx);
|
||||||
let clean::Impl { safety, generics, trait_, for_, items, polarity, kind } = impl_;
|
let clean::Impl { safety, generics, trait_, for_, items, polarity, kind } = impl_;
|
||||||
// FIXME: use something like ImplKind in JSON?
|
// FIXME: use something like ImplKind in JSON?
|
||||||
let (is_synthetic, blanket_impl) = match kind {
|
let (is_synthetic, blanket_impl) = match kind {
|
||||||
@ -711,17 +714,17 @@ impl FromWithTcx<clean::Impl> for Impl {
|
|||||||
};
|
};
|
||||||
Impl {
|
Impl {
|
||||||
is_unsafe: safety == rustc_hir::Safety::Unsafe,
|
is_unsafe: safety == rustc_hir::Safety::Unsafe,
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
provided_trait_methods: provided_trait_methods
|
provided_trait_methods: provided_trait_methods
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|x| x.to_string())
|
.map(|x| x.to_string())
|
||||||
.collect(),
|
.collect(),
|
||||||
trait_: trait_.map(|path| path.into_tcx(tcx)),
|
trait_: trait_.map(|path| path.into_json(renderer)),
|
||||||
for_: for_.into_tcx(tcx),
|
for_: for_.into_json(renderer),
|
||||||
items: ids(items, tcx),
|
items: renderer.ids(items),
|
||||||
is_negative,
|
is_negative,
|
||||||
is_synthetic,
|
is_synthetic,
|
||||||
blanket_impl: blanket_impl.map(|x| x.into_tcx(tcx)),
|
blanket_impl: blanket_impl.map(|x| x.into_json(renderer)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -730,42 +733,42 @@ pub(crate) fn from_function(
|
|||||||
function: Box<clean::Function>,
|
function: Box<clean::Function>,
|
||||||
has_body: bool,
|
has_body: bool,
|
||||||
header: rustc_hir::FnHeader,
|
header: rustc_hir::FnHeader,
|
||||||
tcx: TyCtxt<'_>,
|
renderer: &JsonRenderer<'_>,
|
||||||
) -> Function {
|
) -> Function {
|
||||||
let clean::Function { decl, generics } = *function;
|
let clean::Function { decl, generics } = *function;
|
||||||
Function {
|
Function {
|
||||||
sig: decl.into_tcx(tcx),
|
sig: decl.into_json(renderer),
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
header: from_fn_header(&header),
|
header: from_fn_header(&header),
|
||||||
has_body,
|
has_body,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Enum> for Enum {
|
impl FromClean<clean::Enum> for Enum {
|
||||||
fn from_tcx(enum_: clean::Enum, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(enum_: clean::Enum, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let has_stripped_variants = enum_.has_stripped_entries();
|
let has_stripped_variants = enum_.has_stripped_entries();
|
||||||
let clean::Enum { variants, generics } = enum_;
|
let clean::Enum { variants, generics } = enum_;
|
||||||
Enum {
|
Enum {
|
||||||
generics: generics.into_tcx(tcx),
|
generics: generics.into_json(renderer),
|
||||||
has_stripped_variants,
|
has_stripped_variants,
|
||||||
variants: ids(variants, tcx),
|
variants: renderer.ids(variants),
|
||||||
impls: Vec::new(), // Added in JsonRenderer::item
|
impls: Vec::new(), // Added in JsonRenderer::item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Variant> for Variant {
|
impl FromClean<clean::Variant> for Variant {
|
||||||
fn from_tcx(variant: clean::Variant, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(variant: clean::Variant, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::VariantKind::*;
|
use clean::VariantKind::*;
|
||||||
|
|
||||||
let discriminant = variant.discriminant.map(|d| d.into_tcx(tcx));
|
let discriminant = variant.discriminant.map(|d| d.into_json(renderer));
|
||||||
|
|
||||||
let kind = match variant.kind {
|
let kind = match variant.kind {
|
||||||
CLike => VariantKind::Plain,
|
CLike => VariantKind::Plain,
|
||||||
Tuple(fields) => VariantKind::Tuple(ids_keeping_stripped(fields, tcx)),
|
Tuple(fields) => VariantKind::Tuple(renderer.ids_keeping_stripped(fields)),
|
||||||
Struct(s) => VariantKind::Struct {
|
Struct(s) => VariantKind::Struct {
|
||||||
has_stripped_fields: s.has_stripped_entries(),
|
has_stripped_fields: s.has_stripped_entries(),
|
||||||
fields: ids(s.fields, tcx),
|
fields: renderer.ids(s.fields),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -773,8 +776,9 @@ impl FromWithTcx<clean::Variant> for Variant {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Discriminant> for Discriminant {
|
impl FromClean<clean::Discriminant> for Discriminant {
|
||||||
fn from_tcx(disr: clean::Discriminant, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(disr: clean::Discriminant, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
|
let tcx = renderer.tcx;
|
||||||
Discriminant {
|
Discriminant {
|
||||||
// expr is only none if going through the inlining path, which gets
|
// expr is only none if going through the inlining path, which gets
|
||||||
// `rustc_middle` types, not `rustc_hir`, but because JSON never inlines
|
// `rustc_middle` types, not `rustc_hir`, but because JSON never inlines
|
||||||
@ -785,8 +789,8 @@ impl FromWithTcx<clean::Discriminant> for Discriminant {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Import> for Use {
|
impl FromClean<clean::Import> for Use {
|
||||||
fn from_tcx(import: clean::Import, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(import: clean::Import, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use clean::ImportKind::*;
|
use clean::ImportKind::*;
|
||||||
let (name, is_glob) = match import.kind {
|
let (name, is_glob) = match import.kind {
|
||||||
Simple(s) => (s.to_string(), false),
|
Simple(s) => (s.to_string(), false),
|
||||||
@ -798,14 +802,14 @@ impl FromWithTcx<clean::Import> for Use {
|
|||||||
Use {
|
Use {
|
||||||
source: import.source.path.whole_name(),
|
source: import.source.path.whole_name(),
|
||||||
name,
|
name,
|
||||||
id: import.source.did.map(ItemId::from).map(|i| id_from_item_default(i, tcx)),
|
id: import.source.did.map(ItemId::from).map(|i| renderer.id_from_item_default(i)),
|
||||||
is_glob,
|
is_glob,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::ProcMacro> for ProcMacro {
|
impl FromClean<clean::ProcMacro> for ProcMacro {
|
||||||
fn from_tcx(mac: clean::ProcMacro, _tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(mac: clean::ProcMacro, _renderer: &JsonRenderer<'_>) -> Self {
|
||||||
ProcMacro {
|
ProcMacro {
|
||||||
kind: from_macro_kind(mac.kind),
|
kind: from_macro_kind(mac.kind),
|
||||||
helpers: mac.helpers.iter().map(|x| x.to_string()).collect(),
|
helpers: mac.helpers.iter().map(|x| x.to_string()).collect(),
|
||||||
@ -822,17 +826,18 @@ pub(crate) fn from_macro_kind(kind: rustc_span::hygiene::MacroKind) -> MacroKind
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<Box<clean::TypeAlias>> for TypeAlias {
|
impl FromClean<Box<clean::TypeAlias>> for TypeAlias {
|
||||||
fn from_tcx(type_alias: Box<clean::TypeAlias>, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(type_alias: Box<clean::TypeAlias>, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
let clean::TypeAlias { type_, generics, item_type: _, inner_type: _ } = *type_alias;
|
let clean::TypeAlias { type_, generics, item_type: _, inner_type: _ } = *type_alias;
|
||||||
TypeAlias { type_: type_.into_tcx(tcx), generics: generics.into_tcx(tcx) }
|
TypeAlias { type_: type_.into_json(renderer), generics: generics.into_json(renderer) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::Static> for Static {
|
impl FromClean<clean::Static> for Static {
|
||||||
fn from_tcx(stat: clean::Static, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(stat: clean::Static, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
|
let tcx = renderer.tcx;
|
||||||
Static {
|
Static {
|
||||||
type_: (*stat.type_).into_tcx(tcx),
|
type_: (*stat.type_).into_json(renderer),
|
||||||
is_mutable: stat.mutability == ast::Mutability::Mut,
|
is_mutable: stat.mutability == ast::Mutability::Mut,
|
||||||
expr: stat
|
expr: stat
|
||||||
.expr
|
.expr
|
||||||
@ -842,14 +847,17 @@ impl FromWithTcx<clean::Static> for Static {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<clean::TraitAlias> for TraitAlias {
|
impl FromClean<clean::TraitAlias> for TraitAlias {
|
||||||
fn from_tcx(alias: clean::TraitAlias, tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(alias: clean::TraitAlias, renderer: &JsonRenderer<'_>) -> Self {
|
||||||
TraitAlias { generics: alias.generics.into_tcx(tcx), params: alias.bounds.into_tcx(tcx) }
|
TraitAlias {
|
||||||
|
generics: alias.generics.into_json(renderer),
|
||||||
|
params: alias.bounds.into_json(renderer),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWithTcx<ItemType> for ItemKind {
|
impl FromClean<ItemType> for ItemKind {
|
||||||
fn from_tcx(kind: ItemType, _tcx: TyCtxt<'_>) -> Self {
|
fn from_clean(kind: ItemType, _renderer: &JsonRenderer<'_>) -> Self {
|
||||||
use ItemType::*;
|
use ItemType::*;
|
||||||
match kind {
|
match kind {
|
||||||
Module => ItemKind::Module,
|
Module => ItemKind::Module,
|
||||||
@ -878,25 +886,3 @@ impl FromWithTcx<ItemType> for ItemKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ids(items: impl IntoIterator<Item = clean::Item>, tcx: TyCtxt<'_>) -> Vec<Id> {
|
|
||||||
items
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| !x.is_stripped() && !x.is_keyword())
|
|
||||||
.map(|i| id_from_item(&i, tcx))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ids_keeping_stripped(
|
|
||||||
items: impl IntoIterator<Item = clean::Item>,
|
|
||||||
tcx: TyCtxt<'_>,
|
|
||||||
) -> Vec<Option<Id>> {
|
|
||||||
items
|
|
||||||
.into_iter()
|
|
||||||
.map(
|
|
||||||
|i| {
|
|
||||||
if !i.is_stripped() && !i.is_keyword() { Some(id_from_item(&i, tcx)) } else { None }
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
@ -16,6 +16,7 @@ use std::rc::Rc;
|
|||||||
use rustc_hir::def_id::{DefId, DefIdSet};
|
use rustc_hir::def_id::{DefId, DefIdSet};
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
|
use rustc_span::Symbol;
|
||||||
use rustc_span::def_id::LOCAL_CRATE;
|
use rustc_span::def_id::LOCAL_CRATE;
|
||||||
use rustdoc_json_types as types;
|
use rustdoc_json_types as types;
|
||||||
// It's important to use the FxHashMap from rustdoc_json_types here, instead of
|
// It's important to use the FxHashMap from rustdoc_json_types here, instead of
|
||||||
@ -31,9 +32,17 @@ use crate::docfs::PathError;
|
|||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::formats::FormatRenderer;
|
use crate::formats::FormatRenderer;
|
||||||
use crate::formats::cache::Cache;
|
use crate::formats::cache::Cache;
|
||||||
use crate::json::conversions::{IntoWithTcx, id_from_item, id_from_item_default};
|
use crate::json::conversions::IntoJson;
|
||||||
use crate::{clean, try_err};
|
use crate::{clean, try_err};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
|
||||||
|
struct FullItemId {
|
||||||
|
def_id: DefId,
|
||||||
|
name: Option<Symbol>,
|
||||||
|
/// Used to distinguish imports of different items with the same name
|
||||||
|
extra: Option<types::Id>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct JsonRenderer<'tcx> {
|
pub(crate) struct JsonRenderer<'tcx> {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
@ -46,6 +55,7 @@ pub(crate) struct JsonRenderer<'tcx> {
|
|||||||
out_dir: Option<PathBuf>,
|
out_dir: Option<PathBuf>,
|
||||||
cache: Rc<Cache>,
|
cache: Rc<Cache>,
|
||||||
imported_items: DefIdSet,
|
imported_items: DefIdSet,
|
||||||
|
id_interner: Rc<RefCell<FxHashMap<(FullItemId, Option<FullItemId>), types::Id>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> JsonRenderer<'tcx> {
|
impl<'tcx> JsonRenderer<'tcx> {
|
||||||
@ -63,7 +73,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||||||
.map(|i| {
|
.map(|i| {
|
||||||
let item = &i.impl_item;
|
let item = &i.impl_item;
|
||||||
self.item(item.clone()).unwrap();
|
self.item(item.clone()).unwrap();
|
||||||
id_from_item(&item, self.tcx)
|
self.id_from_item(&item)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
})
|
})
|
||||||
@ -94,7 +104,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||||||
|
|
||||||
if item.item_id.is_local() || is_primitive_impl {
|
if item.item_id.is_local() || is_primitive_impl {
|
||||||
self.item(item.clone()).unwrap();
|
self.item(item.clone()).unwrap();
|
||||||
Some(id_from_item(&item, self.tcx))
|
Some(self.id_from_item(&item))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -145,6 +155,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||||||
out_dir: if options.output_to_stdout { None } else { Some(options.output) },
|
out_dir: if options.output_to_stdout { None } else { Some(options.output) },
|
||||||
cache: Rc::new(cache),
|
cache: Rc::new(cache),
|
||||||
imported_items,
|
imported_items,
|
||||||
|
id_interner: Default::default(),
|
||||||
},
|
},
|
||||||
krate,
|
krate,
|
||||||
))
|
))
|
||||||
@ -243,7 +254,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||||||
|
|
||||||
debug!("Constructing Output");
|
debug!("Constructing Output");
|
||||||
let output_crate = types::Crate {
|
let output_crate = types::Crate {
|
||||||
root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())),
|
root: self.id_from_item_default(e.def_id().into()),
|
||||||
crate_version: self.cache.crate_version.clone(),
|
crate_version: self.cache.crate_version.clone(),
|
||||||
includes_private: self.cache.document_private,
|
includes_private: self.cache.document_private,
|
||||||
index,
|
index,
|
||||||
@ -253,10 +264,10 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||||||
.iter()
|
.iter()
|
||||||
.chain(&self.cache.external_paths)
|
.chain(&self.cache.external_paths)
|
||||||
.map(|(&k, &(ref path, kind))| {
|
.map(|(&k, &(ref path, kind))| {
|
||||||
(id_from_item_default(k.into(), self.tcx), types::ItemSummary {
|
(self.id_from_item_default(k.into()), types::ItemSummary {
|
||||||
crate_id: k.krate.as_u32(),
|
crate_id: k.krate.as_u32(),
|
||||||
path: path.iter().map(|s| s.to_string()).collect(),
|
path: path.iter().map(|s| s.to_string()).collect(),
|
||||||
kind: kind.into_tcx(self.tcx),
|
kind: kind.into_json(self),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
|
@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
/// This integer is incremented with every breaking change to the API,
|
/// This integer is incremented with every breaking change to the API,
|
||||||
/// and is returned along with the JSON blob as [`Crate::format_version`].
|
/// and is returned along with the JSON blob as [`Crate::format_version`].
|
||||||
/// Consuming code should assert that this value matches the format version(s) that it supports.
|
/// Consuming code should assert that this value matches the format version(s) that it supports.
|
||||||
pub const FORMAT_VERSION: u32 = 34;
|
pub const FORMAT_VERSION: u32 = 35;
|
||||||
|
|
||||||
/// The root of the emitted JSON blob.
|
/// The root of the emitted JSON blob.
|
||||||
///
|
///
|
||||||
@ -296,9 +296,9 @@ pub enum AssocItemConstraintKind {
|
|||||||
/// Rustdoc makes no guarantees about the inner value of Id's. Applications
|
/// Rustdoc makes no guarantees about the inner value of Id's. Applications
|
||||||
/// should treat them as opaque keys to lookup items, and avoid attempting
|
/// should treat them as opaque keys to lookup items, and avoid attempting
|
||||||
/// to parse them, or otherwise depend on any implementation details.
|
/// to parse them, or otherwise depend on any implementation details.
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
// FIXME(aDotInTheVoid): Consider making this non-public in rustdoc-types.
|
// FIXME(aDotInTheVoid): Consider making this non-public in rustdoc-types.
|
||||||
pub struct Id(pub String);
|
pub struct Id(pub u32);
|
||||||
|
|
||||||
/// The fundamental kind of an item. Unlike [`ItemEnum`], this does not carry any aditional info.
|
/// The fundamental kind of an item. Unlike [`ItemEnum`], this does not carry any aditional info.
|
||||||
///
|
///
|
||||||
|
@ -418,7 +418,7 @@ impl<'a> Validator<'a> {
|
|||||||
} else if !self.missing_ids.contains(id) {
|
} else if !self.missing_ids.contains(id) {
|
||||||
self.missing_ids.insert(id);
|
self.missing_ids.insert(id);
|
||||||
|
|
||||||
let sels = json_find::find_selector(&self.krate_json, &Value::String(id.0.clone()));
|
let sels = json_find::find_selector(&self.krate_json, &Value::Number(id.0.into()));
|
||||||
assert_ne!(sels.len(), 0);
|
assert_ne!(sels.len(), 0);
|
||||||
|
|
||||||
self.fail(id, ErrorKind::NotFound(sels))
|
self.fail(id, ErrorKind::NotFound(sels))
|
||||||
|
@ -15,24 +15,20 @@ fn check(krate: &Crate, errs: &[Error]) {
|
|||||||
assert_eq!(errs, &validator.errs[..]);
|
assert_eq!(errs, &validator.errs[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn id(s: &str) -> Id {
|
|
||||||
Id(s.to_owned())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn errors_on_missing_links() {
|
fn errors_on_missing_links() {
|
||||||
let k = Crate {
|
let k = Crate {
|
||||||
root: id("0"),
|
root: Id(0),
|
||||||
crate_version: None,
|
crate_version: None,
|
||||||
includes_private: false,
|
includes_private: false,
|
||||||
index: FxHashMap::from_iter([(id("0"), Item {
|
index: FxHashMap::from_iter([(Id(0), Item {
|
||||||
name: Some("root".to_owned()),
|
name: Some("root".to_owned()),
|
||||||
id: id(""),
|
id: Id(0),
|
||||||
crate_id: 0,
|
crate_id: 0,
|
||||||
span: None,
|
span: None,
|
||||||
visibility: Visibility::Public,
|
visibility: Visibility::Public,
|
||||||
docs: None,
|
docs: None,
|
||||||
links: FxHashMap::from_iter([("Not Found".to_owned(), id("1"))]),
|
links: FxHashMap::from_iter([("Not Found".to_owned(), Id(1))]),
|
||||||
attrs: vec![],
|
attrs: vec![],
|
||||||
deprecation: None,
|
deprecation: None,
|
||||||
inner: ItemEnum::Module(Module { is_crate: true, items: vec![], is_stripped: false }),
|
inner: ItemEnum::Module(Module { is_crate: true, items: vec![], is_stripped: false }),
|
||||||
@ -49,7 +45,7 @@ fn errors_on_missing_links() {
|
|||||||
SelectorPart::Field("links".to_owned()),
|
SelectorPart::Field("links".to_owned()),
|
||||||
SelectorPart::Field("Not Found".to_owned()),
|
SelectorPart::Field("Not Found".to_owned()),
|
||||||
]]),
|
]]),
|
||||||
id: id("1"),
|
id: Id(1),
|
||||||
}]);
|
}]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,28 +54,28 @@ fn errors_on_missing_links() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn errors_on_local_in_paths_and_not_index() {
|
fn errors_on_local_in_paths_and_not_index() {
|
||||||
let krate = Crate {
|
let krate = Crate {
|
||||||
root: id("0:0:1572"),
|
root: Id(0),
|
||||||
crate_version: None,
|
crate_version: None,
|
||||||
includes_private: false,
|
includes_private: false,
|
||||||
index: FxHashMap::from_iter([
|
index: FxHashMap::from_iter([
|
||||||
(id("0:0:1572"), Item {
|
(Id(0), Item {
|
||||||
id: id("0:0:1572"),
|
id: Id(0),
|
||||||
crate_id: 0,
|
crate_id: 0,
|
||||||
name: Some("microcore".to_owned()),
|
name: Some("microcore".to_owned()),
|
||||||
span: None,
|
span: None,
|
||||||
visibility: Visibility::Public,
|
visibility: Visibility::Public,
|
||||||
docs: None,
|
docs: None,
|
||||||
links: FxHashMap::from_iter([(("prim@i32".to_owned(), id("0:1:1571")))]),
|
links: FxHashMap::from_iter([(("prim@i32".to_owned(), Id(2)))]),
|
||||||
attrs: Vec::new(),
|
attrs: Vec::new(),
|
||||||
deprecation: None,
|
deprecation: None,
|
||||||
inner: ItemEnum::Module(Module {
|
inner: ItemEnum::Module(Module {
|
||||||
is_crate: true,
|
is_crate: true,
|
||||||
items: vec![id("0:1:717")],
|
items: vec![Id(1)],
|
||||||
is_stripped: false,
|
is_stripped: false,
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
(id("0:1:717"), Item {
|
(Id(1), Item {
|
||||||
id: id("0:1:717"),
|
id: Id(1),
|
||||||
crate_id: 0,
|
crate_id: 0,
|
||||||
name: Some("i32".to_owned()),
|
name: Some("i32".to_owned()),
|
||||||
span: None,
|
span: None,
|
||||||
@ -91,7 +87,7 @@ fn errors_on_local_in_paths_and_not_index() {
|
|||||||
inner: ItemEnum::Primitive(Primitive { name: "i32".to_owned(), impls: vec![] }),
|
inner: ItemEnum::Primitive(Primitive { name: "i32".to_owned(), impls: vec![] }),
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
paths: FxHashMap::from_iter([(id("0:1:1571"), ItemSummary {
|
paths: FxHashMap::from_iter([(Id(2), ItemSummary {
|
||||||
crate_id: 0,
|
crate_id: 0,
|
||||||
path: vec!["microcore".to_owned(), "i32".to_owned()],
|
path: vec!["microcore".to_owned(), "i32".to_owned()],
|
||||||
kind: ItemKind::Primitive,
|
kind: ItemKind::Primitive,
|
||||||
@ -101,7 +97,7 @@ fn errors_on_local_in_paths_and_not_index() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
check(&krate, &[Error {
|
check(&krate, &[Error {
|
||||||
id: id("0:1:1571"),
|
id: Id(2),
|
||||||
kind: ErrorKind::Custom("Id for local item in `paths` but not in `index`".to_owned()),
|
kind: ErrorKind::Custom("Id for local item in `paths` but not in `index`".to_owned()),
|
||||||
}]);
|
}]);
|
||||||
}
|
}
|
||||||
@ -110,11 +106,11 @@ fn errors_on_local_in_paths_and_not_index() {
|
|||||||
#[should_panic = "LOCAL_CRATE_ID is wrong"]
|
#[should_panic = "LOCAL_CRATE_ID is wrong"]
|
||||||
fn checks_local_crate_id_is_correct() {
|
fn checks_local_crate_id_is_correct() {
|
||||||
let krate = Crate {
|
let krate = Crate {
|
||||||
root: id("root"),
|
root: Id(0),
|
||||||
crate_version: None,
|
crate_version: None,
|
||||||
includes_private: false,
|
includes_private: false,
|
||||||
index: FxHashMap::from_iter([(id("root"), Item {
|
index: FxHashMap::from_iter([(Id(0), Item {
|
||||||
id: id("root"),
|
id: Id(0),
|
||||||
crate_id: LOCAL_CRATE_ID.wrapping_add(1),
|
crate_id: LOCAL_CRATE_ID.wrapping_add(1),
|
||||||
name: Some("irrelavent".to_owned()),
|
name: Some("irrelavent".to_owned()),
|
||||||
span: None,
|
span: None,
|
||||||
|
Loading…
Reference in New Issue
Block a user