mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 19:17:43 +00:00
Auto merge of #109056 - matthiaskrgr:rollup-9trny1z, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #108651 (Forbid the use of `#[target_feature]` on `main`) - #109009 (rustdoc: use restricted Damerau-Levenshtein distance for search) - #109026 (Introduce `Rc::into_inner`, as a parallel to `Arc::into_inner`) - #109029 (Gate usages of `dyn*` and const closures in macros) - #109031 (Rename `config.toml.example` to `config.example.toml`) - #109032 (Use `TyCtxt::trait_solver_next` in some places) - #109047 (typo) - #109052 (Add eslint check for rustdoc-gui tester) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
7b4f48927d
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -9,7 +9,6 @@
|
|||||||
src/etc/installer/gfx/* binary
|
src/etc/installer/gfx/* binary
|
||||||
src/vendor/** -text
|
src/vendor/** -text
|
||||||
Cargo.lock linguist-generated=false
|
Cargo.lock linguist-generated=false
|
||||||
config.toml.example linguist-language=TOML
|
|
||||||
|
|
||||||
# Older git versions try to fix line endings on images and fonts, this prevents it.
|
# Older git versions try to fix line endings on images and fonts, this prevents it.
|
||||||
*.png binary
|
*.png binary
|
||||||
|
@ -16,7 +16,7 @@ Files: compiler/*
|
|||||||
Cargo.lock
|
Cargo.lock
|
||||||
Cargo.toml
|
Cargo.toml
|
||||||
CODE_OF_CONDUCT.md
|
CODE_OF_CONDUCT.md
|
||||||
config.toml.example
|
config.example.toml
|
||||||
configure
|
configure
|
||||||
CONTRIBUTING.md
|
CONTRIBUTING.md
|
||||||
COPYRIGHT
|
COPYRIGHT
|
||||||
|
@ -99,7 +99,7 @@ See [the rustc-dev-guide for more info][sysllvm].
|
|||||||
The Rust build system uses a file named `config.toml` in the root of the
|
The Rust build system uses a file named `config.toml` in the root of the
|
||||||
source tree to determine various configuration settings for the build.
|
source tree to determine various configuration settings for the build.
|
||||||
Set up the defaults intended for distros to get started. You can see a full
|
Set up the defaults intended for distros to get started. You can see a full
|
||||||
list of options in `config.toml.example`.
|
list of options in `config.example.toml`.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
printf 'profile = "user" \nchangelog-seen = 2 \n' > config.toml
|
printf 'profile = "user" \nchangelog-seen = 2 \n' > config.toml
|
||||||
|
@ -337,9 +337,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
ast::TyKind::Never => {
|
ast::TyKind::Never => {
|
||||||
gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental");
|
gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental");
|
||||||
}
|
}
|
||||||
ast::TyKind::TraitObject(_, ast::TraitObjectSyntax::DynStar, ..) => {
|
|
||||||
gate_feature_post!(&self, dyn_star, ty.span, "dyn* trait objects are unstable");
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
visit::walk_ty(self, ty)
|
visit::walk_ty(self, ty)
|
||||||
@ -425,14 +422,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
ast::ExprKind::TryBlock(_) => {
|
ast::ExprKind::TryBlock(_) => {
|
||||||
gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental");
|
gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental");
|
||||||
}
|
}
|
||||||
ast::ExprKind::Closure(box ast::Closure { constness: ast::Const::Yes(_), .. }) => {
|
|
||||||
gate_feature_post!(
|
|
||||||
&self,
|
|
||||||
const_closures,
|
|
||||||
e.span,
|
|
||||||
"const closures are experimental"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
visit::walk_expr(self, e)
|
visit::walk_expr(self, e)
|
||||||
@ -594,6 +583,8 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
|||||||
gate_all!(inline_const_pat, "inline-const in pattern position is experimental");
|
gate_all!(inline_const_pat, "inline-const in pattern position is experimental");
|
||||||
gate_all!(associated_const_equality, "associated const equality is incomplete");
|
gate_all!(associated_const_equality, "associated const equality is incomplete");
|
||||||
gate_all!(yeet_expr, "`do yeet` expression is experimental");
|
gate_all!(yeet_expr, "`do yeet` expression is experimental");
|
||||||
|
gate_all!(dyn_star, "`dyn*` trait objects are experimental");
|
||||||
|
gate_all!(const_closures, "const closures are experimental");
|
||||||
|
|
||||||
// All uses of `gate_all!` below this point were added in #65742,
|
// All uses of `gate_all!` below this point were added in #65742,
|
||||||
// and subsequently disabled (with the non-early gating readded).
|
// and subsequently disabled (with the non-early gating readded).
|
||||||
|
@ -242,6 +242,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: DefId) -> CodegenFnAttrs {
|
|||||||
// Note that this is also allowed if `actually_rustdoc` so
|
// Note that this is also allowed if `actually_rustdoc` so
|
||||||
// if a target is documenting some wasm-specific code then
|
// if a target is documenting some wasm-specific code then
|
||||||
// it's not spuriously denied.
|
// it's not spuriously denied.
|
||||||
|
//
|
||||||
|
// This exception needs to be kept in sync with allowing
|
||||||
|
// `#[target_feature]` on `main` and `start`.
|
||||||
} else if !tcx.features().target_feature_11 {
|
} else if !tcx.features().target_feature_11 {
|
||||||
let mut err = feature_err(
|
let mut err = feature_err(
|
||||||
&tcx.sess.parse_sess,
|
&tcx.sess.parse_sess,
|
||||||
|
@ -128,9 +128,14 @@ hir_analysis_where_clause_on_main = `main` function is not allowed to have a `wh
|
|||||||
hir_analysis_track_caller_on_main = `main` function is not allowed to be `#[track_caller]`
|
hir_analysis_track_caller_on_main = `main` function is not allowed to be `#[track_caller]`
|
||||||
.suggestion = remove this annotation
|
.suggestion = remove this annotation
|
||||||
|
|
||||||
|
hir_analysis_target_feature_on_main = `main` function is not allowed to have `#[target_feature]`
|
||||||
|
|
||||||
hir_analysis_start_not_track_caller = `start` is not allowed to be `#[track_caller]`
|
hir_analysis_start_not_track_caller = `start` is not allowed to be `#[track_caller]`
|
||||||
.label = `start` is not allowed to be `#[track_caller]`
|
.label = `start` is not allowed to be `#[track_caller]`
|
||||||
|
|
||||||
|
hir_analysis_start_not_target_feature = `start` is not allowed to have `#[target_feature]`
|
||||||
|
.label = `start` is not allowed to have `#[target_feature]`
|
||||||
|
|
||||||
hir_analysis_start_not_async = `start` is not allowed to be `async`
|
hir_analysis_start_not_async = `start` is not allowed to be `async`
|
||||||
.label = `start` is not allowed to be `async`
|
.label = `start` is not allowed to be `async`
|
||||||
|
|
||||||
|
@ -327,6 +327,14 @@ pub(crate) struct TrackCallerOnMain {
|
|||||||
pub annotated: Span,
|
pub annotated: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(hir_analysis_target_feature_on_main)]
|
||||||
|
pub(crate) struct TargetFeatureOnMain {
|
||||||
|
#[primary_span]
|
||||||
|
#[label(hir_analysis_target_feature_on_main)]
|
||||||
|
pub main: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(hir_analysis_start_not_track_caller)]
|
#[diag(hir_analysis_start_not_track_caller)]
|
||||||
pub(crate) struct StartTrackCaller {
|
pub(crate) struct StartTrackCaller {
|
||||||
@ -336,6 +344,15 @@ pub(crate) struct StartTrackCaller {
|
|||||||
pub start: Span,
|
pub start: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(hir_analysis_start_not_target_feature)]
|
||||||
|
pub(crate) struct StartTargetFeature {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
#[label]
|
||||||
|
pub start: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(hir_analysis_start_not_async, code = "E0752")]
|
#[diag(hir_analysis_start_not_async, code = "E0752")]
|
||||||
pub(crate) struct StartAsync {
|
pub(crate) struct StartAsync {
|
||||||
|
@ -283,6 +283,15 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) {
|
|||||||
error = true;
|
error = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !tcx.codegen_fn_attrs(main_def_id).target_features.is_empty()
|
||||||
|
// Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988
|
||||||
|
&& !tcx.sess.target.is_like_wasm
|
||||||
|
&& !tcx.sess.opts.actually_rustdoc
|
||||||
|
{
|
||||||
|
tcx.sess.emit_err(errors::TargetFeatureOnMain { main: main_span });
|
||||||
|
error = true;
|
||||||
|
}
|
||||||
|
|
||||||
if error {
|
if error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -373,6 +382,18 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) {
|
|||||||
});
|
});
|
||||||
error = true;
|
error = true;
|
||||||
}
|
}
|
||||||
|
if attr.has_name(sym::target_feature)
|
||||||
|
// Calling functions with `#[target_feature]` is
|
||||||
|
// not unsafe on WASM, see #84988
|
||||||
|
&& !tcx.sess.target.is_like_wasm
|
||||||
|
&& !tcx.sess.opts.actually_rustdoc
|
||||||
|
{
|
||||||
|
tcx.sess.emit_err(errors::StartTargetFeature {
|
||||||
|
span: attr.span,
|
||||||
|
start: start_span,
|
||||||
|
});
|
||||||
|
error = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if error {
|
if error {
|
||||||
|
@ -2105,7 +2105,7 @@ impl<'a> Parser<'a> {
|
|||||||
ClosureBinder::NotPresent
|
ClosureBinder::NotPresent
|
||||||
};
|
};
|
||||||
|
|
||||||
let constness = self.parse_closure_constness(Case::Sensitive);
|
let constness = self.parse_closure_constness();
|
||||||
|
|
||||||
let movability =
|
let movability =
|
||||||
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
|
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
|
||||||
|
@ -1196,9 +1196,13 @@ impl<'a> Parser<'a> {
|
|||||||
self.parse_constness_(case, false)
|
self.parse_constness_(case, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses constness for closures
|
/// Parses constness for closures (case sensitive, feature-gated)
|
||||||
fn parse_closure_constness(&mut self, case: Case) -> Const {
|
fn parse_closure_constness(&mut self) -> Const {
|
||||||
self.parse_constness_(case, true)
|
let constness = self.parse_constness_(Case::Sensitive, true);
|
||||||
|
if let Const::Yes(span) = constness {
|
||||||
|
self.sess.gated_spans.gate(sym::const_closures, span);
|
||||||
|
}
|
||||||
|
constness
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
|
fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
|
||||||
|
@ -624,10 +624,12 @@ impl<'a> Parser<'a> {
|
|||||||
///
|
///
|
||||||
/// Note that this does *not* parse bare trait objects.
|
/// Note that this does *not* parse bare trait objects.
|
||||||
fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
|
fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
|
||||||
|
let lo = self.token.span;
|
||||||
self.bump(); // `dyn`
|
self.bump(); // `dyn`
|
||||||
|
|
||||||
// parse dyn* types
|
// parse dyn* types
|
||||||
let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
|
let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
|
||||||
|
self.sess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
|
||||||
TraitObjectSyntax::DynStar
|
TraitObjectSyntax::DynStar
|
||||||
} else {
|
} else {
|
||||||
TraitObjectSyntax::Dyn
|
TraitObjectSyntax::Dyn
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use rustc_middle::traits::solve::{Certainty, Goal, MaybeCause};
|
use rustc_middle::traits::solve::{Certainty, Goal, MaybeCause};
|
||||||
use rustc_middle::ty;
|
use rustc_middle::ty;
|
||||||
use rustc_session::config::TraitSolver;
|
|
||||||
|
|
||||||
use crate::infer::canonical::OriginalQueryValues;
|
use crate::infer::canonical::OriginalQueryValues;
|
||||||
use crate::infer::InferCtxt;
|
use crate::infer::InferCtxt;
|
||||||
@ -80,13 +79,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
|||||||
_ => obligation.param_env.without_const(),
|
_ => obligation.param_env.without_const(),
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.tcx.sess.opts.unstable_opts.trait_solver != TraitSolver::Next {
|
if self.tcx.trait_solver_next() {
|
||||||
let c_pred = self.canonicalize_query_keep_static(
|
|
||||||
param_env.and(obligation.predicate),
|
|
||||||
&mut _orig_values,
|
|
||||||
);
|
|
||||||
self.tcx.at(obligation.cause.span()).evaluate_obligation(c_pred)
|
|
||||||
} else {
|
|
||||||
self.probe(|snapshot| {
|
self.probe(|snapshot| {
|
||||||
if let Ok((_, certainty)) =
|
if let Ok((_, certainty)) =
|
||||||
self.evaluate_root_goal(Goal::new(self.tcx, param_env, obligation.predicate))
|
self.evaluate_root_goal(Goal::new(self.tcx, param_env, obligation.predicate))
|
||||||
@ -111,6 +104,12 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
|||||||
Ok(EvaluationResult::EvaluatedToErr)
|
Ok(EvaluationResult::EvaluatedToErr)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
let c_pred = self.canonicalize_query_keep_static(
|
||||||
|
param_env.and(obligation.predicate),
|
||||||
|
&mut _orig_values,
|
||||||
|
);
|
||||||
|
self.tcx.at(obligation.cause.span()).evaluate_obligation(c_pred)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,6 @@ use rustc_middle::ty::relate::TypeRelation;
|
|||||||
use rustc_middle::ty::SubstsRef;
|
use rustc_middle::ty::SubstsRef;
|
||||||
use rustc_middle::ty::{self, EarlyBinder, PolyProjectionPredicate, ToPolyTraitRef, ToPredicate};
|
use rustc_middle::ty::{self, EarlyBinder, PolyProjectionPredicate, ToPolyTraitRef, ToPredicate};
|
||||||
use rustc_middle::ty::{Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
use rustc_middle::ty::{Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
||||||
use rustc_session::config::TraitSolver;
|
|
||||||
use rustc_span::symbol::sym;
|
use rustc_span::symbol::sym;
|
||||||
|
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
@ -545,13 +544,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||||||
obligation: &PredicateObligation<'tcx>,
|
obligation: &PredicateObligation<'tcx>,
|
||||||
) -> Result<EvaluationResult, OverflowError> {
|
) -> Result<EvaluationResult, OverflowError> {
|
||||||
self.evaluation_probe(|this| {
|
self.evaluation_probe(|this| {
|
||||||
if this.tcx().sess.opts.unstable_opts.trait_solver != TraitSolver::Next {
|
if this.tcx().trait_solver_next() {
|
||||||
|
this.evaluate_predicates_recursively_in_new_solver([obligation.clone()])
|
||||||
|
} else {
|
||||||
this.evaluate_predicate_recursively(
|
this.evaluate_predicate_recursively(
|
||||||
TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()),
|
TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()),
|
||||||
obligation.clone(),
|
obligation.clone(),
|
||||||
)
|
)
|
||||||
} else {
|
|
||||||
this.evaluate_predicates_recursively_in_new_solver([obligation.clone()])
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -591,7 +590,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||||||
where
|
where
|
||||||
I: IntoIterator<Item = PredicateObligation<'tcx>> + std::fmt::Debug,
|
I: IntoIterator<Item = PredicateObligation<'tcx>> + std::fmt::Debug,
|
||||||
{
|
{
|
||||||
if self.tcx().sess.opts.unstable_opts.trait_solver != TraitSolver::Next {
|
if self.tcx().trait_solver_next() {
|
||||||
|
self.evaluate_predicates_recursively_in_new_solver(predicates)
|
||||||
|
} else {
|
||||||
let mut result = EvaluatedToOk;
|
let mut result = EvaluatedToOk;
|
||||||
for obligation in predicates {
|
for obligation in predicates {
|
||||||
let eval = self.evaluate_predicate_recursively(stack, obligation.clone())?;
|
let eval = self.evaluate_predicate_recursively(stack, obligation.clone())?;
|
||||||
@ -604,8 +605,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(result)
|
Ok(result)
|
||||||
} else {
|
|
||||||
self.evaluate_predicates_recursively_in_new_solver(predicates)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ pub enum TyKind<I: Interner> {
|
|||||||
/// lifetimes bound by the witness itself.
|
/// lifetimes bound by the witness itself.
|
||||||
///
|
///
|
||||||
/// This variant is only using when `drop_tracking_mir` is set.
|
/// This variant is only using when `drop_tracking_mir` is set.
|
||||||
/// This contains the `DefId` and the `SubstRef` of the generator.
|
/// This contains the `DefId` and the `SubstsRef` of the generator.
|
||||||
/// The actual witness types are computed on MIR by the `mir_generator_witnesses` query.
|
/// The actual witness types are computed on MIR by the `mir_generator_witnesses` query.
|
||||||
///
|
///
|
||||||
/// Looking at the following example, the witness for this generator
|
/// Looking at the following example, the witness for this generator
|
||||||
|
@ -681,6 +681,24 @@ impl<T> Rc<T> {
|
|||||||
Err(this)
|
Err(this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the inner value, if the `Rc` has exactly one strong reference.
|
||||||
|
///
|
||||||
|
/// Otherwise, [`None`] is returned and the `Rc` is dropped.
|
||||||
|
///
|
||||||
|
/// This will succeed even if there are outstanding weak references.
|
||||||
|
///
|
||||||
|
/// If `Rc::into_inner` is called on every clone of this `Rc`,
|
||||||
|
/// it is guaranteed that exactly one of the calls returns the inner value.
|
||||||
|
/// This means in particular that the inner value is not dropped.
|
||||||
|
///
|
||||||
|
/// This is equivalent to `Rc::try_unwrap(...).ok()`. (Note that these are not equivalent for
|
||||||
|
/// `Arc`, due to race conditions that do not apply to `Rc`.)
|
||||||
|
#[inline]
|
||||||
|
#[unstable(feature = "rc_into_inner", issue = "106894")]
|
||||||
|
pub fn into_inner(this: Self) -> Option<T> {
|
||||||
|
Rc::try_unwrap(this).ok()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Rc<[T]> {
|
impl<T> Rc<[T]> {
|
||||||
|
@ -151,6 +151,21 @@ fn try_unwrap() {
|
|||||||
assert_eq!(Rc::try_unwrap(x), Ok(5));
|
assert_eq!(Rc::try_unwrap(x), Ok(5));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_inner() {
|
||||||
|
let x = Rc::new(3);
|
||||||
|
assert_eq!(Rc::into_inner(x), Some(3));
|
||||||
|
|
||||||
|
let x = Rc::new(4);
|
||||||
|
let y = Rc::clone(&x);
|
||||||
|
assert_eq!(Rc::into_inner(x), None);
|
||||||
|
assert_eq!(Rc::into_inner(y), Some(4));
|
||||||
|
|
||||||
|
let x = Rc::new(5);
|
||||||
|
let _w = Rc::downgrade(&x);
|
||||||
|
assert_eq!(Rc::into_inner(x), Some(5));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn into_from_raw() {
|
fn into_from_raw() {
|
||||||
let x = Rc::new(Box::new("hello"));
|
let x = Rc::new(Box::new("hello"));
|
||||||
|
@ -185,7 +185,7 @@ Some general areas that you may be interested in modifying are:
|
|||||||
If you make a major change, please remember to:
|
If you make a major change, please remember to:
|
||||||
|
|
||||||
+ Update `VERSION` in `src/bootstrap/main.rs`.
|
+ Update `VERSION` in `src/bootstrap/main.rs`.
|
||||||
* Update `changelog-seen = N` in `config.toml.example`.
|
* Update `changelog-seen = N` in `config.example.toml`.
|
||||||
* Add an entry in `src/bootstrap/CHANGELOG.md`.
|
* Add an entry in `src/bootstrap/CHANGELOG.md`.
|
||||||
|
|
||||||
A 'major change' includes
|
A 'major change' includes
|
||||||
|
@ -44,8 +44,8 @@ fn main() {
|
|||||||
if suggest_setup {
|
if suggest_setup {
|
||||||
println!("warning: you have not made a `config.toml`");
|
println!("warning: you have not made a `config.toml`");
|
||||||
println!(
|
println!(
|
||||||
"help: consider running `./x.py setup` or copying `config.toml.example` by running \
|
"help: consider running `./x.py setup` or copying `config.example.toml` by running \
|
||||||
`cp config.toml.example config.toml`"
|
`cp config.example.toml config.toml`"
|
||||||
);
|
);
|
||||||
} else if let Some(suggestion) = &changelog_suggestion {
|
} else if let Some(suggestion) = &changelog_suggestion {
|
||||||
println!("{}", suggestion);
|
println!("{}", suggestion);
|
||||||
@ -57,8 +57,8 @@ fn main() {
|
|||||||
if suggest_setup {
|
if suggest_setup {
|
||||||
println!("warning: you have not made a `config.toml`");
|
println!("warning: you have not made a `config.toml`");
|
||||||
println!(
|
println!(
|
||||||
"help: consider running `./x.py setup` or copying `config.toml.example` by running \
|
"help: consider running `./x.py setup` or copying `config.example.toml` by running \
|
||||||
`cp config.toml.example config.toml`"
|
`cp config.example.toml config.toml`"
|
||||||
);
|
);
|
||||||
} else if let Some(suggestion) = &changelog_suggestion {
|
} else if let Some(suggestion) = &changelog_suggestion {
|
||||||
println!("{}", suggestion);
|
println!("{}", suggestion);
|
||||||
|
@ -55,7 +55,7 @@ pub enum DryRun {
|
|||||||
/// Note that this structure is not decoded directly into, but rather it is
|
/// Note that this structure is not decoded directly into, but rather it is
|
||||||
/// filled out from the decoded forms of the structs below. For documentation
|
/// filled out from the decoded forms of the structs below. For documentation
|
||||||
/// each field, see the corresponding fields in
|
/// each field, see the corresponding fields in
|
||||||
/// `config.toml.example`.
|
/// `config.example.toml`.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
#[cfg_attr(test, derive(Clone))]
|
#[cfg_attr(test, derive(Clone))]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
@ -325,7 +325,7 @@ impl std::str::FromStr for SplitDebuginfo {
|
|||||||
|
|
||||||
impl SplitDebuginfo {
|
impl SplitDebuginfo {
|
||||||
/// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for
|
/// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for
|
||||||
/// `rust.split-debuginfo` in `config.toml.example`.
|
/// `rust.split-debuginfo` in `config.example.toml`.
|
||||||
fn default_for_platform(target: &str) -> Self {
|
fn default_for_platform(target: &str) -> Self {
|
||||||
if target.contains("apple") {
|
if target.contains("apple") {
|
||||||
SplitDebuginfo::Unpacked
|
SplitDebuginfo::Unpacked
|
||||||
|
@ -194,7 +194,7 @@ if '--help' in sys.argv or '-h' in sys.argv:
|
|||||||
print('')
|
print('')
|
||||||
print('This configure script is a thin configuration shim over the true')
|
print('This configure script is a thin configuration shim over the true')
|
||||||
print('configuration system, `config.toml`. You can explore the comments')
|
print('configuration system, `config.toml`. You can explore the comments')
|
||||||
print('in `config.toml.example` next to this configure script to see')
|
print('in `config.example.toml` next to this configure script to see')
|
||||||
print('more information about what each option is. Additionally you can')
|
print('more information about what each option is. Additionally you can')
|
||||||
print('pass `--set` as an argument to set arbitrary key/value pairs')
|
print('pass `--set` as an argument to set arbitrary key/value pairs')
|
||||||
print('in the TOML configuration if desired')
|
print('in the TOML configuration if desired')
|
||||||
@ -367,7 +367,7 @@ for key in known_args:
|
|||||||
|
|
||||||
set('build.configure-args', sys.argv[1:])
|
set('build.configure-args', sys.argv[1:])
|
||||||
|
|
||||||
# "Parse" the `config.toml.example` file into the various sections, and we'll
|
# "Parse" the `config.example.toml` file into the various sections, and we'll
|
||||||
# use this as a template of a `config.toml` to write out which preserves
|
# use this as a template of a `config.toml` to write out which preserves
|
||||||
# all the various comments and whatnot.
|
# all the various comments and whatnot.
|
||||||
#
|
#
|
||||||
@ -380,7 +380,7 @@ section_order = [None]
|
|||||||
targets = {}
|
targets = {}
|
||||||
top_level_keys = []
|
top_level_keys = []
|
||||||
|
|
||||||
for line in open(rust_dir + '/config.toml.example').read().split("\n"):
|
for line in open(rust_dir + '/config.example.toml').read().split("\n"):
|
||||||
if cur_section == None:
|
if cur_section == None:
|
||||||
if line.count('=') == 1:
|
if line.count('=') == 1:
|
||||||
top_level_key = line.split('=')[0]
|
top_level_key = line.split('=')[0]
|
||||||
|
@ -967,7 +967,7 @@ impl Step for PlainSourceTarball {
|
|||||||
"RELEASES.md",
|
"RELEASES.md",
|
||||||
"configure",
|
"configure",
|
||||||
"x.py",
|
"x.py",
|
||||||
"config.toml.example",
|
"config.example.toml",
|
||||||
"Cargo.toml",
|
"Cargo.toml",
|
||||||
"Cargo.lock",
|
"Cargo.lock",
|
||||||
];
|
];
|
||||||
|
@ -286,7 +286,7 @@ impl Step for Llvm {
|
|||||||
(true, true) => "RelWithDebInfo",
|
(true, true) => "RelWithDebInfo",
|
||||||
};
|
};
|
||||||
|
|
||||||
// NOTE: remember to also update `config.toml.example` when changing the
|
// NOTE: remember to also update `config.example.toml` when changing the
|
||||||
// defaults!
|
// defaults!
|
||||||
let llvm_targets = match &builder.config.llvm_targets {
|
let llvm_targets = match &builder.config.llvm_targets {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
|
@ -53,4 +53,5 @@ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
|
|||||||
# Runs checks to ensure that there are no ES5 issues in our JS code.
|
# Runs checks to ensure that there are no ES5 issues in our JS code.
|
||||||
es-check es6 ../src/librustdoc/html/static/js/*.js && \
|
es-check es6 ../src/librustdoc/html/static/js/*.js && \
|
||||||
eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js && \
|
eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js && \
|
||||||
eslint -c ../src/tools/rustdoc-js/.eslintrc.js ../src/tools/rustdoc-js/tester.js
|
eslint -c ../src/tools/rustdoc-js/.eslintrc.js ../src/tools/rustdoc-js/tester.js && \
|
||||||
|
eslint -c ../src/tools/rustdoc-gui/.eslintrc.js ../src/tools/rustdoc-gui/tester.js
|
||||||
|
@ -31,7 +31,7 @@ Rust's source-based code coverage requires the Rust "profiler runtime". Without
|
|||||||
|
|
||||||
The Rust `nightly` distribution channel includes the profiler runtime, by default.
|
The Rust `nightly` distribution channel includes the profiler runtime, by default.
|
||||||
|
|
||||||
> **Important**: If you are building the Rust compiler from the source distribution, the profiler runtime is _not_ enabled in the default `config.toml.example`. Edit your `config.toml` file and ensure the `profiler` feature is set it to `true` (either under the `[build]` section, or under the settings for an individual `[target.<triple>]`):
|
> **Important**: If you are building the Rust compiler from the source distribution, the profiler runtime is _not_ enabled in the default `config.example.toml`. Edit your `config.toml` file and ensure the `profiler` feature is set it to `true` (either under the `[build]` section, or under the settings for an individual `[target.<triple>]`):
|
||||||
>
|
>
|
||||||
> ```toml
|
> ```toml
|
||||||
> # Build the profiler runtime (required when compiling with options that depend
|
> # Build the profiler runtime (required when compiling with options that depend
|
||||||
|
@ -76,39 +76,111 @@ function printTab(nb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A function to compute the Levenshtein distance between two strings
|
* The [edit distance] is a metric for measuring the difference between two strings.
|
||||||
* Licensed under the Creative Commons Attribution-ShareAlike 3.0 Unported
|
*
|
||||||
* Full License can be found at http://creativecommons.org/licenses/by-sa/3.0/legalcode
|
* [edit distance]: https://en.wikipedia.org/wiki/Edit_distance
|
||||||
* This code is an unmodified version of the code written by Marco de Wit
|
|
||||||
* and was found at https://stackoverflow.com/a/18514751/745719
|
|
||||||
*/
|
*/
|
||||||
const levenshtein_row2 = [];
|
|
||||||
function levenshtein(s1, s2) {
|
/*
|
||||||
if (s1 === s2) {
|
* This function was translated, mostly line-for-line, from
|
||||||
return 0;
|
* https://github.com/rust-lang/rust/blob/ff4b772f805ec1e/compiler/rustc_span/src/edit_distance.rs
|
||||||
|
*
|
||||||
|
* The current implementation is the restricted Damerau-Levenshtein algorithm. It is restricted
|
||||||
|
* because it does not permit modifying characters that have already been transposed. The specific
|
||||||
|
* algorithm should not matter to the caller of the methods, which is why it is not noted in the
|
||||||
|
* documentation.
|
||||||
|
*/
|
||||||
|
const editDistanceState = {
|
||||||
|
current: [],
|
||||||
|
prev: [],
|
||||||
|
prevPrev: [],
|
||||||
|
calculate: function calculate(a, b, limit) {
|
||||||
|
// Ensure that `b` is the shorter string, minimizing memory use.
|
||||||
|
if (a.length < b.length) {
|
||||||
|
const aTmp = a;
|
||||||
|
a = b;
|
||||||
|
b = aTmp;
|
||||||
}
|
}
|
||||||
const s1_len = s1.length, s2_len = s2.length;
|
|
||||||
if (s1_len && s2_len) {
|
const minDist = a.length - b.length;
|
||||||
let i1 = 0, i2 = 0, a, b, c, c2;
|
// If we know the limit will be exceeded, we can return early.
|
||||||
const row = levenshtein_row2;
|
if (minDist > limit) {
|
||||||
while (i1 < s1_len) {
|
return limit + 1;
|
||||||
row[i1] = ++i1;
|
|
||||||
}
|
}
|
||||||
while (i2 < s2_len) {
|
|
||||||
c2 = s2.charCodeAt(i2);
|
// Strip common prefix.
|
||||||
a = i2;
|
// We know that `b` is the shorter string, so we don't need to check
|
||||||
++i2;
|
// `a.length`.
|
||||||
b = i2;
|
while (b.length > 0 && b[0] === a[0]) {
|
||||||
for (i1 = 0; i1 < s1_len; ++i1) {
|
a = a.substring(1);
|
||||||
c = a + (s1.charCodeAt(i1) !== c2 ? 1 : 0);
|
b = b.substring(1);
|
||||||
a = row[i1];
|
}
|
||||||
b = b < a ? (b < c ? b + 1 : c) : (a < c ? a + 1 : c);
|
// Strip common suffix.
|
||||||
row[i1] = b;
|
while (b.length > 0 && b[b.length - 1] === a[a.length - 1]) {
|
||||||
|
a = a.substring(0, a.length - 1);
|
||||||
|
b = b.substring(0, b.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If either string is empty, the distance is the length of the other.
|
||||||
|
// We know that `b` is the shorter string, so we don't need to check `a`.
|
||||||
|
if (b.length === 0) {
|
||||||
|
return minDist;
|
||||||
|
}
|
||||||
|
|
||||||
|
const aLength = a.length;
|
||||||
|
const bLength = b.length;
|
||||||
|
|
||||||
|
for (let i = 0; i <= bLength; ++i) {
|
||||||
|
this.current[i] = 0;
|
||||||
|
this.prev[i] = i;
|
||||||
|
this.prevPrev[i] = Number.MAX_VALUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// row by row
|
||||||
|
for (let i = 1; i <= aLength; ++i) {
|
||||||
|
this.current[0] = i;
|
||||||
|
const aIdx = i - 1;
|
||||||
|
|
||||||
|
// column by column
|
||||||
|
for (let j = 1; j <= bLength; ++j) {
|
||||||
|
const bIdx = j - 1;
|
||||||
|
|
||||||
|
// There is no cost to substitute a character with itself.
|
||||||
|
const substitutionCost = a[aIdx] === b[bIdx] ? 0 : 1;
|
||||||
|
|
||||||
|
this.current[j] = Math.min(
|
||||||
|
// deletion
|
||||||
|
this.prev[j] + 1,
|
||||||
|
// insertion
|
||||||
|
this.current[j - 1] + 1,
|
||||||
|
// substitution
|
||||||
|
this.prev[j - 1] + substitutionCost
|
||||||
|
);
|
||||||
|
|
||||||
|
if ((i > 1) && (j > 1) && (a[aIdx] === b[bIdx - 1]) && (a[aIdx - 1] === b[bIdx])) {
|
||||||
|
// transposition
|
||||||
|
this.current[j] = Math.min(
|
||||||
|
this.current[j],
|
||||||
|
this.prevPrev[j - 2] + 1
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return b;
|
|
||||||
|
// Rotate the buffers, reusing the memory
|
||||||
|
const prevPrevTmp = this.prevPrev;
|
||||||
|
this.prevPrev = this.prev;
|
||||||
|
this.prev = this.current;
|
||||||
|
this.current = prevPrevTmp;
|
||||||
}
|
}
|
||||||
return s1_len + s2_len;
|
|
||||||
|
// `prev` because we already rotated the buffers.
|
||||||
|
const distance = this.prev[bLength];
|
||||||
|
return distance <= limit ? distance : (limit + 1);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function editDistance(a, b, limit) {
|
||||||
|
return editDistanceState.calculate(a, b, limit);
|
||||||
}
|
}
|
||||||
|
|
||||||
function initSearch(rawSearchIndex) {
|
function initSearch(rawSearchIndex) {
|
||||||
@ -802,7 +874,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
for (const result of results) {
|
for (const result of results) {
|
||||||
if (result.id > -1) {
|
if (result.id > -1) {
|
||||||
const obj = searchIndex[result.id];
|
const obj = searchIndex[result.id];
|
||||||
obj.lev = result.lev;
|
obj.dist = result.dist;
|
||||||
const res = buildHrefAndPath(obj);
|
const res = buildHrefAndPath(obj);
|
||||||
obj.displayPath = pathSplitter(res[0]);
|
obj.displayPath = pathSplitter(res[0]);
|
||||||
obj.fullPath = obj.displayPath + obj.name;
|
obj.fullPath = obj.displayPath + obj.name;
|
||||||
@ -860,8 +932,8 @@ function initSearch(rawSearchIndex) {
|
|||||||
|
|
||||||
// Sort by distance in the path part, if specified
|
// Sort by distance in the path part, if specified
|
||||||
// (less changes required to match means higher rankings)
|
// (less changes required to match means higher rankings)
|
||||||
a = aaa.path_lev;
|
a = aaa.path_dist;
|
||||||
b = bbb.path_lev;
|
b = bbb.path_dist;
|
||||||
if (a !== b) {
|
if (a !== b) {
|
||||||
return a - b;
|
return a - b;
|
||||||
}
|
}
|
||||||
@ -875,8 +947,8 @@ function initSearch(rawSearchIndex) {
|
|||||||
|
|
||||||
// Sort by distance in the name part, the last part of the path
|
// Sort by distance in the name part, the last part of the path
|
||||||
// (less changes required to match means higher rankings)
|
// (less changes required to match means higher rankings)
|
||||||
a = (aaa.lev);
|
a = (aaa.dist);
|
||||||
b = (bbb.lev);
|
b = (bbb.dist);
|
||||||
if (a !== b) {
|
if (a !== b) {
|
||||||
return a - b;
|
return a - b;
|
||||||
}
|
}
|
||||||
@ -968,19 +1040,20 @@ function initSearch(rawSearchIndex) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This function checks if the object (`row`) generics match the given type (`elem`)
|
* This function checks if the object (`row`) generics match the given type (`elem`)
|
||||||
* generics. If there are no generics on `row`, `defaultLev` is returned.
|
* generics. If there are no generics on `row`, `defaultDistance` is returned.
|
||||||
*
|
*
|
||||||
* @param {Row} row - The object to check.
|
* @param {Row} row - The object to check.
|
||||||
* @param {QueryElement} elem - The element from the parsed query.
|
* @param {QueryElement} elem - The element from the parsed query.
|
||||||
* @param {integer} defaultLev - This is the value to return in case there are no generics.
|
* @param {integer} defaultDistance - This is the value to return in case there are no
|
||||||
|
* generics.
|
||||||
*
|
*
|
||||||
* @return {integer} - Returns the best match (if any) or `maxLevDistance + 1`.
|
* @return {integer} - Returns the best match (if any) or `maxEditDistance + 1`.
|
||||||
*/
|
*/
|
||||||
function checkGenerics(row, elem, defaultLev, maxLevDistance) {
|
function checkGenerics(row, elem, defaultDistance, maxEditDistance) {
|
||||||
if (row.generics.length === 0) {
|
if (row.generics.length === 0) {
|
||||||
return elem.generics.length === 0 ? defaultLev : maxLevDistance + 1;
|
return elem.generics.length === 0 ? defaultDistance : maxEditDistance + 1;
|
||||||
} else if (row.generics.length > 0 && row.generics[0].name === null) {
|
} else if (row.generics.length > 0 && row.generics[0].name === null) {
|
||||||
return checkGenerics(row.generics[0], elem, defaultLev, maxLevDistance);
|
return checkGenerics(row.generics[0], elem, defaultDistance, maxEditDistance);
|
||||||
}
|
}
|
||||||
// The names match, but we need to be sure that all generics kinda
|
// The names match, but we need to be sure that all generics kinda
|
||||||
// match as well.
|
// match as well.
|
||||||
@ -991,8 +1064,9 @@ function initSearch(rawSearchIndex) {
|
|||||||
elem_name = entry.name;
|
elem_name = entry.name;
|
||||||
if (elem_name === "") {
|
if (elem_name === "") {
|
||||||
// Pure generic, needs to check into it.
|
// Pure generic, needs to check into it.
|
||||||
if (checkGenerics(entry, elem, maxLevDistance + 1, maxLevDistance) !== 0) {
|
if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance)
|
||||||
return maxLevDistance + 1;
|
!== 0) {
|
||||||
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1019,7 +1093,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (match === null) {
|
if (match === null) {
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
elems[match] -= 1;
|
elems[match] -= 1;
|
||||||
if (elems[match] === 0) {
|
if (elems[match] === 0) {
|
||||||
@ -1028,7 +1102,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1038,17 +1112,17 @@ function initSearch(rawSearchIndex) {
|
|||||||
* @param {Row} row
|
* @param {Row} row
|
||||||
* @param {QueryElement} elem - The element from the parsed query.
|
* @param {QueryElement} elem - The element from the parsed query.
|
||||||
*
|
*
|
||||||
* @return {integer} - Returns a Levenshtein distance to the best match.
|
* @return {integer} - Returns an edit distance to the best match.
|
||||||
*/
|
*/
|
||||||
function checkIfInGenerics(row, elem, maxLevDistance) {
|
function checkIfInGenerics(row, elem, maxEditDistance) {
|
||||||
let lev = maxLevDistance + 1;
|
let dist = maxEditDistance + 1;
|
||||||
for (const entry of row.generics) {
|
for (const entry of row.generics) {
|
||||||
lev = Math.min(checkType(entry, elem, true, maxLevDistance), lev);
|
dist = Math.min(checkType(entry, elem, true, maxEditDistance), dist);
|
||||||
if (lev === 0) {
|
if (dist === 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return lev;
|
return dist;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1059,21 +1133,21 @@ function initSearch(rawSearchIndex) {
|
|||||||
* @param {QueryElement} elem - The element from the parsed query.
|
* @param {QueryElement} elem - The element from the parsed query.
|
||||||
* @param {boolean} literalSearch
|
* @param {boolean} literalSearch
|
||||||
*
|
*
|
||||||
* @return {integer} - Returns a Levenshtein distance to the best match. If there is
|
* @return {integer} - Returns an edit distance to the best match. If there is
|
||||||
* no match, returns `maxLevDistance + 1`.
|
* no match, returns `maxEditDistance + 1`.
|
||||||
*/
|
*/
|
||||||
function checkType(row, elem, literalSearch, maxLevDistance) {
|
function checkType(row, elem, literalSearch, maxEditDistance) {
|
||||||
if (row.name === null) {
|
if (row.name === null) {
|
||||||
// This is a pure "generic" search, no need to run other checks.
|
// This is a pure "generic" search, no need to run other checks.
|
||||||
if (row.generics.length > 0) {
|
if (row.generics.length > 0) {
|
||||||
return checkIfInGenerics(row, elem, maxLevDistance);
|
return checkIfInGenerics(row, elem, maxEditDistance);
|
||||||
}
|
}
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lev = levenshtein(row.name, elem.name);
|
let dist = editDistance(row.name, elem.name, maxEditDistance);
|
||||||
if (literalSearch) {
|
if (literalSearch) {
|
||||||
if (lev !== 0) {
|
if (dist !== 0) {
|
||||||
// The name didn't match, let's try to check if the generics do.
|
// The name didn't match, let's try to check if the generics do.
|
||||||
if (elem.generics.length === 0) {
|
if (elem.generics.length === 0) {
|
||||||
const checkGeneric = row.generics.length > 0;
|
const checkGeneric = row.generics.length > 0;
|
||||||
@ -1082,44 +1156,44 @@ function initSearch(rawSearchIndex) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
} else if (elem.generics.length > 0) {
|
} else if (elem.generics.length > 0) {
|
||||||
return checkGenerics(row, elem, maxLevDistance + 1, maxLevDistance);
|
return checkGenerics(row, elem, maxEditDistance + 1, maxEditDistance);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
} else if (row.generics.length > 0) {
|
} else if (row.generics.length > 0) {
|
||||||
if (elem.generics.length === 0) {
|
if (elem.generics.length === 0) {
|
||||||
if (lev === 0) {
|
if (dist === 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
// The name didn't match so we now check if the type we're looking for is inside
|
// The name didn't match so we now check if the type we're looking for is inside
|
||||||
// the generics!
|
// the generics!
|
||||||
lev = Math.min(lev, checkIfInGenerics(row, elem, maxLevDistance));
|
dist = Math.min(dist, checkIfInGenerics(row, elem, maxEditDistance));
|
||||||
return lev;
|
return dist;
|
||||||
} else if (lev > maxLevDistance) {
|
} else if (dist > maxEditDistance) {
|
||||||
// So our item's name doesn't match at all and has generics.
|
// So our item's name doesn't match at all and has generics.
|
||||||
//
|
//
|
||||||
// Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
|
// Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
|
||||||
// looking for "B<C>", we'll need to go down.
|
// looking for "B<C>", we'll need to go down.
|
||||||
return checkIfInGenerics(row, elem, maxLevDistance);
|
return checkIfInGenerics(row, elem, maxEditDistance);
|
||||||
} else {
|
} else {
|
||||||
// At this point, the name kinda match and we have generics to check, so
|
// At this point, the name kinda match and we have generics to check, so
|
||||||
// let's go!
|
// let's go!
|
||||||
const tmp_lev = checkGenerics(row, elem, lev, maxLevDistance);
|
const tmp_dist = checkGenerics(row, elem, dist, maxEditDistance);
|
||||||
if (tmp_lev > maxLevDistance) {
|
if (tmp_dist > maxEditDistance) {
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
// We compute the median value of both checks and return it.
|
// We compute the median value of both checks and return it.
|
||||||
return (tmp_lev + lev) / 2;
|
return (tmp_dist + dist) / 2;
|
||||||
}
|
}
|
||||||
} else if (elem.generics.length > 0) {
|
} else if (elem.generics.length > 0) {
|
||||||
// In this case, we were expecting generics but there isn't so we simply reject this
|
// In this case, we were expecting generics but there isn't so we simply reject this
|
||||||
// one.
|
// one.
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
// No generics on our query or on the target type so we can return without doing
|
// No generics on our query or on the target type so we can return without doing
|
||||||
// anything else.
|
// anything else.
|
||||||
return lev;
|
return dist;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1129,27 +1203,27 @@ function initSearch(rawSearchIndex) {
|
|||||||
* @param {QueryElement} elem - The element from the parsed query.
|
* @param {QueryElement} elem - The element from the parsed query.
|
||||||
* @param {integer} typeFilter
|
* @param {integer} typeFilter
|
||||||
*
|
*
|
||||||
* @return {integer} - Returns a Levenshtein distance to the best match. If there is no
|
* @return {integer} - Returns an edit distance to the best match. If there is no
|
||||||
* match, returns `maxLevDistance + 1`.
|
* match, returns `maxEditDistance + 1`.
|
||||||
*/
|
*/
|
||||||
function findArg(row, elem, typeFilter, maxLevDistance) {
|
function findArg(row, elem, typeFilter, maxEditDistance) {
|
||||||
let lev = maxLevDistance + 1;
|
let dist = maxEditDistance + 1;
|
||||||
|
|
||||||
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
|
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
|
||||||
for (const input of row.type.inputs) {
|
for (const input of row.type.inputs) {
|
||||||
if (!typePassesFilter(typeFilter, input.ty)) {
|
if (!typePassesFilter(typeFilter, input.ty)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
lev = Math.min(
|
dist = Math.min(
|
||||||
lev,
|
dist,
|
||||||
checkType(input, elem, parsedQuery.literalSearch, maxLevDistance)
|
checkType(input, elem, parsedQuery.literalSearch, maxEditDistance)
|
||||||
);
|
);
|
||||||
if (lev === 0) {
|
if (dist === 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
|
return parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1159,11 +1233,11 @@ function initSearch(rawSearchIndex) {
|
|||||||
* @param {QueryElement} elem - The element from the parsed query.
|
* @param {QueryElement} elem - The element from the parsed query.
|
||||||
* @param {integer} typeFilter
|
* @param {integer} typeFilter
|
||||||
*
|
*
|
||||||
* @return {integer} - Returns a Levenshtein distance to the best match. If there is no
|
* @return {integer} - Returns an edit distance to the best match. If there is no
|
||||||
* match, returns `maxLevDistance + 1`.
|
* match, returns `maxEditDistance + 1`.
|
||||||
*/
|
*/
|
||||||
function checkReturned(row, elem, typeFilter, maxLevDistance) {
|
function checkReturned(row, elem, typeFilter, maxEditDistance) {
|
||||||
let lev = maxLevDistance + 1;
|
let dist = maxEditDistance + 1;
|
||||||
|
|
||||||
if (row && row.type && row.type.output.length > 0) {
|
if (row && row.type && row.type.output.length > 0) {
|
||||||
const ret = row.type.output;
|
const ret = row.type.output;
|
||||||
@ -1171,23 +1245,23 @@ function initSearch(rawSearchIndex) {
|
|||||||
if (!typePassesFilter(typeFilter, ret_ty.ty)) {
|
if (!typePassesFilter(typeFilter, ret_ty.ty)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
lev = Math.min(
|
dist = Math.min(
|
||||||
lev,
|
dist,
|
||||||
checkType(ret_ty, elem, parsedQuery.literalSearch, maxLevDistance)
|
checkType(ret_ty, elem, parsedQuery.literalSearch, maxEditDistance)
|
||||||
);
|
);
|
||||||
if (lev === 0) {
|
if (dist === 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
|
return parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkPath(contains, ty, maxLevDistance) {
|
function checkPath(contains, ty, maxEditDistance) {
|
||||||
if (contains.length === 0) {
|
if (contains.length === 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
let ret_lev = maxLevDistance + 1;
|
let ret_dist = maxEditDistance + 1;
|
||||||
const path = ty.path.split("::");
|
const path = ty.path.split("::");
|
||||||
|
|
||||||
if (ty.parent && ty.parent.name) {
|
if (ty.parent && ty.parent.name) {
|
||||||
@ -1197,27 +1271,27 @@ function initSearch(rawSearchIndex) {
|
|||||||
const length = path.length;
|
const length = path.length;
|
||||||
const clength = contains.length;
|
const clength = contains.length;
|
||||||
if (clength > length) {
|
if (clength > length) {
|
||||||
return maxLevDistance + 1;
|
return maxEditDistance + 1;
|
||||||
}
|
}
|
||||||
for (let i = 0; i < length; ++i) {
|
for (let i = 0; i < length; ++i) {
|
||||||
if (i + clength > length) {
|
if (i + clength > length) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let lev_total = 0;
|
let dist_total = 0;
|
||||||
let aborted = false;
|
let aborted = false;
|
||||||
for (let x = 0; x < clength; ++x) {
|
for (let x = 0; x < clength; ++x) {
|
||||||
const lev = levenshtein(path[i + x], contains[x]);
|
const dist = editDistance(path[i + x], contains[x], maxEditDistance);
|
||||||
if (lev > maxLevDistance) {
|
if (dist > maxEditDistance) {
|
||||||
aborted = true;
|
aborted = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
lev_total += lev;
|
dist_total += dist;
|
||||||
}
|
}
|
||||||
if (!aborted) {
|
if (!aborted) {
|
||||||
ret_lev = Math.min(ret_lev, Math.round(lev_total / clength));
|
ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret_lev;
|
return ret_dist;
|
||||||
}
|
}
|
||||||
|
|
||||||
function typePassesFilter(filter, type) {
|
function typePassesFilter(filter, type) {
|
||||||
@ -1312,31 +1386,31 @@ function initSearch(rawSearchIndex) {
|
|||||||
* This function adds the given result into the provided `results` map if it matches the
|
* This function adds the given result into the provided `results` map if it matches the
|
||||||
* following condition:
|
* following condition:
|
||||||
*
|
*
|
||||||
* * If it is a "literal search" (`parsedQuery.literalSearch`), then `lev` must be 0.
|
* * If it is a "literal search" (`parsedQuery.literalSearch`), then `dist` must be 0.
|
||||||
* * If it is not a "literal search", `lev` must be <= `maxLevDistance`.
|
* * If it is not a "literal search", `dist` must be <= `maxEditDistance`.
|
||||||
*
|
*
|
||||||
* The `results` map contains information which will be used to sort the search results:
|
* The `results` map contains information which will be used to sort the search results:
|
||||||
*
|
*
|
||||||
* * `fullId` is a `string`` used as the key of the object we use for the `results` map.
|
* * `fullId` is a `string`` used as the key of the object we use for the `results` map.
|
||||||
* * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
|
* * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
|
||||||
* * `index` is an `integer`` used to sort by the position of the word in the item's name.
|
* * `index` is an `integer`` used to sort by the position of the word in the item's name.
|
||||||
* * `lev` is the main metric used to sort the search results.
|
* * `dist` is the main metric used to sort the search results.
|
||||||
* * `path_lev` is zero if a single-component search query is used, otherwise it's the
|
* * `path_dist` is zero if a single-component search query is used, otherwise it's the
|
||||||
* distance computed for everything other than the last path component.
|
* distance computed for everything other than the last path component.
|
||||||
*
|
*
|
||||||
* @param {Results} results
|
* @param {Results} results
|
||||||
* @param {string} fullId
|
* @param {string} fullId
|
||||||
* @param {integer} id
|
* @param {integer} id
|
||||||
* @param {integer} index
|
* @param {integer} index
|
||||||
* @param {integer} lev
|
* @param {integer} dist
|
||||||
* @param {integer} path_lev
|
* @param {integer} path_dist
|
||||||
*/
|
*/
|
||||||
function addIntoResults(results, fullId, id, index, lev, path_lev, maxLevDistance) {
|
function addIntoResults(results, fullId, id, index, dist, path_dist, maxEditDistance) {
|
||||||
const inBounds = lev <= maxLevDistance || index !== -1;
|
const inBounds = dist <= maxEditDistance || index !== -1;
|
||||||
if (lev === 0 || (!parsedQuery.literalSearch && inBounds)) {
|
if (dist === 0 || (!parsedQuery.literalSearch && inBounds)) {
|
||||||
if (results[fullId] !== undefined) {
|
if (results[fullId] !== undefined) {
|
||||||
const result = results[fullId];
|
const result = results[fullId];
|
||||||
if (result.dontValidate || result.lev <= lev) {
|
if (result.dontValidate || result.dist <= dist) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1344,8 +1418,8 @@ function initSearch(rawSearchIndex) {
|
|||||||
id: id,
|
id: id,
|
||||||
index: index,
|
index: index,
|
||||||
dontValidate: parsedQuery.literalSearch,
|
dontValidate: parsedQuery.literalSearch,
|
||||||
lev: lev,
|
dist: dist,
|
||||||
path_lev: path_lev,
|
path_dist: path_dist,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1354,7 +1428,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
* This function is called in case the query is only one element (with or without generics).
|
* This function is called in case the query is only one element (with or without generics).
|
||||||
* This element will be compared to arguments' and returned values' items and also to items.
|
* This element will be compared to arguments' and returned values' items and also to items.
|
||||||
*
|
*
|
||||||
* Other important thing to note: since there is only one element, we use levenshtein
|
* Other important thing to note: since there is only one element, we use edit
|
||||||
* distance for name comparisons.
|
* distance for name comparisons.
|
||||||
*
|
*
|
||||||
* @param {Row} row
|
* @param {Row} row
|
||||||
@ -1372,22 +1446,22 @@ function initSearch(rawSearchIndex) {
|
|||||||
results_others,
|
results_others,
|
||||||
results_in_args,
|
results_in_args,
|
||||||
results_returned,
|
results_returned,
|
||||||
maxLevDistance
|
maxEditDistance
|
||||||
) {
|
) {
|
||||||
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
|
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let lev, index = -1, path_lev = 0;
|
let dist, index = -1, path_dist = 0;
|
||||||
const fullId = row.id;
|
const fullId = row.id;
|
||||||
const searchWord = searchWords[pos];
|
const searchWord = searchWords[pos];
|
||||||
|
|
||||||
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxLevDistance);
|
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxEditDistance);
|
||||||
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxLevDistance);
|
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxEditDistance);
|
||||||
|
|
||||||
// path_lev is 0 because no parent path information is currently stored
|
// path_dist is 0 because no parent path information is currently stored
|
||||||
// in the search index
|
// in the search index
|
||||||
addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxLevDistance);
|
addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxEditDistance);
|
||||||
addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxLevDistance);
|
addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxEditDistance);
|
||||||
|
|
||||||
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
|
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
|
||||||
return;
|
return;
|
||||||
@ -1411,34 +1485,34 @@ function initSearch(rawSearchIndex) {
|
|||||||
// No need to check anything else if it's a "pure" generics search.
|
// No need to check anything else if it's a "pure" generics search.
|
||||||
if (elem.name.length === 0) {
|
if (elem.name.length === 0) {
|
||||||
if (row.type !== null) {
|
if (row.type !== null) {
|
||||||
lev = checkGenerics(row.type, elem, maxLevDistance + 1, maxLevDistance);
|
dist = checkGenerics(row.type, elem, maxEditDistance + 1, maxEditDistance);
|
||||||
// path_lev is 0 because we know it's empty
|
// path_dist is 0 because we know it's empty
|
||||||
addIntoResults(results_others, fullId, pos, index, lev, 0, maxLevDistance);
|
addIntoResults(results_others, fullId, pos, index, dist, 0, maxEditDistance);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (elem.fullPath.length > 1) {
|
if (elem.fullPath.length > 1) {
|
||||||
path_lev = checkPath(elem.pathWithoutLast, row, maxLevDistance);
|
path_dist = checkPath(elem.pathWithoutLast, row, maxEditDistance);
|
||||||
if (path_lev > maxLevDistance) {
|
if (path_dist > maxEditDistance) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parsedQuery.literalSearch) {
|
if (parsedQuery.literalSearch) {
|
||||||
if (searchWord === elem.name) {
|
if (searchWord === elem.name) {
|
||||||
addIntoResults(results_others, fullId, pos, index, 0, path_lev);
|
addIntoResults(results_others, fullId, pos, index, 0, path_dist);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
lev = levenshtein(searchWord, elem.pathLast);
|
dist = editDistance(searchWord, elem.pathLast, maxEditDistance);
|
||||||
|
|
||||||
if (index === -1 && lev + path_lev > maxLevDistance) {
|
if (index === -1 && dist + path_dist > maxEditDistance) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
addIntoResults(results_others, fullId, pos, index, lev, path_lev, maxLevDistance);
|
addIntoResults(results_others, fullId, pos, index, dist, path_dist, maxEditDistance);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1450,22 +1524,22 @@ function initSearch(rawSearchIndex) {
|
|||||||
* @param {integer} pos - Position in the `searchIndex`.
|
* @param {integer} pos - Position in the `searchIndex`.
|
||||||
* @param {Object} results
|
* @param {Object} results
|
||||||
*/
|
*/
|
||||||
function handleArgs(row, pos, results, maxLevDistance) {
|
function handleArgs(row, pos, results, maxEditDistance) {
|
||||||
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
|
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let totalLev = 0;
|
let totalDist = 0;
|
||||||
let nbLev = 0;
|
let nbDist = 0;
|
||||||
|
|
||||||
// If the result is too "bad", we return false and it ends this search.
|
// If the result is too "bad", we return false and it ends this search.
|
||||||
function checkArgs(elems, callback) {
|
function checkArgs(elems, callback) {
|
||||||
for (const elem of elems) {
|
for (const elem of elems) {
|
||||||
// There is more than one parameter to the query so all checks should be "exact"
|
// There is more than one parameter to the query so all checks should be "exact"
|
||||||
const lev = callback(row, elem, NO_TYPE_FILTER, maxLevDistance);
|
const dist = callback(row, elem, NO_TYPE_FILTER, maxEditDistance);
|
||||||
if (lev <= 1) {
|
if (dist <= 1) {
|
||||||
nbLev += 1;
|
nbDist += 1;
|
||||||
totalLev += lev;
|
totalDist += dist;
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -1479,11 +1553,11 @@ function initSearch(rawSearchIndex) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nbLev === 0) {
|
if (nbDist === 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const lev = Math.round(totalLev / nbLev);
|
const dist = Math.round(totalDist / nbDist);
|
||||||
addIntoResults(results, row.id, pos, 0, lev, 0, maxLevDistance);
|
addIntoResults(results, row.id, pos, 0, dist, 0, maxEditDistance);
|
||||||
}
|
}
|
||||||
|
|
||||||
function innerRunQuery() {
|
function innerRunQuery() {
|
||||||
@ -1496,7 +1570,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
for (const elem of parsedQuery.returned) {
|
for (const elem of parsedQuery.returned) {
|
||||||
queryLen += elem.name.length;
|
queryLen += elem.name.length;
|
||||||
}
|
}
|
||||||
const maxLevDistance = Math.floor(queryLen / 3);
|
const maxEditDistance = Math.floor(queryLen / 3);
|
||||||
|
|
||||||
if (parsedQuery.foundElems === 1) {
|
if (parsedQuery.foundElems === 1) {
|
||||||
if (parsedQuery.elems.length === 1) {
|
if (parsedQuery.elems.length === 1) {
|
||||||
@ -1511,7 +1585,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
results_others,
|
results_others,
|
||||||
results_in_args,
|
results_in_args,
|
||||||
results_returned,
|
results_returned,
|
||||||
maxLevDistance
|
maxEditDistance
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if (parsedQuery.returned.length === 1) {
|
} else if (parsedQuery.returned.length === 1) {
|
||||||
@ -1523,14 +1597,14 @@ function initSearch(rawSearchIndex) {
|
|||||||
row,
|
row,
|
||||||
elem,
|
elem,
|
||||||
parsedQuery.typeFilter,
|
parsedQuery.typeFilter,
|
||||||
maxLevDistance
|
maxEditDistance
|
||||||
);
|
);
|
||||||
addIntoResults(results_others, row.id, i, -1, in_returned, maxLevDistance);
|
addIntoResults(results_others, row.id, i, -1, in_returned, maxEditDistance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (parsedQuery.foundElems > 0) {
|
} else if (parsedQuery.foundElems > 0) {
|
||||||
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
|
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
|
||||||
handleArgs(searchIndex[i], i, results_others, maxLevDistance);
|
handleArgs(searchIndex[i], i, results_others, maxEditDistance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1568,7 +1642,7 @@ function initSearch(rawSearchIndex) {
|
|||||||
*
|
*
|
||||||
* @return {boolean} - Whether the result is valid or not
|
* @return {boolean} - Whether the result is valid or not
|
||||||
*/
|
*/
|
||||||
function validateResult(name, path, keys, parent, maxLevDistance) {
|
function validateResult(name, path, keys, parent, maxEditDistance) {
|
||||||
if (!keys || !keys.length) {
|
if (!keys || !keys.length) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -1582,8 +1656,8 @@ function initSearch(rawSearchIndex) {
|
|||||||
// next if there is a parent, check for exact parent match
|
// next if there is a parent, check for exact parent match
|
||||||
(parent !== undefined && parent.name !== undefined &&
|
(parent !== undefined && parent.name !== undefined &&
|
||||||
parent.name.toLowerCase().indexOf(key) > -1) ||
|
parent.name.toLowerCase().indexOf(key) > -1) ||
|
||||||
// lastly check to see if the name was a levenshtein match
|
// lastly check to see if the name was an editDistance match
|
||||||
levenshtein(name, key) <= maxLevDistance)) {
|
editDistance(name, key, maxEditDistance) <= maxEditDistance)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
96
src/tools/rustdoc-gui/.eslintrc.js
Normal file
96
src/tools/rustdoc-gui/.eslintrc.js
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
module.exports = {
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"node": true,
|
||||||
|
"es6": true
|
||||||
|
},
|
||||||
|
"extends": "eslint:recommended",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2018,
|
||||||
|
"sourceType": "module"
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"linebreak-style": [
|
||||||
|
"error",
|
||||||
|
"unix"
|
||||||
|
],
|
||||||
|
"semi": [
|
||||||
|
"error",
|
||||||
|
"always"
|
||||||
|
],
|
||||||
|
"quotes": [
|
||||||
|
"error",
|
||||||
|
"double"
|
||||||
|
],
|
||||||
|
"linebreak-style": [
|
||||||
|
"error",
|
||||||
|
"unix"
|
||||||
|
],
|
||||||
|
"no-trailing-spaces": "error",
|
||||||
|
"no-var": ["error"],
|
||||||
|
"prefer-const": ["error"],
|
||||||
|
"prefer-arrow-callback": ["error"],
|
||||||
|
"brace-style": [
|
||||||
|
"error",
|
||||||
|
"1tbs",
|
||||||
|
{ "allowSingleLine": false }
|
||||||
|
],
|
||||||
|
"keyword-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "before": true, "after": true }
|
||||||
|
],
|
||||||
|
"arrow-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "before": true, "after": true }
|
||||||
|
],
|
||||||
|
"key-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "beforeColon": false, "afterColon": true, "mode": "strict" }
|
||||||
|
],
|
||||||
|
"func-call-spacing": ["error", "never"],
|
||||||
|
"space-infix-ops": "error",
|
||||||
|
"space-before-function-paren": ["error", "never"],
|
||||||
|
"space-before-blocks": "error",
|
||||||
|
"comma-dangle": ["error", "always-multiline"],
|
||||||
|
"comma-style": ["error", "last"],
|
||||||
|
"max-len": ["error", { "code": 100, "tabWidth": 4 }],
|
||||||
|
"eol-last": ["error", "always"],
|
||||||
|
"arrow-parens": ["error", "as-needed"],
|
||||||
|
"no-unused-vars": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"argsIgnorePattern": "^_",
|
||||||
|
"varsIgnorePattern": "^_"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"eqeqeq": "error",
|
||||||
|
"no-const-assign": "error",
|
||||||
|
"no-debugger": "error",
|
||||||
|
"no-dupe-args": "error",
|
||||||
|
"no-dupe-else-if": "error",
|
||||||
|
"no-dupe-keys": "error",
|
||||||
|
"no-duplicate-case": "error",
|
||||||
|
"no-ex-assign": "error",
|
||||||
|
"no-fallthrough": "error",
|
||||||
|
"no-invalid-regexp": "error",
|
||||||
|
"no-import-assign": "error",
|
||||||
|
"no-self-compare": "error",
|
||||||
|
"no-template-curly-in-string": "error",
|
||||||
|
"block-scoped-var": "error",
|
||||||
|
"guard-for-in": "error",
|
||||||
|
"no-alert": "error",
|
||||||
|
"no-confusing-arrow": "error",
|
||||||
|
"no-div-regex": "error",
|
||||||
|
"no-floating-decimal": "error",
|
||||||
|
"no-implicit-globals": "error",
|
||||||
|
"no-implied-eval": "error",
|
||||||
|
"no-label-var": "error",
|
||||||
|
"no-lonely-if": "error",
|
||||||
|
"no-mixed-operators": "error",
|
||||||
|
"no-multi-assign": "error",
|
||||||
|
"no-return-assign": "error",
|
||||||
|
"no-script-url": "error",
|
||||||
|
"no-sequences": "error",
|
||||||
|
"no-div-regex": "error",
|
||||||
|
}
|
||||||
|
};
|
@ -6,8 +6,8 @@
|
|||||||
|
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const os = require('os');
|
const os = require("os");
|
||||||
const {Options, runTest} = require('browser-ui-test');
|
const {Options, runTest} = require("browser-ui-test");
|
||||||
|
|
||||||
// If a test fails or errors, we will retry it two more times in case it was a flaky failure.
|
// If a test fails or errors, we will retry it two more times in case it was a flaky failure.
|
||||||
const NB_RETRY = 3;
|
const NB_RETRY = 3;
|
||||||
@ -200,7 +200,7 @@ async function main(argv) {
|
|||||||
const framework_options = new Options();
|
const framework_options = new Options();
|
||||||
try {
|
try {
|
||||||
// This is more convenient that setting fields one by one.
|
// This is more convenient that setting fields one by one.
|
||||||
let args = [
|
const args = [
|
||||||
"--variable", "DOC_PATH", opts["doc_folder"], "--enable-fail-on-js-error",
|
"--variable", "DOC_PATH", opts["doc_folder"], "--enable-fail-on-js-error",
|
||||||
"--allow-file-access-from-files",
|
"--allow-file-access-from-files",
|
||||||
];
|
];
|
||||||
@ -234,7 +234,7 @@ async function main(argv) {
|
|||||||
} else {
|
} else {
|
||||||
files = opts["files"];
|
files = opts["files"];
|
||||||
}
|
}
|
||||||
files = files.filter(file => path.extname(file) == ".goml");
|
files = files.filter(file => path.extname(file) === ".goml");
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
console.error("rustdoc-gui: No test selected");
|
console.error("rustdoc-gui: No test selected");
|
||||||
process.exit(2);
|
process.exit(2);
|
||||||
@ -259,7 +259,7 @@ async function main(argv) {
|
|||||||
|
|
||||||
// We catch this "event" to display a nicer message in case of unexpected exit (because of a
|
// We catch this "event" to display a nicer message in case of unexpected exit (because of a
|
||||||
// missing `--no-sandbox`).
|
// missing `--no-sandbox`).
|
||||||
const exitHandling = (code) => {
|
const exitHandling = () => {
|
||||||
if (!opts["no_sandbox"]) {
|
if (!opts["no_sandbox"]) {
|
||||||
console.log("");
|
console.log("");
|
||||||
console.log(
|
console.log(
|
||||||
@ -268,10 +268,10 @@ async function main(argv) {
|
|||||||
console.log("");
|
console.log("");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
process.on('exit', exitHandling);
|
process.on("exit", exitHandling);
|
||||||
|
|
||||||
const originalFilesLen = files.length;
|
const originalFilesLen = files.length;
|
||||||
let results = createEmptyResults();
|
const results = createEmptyResults();
|
||||||
const status_bar = char_printer(files.length);
|
const status_bar = char_printer(files.length);
|
||||||
|
|
||||||
let new_results;
|
let new_results;
|
||||||
@ -281,7 +281,7 @@ async function main(argv) {
|
|||||||
Array.prototype.push.apply(results.successful, new_results.successful);
|
Array.prototype.push.apply(results.successful, new_results.successful);
|
||||||
// We generate the new list of files with the previously failing tests.
|
// We generate the new list of files with the previously failing tests.
|
||||||
files = Array.prototype.concat(new_results.failed, new_results.errored).map(
|
files = Array.prototype.concat(new_results.failed, new_results.errored).map(
|
||||||
f => f['file_name']);
|
f => f["file_name"]);
|
||||||
if (files.length > originalFilesLen / 2) {
|
if (files.length > originalFilesLen / 2) {
|
||||||
// If we have too many failing tests, it's very likely not flaky failures anymore so
|
// If we have too many failing tests, it's very likely not flaky failures anymore so
|
||||||
// no need to retry.
|
// no need to retry.
|
||||||
|
12
tests/rustdoc-js-std/println-typo.js
Normal file
12
tests/rustdoc-js-std/println-typo.js
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// exact-check
|
||||||
|
|
||||||
|
const QUERY = 'prinltn';
|
||||||
|
const FILTER_CRATE = 'std';
|
||||||
|
|
||||||
|
const EXPECTED = {
|
||||||
|
'others': [
|
||||||
|
{ 'path': 'std', 'name': 'println' },
|
||||||
|
{ 'path': 'std', 'name': 'print' },
|
||||||
|
{ 'path': 'std', 'name': 'eprintln' },
|
||||||
|
],
|
||||||
|
};
|
@ -7,7 +7,7 @@
|
|||||||
use std::arch::asm;
|
use std::arch::asm;
|
||||||
|
|
||||||
#[target_feature(enable = "avx")]
|
#[target_feature(enable = "avx")]
|
||||||
fn main() {
|
fn foo() {
|
||||||
unsafe {
|
unsafe {
|
||||||
asm!(
|
asm!(
|
||||||
"/* {} */",
|
"/* {} */",
|
||||||
@ -15,3 +15,5 @@ fn main() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
/// dyn* is not necessarily the final surface syntax (if we have one at all),
|
/// dyn* is not necessarily the final surface syntax (if we have one at all),
|
||||||
/// but for now we will support it to aid in writing tests independently.
|
/// but for now we will support it to aid in writing tests independently.
|
||||||
pub fn dyn_star_parameter(_: &dyn* Send) {
|
pub fn dyn_star_parameter(_: &dyn* Send) {
|
||||||
//~^ dyn* trait objects are unstable
|
//~^ `dyn*` trait objects are experimental
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error[E0658]: dyn* trait objects are unstable
|
error[E0658]: `dyn*` trait objects are experimental
|
||||||
--> $DIR/feature-gate-dyn_star.rs:5:31
|
--> $DIR/feature-gate-dyn_star.rs:5:31
|
||||||
|
|
|
|
||||||
LL | pub fn dyn_star_parameter(_: &dyn* Send) {
|
LL | pub fn dyn_star_parameter(_: &dyn* Send) {
|
||||||
| ^^^^^^^^^
|
| ^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
||||||
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
||||||
|
8
tests/ui/dyn-star/gated-span.rs
Normal file
8
tests/ui/dyn-star/gated-span.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
macro_rules! t {
|
||||||
|
($t:ty) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
t!(dyn* Send);
|
||||||
|
//~^ ERROR `dyn*` trait objects are experimental
|
||||||
|
|
||||||
|
fn main() {}
|
12
tests/ui/dyn-star/gated-span.stderr
Normal file
12
tests/ui/dyn-star/gated-span.stderr
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
error[E0658]: `dyn*` trait objects are experimental
|
||||||
|
--> $DIR/gated-span.rs:5:4
|
||||||
|
|
|
||||||
|
LL | t!(dyn* Send);
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
||||||
|
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0658`.
|
@ -4,8 +4,8 @@ fn make_dyn_star() {
|
|||||||
let i = 42usize;
|
let i = 42usize;
|
||||||
let dyn_i: dyn* Debug = i as dyn* Debug;
|
let dyn_i: dyn* Debug = i as dyn* Debug;
|
||||||
//~^ ERROR casting `usize` as `dyn* Debug` is invalid
|
//~^ ERROR casting `usize` as `dyn* Debug` is invalid
|
||||||
//~| ERROR dyn* trait objects are unstable
|
//~| ERROR `dyn*` trait objects are experimental
|
||||||
//~| ERROR dyn* trait objects are unstable
|
//~| ERROR `dyn*` trait objects are experimental
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
error[E0658]: dyn* trait objects are unstable
|
error[E0658]: `dyn*` trait objects are experimental
|
||||||
--> $DIR/no-explicit-dyn-star-cast.rs:5:16
|
--> $DIR/no-explicit-dyn-star-cast.rs:5:16
|
||||||
|
|
|
|
||||||
LL | let dyn_i: dyn* Debug = i as dyn* Debug;
|
LL | let dyn_i: dyn* Debug = i as dyn* Debug;
|
||||||
| ^^^^^^^^^^
|
| ^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
||||||
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
||||||
|
|
||||||
error[E0658]: dyn* trait objects are unstable
|
error[E0658]: `dyn*` trait objects are experimental
|
||||||
--> $DIR/no-explicit-dyn-star-cast.rs:5:34
|
--> $DIR/no-explicit-dyn-star-cast.rs:5:34
|
||||||
|
|
|
|
||||||
LL | let dyn_i: dyn* Debug = i as dyn* Debug;
|
LL | let dyn_i: dyn* Debug = i as dyn* Debug;
|
||||||
| ^^^^^^^^^^
|
| ^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
= note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information
|
||||||
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
= help: add `#![feature(dyn_star)]` to the crate attributes to enable
|
||||||
|
@ -1,5 +1,13 @@
|
|||||||
// gate-test-const_closures
|
// gate-test-const_closures
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
(const || {})();
|
(const || {})();
|
||||||
//~^ ERROR: const closures are experimental
|
//~^ ERROR: const closures are experimental
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! e {
|
||||||
|
($e:expr) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
e!((const || {}));
|
||||||
|
//~^ ERROR const closures are experimental
|
||||||
|
@ -1,12 +1,21 @@
|
|||||||
error[E0658]: const closures are experimental
|
error[E0658]: const closures are experimental
|
||||||
--> $DIR/gate.rs:3:6
|
--> $DIR/gate.rs:4:6
|
||||||
|
|
|
|
||||||
LL | (const || {})();
|
LL | (const || {})();
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #106003 <https://github.com/rust-lang/rust/issues/106003> for more information
|
= note: see issue #106003 <https://github.com/rust-lang/rust/issues/106003> for more information
|
||||||
= help: add `#![feature(const_closures)]` to the crate attributes to enable
|
= help: add `#![feature(const_closures)]` to the crate attributes to enable
|
||||||
|
|
||||||
error: aborting due to previous error
|
error[E0658]: const closures are experimental
|
||||||
|
--> $DIR/gate.rs:12:5
|
||||||
|
|
|
||||||
|
LL | e!((const || {}));
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #106003 <https://github.com/rust-lang/rust/issues/106003> for more information
|
||||||
|
= help: add `#![feature(const_closures)]` to the crate attributes to enable
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0658`.
|
For more information about this error, try `rustc --explain E0658`.
|
||||||
|
@ -0,0 +1,7 @@
|
|||||||
|
// only-x86_64
|
||||||
|
|
||||||
|
#![feature(target_feature_11)]
|
||||||
|
|
||||||
|
#[target_feature(enable = "avx2")]
|
||||||
|
fn main() {}
|
||||||
|
//~^ ERROR `main` function is not allowed to have `#[target_feature]`
|
@ -0,0 +1,8 @@
|
|||||||
|
error: `main` function is not allowed to have `#[target_feature]`
|
||||||
|
--> $DIR/issue-108645-target-feature-on-main.rs:6:1
|
||||||
|
|
|
||||||
|
LL | fn main() {}
|
||||||
|
| ^^^^^^^^^ `main` function is not allowed to have `#[target_feature]`
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
// only-x86_64
|
||||||
|
|
||||||
|
#![feature(start)]
|
||||||
|
#![feature(target_feature_11)]
|
||||||
|
|
||||||
|
#[start]
|
||||||
|
#[target_feature(enable = "avx2")]
|
||||||
|
//~^ ERROR `start` is not allowed to have `#[target_feature]`
|
||||||
|
fn start(_argc: isize, _argv: *const *const u8) -> isize { 0 }
|
@ -0,0 +1,11 @@
|
|||||||
|
error: `start` is not allowed to have `#[target_feature]`
|
||||||
|
--> $DIR/issue-108645-target-feature-on-start.rs:7:1
|
||||||
|
|
|
||||||
|
LL | #[target_feature(enable = "avx2")]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
LL |
|
||||||
|
LL | fn start(_argc: isize, _argv: *const *const u8) -> isize { 0 }
|
||||||
|
| -------------------------------------------------------- `start` is not allowed to have `#[target_feature]`
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
@ -185,7 +185,7 @@ trigger_files = [
|
|||||||
"src/tools/x",
|
"src/tools/x",
|
||||||
"configure",
|
"configure",
|
||||||
"Cargo.toml",
|
"Cargo.toml",
|
||||||
"config.toml.example",
|
"config.example.toml",
|
||||||
"src/stage0.json"
|
"src/stage0.json"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user