mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-02 15:32:06 +00:00
Auto merge of #57520 - alexreg:tidy-copyright-lint, r=Mark-Simulacrum
Add lint for copyright headers to 'tidy' tool r? @Mark-Simulacrum CC @centril
This commit is contained in:
commit
6599946272
@ -1 +1 @@
|
||||
Subproject commit 74d81d80052cb88925f0e73b12fbd0b73ab7b5a0
|
||||
Subproject commit 0e9061cbaf95adfb9f3ed36c6cef4c046f282e86
|
@ -1 +1 @@
|
||||
Subproject commit 60077efda319c95a89fe39609803c5433567adbf
|
||||
Subproject commit 1c775a1dc5e29bc44b36604b510d6196d98077fa
|
@ -1,5 +1,4 @@
|
||||
//
|
||||
// Original implementation taken from rust-memchr
|
||||
// Original implementation taken from rust-memchr.
|
||||
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
|
||||
|
||||
use cmp;
|
||||
@ -8,13 +7,13 @@ use mem;
|
||||
const LO_U64: u64 = 0x0101010101010101;
|
||||
const HI_U64: u64 = 0x8080808080808080;
|
||||
|
||||
// use truncation
|
||||
// Use truncation.
|
||||
const LO_USIZE: usize = LO_U64 as usize;
|
||||
const HI_USIZE: usize = HI_U64 as usize;
|
||||
|
||||
/// Return `true` if `x` contains any zero byte.
|
||||
/// Returns whether `x` contains any zero byte.
|
||||
///
|
||||
/// From *Matters Computational*, J. Arndt
|
||||
/// From *Matters Computational*, J. Arndt:
|
||||
///
|
||||
/// "The idea is to subtract one from each of the bytes and then look for
|
||||
/// bytes where the borrow propagated all the way to the most significant
|
||||
@ -36,7 +35,7 @@ fn repeat_byte(b: u8) -> usize {
|
||||
(b as usize) * (::usize::MAX / 255)
|
||||
}
|
||||
|
||||
/// Return the first index matching the byte `x` in `text`.
|
||||
/// Returns the first index matching the byte `x` in `text`.
|
||||
pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
// Scan for a single byte value by reading two `usize` words at a time.
|
||||
//
|
||||
@ -77,18 +76,18 @@ pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
}
|
||||
}
|
||||
|
||||
// find the byte after the point the body loop stopped
|
||||
// Find the byte after the point the body loop stopped.
|
||||
text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)
|
||||
}
|
||||
|
||||
/// Return the last index matching the byte `x` in `text`.
|
||||
/// Returns the last index matching the byte `x` in `text`.
|
||||
pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
// Scan for a single byte value by reading two `usize` words at a time.
|
||||
//
|
||||
// Split `text` in three parts
|
||||
// - unaligned tail, after the last word aligned address in text
|
||||
// - body, scan by 2 words at a time
|
||||
// - the first remaining bytes, < 2 word size
|
||||
// Split `text` in three parts:
|
||||
// - unaligned tail, after the last word aligned address in text,
|
||||
// - body, scanned by 2 words at a time,
|
||||
// - the first remaining bytes, < 2 word size.
|
||||
let len = text.len();
|
||||
let ptr = text.as_ptr();
|
||||
type Chunk = usize;
|
||||
@ -105,7 +104,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
return Some(offset + index);
|
||||
}
|
||||
|
||||
// search the body of the text, make sure we don't cross min_aligned_offset.
|
||||
// Search the body of the text, make sure we don't cross min_aligned_offset.
|
||||
// offset is always aligned, so just testing `>` is sufficient and avoids possible
|
||||
// overflow.
|
||||
let repeated_x = repeat_byte(x);
|
||||
@ -116,7 +115,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
let u = *(ptr.offset(offset as isize - 2 * chunk_bytes as isize) as *const Chunk);
|
||||
let v = *(ptr.offset(offset as isize - chunk_bytes as isize) as *const Chunk);
|
||||
|
||||
// break if there is a matching byte
|
||||
// Break if there is a matching byte.
|
||||
let zu = contains_zero_byte(u ^ repeated_x);
|
||||
let zv = contains_zero_byte(v ^ repeated_x);
|
||||
if zu || zv {
|
||||
@ -126,6 +125,6 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
|
||||
offset -= 2 * chunk_bytes;
|
||||
}
|
||||
|
||||
// find the byte before the point the body loop stopped
|
||||
// Find the byte before the point the body loop stopped.
|
||||
text[..offset].iter().rposition(|elt| *elt == x)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Rust JSON serialization library
|
||||
// Rust JSON serialization library.
|
||||
// Copyright (c) 2011 Google Inc.
|
||||
|
||||
#![forbid(non_camel_case_types)]
|
||||
|
@ -1,5 +1,4 @@
|
||||
//
|
||||
// Original implementation taken from rust-memchr
|
||||
// Original implementation taken from rust-memchr.
|
||||
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
|
||||
|
||||
/// A safe interface to `memchr`.
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright (c) 2016-2017 Nuxi (https://nuxi.nl/) and contributors.
|
||||
// Copyright (c) 2016-2017 Nuxi <https://nuxi.nl/> and contributors.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions
|
||||
|
@ -1,5 +1,4 @@
|
||||
//
|
||||
// Original implementation taken from rust-memchr
|
||||
// Original implementation taken from rust-memchr.
|
||||
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
|
||||
|
||||
pub use core::slice::memchr::{memchr, memrchr};
|
||||
|
@ -1,5 +1,4 @@
|
||||
//
|
||||
// Original implementation taken from rust-memchr
|
||||
// Original implementation taken from rust-memchr.
|
||||
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
|
||||
|
||||
pub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> {
|
||||
|
@ -1,6 +1,5 @@
|
||||
//
|
||||
// Original implementation taken from rust-memchr
|
||||
// Original implementation taken from rust-memchr.
|
||||
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
|
||||
|
||||
// Fallback memchr is fastest on windows
|
||||
// Fallback memchr is fastest on Windows.
|
||||
pub use core::slice::memchr::{memchr, memrchr};
|
||||
|
@ -7,7 +7,7 @@
|
||||
//! The format of the JSON output should be considered *unstable*. For now the
|
||||
//! structs at the end of this file (Diagnostic*) specify the error format.
|
||||
|
||||
// FIXME spec the JSON output properly.
|
||||
// FIXME: spec the JSON output properly.
|
||||
|
||||
use source_map::{SourceMap, FilePathMapping};
|
||||
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
|
||||
|
@ -1,13 +1,3 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// force-host
|
||||
|
||||
#![feature(plugin_registrar)]
|
||||
|
@ -1,13 +1,3 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![deny(missing_docs)] //~ ERROR
|
||||
|
||||
pub struct Foo; //~ ERROR
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: missing documentation for crate
|
||||
--> $DIR/deny-missing-docs-crate.rs:11:1
|
||||
--> $DIR/deny-missing-docs-crate.rs:1:1
|
||||
|
|
||||
LL | / #![deny(missing_docs)] //~ ERROR
|
||||
LL | |
|
||||
@ -7,13 +7,13 @@ LL | | pub struct Foo; //~ ERROR
|
||||
| |_______________^
|
||||
|
|
||||
note: lint level defined here
|
||||
--> $DIR/deny-missing-docs-crate.rs:11:9
|
||||
--> $DIR/deny-missing-docs-crate.rs:1:9
|
||||
|
|
||||
LL | #![deny(missing_docs)] //~ ERROR
|
||||
| ^^^^^^^^^^^^
|
||||
|
||||
error: missing documentation for a struct
|
||||
--> $DIR/deny-missing-docs-crate.rs:13:1
|
||||
--> $DIR/deny-missing-docs-crate.rs:3:1
|
||||
|
|
||||
LL | pub struct Foo; //~ ERROR
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
@ -1,13 +1,3 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! foo
|
||||
|
||||
#![deny(missing_docs)]
|
||||
|
@ -1,11 +1,11 @@
|
||||
error: missing documentation for macro
|
||||
--> $DIR/deny-missing-docs-macro.rs:16:1
|
||||
--> $DIR/deny-missing-docs-macro.rs:6:1
|
||||
|
|
||||
LL | macro_rules! foo { //~ ERROR
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: lint level defined here
|
||||
--> $DIR/deny-missing-docs-macro.rs:13:9
|
||||
--> $DIR/deny-missing-docs-macro.rs:3:9
|
||||
|
|
||||
LL | #![deny(missing_docs)]
|
||||
| ^^^^^^^^^^^^
|
||||
|
@ -19,7 +19,6 @@
|
||||
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
//! This crate exports a macro `enum_from_primitive!` that wraps an
|
||||
//! `enum` declaration and automatically adds an implementation of
|
||||
//! `num::FromPrimitive` (reexported here), to allow conversion from
|
||||
@ -52,7 +51,6 @@
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
|
||||
pub mod num_traits {
|
||||
pub trait FromPrimitive: Sized {
|
||||
fn from_i64(n: i64) -> Option<Self>;
|
||||
@ -207,4 +205,3 @@ macro_rules! enum_from_primitive {
|
||||
enum_from_primitive_impl! { $name, $( $( $variant )+ )+ }
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,13 +1,3 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![crate_name = "foo"]
|
||||
|
||||
// compile-flags: -Z unstable-options --disable-per-crate-search
|
||||
|
@ -1,13 +1,3 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
enum Foo {
|
||||
Bar(i32),
|
||||
Baz { i: i32 },
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: enum variants on type aliases are experimental
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:19:13
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:9:13
|
||||
|
|
||||
LL | let t = Alias::Bar(0);
|
||||
| ^^^^^^^^^^
|
||||
@ -7,7 +7,7 @@ LL | let t = Alias::Bar(0);
|
||||
= help: add `#![feature(type_alias_enum_variants)]` to the crate attributes to enable
|
||||
|
||||
error: enum variants on type aliases are experimental
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:21:13
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:11:13
|
||||
|
|
||||
LL | let t = Alias::Baz { i: 0 };
|
||||
| ^^^^^^^^^^
|
||||
@ -15,7 +15,7 @@ LL | let t = Alias::Baz { i: 0 };
|
||||
= help: add `#![feature(type_alias_enum_variants)]` to the crate attributes to enable
|
||||
|
||||
error: enum variants on type aliases are experimental
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:24:9
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:14:9
|
||||
|
|
||||
LL | Alias::Bar(_i) => {}
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -23,7 +23,7 @@ LL | Alias::Bar(_i) => {}
|
||||
= help: add `#![feature(type_alias_enum_variants)]` to the crate attributes to enable
|
||||
|
||||
error: enum variants on type aliases are experimental
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:26:9
|
||||
--> $DIR/feature-gate-type_alias_enum_variants.rs:16:9
|
||||
|
|
||||
LL | Alias::Baz { i: _i } => {}
|
||||
| ^^^^^^^^^^
|
||||
|
@ -2,12 +2,12 @@
|
||||
//! by accident.
|
||||
//!
|
||||
//! In the past we've accidentally checked in test binaries and such which add a
|
||||
//! huge amount of bloat to the git history, so it's good to just ensure we
|
||||
//! don't do that again :)
|
||||
//! huge amount of bloat to the Git history, so it's good to just ensure we
|
||||
//! don't do that again.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
// All files are executable on Windows, so just check on Unix
|
||||
// All files are executable on Windows, so just check on Unix.
|
||||
#[cfg(windows)]
|
||||
pub fn check(_path: &Path, _bad: &mut bool) {}
|
||||
|
||||
|
@ -13,7 +13,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
return
|
||||
}
|
||||
for entry in t!(path.read_dir(), path).map(|e| t!(e)) {
|
||||
// Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs`
|
||||
// Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs`.
|
||||
if entry.file_name().to_str() == Some("Cargo.toml") {
|
||||
if path.join("src/lib.rs").is_file() {
|
||||
verify(&entry.path(), &path.join("src/lib.rs"), bad)
|
||||
@ -27,8 +27,8 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that the dependencies in Cargo.toml at `tomlfile` are sync'd with the
|
||||
// `extern crate` annotations in the lib.rs at `libfile`.
|
||||
/// Verifies that the dependencies in Cargo.toml at `tomlfile` are synced with
|
||||
/// the `extern crate` annotations in the lib.rs at `libfile`.
|
||||
fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
|
||||
let toml = t!(fs::read_to_string(&tomlfile));
|
||||
let librs = t!(fs::read_to_string(&libfile));
|
||||
@ -37,14 +37,16 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
|
||||
return
|
||||
}
|
||||
|
||||
// "Poor man's TOML parser", just assume we use one syntax for now
|
||||
// "Poor man's TOML parser" -- just assume we use one syntax for now.
|
||||
//
|
||||
// We just look for:
|
||||
//
|
||||
// [dependencies]
|
||||
// name = ...
|
||||
// name2 = ...
|
||||
// name3 = ...
|
||||
// ````
|
||||
// [dependencies]
|
||||
// name = ...
|
||||
// name2 = ...
|
||||
// name3 = ...
|
||||
// ```
|
||||
//
|
||||
// If we encounter a line starting with `[` then we assume it's the end of
|
||||
// the dependency section and bail out.
|
||||
@ -63,14 +65,14 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Don't worry about depending on core/std but not saying `extern crate
|
||||
// core/std`, that's intentional.
|
||||
// Don't worry about depending on core/std while not writing `extern crate
|
||||
// core/std` -- that's intentional.
|
||||
if krate == "core" || krate == "std" {
|
||||
continue
|
||||
}
|
||||
|
||||
// This is intentional, this dependency just makes the crate available
|
||||
// for others later on. Cover cases
|
||||
// This is intentional -- this dependency just makes the crate available
|
||||
// for others later on.
|
||||
let whitelisted = krate.starts_with("panic");
|
||||
if toml.contains("name = \"std\"") && whitelisted {
|
||||
continue
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Check license of third-party deps by inspecting vendor
|
||||
//! Checks the licenses of third-party dependencies by inspecting vendors.
|
||||
|
||||
use std::collections::{BTreeSet, HashSet, HashMap};
|
||||
use std::fs;
|
||||
@ -21,7 +21,7 @@ const LICENSES: &[&str] = &[
|
||||
/// These are exceptions to Rust's permissive licensing policy, and
|
||||
/// should be considered bugs. Exceptions are only allowed in Rust
|
||||
/// tooling. It is _crucial_ that no exception crates be dependencies
|
||||
/// of the Rust runtime (std / test).
|
||||
/// of the Rust runtime (std/test).
|
||||
const EXCEPTIONS: &[&str] = &[
|
||||
"mdbook", // MPL2, mdbook
|
||||
"openssl", // BSD+advertising clause, cargo, mdbook
|
||||
@ -39,11 +39,11 @@ const EXCEPTIONS: &[&str] = &[
|
||||
"colored", // MPL-2.0, rustfmt
|
||||
"ordslice", // Apache-2.0, rls
|
||||
"cloudabi", // BSD-2-Clause, (rls -> crossbeam-channel 0.2 -> rand 0.5)
|
||||
"ryu", // Apache-2.0, rls/cargo/... (b/c of serde)
|
||||
"ryu", // Apache-2.0, rls/cargo/... (because of serde)
|
||||
"bytesize", // Apache-2.0, cargo
|
||||
"im-rc", // MPL-2.0+, cargo
|
||||
"adler32", // BSD-3-Clause AND Zlib, cargo dep that isn't used
|
||||
"fortanix-sgx-abi", // MPL-2.0+, libstd but only for sgx target
|
||||
"fortanix-sgx-abi", // MPL-2.0+, libstd but only for `sgx` target
|
||||
];
|
||||
|
||||
/// Which crates to check against the whitelist?
|
||||
@ -156,7 +156,7 @@ const WHITELIST: &[Crate] = &[
|
||||
Crate("wincolor"),
|
||||
];
|
||||
|
||||
// Some types for Serde to deserialize the output of `cargo metadata` to...
|
||||
// Some types for Serde to deserialize the output of `cargo metadata` to.
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Output {
|
||||
@ -174,9 +174,9 @@ struct ResolveNode {
|
||||
dependencies: Vec<String>,
|
||||
}
|
||||
|
||||
/// A unique identifier for a crate
|
||||
/// A unique identifier for a crate.
|
||||
#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
|
||||
struct Crate<'a>(&'a str); // (name,)
|
||||
struct Crate<'a>(&'a str); // (name)
|
||||
|
||||
#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
|
||||
struct CrateVersion<'a>(&'a str, &'a str); // (name, version)
|
||||
@ -188,7 +188,7 @@ impl<'a> Crate<'a> {
|
||||
}
|
||||
|
||||
impl<'a> CrateVersion<'a> {
|
||||
/// Returns the struct and whether or not the dep is in-tree
|
||||
/// Returns the struct and whether or not the dependency is in-tree.
|
||||
pub fn from_str(s: &'a str) -> (Self, bool) {
|
||||
let mut parts = s.split(' ');
|
||||
let name = parts.next().unwrap();
|
||||
@ -215,7 +215,7 @@ impl<'a> From<CrateVersion<'a>> for Crate<'a> {
|
||||
///
|
||||
/// Specifically, this checks that the license is correct.
|
||||
pub fn check(path: &Path, bad: &mut bool) {
|
||||
// Check licences
|
||||
// Check licences.
|
||||
let path = path.join("../vendor");
|
||||
assert!(path.exists(), "vendor directory missing");
|
||||
let mut saw_dir = false;
|
||||
@ -223,7 +223,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
saw_dir = true;
|
||||
let dir = t!(dir);
|
||||
|
||||
// skip our exceptions
|
||||
// Skip our exceptions.
|
||||
let is_exception = EXCEPTIONS.iter().any(|exception| {
|
||||
dir.path()
|
||||
.to_str()
|
||||
@ -240,18 +240,18 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
assert!(saw_dir, "no vendored source");
|
||||
}
|
||||
|
||||
/// Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check
|
||||
/// failed.
|
||||
/// Checks the dependency of `WHITELIST_CRATES` at the given path. Changes `bad` to `true` if a
|
||||
/// check failed.
|
||||
///
|
||||
/// Specifically, this checks that the dependencies are on the WHITELIST.
|
||||
/// Specifically, this checks that the dependencies are on the `WHITELIST`.
|
||||
pub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {
|
||||
// Get dependencies from cargo metadata
|
||||
// Get dependencies from Cargo metadata.
|
||||
let resolve = get_deps(path, cargo);
|
||||
|
||||
// Get the whitelist into a convenient form
|
||||
// Get the whitelist in a convenient form.
|
||||
let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();
|
||||
|
||||
// Check dependencies
|
||||
// Check dependencies.
|
||||
let mut visited = BTreeSet::new();
|
||||
let mut unapproved = BTreeSet::new();
|
||||
for &krate in WHITELIST_CRATES.iter() {
|
||||
@ -308,9 +308,9 @@ fn extract_license(line: &str) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the dependencies of the crate at the given path using `cargo metadata`.
|
||||
/// Gets the dependencies of the crate at the given path using `cargo metadata`.
|
||||
fn get_deps(path: &Path, cargo: &Path) -> Resolve {
|
||||
// Run `cargo metadata` to get the set of dependencies
|
||||
// Run `cargo metadata` to get the set of dependencies.
|
||||
let output = Command::new(cargo)
|
||||
.arg("metadata")
|
||||
.arg("--format-version")
|
||||
@ -335,25 +335,25 @@ fn check_crate_whitelist<'a, 'b>(
|
||||
krate: CrateVersion<'a>,
|
||||
must_be_on_whitelist: bool,
|
||||
) -> BTreeSet<Crate<'a>> {
|
||||
// Will contain bad deps
|
||||
// This will contain bad deps.
|
||||
let mut unapproved = BTreeSet::new();
|
||||
|
||||
// Check if we have already visited this crate
|
||||
// Check if we have already visited this crate.
|
||||
if visited.contains(&krate) {
|
||||
return unapproved;
|
||||
}
|
||||
|
||||
visited.insert(krate);
|
||||
|
||||
// If this path is in-tree, we don't require it to be on the whitelist
|
||||
// If this path is in-tree, we don't require it to be on the whitelist.
|
||||
if must_be_on_whitelist {
|
||||
// If this dependency is not on the WHITELIST, add to bad set
|
||||
// If this dependency is not on `WHITELIST`, add to bad set.
|
||||
if !whitelist.contains(&krate.into()) {
|
||||
unapproved.insert(krate.into());
|
||||
}
|
||||
}
|
||||
|
||||
// Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)
|
||||
// Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!).
|
||||
let to_check = resolve
|
||||
.nodes
|
||||
.iter()
|
||||
@ -372,9 +372,10 @@ fn check_crate_whitelist<'a, 'b>(
|
||||
|
||||
fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {
|
||||
const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
|
||||
// These two crates take quite a long time to build, let's not let two
|
||||
// versions of them accidentally sneak into our dependency graph to
|
||||
// ensure we keep our CI times under control
|
||||
// These two crates take quite a long time to build, so don't allow two versions of them
|
||||
// to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
|
||||
// under control.
|
||||
|
||||
// "cargo", // FIXME(#53005)
|
||||
"rustc-ap-syntax",
|
||||
];
|
||||
|
@ -22,11 +22,13 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
contents.truncate(0);
|
||||
t!(t!(File::open(file)).read_to_string(&mut contents));
|
||||
|
||||
// In the register_long_diagnostics! macro, entries look like this:
|
||||
// In the `register_long_diagnostics!` macro, entries look like this:
|
||||
//
|
||||
// ```
|
||||
// EXXXX: r##"
|
||||
// <Long diagnostic message>
|
||||
// "##,
|
||||
// ```
|
||||
//
|
||||
// and these long messages often have error codes themselves inside
|
||||
// them, but we don't want to report duplicates in these cases. This
|
||||
|
@ -1,33 +1,32 @@
|
||||
// ! Check for external package sources. Allow only vendorable packages.
|
||||
//! Check for external package sources. Allow only vendorable packages.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// List of whitelisted sources for packages
|
||||
/// List of whitelisted sources for packages.
|
||||
const WHITELISTED_SOURCES: &[&str] = &[
|
||||
"\"registry+https://github.com/rust-lang/crates.io-index\"",
|
||||
];
|
||||
|
||||
/// check for external package sources
|
||||
/// Checks for external package sources.
|
||||
pub fn check(path: &Path, bad: &mut bool) {
|
||||
// Cargo.lock of rust (tidy runs inside src/)
|
||||
// `Cargo.lock` of rust (tidy runs inside `src/`).
|
||||
let path = path.join("../Cargo.lock");
|
||||
|
||||
// open and read the whole file
|
||||
// Open and read the whole file.
|
||||
let cargo_lock = t!(fs::read_to_string(&path));
|
||||
|
||||
// process each line
|
||||
// Process each line.
|
||||
for line in cargo_lock.lines() {
|
||||
|
||||
// consider only source entries
|
||||
// Consider only source entries.
|
||||
if ! line.starts_with("source = ") {
|
||||
continue;
|
||||
}
|
||||
|
||||
// extract source value
|
||||
// Extract source value.
|
||||
let source = line.splitn(2, '=').nth(1).unwrap().trim();
|
||||
|
||||
// ensure source is whitelisted
|
||||
// Ensure source is whitelisted.
|
||||
if !WHITELISTED_SOURCES.contains(&&*source) {
|
||||
println!("invalid source: {}", source);
|
||||
*bad = true;
|
||||
|
@ -1,12 +1,12 @@
|
||||
//! Tidy check to ensure that unstable features are all in order
|
||||
//! Tidy check to ensure that unstable features are all in order.
|
||||
//!
|
||||
//! This check will ensure properties like:
|
||||
//!
|
||||
//! * All stability attributes look reasonably well formed
|
||||
//! * The set of library features is disjoint from the set of language features
|
||||
//! * Library features have at most one stability level
|
||||
//! * Library features have at most one `since` value
|
||||
//! * All unstable lang features have tests to ensure they are actually unstable
|
||||
//! * All stability attributes look reasonably well formed.
|
||||
//! * The set of library features is disjoint from the set of language features.
|
||||
//! * Library features have at most one stability level.
|
||||
//! * Library features have at most one `since` value.
|
||||
//! * All unstable lang features have tests to ensure they are actually unstable.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
@ -172,8 +172,8 @@ fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {
|
||||
pub fn collect_lang_features(base_src_path: &Path, bad: &mut bool) -> Features {
|
||||
let contents = t!(fs::read_to_string(base_src_path.join("libsyntax/feature_gate.rs")));
|
||||
|
||||
// we allow rustc-internal features to omit a tracking issue.
|
||||
// these features must be marked with `// rustc internal` in its own group.
|
||||
// We allow rustc-internal features to omit a tracking issue.
|
||||
// These features must be marked with a `// rustc internal` in its own group.
|
||||
let mut next_feature_is_rustc_internal = false;
|
||||
|
||||
contents.lines().zip(1..)
|
||||
@ -327,7 +327,7 @@ fn map_lib_features(base_src_path: &Path,
|
||||
}
|
||||
becoming_feature = None;
|
||||
if line.contains("rustc_const_unstable(") {
|
||||
// const fn features are handled specially
|
||||
// `const fn` features are handled specially.
|
||||
let feature_name = match find_attr_val(line, "feature") {
|
||||
Some(name) => name,
|
||||
None => err!("malformed stability attribute"),
|
||||
@ -337,9 +337,8 @@ fn map_lib_features(base_src_path: &Path,
|
||||
since: "None".to_owned(),
|
||||
has_gate_test: false,
|
||||
// FIXME(#57563): #57563 is now used as a common tracking issue,
|
||||
// although we would like to have specific tracking
|
||||
// issues for each `rustc_const_unstable` in the
|
||||
// future.
|
||||
// although we would like to have specific tracking issues for each
|
||||
// `rustc_const_unstable` in the future.
|
||||
tracking_issue: Some(57563),
|
||||
};
|
||||
mf(Ok((feature_name, feature)), file, i + 1);
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Library used by tidy and other tools
|
||||
//! Library used by tidy and other tools.
|
||||
//!
|
||||
//! This library contains the tidy lints and exposes it
|
||||
//! to be used by tools.
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Tidy checks source code in this repository
|
||||
//! Tidy checks source code in this repository.
|
||||
//!
|
||||
//! This program runs all of the various tidy checks for style, cleanliness,
|
||||
//! etc. This is run by default on `make check` and as part of the auto
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Tidy check to enforce rules about platform-specific code in std
|
||||
//! Tidy check to enforce rules about platform-specific code in std.
|
||||
//!
|
||||
//! This is intended to maintain existing standards of code
|
||||
//! organization in hopes that the standard library will continue to
|
||||
@ -15,15 +15,15 @@
|
||||
//! Following are the basic rules, though there are currently
|
||||
//! exceptions:
|
||||
//!
|
||||
//! - core may not have platform-specific code
|
||||
//! - libpanic_abort may have platform-specific code
|
||||
//! - libpanic_unwind may have platform-specific code
|
||||
//! - libunwind may have platform-specific code
|
||||
//! - other crates in the std facade may not
|
||||
//! - std may have platform-specific code in the following places
|
||||
//! - sys/unix/
|
||||
//! - sys/windows/
|
||||
//! - os/
|
||||
//! - core may not have platform-specific code.
|
||||
//! - libpanic_abort may have platform-specific code.
|
||||
//! - libpanic_unwind may have platform-specific code.
|
||||
//! - libunwind may have platform-specific code.
|
||||
//! - other crates in the std facade may not.
|
||||
//! - std may have platform-specific code in the following places:
|
||||
//! - `sys/unix/`
|
||||
//! - `sys/windows/`
|
||||
//! - `os/`
|
||||
//!
|
||||
//! `std/sys_common` should _not_ contain platform-specific code.
|
||||
//! Finally, because std contains tests with platform-specific
|
||||
@ -36,7 +36,7 @@ use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::iter::Iterator;
|
||||
|
||||
// Paths that may contain platform-specific code
|
||||
// Paths that may contain platform-specific code.
|
||||
const EXCEPTION_PATHS: &[&str] = &[
|
||||
// std crates
|
||||
"src/libpanic_abort",
|
||||
@ -54,10 +54,10 @@ const EXCEPTION_PATHS: &[&str] = &[
|
||||
"src/libstd/f64.rs",
|
||||
"src/libstd/sys_common/mod.rs",
|
||||
"src/libstd/sys_common/net.rs",
|
||||
"src/libterm", // Not sure how to make this crate portable, but test needs it
|
||||
"src/libtest", // Probably should defer to unstable std::sys APIs
|
||||
"src/libterm", // Not sure how to make this crate portable, but test crate needs it.
|
||||
"src/libtest", // Probably should defer to unstable `std::sys` APIs.
|
||||
|
||||
// std testing crates, ok for now at least
|
||||
// std testing crates, okay for now at least
|
||||
"src/libcore/tests",
|
||||
"src/liballoc/tests/lib.rs",
|
||||
|
||||
@ -79,7 +79,7 @@ const EXCEPTION_PATHS: &[&str] = &[
|
||||
|
||||
pub fn check(path: &Path, bad: &mut bool) {
|
||||
let mut contents = String::new();
|
||||
// Sanity check that the complex parsing here works
|
||||
// Sanity check that the complex parsing here works.
|
||||
let mut saw_target_arch = false;
|
||||
let mut saw_cfg_bang = false;
|
||||
super::walk(path, &mut super::filter_dirs, &mut |file| {
|
||||
@ -104,7 +104,7 @@ fn check_cfgs(contents: &mut String, file: &Path,
|
||||
// For now it's ok to have platform-specific code after 'mod tests'.
|
||||
let mod_tests_idx = find_test_mod(contents);
|
||||
let contents = &contents[..mod_tests_idx];
|
||||
// Pull out all "cfg(...)" and "cfg!(...)" strings
|
||||
// Pull out all `cfg(...)` and `cfg!(...)` strings.
|
||||
let cfgs = parse_cfgs(contents);
|
||||
|
||||
let mut line_numbers: Option<Vec<usize>> = None;
|
||||
@ -121,7 +121,7 @@ fn check_cfgs(contents: &mut String, file: &Path,
|
||||
};
|
||||
|
||||
for (idx, cfg) in cfgs {
|
||||
// Sanity check that the parsing here works
|
||||
// Sanity check that the parsing here works.
|
||||
if !*saw_target_arch && cfg.contains("target_arch") { *saw_target_arch = true }
|
||||
if !*saw_cfg_bang && cfg.contains("cfg!") { *saw_cfg_bang = true }
|
||||
|
||||
@ -153,7 +153,7 @@ fn check_cfgs(contents: &mut String, file: &Path,
|
||||
|
||||
fn find_test_mod(contents: &str) -> usize {
|
||||
if let Some(mod_tests_idx) = contents.find("mod tests") {
|
||||
// Also capture a previous line indicating "mod tests" in cfg-ed out
|
||||
// Also capture a previous line indicating that "mod tests" is cfg'd out.
|
||||
let prev_newline_idx = contents[..mod_tests_idx].rfind('\n').unwrap_or(mod_tests_idx);
|
||||
let prev_newline_idx = contents[..prev_newline_idx].rfind('\n');
|
||||
if let Some(nl) = prev_newline_idx {
|
||||
@ -176,7 +176,7 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> {
|
||||
let candidate_cfgs = contents.match_indices("cfg");
|
||||
let candidate_cfg_idxs = candidate_cfgs.map(|(i, _)| i);
|
||||
// This is puling out the indexes of all "cfg" strings
|
||||
// that appear to be tokens succeeded by a paren.
|
||||
// that appear to be tokens followed by a parenthesis.
|
||||
let cfgs = candidate_cfg_idxs.filter(|i| {
|
||||
let pre_idx = i.saturating_sub(*i);
|
||||
let succeeds_non_ident = !contents.as_bytes().get(pre_idx)
|
||||
|
@ -2,12 +2,12 @@
|
||||
//!
|
||||
//! Example checks are:
|
||||
//!
|
||||
//! * No lines over 100 characters
|
||||
//! * No tabs
|
||||
//! * No trailing whitespace
|
||||
//! * No CR characters
|
||||
//! * No `TODO` or `XXX` directives
|
||||
//! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests
|
||||
//! * No lines over 100 characters.
|
||||
//! * No tabs.
|
||||
//! * No trailing whitespace.
|
||||
//! * No CR characters.
|
||||
//! * No `TODO` or `XXX` directives.
|
||||
//! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests.
|
||||
//!
|
||||
//! A number of these checks can be opted-out of with various directives like
|
||||
//! `// ignore-tidy-linelength`.
|
||||
@ -34,15 +34,17 @@ C++ code used llvm_unreachable, which triggers undefined behavior
|
||||
when executed when assertions are disabled.
|
||||
Use llvm::report_fatal_error for increased robustness.";
|
||||
|
||||
/// Parser states for line_is_url.
|
||||
/// Parser states for `line_is_url`.
|
||||
#[derive(PartialEq)]
|
||||
#[allow(non_camel_case_types)]
|
||||
enum LIUState { EXP_COMMENT_START,
|
||||
EXP_LINK_LABEL_OR_URL,
|
||||
EXP_URL,
|
||||
EXP_END }
|
||||
enum LIUState {
|
||||
EXP_COMMENT_START,
|
||||
EXP_LINK_LABEL_OR_URL,
|
||||
EXP_URL,
|
||||
EXP_END,
|
||||
}
|
||||
|
||||
/// True if LINE appears to be a line comment containing an URL,
|
||||
/// Returns whether `line` appears to be a line comment containing an URL,
|
||||
/// possibly with a Markdown link label in front, and nothing else.
|
||||
/// The Markdown link label, if present, may not contain whitespace.
|
||||
/// Lines of this form are allowed to be overlength, because Markdown
|
||||
@ -77,7 +79,7 @@ fn line_is_url(line: &str) -> bool {
|
||||
state == EXP_END
|
||||
}
|
||||
|
||||
/// True if LINE is allowed to be longer than the normal limit.
|
||||
/// Returns whether `line` is allowed to be longer than the normal limit.
|
||||
/// Currently there is only one exception, for long URLs, but more
|
||||
/// may be added in the future.
|
||||
fn long_line_is_ok(line: &str) -> bool {
|
||||
@ -109,6 +111,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
let skip_tab = contents.contains("ignore-tidy-tab");
|
||||
let skip_length = contents.contains("ignore-tidy-linelength");
|
||||
let skip_end_whitespace = contents.contains("ignore-tidy-end-whitespace");
|
||||
let skip_copyright = contents.contains("ignore-tidy-copyright");
|
||||
let mut trailing_new_lines = 0;
|
||||
for (i, line) in contents.split('\n').enumerate() {
|
||||
let mut err = |msg: &str| {
|
||||
@ -118,13 +121,13 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
&& !long_line_is_ok(line) {
|
||||
err(&format!("line longer than {} chars", COLS));
|
||||
}
|
||||
if line.contains('\t') && !skip_tab {
|
||||
if !skip_tab && line.contains('\t') {
|
||||
err("tab character");
|
||||
}
|
||||
if !skip_end_whitespace && (line.ends_with(' ') || line.ends_with('\t')) {
|
||||
err("trailing whitespace");
|
||||
}
|
||||
if line.contains('\r') && !skip_cr {
|
||||
if !skip_cr && line.contains('\r') {
|
||||
err("CR character");
|
||||
}
|
||||
if filename != "style.rs" {
|
||||
@ -135,6 +138,13 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
err("XXX is deprecated; use FIXME")
|
||||
}
|
||||
}
|
||||
if !skip_copyright && (line.starts_with("// Copyright") ||
|
||||
line.starts_with("# Copyright") ||
|
||||
line.starts_with("Copyright"))
|
||||
&& (line.contains("Rust Developers") ||
|
||||
line.contains("Rust Project Developers")) {
|
||||
err("copyright notices attributed to the Rust Project Developers are deprecated");
|
||||
}
|
||||
if line.ends_with("```ignore") || line.ends_with("```rust,ignore") {
|
||||
err(UNEXPLAINED_IGNORE_DOCTEST_INFO);
|
||||
}
|
||||
|
@ -10,14 +10,16 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
&mut |file_path| {
|
||||
if let Some(ext) = file_path.extension() {
|
||||
if ext == "stderr" || ext == "stdout" {
|
||||
// Test output filenames have the format:
|
||||
// Test output filenames have one of the formats:
|
||||
// ```
|
||||
// $testname.stderr
|
||||
// $testname.$mode.stderr
|
||||
// $testname.$revision.stderr
|
||||
// $testname.$revision.$mode.stderr
|
||||
// ```
|
||||
//
|
||||
// For now, just make sure that there is a corresponding
|
||||
// $testname.rs file.
|
||||
// `$testname.rs` file.
|
||||
let testname = file_path
|
||||
.file_name()
|
||||
.unwrap()
|
||||
|
@ -11,22 +11,24 @@ pub const LANG_FEATURES_DIR: &str = "language-features";
|
||||
|
||||
pub const LIB_FEATURES_DIR: &str = "library-features";
|
||||
|
||||
/// Build the path to the Unstable Book source directory from the Rust 'src' directory
|
||||
/// Builds the path to the Unstable Book source directory from the Rust 'src' directory.
|
||||
pub fn unstable_book_path(base_src_path: &path::Path) -> path::PathBuf {
|
||||
base_src_path.join(PATH_STR)
|
||||
}
|
||||
|
||||
/// Directory where the features are documented within the Unstable Book source directory
|
||||
/// Builds the path to the directory where the features are documented within the Unstable Book
|
||||
/// source directory.
|
||||
pub fn unstable_book_lang_features_path(base_src_path: &path::Path) -> path::PathBuf {
|
||||
unstable_book_path(base_src_path).join(LANG_FEATURES_DIR)
|
||||
}
|
||||
|
||||
/// Directory where the features are documented within the Unstable Book source directory
|
||||
/// Builds the path to the directory where the features are documented within the Unstable Book
|
||||
/// source directory.
|
||||
pub fn unstable_book_lib_features_path(base_src_path: &path::Path) -> path::PathBuf {
|
||||
unstable_book_path(base_src_path).join(LIB_FEATURES_DIR)
|
||||
}
|
||||
|
||||
/// Test to determine if DirEntry is a file
|
||||
/// Tests whether `DirEntry` is a file.
|
||||
fn dir_entry_is_file(dir_entry: &fs::DirEntry) -> bool {
|
||||
dir_entry
|
||||
.file_type()
|
||||
@ -34,7 +36,7 @@ fn dir_entry_is_file(dir_entry: &fs::DirEntry) -> bool {
|
||||
.is_file()
|
||||
}
|
||||
|
||||
/// Retrieve names of all unstable features
|
||||
/// Retrieves names of all unstable features.
|
||||
pub fn collect_unstable_feature_names(features: &Features) -> BTreeSet<String> {
|
||||
features
|
||||
.iter()
|
||||
@ -56,24 +58,23 @@ pub fn collect_unstable_book_section_file_names(dir: &path::Path) -> BTreeSet<St
|
||||
|
||||
/// Retrieve file names of all library feature sections in the Unstable Book with:
|
||||
///
|
||||
/// * hyphens replaced by underscores
|
||||
/// * the markdown suffix ('.md') removed
|
||||
/// * hyphens replaced by underscores,
|
||||
/// * the markdown suffix ('.md') removed.
|
||||
fn collect_unstable_book_lang_features_section_file_names(base_src_path: &path::Path)
|
||||
-> BTreeSet<String> {
|
||||
collect_unstable_book_section_file_names(&unstable_book_lang_features_path(base_src_path))
|
||||
}
|
||||
|
||||
/// Retrieve file names of all language feature sections in the Unstable Book with:
|
||||
/// Retrieves file names of all language feature sections in the Unstable Book with:
|
||||
///
|
||||
/// * hyphens replaced by underscores
|
||||
/// * the markdown suffix ('.md') removed
|
||||
/// * hyphens replaced by underscores,
|
||||
/// * the markdown suffix ('.md') removed.
|
||||
fn collect_unstable_book_lib_features_section_file_names(base_src_path: &path::Path)
|
||||
-> BTreeSet<String> {
|
||||
collect_unstable_book_section_file_names(&unstable_book_lib_features_path(base_src_path))
|
||||
}
|
||||
|
||||
pub fn check(path: &path::Path, bad: &mut bool) {
|
||||
|
||||
// Library features
|
||||
|
||||
let lang_features = collect_lang_features(path, bad);
|
||||
@ -100,7 +101,7 @@ pub fn check(path: &path::Path, bad: &mut bool) {
|
||||
let unstable_book_lang_features_section_file_names =
|
||||
collect_unstable_book_lang_features_section_file_names(path);
|
||||
|
||||
// Check for Unstable Book sections that don't have a corresponding unstable feature
|
||||
// Check for Unstable Book sections that don't have a corresponding unstable feature.
|
||||
for feature_name in &unstable_book_lang_features_section_file_names -
|
||||
&unstable_lang_feature_names {
|
||||
tidy_error!(bad,
|
||||
@ -109,8 +110,8 @@ pub fn check(path: &path::Path, bad: &mut bool) {
|
||||
feature_name)
|
||||
}
|
||||
|
||||
// List unstable features that don't have Unstable Book sections
|
||||
// Remove the comment marker if you want the list printed
|
||||
// List unstable features that don't have Unstable Book sections.
|
||||
// Remove the comment marker if you want the list printed.
|
||||
/*
|
||||
println!("Lib features without unstable book sections:");
|
||||
for feature_name in &unstable_lang_feature_names -
|
||||
|
Loading…
Reference in New Issue
Block a user