mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
Revert "Auto merge of #89709 - clemenswasser:apply_clippy_suggestions_2, r=petrochenkov"
The PR had some unforseen perf regressions that are not as easy to find. Revert the PR for now. This reverts commit6ae8912a3e
, reversing changes made to86d6d2b738
.
This commit is contained in:
parent
72d66064e7
commit
4457014398
@ -389,7 +389,6 @@ impl<S: Semantics> fmt::Display for IeeeFloat<S> {
|
|||||||
let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize);
|
let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize);
|
||||||
|
|
||||||
// Change the exponent from 2^e to 10^e.
|
// Change the exponent from 2^e to 10^e.
|
||||||
#[allow(clippy::comparison_chain)]
|
|
||||||
if exp == 0 {
|
if exp == 0 {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
} else if exp > 0 {
|
} else if exp > 0 {
|
||||||
@ -2527,7 +2526,6 @@ mod sig {
|
|||||||
if *a_sign ^ b_sign {
|
if *a_sign ^ b_sign {
|
||||||
let (reverse, loss);
|
let (reverse, loss);
|
||||||
|
|
||||||
#[allow(clippy::comparison_chain)]
|
|
||||||
if bits == 0 {
|
if bits == 0 {
|
||||||
reverse = cmp(a_sig, b_sig) == Ordering::Less;
|
reverse = cmp(a_sig, b_sig) == Ordering::Less;
|
||||||
loss = Loss::ExactlyZero;
|
loss = Loss::ExactlyZero;
|
||||||
|
@ -14,7 +14,7 @@ const BASE_64: &[u8; MAX_BASE as usize] =
|
|||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
|
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
|
||||||
debug_assert!((2..=MAX_BASE).contains(&base));
|
debug_assert!(base >= 2 && base <= MAX_BASE);
|
||||||
let mut s = [0u8; 128];
|
let mut s = [0u8; 128];
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
|
|
||||||
|
@ -206,11 +206,17 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
|||||||
AdjacentEdges { graph: self, direction, next: first_edge }
|
AdjacentEdges { graph: self, direction, next: first_edge }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
|
pub fn successor_nodes<'a>(
|
||||||
|
&'a self,
|
||||||
|
source: NodeIndex,
|
||||||
|
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||||
self.outgoing_edges(source).targets()
|
self.outgoing_edges(source).targets()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
|
pub fn predecessor_nodes<'a>(
|
||||||
|
&'a self,
|
||||||
|
target: NodeIndex,
|
||||||
|
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||||
self.incoming_edges(target).sources()
|
self.incoming_edges(target).sources()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
|
|||||||
let node = frame.node;
|
let node = frame.node;
|
||||||
visited[node] = true;
|
visited[node] = true;
|
||||||
|
|
||||||
for successor in frame.iter.by_ref() {
|
while let Some(successor) = frame.iter.next() {
|
||||||
if !visited[successor] {
|
if !visited[successor] {
|
||||||
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
|
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
|
||||||
continue 'recurse;
|
continue 'recurse;
|
||||||
@ -112,7 +112,7 @@ where
|
|||||||
/// This is equivalent to just invoke `next` repeatedly until
|
/// This is equivalent to just invoke `next` repeatedly until
|
||||||
/// you get a `None` result.
|
/// you get a `None` result.
|
||||||
pub fn complete_search(&mut self) {
|
pub fn complete_search(&mut self) {
|
||||||
for _ in self {}
|
while let Some(_) = self.next() {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if node has been visited thus far.
|
/// Returns true if node has been visited thus far.
|
||||||
|
@ -390,7 +390,7 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||||||
.map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) })
|
.map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) })
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
self.compress(|_| unreachable!());
|
self.compress(|_| assert!(false));
|
||||||
errors
|
errors
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -612,7 +612,7 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||||||
fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
|
fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
|
||||||
let orig_nodes_len = self.nodes.len();
|
let orig_nodes_len = self.nodes.len();
|
||||||
let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec);
|
let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec);
|
||||||
assert!(node_rewrites.is_empty());
|
debug_assert!(node_rewrites.is_empty());
|
||||||
node_rewrites.extend(0..orig_nodes_len);
|
node_rewrites.extend(0..orig_nodes_len);
|
||||||
let mut dead_nodes = 0;
|
let mut dead_nodes = 0;
|
||||||
|
|
||||||
@ -623,13 +623,13 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||||||
// self.nodes[0..index - dead_nodes] are the first remaining nodes
|
// self.nodes[0..index - dead_nodes] are the first remaining nodes
|
||||||
// self.nodes[index - dead_nodes..index] are all dead
|
// self.nodes[index - dead_nodes..index] are all dead
|
||||||
// self.nodes[index..] are unchanged
|
// self.nodes[index..] are unchanged
|
||||||
for (index, node_rewrite) in node_rewrites.iter_mut().enumerate() {
|
for index in 0..orig_nodes_len {
|
||||||
let node = &self.nodes[index];
|
let node = &self.nodes[index];
|
||||||
match node.state.get() {
|
match node.state.get() {
|
||||||
NodeState::Pending | NodeState::Waiting => {
|
NodeState::Pending | NodeState::Waiting => {
|
||||||
if dead_nodes > 0 {
|
if dead_nodes > 0 {
|
||||||
self.nodes.swap(index, index - dead_nodes);
|
self.nodes.swap(index, index - dead_nodes);
|
||||||
*node_rewrite -= dead_nodes;
|
node_rewrites[index] -= dead_nodes;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NodeState::Done => {
|
NodeState::Done => {
|
||||||
@ -646,7 +646,7 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||||||
}
|
}
|
||||||
// Extract the success stories.
|
// Extract the success stories.
|
||||||
outcome_cb(&node.obligation);
|
outcome_cb(&node.obligation);
|
||||||
*node_rewrite = orig_nodes_len;
|
node_rewrites[index] = orig_nodes_len;
|
||||||
dead_nodes += 1;
|
dead_nodes += 1;
|
||||||
}
|
}
|
||||||
NodeState::Error => {
|
NodeState::Error => {
|
||||||
@ -655,7 +655,7 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||||||
// check against.
|
// check against.
|
||||||
self.active_cache.remove(&node.obligation.as_cache_key());
|
self.active_cache.remove(&node.obligation.as_cache_key());
|
||||||
self.insert_into_error_cache(index);
|
self.insert_into_error_cache(index);
|
||||||
*node_rewrite = orig_nodes_len;
|
node_rewrites[index] = orig_nodes_len;
|
||||||
dead_nodes += 1;
|
dead_nodes += 1;
|
||||||
}
|
}
|
||||||
NodeState::Success => unreachable!(),
|
NodeState::Success => unreachable!(),
|
||||||
|
@ -205,10 +205,10 @@ impl<K: Ord, V> SortedMap<K, V> {
|
|||||||
R: RangeBounds<K>,
|
R: RangeBounds<K>,
|
||||||
{
|
{
|
||||||
let start = match range.start_bound() {
|
let start = match range.start_bound() {
|
||||||
Bound::Included(k) => match self.lookup_index_for(k) {
|
Bound::Included(ref k) => match self.lookup_index_for(k) {
|
||||||
Ok(index) | Err(index) => index,
|
Ok(index) | Err(index) => index,
|
||||||
},
|
},
|
||||||
Bound::Excluded(k) => match self.lookup_index_for(k) {
|
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
|
||||||
Ok(index) => index + 1,
|
Ok(index) => index + 1,
|
||||||
Err(index) => index,
|
Err(index) => index,
|
||||||
},
|
},
|
||||||
@ -216,11 +216,11 @@ impl<K: Ord, V> SortedMap<K, V> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let end = match range.end_bound() {
|
let end = match range.end_bound() {
|
||||||
Bound::Included(k) => match self.lookup_index_for(k) {
|
Bound::Included(ref k) => match self.lookup_index_for(k) {
|
||||||
Ok(index) => index + 1,
|
Ok(index) => index + 1,
|
||||||
Err(index) => index,
|
Err(index) => index,
|
||||||
},
|
},
|
||||||
Bound::Excluded(k) => match self.lookup_index_for(k) {
|
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
|
||||||
Ok(index) | Err(index) => index,
|
Ok(index) | Err(index) => index,
|
||||||
},
|
},
|
||||||
Bound::Unbounded => self.data.len(),
|
Bound::Unbounded => self.data.len(),
|
||||||
|
@ -75,7 +75,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
|
|||||||
///
|
///
|
||||||
/// If there are multiple items that are equivalent to `key`, they will be yielded in
|
/// If there are multiple items that are equivalent to `key`, they will be yielded in
|
||||||
/// insertion order.
|
/// insertion order.
|
||||||
pub fn get_by_key(&self, key: K) -> impl Iterator<Item = &V> {
|
pub fn get_by_key(&'a self, key: K) -> impl 'a + Iterator<Item = &'a V> {
|
||||||
self.get_by_key_enumerated(key).map(|(_, v)| v)
|
self.get_by_key_enumerated(key).map(|(_, v)| v)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
|
|||||||
///
|
///
|
||||||
/// If there are multiple items that are equivalent to `key`, they will be yielded in
|
/// If there are multiple items that are equivalent to `key`, they will be yielded in
|
||||||
/// insertion order.
|
/// insertion order.
|
||||||
pub fn get_by_key_enumerated(&self, key: K) -> impl Iterator<Item = (I, &V)> {
|
pub fn get_by_key_enumerated(&'a self, key: K) -> impl '_ + Iterator<Item = (I, &V)> {
|
||||||
let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key);
|
let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key);
|
||||||
self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| {
|
self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| {
|
||||||
let (k, v) = &self.items[i];
|
let (k, v) = &self.items[i];
|
||||||
|
@ -257,7 +257,11 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
|
|||||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||||
match self {
|
match self {
|
||||||
SsoHashMap::Array(array) => {
|
SsoHashMap::Array(array) => {
|
||||||
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index).1)
|
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
|
||||||
|
Some(array.swap_remove(index).1)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
SsoHashMap::Map(map) => map.remove(key),
|
SsoHashMap::Map(map) => map.remove(key),
|
||||||
}
|
}
|
||||||
@ -268,7 +272,11 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
|
|||||||
pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> {
|
pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> {
|
||||||
match self {
|
match self {
|
||||||
SsoHashMap::Array(array) => {
|
SsoHashMap::Array(array) => {
|
||||||
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index))
|
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
|
||||||
|
Some(array.swap_remove(index))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
SsoHashMap::Map(map) => map.remove_entry(key),
|
SsoHashMap::Map(map) => map.remove_entry(key),
|
||||||
}
|
}
|
||||||
@ -415,14 +423,14 @@ impl<K, V> IntoIterator for SsoHashMap<K, V> {
|
|||||||
|
|
||||||
/// adapts Item of array reference iterator to Item of hashmap reference iterator.
|
/// adapts Item of array reference iterator to Item of hashmap reference iterator.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn adapt_array_ref_it<K, V>(pair: &(K, V)) -> (&K, &V) {
|
fn adapt_array_ref_it<K, V>(pair: &'a (K, V)) -> (&'a K, &'a V) {
|
||||||
let (a, b) = pair;
|
let (a, b) = pair;
|
||||||
(a, b)
|
(a, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator.
|
/// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn adapt_array_mut_it<K, V>(pair: &mut (K, V)) -> (&K, &mut V) {
|
fn adapt_array_mut_it<K, V>(pair: &'a mut (K, V)) -> (&'a K, &'a mut V) {
|
||||||
let (a, b) = pair;
|
let (a, b) = pair;
|
||||||
(a, b)
|
(a, b)
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,7 @@ impl<T> SsoHashSet<T> {
|
|||||||
/// An iterator visiting all elements in arbitrary order.
|
/// An iterator visiting all elements in arbitrary order.
|
||||||
/// The iterator element type is `&'a T`.
|
/// The iterator element type is `&'a T`.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
pub fn iter(&'a self) -> impl Iterator<Item = &'a T> {
|
||||||
self.into_iter()
|
self.into_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -229,14 +229,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {
|
|||||||
|
|
||||||
impl<CTX> HashStable<CTX> for f32 {
|
impl<CTX> HashStable<CTX> for f32 {
|
||||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
let val: u32 = self.to_bits();
|
let val: u32 = unsafe { ::std::mem::transmute(*self) };
|
||||||
val.hash_stable(ctx, hasher);
|
val.hash_stable(ctx, hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<CTX> HashStable<CTX> for f64 {
|
impl<CTX> HashStable<CTX> for f64 {
|
||||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
let val: u64 = self.to_bits();
|
let val: u64 = unsafe { ::std::mem::transmute(*self) };
|
||||||
val.hash_stable(ctx, hasher);
|
val.hash_stable(ctx, hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ const RED_ZONE: usize = 100 * 1024; // 100k
|
|||||||
|
|
||||||
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
|
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
|
||||||
// on. This flag has performance relevant characteristics. Don't set it too high.
|
// on. This flag has performance relevant characteristics. Don't set it too high.
|
||||||
#[allow(clippy::identity_op)]
|
|
||||||
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
|
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
|
||||||
|
|
||||||
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
|
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
|
||||||
|
@ -34,7 +34,7 @@ impl<T> Steal<T> {
|
|||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn borrow(&self) -> MappedReadGuard<'_, T> {
|
pub fn borrow(&self) -> MappedReadGuard<'_, T> {
|
||||||
let borrow = self.value.borrow();
|
let borrow = self.value.borrow();
|
||||||
if borrow.is_none() {
|
if let None = &*borrow {
|
||||||
panic!("attempted to read from stolen value: {}", std::any::type_name::<T>());
|
panic!("attempted to read from stolen value: {}", std::any::type_name::<T>());
|
||||||
}
|
}
|
||||||
ReadGuard::map(borrow, |opt| opt.as_ref().unwrap())
|
ReadGuard::map(borrow, |opt| opt.as_ref().unwrap())
|
||||||
|
@ -48,7 +48,7 @@ impl<T: PartialEq> TinyList<T> {
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub fn contains(&self, data: &T) -> bool {
|
pub fn contains(&self, data: &T) -> bool {
|
||||||
let mut elem = self.head.as_ref();
|
let mut elem = self.head.as_ref();
|
||||||
while let Some(e) = elem {
|
while let Some(ref e) = elem {
|
||||||
if &e.data == data {
|
if &e.data == data {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,8 @@ use rustc_index::vec::{Idx, IndexVec};
|
|||||||
|
|
||||||
pub fn iter<Ls>(
|
pub fn iter<Ls>(
|
||||||
first: Option<Ls::LinkIndex>,
|
first: Option<Ls::LinkIndex>,
|
||||||
links: &Ls,
|
links: &'a Ls,
|
||||||
) -> impl Iterator<Item = Ls::LinkIndex> + '_
|
) -> impl Iterator<Item = Ls::LinkIndex> + 'a
|
||||||
where
|
where
|
||||||
Ls: Links,
|
Ls: Links,
|
||||||
{
|
{
|
||||||
|
@ -512,7 +512,7 @@ impl<'a> LabelText<'a> {
|
|||||||
pub fn to_dot_string(&self) -> String {
|
pub fn to_dot_string(&self) -> String {
|
||||||
match *self {
|
match *self {
|
||||||
LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
|
LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
|
||||||
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
|
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
|
||||||
HtmlStr(ref s) => format!("<{}>", s),
|
HtmlStr(ref s) => format!("<{}>", s),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -990,8 +990,9 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
|
|||||||
pub fn insert_all_into_row(&mut self, row: R) {
|
pub fn insert_all_into_row(&mut self, row: R) {
|
||||||
assert!(row.index() < self.num_rows);
|
assert!(row.index() < self.num_rows);
|
||||||
let (start, end) = self.range(row);
|
let (start, end) = self.range(row);
|
||||||
for word in self.words[start..end].iter_mut() {
|
let words = &mut self.words[..];
|
||||||
*word = !0;
|
for index in start..end {
|
||||||
|
words[index] = !0;
|
||||||
}
|
}
|
||||||
self.clear_excess_bits(row);
|
self.clear_excess_bits(row);
|
||||||
}
|
}
|
||||||
@ -1143,7 +1144,7 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
|
|||||||
|
|
||||||
/// Iterates through all the columns set to true in a given row of
|
/// Iterates through all the columns set to true in a given row of
|
||||||
/// the matrix.
|
/// the matrix.
|
||||||
pub fn iter(&self, row: R) -> impl Iterator<Item = C> + '_ {
|
pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a {
|
||||||
self.row(row).into_iter().flat_map(|r| r.iter())
|
self.row(row).into_iter().flat_map(|r| r.iter())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -634,15 +634,18 @@ impl<I: Idx, T> IndexVec<I, T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> impl Iterator<Item = T> + '_ {
|
pub fn drain<'a, R: RangeBounds<usize>>(
|
||||||
|
&'a mut self,
|
||||||
|
range: R,
|
||||||
|
) -> impl Iterator<Item = T> + 'a {
|
||||||
self.raw.drain(range)
|
self.raw.drain(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn drain_enumerated<R: RangeBounds<usize>>(
|
pub fn drain_enumerated<'a, R: RangeBounds<usize>>(
|
||||||
&mut self,
|
&'a mut self,
|
||||||
range: R,
|
range: R,
|
||||||
) -> impl Iterator<Item = (I, T)> + '_ {
|
) -> impl Iterator<Item = (I, T)> + 'a {
|
||||||
self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
|
self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,10 +68,11 @@ pub enum EscapeError {
|
|||||||
impl EscapeError {
|
impl EscapeError {
|
||||||
/// Returns true for actual errors, as opposed to warnings.
|
/// Returns true for actual errors, as opposed to warnings.
|
||||||
pub fn is_fatal(&self) -> bool {
|
pub fn is_fatal(&self) -> bool {
|
||||||
!matches!(
|
match self {
|
||||||
self,
|
EscapeError::UnskippedWhitespaceWarning => false,
|
||||||
EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning
|
EscapeError::MultipleSkippedLinesWarning => false,
|
||||||
)
|
_ => true,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -329,7 +330,7 @@ where
|
|||||||
callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning));
|
callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning));
|
||||||
}
|
}
|
||||||
let tail = &tail[first_non_space..];
|
let tail = &tail[first_non_space..];
|
||||||
if let Some(c) = tail.chars().next() {
|
if let Some(c) = tail.chars().nth(0) {
|
||||||
// For error reporting, we would like the span to contain the character that was not
|
// For error reporting, we would like the span to contain the character that was not
|
||||||
// skipped. The +1 is necessary to account for the leading \ that started the escape.
|
// skipped. The +1 is necessary to account for the leading \ that started the escape.
|
||||||
let end = start + first_non_space + c.len_utf8() + 1;
|
let end = start + first_non_space + c.len_utf8() + 1;
|
||||||
|
@ -24,9 +24,11 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
|
|||||||
}
|
}
|
||||||
if meta.path().is_ident("project") {
|
if meta.path().is_ident("project") {
|
||||||
if let Meta::List(list) = meta {
|
if let Meta::List(list) = meta {
|
||||||
if let Some(NestedMeta::Meta(meta)) = list.nested.iter().next() {
|
if let Some(nested) = list.nested.iter().next() {
|
||||||
attrs.project = meta.path().get_ident().cloned();
|
if let NestedMeta::Meta(meta) = nested {
|
||||||
any_attr = true;
|
attrs.project = meta.path().get_ident().cloned();
|
||||||
|
any_attr = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -349,14 +349,14 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
|
|||||||
) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> {
|
) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> {
|
||||||
let field_binding = &info.binding.binding;
|
let field_binding = &info.binding.binding;
|
||||||
|
|
||||||
let option_ty = option_inner_ty(info.ty);
|
let option_ty = option_inner_ty(&info.ty);
|
||||||
|
|
||||||
let generated_code = self.generate_non_option_field_code(
|
let generated_code = self.generate_non_option_field_code(
|
||||||
attr,
|
attr,
|
||||||
FieldInfo {
|
FieldInfo {
|
||||||
vis: info.vis,
|
vis: info.vis,
|
||||||
binding: info.binding,
|
binding: info.binding,
|
||||||
ty: option_ty.unwrap_or(info.ty),
|
ty: option_ty.unwrap_or(&info.ty),
|
||||||
span: info.span,
|
span: info.span,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
@ -388,7 +388,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
|
|||||||
let formatted_str = self.build_format(&s.value(), attr.span());
|
let formatted_str = self.build_format(&s.value(), attr.span());
|
||||||
match name {
|
match name {
|
||||||
"message" => {
|
"message" => {
|
||||||
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
|
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
|
||||||
quote! {
|
quote! {
|
||||||
#diag.set_span(*#field_binding);
|
#diag.set_span(*#field_binding);
|
||||||
#diag.set_primary_message(#formatted_str);
|
#diag.set_primary_message(#formatted_str);
|
||||||
@ -401,7 +401,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
"label" => {
|
"label" => {
|
||||||
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
|
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
|
||||||
quote! {
|
quote! {
|
||||||
#diag.span_label(*#field_binding, #formatted_str);
|
#diag.span_label(*#field_binding, #formatted_str);
|
||||||
}
|
}
|
||||||
|
@ -500,8 +500,8 @@ impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] {
|
|||||||
d.read_seq(|d, len| {
|
d.read_seq(|d, len| {
|
||||||
assert!(len == N);
|
assert!(len == N);
|
||||||
let mut v = [0u8; N];
|
let mut v = [0u8; N];
|
||||||
for x in &mut v {
|
for i in 0..len {
|
||||||
*x = d.read_seq_elt(|d| Decodable::decode(d))?;
|
v[i] = d.read_seq_elt(|d| Decodable::decode(d))?;
|
||||||
}
|
}
|
||||||
Ok(v)
|
Ok(v)
|
||||||
})
|
})
|
||||||
|
@ -31,10 +31,8 @@ impl<'a> Part<'a> {
|
|||||||
} else {
|
} else {
|
||||||
3
|
3
|
||||||
}
|
}
|
||||||
} else if v < 10_000 {
|
|
||||||
4
|
|
||||||
} else {
|
} else {
|
||||||
5
|
if v < 10_000 { 4 } else { 5 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Part::Copy(buf) => buf.len(),
|
Part::Copy(buf) => buf.len(),
|
||||||
|
Loading…
Reference in New Issue
Block a user