rustc_metadata: replace LazySeq<T> with Lazy<[T]>.

This commit is contained in:
Eduard-Mihai Burtescu 2019-05-22 13:30:07 +03:00
parent 7858dc237d
commit e7ceaa9748
5 changed files with 302 additions and 297 deletions

View File

@ -68,9 +68,9 @@ pub struct CrateMetadata {
pub alloc_decoding_state: AllocDecodingState,
// NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
// lifetime is only used behind `Lazy` / `LazySeq`, and therefore
// acts like an universal (`for<'tcx>`), that is paired up with
// whichever `TyCtxt` is being used to decode those values.
// lifetime is only used behind `Lazy`, and therefore acts like an
// universal (`for<'tcx>`), that is paired up with whichever `TyCtxt`
// is being used to decode those values.
pub root: schema::CrateRoot<'static>,
/// For each definition in this crate, we encode a key. When the
@ -80,7 +80,7 @@ pub struct CrateMetadata {
/// compilation support.
pub def_path_table: Lrc<DefPathTable>,
pub trait_impls: FxHashMap<(u32, DefIndex), schema::LazySeq<DefIndex>>,
pub trait_impls: FxHashMap<(u32, DefIndex), schema::Lazy<[DefIndex]>>,
pub dep_kind: Lock<DepKind>,
pub source: CrateSource,

View File

@ -134,14 +134,14 @@ impl<'a, 'tcx, T: Decodable> Lazy<T> {
}
}
impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable> LazySeq<T> {
impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable> Lazy<[T]> {
pub fn decode<M: Metadata<'a, 'tcx>>(
self,
meta: M,
) -> impl ExactSizeIterator<Item = T> + Captures<'a> + Captures<'tcx> + 'x {
let mut dcx = meta.decoder(self.position);
dcx.lazy_state = LazyState::NodeStart(self.position);
(0..self.len).map(move |_| T::decode(&mut dcx).unwrap())
(0..self.meta).map(move |_| T::decode(&mut dcx).unwrap())
}
}
@ -154,10 +154,14 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
self.cdata.expect("missing CrateMetadata in DecodeContext")
}
fn read_lazy_distance(&mut self, min_size: usize) -> Result<usize, <Self as Decoder>::Error> {
fn read_lazy_with_meta<T: ?Sized + LazyMeta>(
&mut self,
meta: T::Meta,
) -> Result<Lazy<T>, <Self as Decoder>::Error> {
let min_size = T::min_size(meta);
let distance = self.read_usize()?;
let position = match self.lazy_state {
LazyState::NoNode => bug!("read_lazy_distance: outside of a metadata node"),
LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"),
LazyState::NodeStart(start) => {
assert!(distance + min_size <= start);
start - distance - min_size
@ -165,7 +169,7 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
LazyState::Previous(last_min_end) => last_min_end + distance,
};
self.lazy_state = LazyState::Previous(position + min_size);
Ok(position)
Ok(Lazy::from_position_and_meta(position, meta))
}
}
@ -230,19 +234,18 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
impl<'a, 'tcx, T> SpecializedDecoder<Lazy<T>> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<Lazy<T>, Self::Error> {
Ok(Lazy::with_position(self.read_lazy_distance(Lazy::<T>::min_size())?))
self.read_lazy_with_meta(())
}
}
impl<'a, 'tcx, T> SpecializedDecoder<LazySeq<T>> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<LazySeq<T>, Self::Error> {
impl<'a, 'tcx, T> SpecializedDecoder<Lazy<[T]>> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<Lazy<[T]>, Self::Error> {
let len = self.read_usize()?;
let position = if len == 0 {
0
if len == 0 {
Ok(Lazy::empty())
} else {
self.read_lazy_distance(LazySeq::<T>::min_size(len))?
};
Ok(LazySeq::with_position_and_length(position, len))
self.read_lazy_with_meta(len)
}
}
}
@ -378,7 +381,7 @@ impl<'tcx> MetadataBlob {
}
pub fn get_rustc_version(&self) -> String {
Lazy::with_position(METADATA_HEADER.len() + 4).decode(self)
Lazy::<String>::from_position(METADATA_HEADER.len() + 4).decode(self)
}
pub fn get_root(&self) -> CrateRoot<'tcx> {
@ -387,7 +390,7 @@ impl<'tcx> MetadataBlob {
let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) |
((slice[offset + 2] as u32) << 8) |
((slice[offset + 3] as u32) << 0)) as usize;
Lazy::with_position(pos).decode(self)
Lazy::<CrateRoot<'tcx>>::from_position(pos).decode(self)
}
pub fn list_crate_metadata(&self,
@ -1140,7 +1143,7 @@ impl<'a, 'tcx> CrateMetadata {
EntryKind::Fn(data) |
EntryKind::ForeignFn(data) => data.decode(self).arg_names,
EntryKind::Method(data) => data.decode(self).fn_data.arg_names,
_ => LazySeq::empty(),
_ => Lazy::empty(),
};
arg_names.decode(self).collect()
}

View File

@ -98,17 +98,17 @@ impl<'tcx> Encoder for EncodeContext<'tcx> {
impl<'tcx, T> SpecializedEncoder<Lazy<T>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, lazy: &Lazy<T>) -> Result<(), Self::Error> {
self.emit_lazy_distance(lazy.position, Lazy::<T>::min_size())
self.emit_lazy_distance(*lazy)
}
}
impl<'tcx, T> SpecializedEncoder<LazySeq<T>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, seq: &LazySeq<T>) -> Result<(), Self::Error> {
self.emit_usize(seq.len)?;
if seq.len == 0 {
impl<'tcx, T> SpecializedEncoder<Lazy<[T]>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, lazy: &Lazy<[T]>) -> Result<(), Self::Error> {
self.emit_usize(lazy.meta)?;
if lazy.meta == 0 {
return Ok(());
}
self.emit_lazy_distance(seq.position, LazySeq::<T>::min_size(seq.len))
self.emit_lazy_distance(*lazy)
}
}
@ -239,21 +239,38 @@ impl<'tcx> TyEncoder for EncodeContext<'tcx> {
}
}
impl<'tcx> EncodeContext<'tcx> {
fn emit_node<F: FnOnce(&mut Self, usize) -> R, R>(&mut self, f: F) -> R {
assert_eq!(self.lazy_state, LazyState::NoNode);
let pos = self.position();
self.lazy_state = LazyState::NodeStart(pos);
let r = f(self, pos);
self.lazy_state = LazyState::NoNode;
r
}
/// Helper trait to allow overloading `EncodeContext::lazy` for iterators.
trait EncodeContentsForLazy<T: ?Sized + LazyMeta> {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) -> T::Meta;
}
fn emit_lazy_distance(&mut self,
position: usize,
min_size: usize)
-> Result<(), <Self as Encoder>::Error> {
let min_end = position + min_size;
impl<T: Encodable> EncodeContentsForLazy<T> for &T {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) {
self.encode(ecx).unwrap()
}
}
impl<T: Encodable> EncodeContentsForLazy<T> for T {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) {
self.encode(ecx).unwrap()
}
}
impl<I, T> EncodeContentsForLazy<[T]> for I
where I: IntoIterator,
I::Item: EncodeContentsForLazy<T>,
{
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) -> usize {
self.into_iter().map(|value| value.encode_contents_for_lazy(ecx)).count()
}
}
impl<'tcx> EncodeContext<'tcx> {
fn emit_lazy_distance<T: ?Sized + LazyMeta>(
&mut self,
lazy: Lazy<T>,
) -> Result<(), <Self as Encoder>::Error> {
let min_end = lazy.position + T::min_size(lazy.meta);
let distance = match self.lazy_state {
LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"),
LazyState::NodeStart(start) => {
@ -262,48 +279,31 @@ impl<'tcx> EncodeContext<'tcx> {
}
LazyState::Previous(last_min_end) => {
assert!(
last_min_end <= position,
last_min_end <= lazy.position,
"make sure that the calls to `lazy*` \
are in the same order as the metadata fields",
);
position - last_min_end
lazy.position - last_min_end
}
};
self.lazy_state = LazyState::Previous(min_end);
self.emit_usize(distance)
}
pub fn lazy<T: Encodable>(&mut self, value: &T) -> Lazy<T> {
self.emit_node(|ecx, pos| {
value.encode(ecx).unwrap();
fn lazy<T: ?Sized + LazyMeta>(
&mut self,
value: impl EncodeContentsForLazy<T>,
) -> Lazy<T> {
let pos = self.position();
assert!(pos + Lazy::<T>::min_size() <= ecx.position());
Lazy::with_position(pos)
})
}
assert_eq!(self.lazy_state, LazyState::NoNode);
self.lazy_state = LazyState::NodeStart(pos);
let meta = value.encode_contents_for_lazy(self);
self.lazy_state = LazyState::NoNode;
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = T>,
T: Encodable
{
self.emit_node(|ecx, pos| {
let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
assert!(pos + <T>::min_size(meta) <= self.position());
assert!(pos + LazySeq::<T>::min_size(len) <= ecx.position());
LazySeq::with_position_and_length(pos, len)
})
}
pub fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = &'b T>,
T: 'b + Encodable
{
self.emit_node(|ecx, pos| {
let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
assert!(pos + LazySeq::<T>::min_size(len) <= ecx.position());
LazySeq::with_position_and_length(pos, len)
})
Lazy::from_position_and_meta(pos, meta)
}
/// Emit the data for a `DefId` to the metadata. The function to
@ -320,7 +320,7 @@ impl<'tcx> EncodeContext<'tcx> {
assert!(id.is_local());
let entry = op(self, data);
let entry = self.lazy(&entry);
let entry = self.lazy(entry);
self.entries_index.record(id, entry);
}
@ -341,7 +341,7 @@ impl<'tcx> EncodeContext<'tcx> {
self.lazy(definitions.def_path_table())
}
fn encode_source_map(&mut self) -> LazySeq<syntax_pos::SourceFile> {
fn encode_source_map(&mut self) -> Lazy<[syntax_pos::SourceFile]> {
let source_map = self.tcx.sess.source_map();
let all_source_files = source_map.files();
@ -380,7 +380,7 @@ impl<'tcx> EncodeContext<'tcx> {
})
.collect::<Vec<_>>();
self.lazy_seq_ref(adapted.iter().map(|rc| &**rc))
self.lazy(adapted.iter().map(|rc| &**rc))
}
fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
@ -463,7 +463,7 @@ impl<'tcx> EncodeContext<'tcx> {
}
n = new_n;
}
self.lazy_seq(interpret_alloc_index)
self.lazy(interpret_alloc_index)
};
@ -482,8 +482,7 @@ impl<'tcx> EncodeContext<'tcx> {
let has_global_allocator = *tcx.sess.has_global_allocator.get();
let has_panic_handler = *tcx.sess.has_panic_handler.try_get().unwrap_or(&false);
let root = self.lazy(&CrateRoot {
let root = self.lazy(CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
triple: tcx.sess.opts.target_triple.clone(),
@ -562,17 +561,17 @@ impl<'tcx> EncodeContext<'tcx> {
}
impl EncodeContext<'tcx> {
fn encode_variances_of(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
fn encode_variances_of(&mut self, def_id: DefId) -> Lazy<[ty::Variance]> {
debug!("EncodeContext::encode_variances_of({:?})", def_id);
let tcx = self.tcx;
self.lazy_seq_ref(&tcx.variances_of(def_id)[..])
self.lazy(&tcx.variances_of(def_id)[..])
}
fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
let tcx = self.tcx;
let ty = tcx.type_of(def_id);
debug!("EncodeContext::encode_item_type({:?}) => {:?}", def_id, ty);
self.lazy(&ty)
self.lazy(ty)
}
fn encode_enum_variant_info(
@ -601,11 +600,11 @@ impl EncodeContext<'tcx> {
let enum_vis = &tcx.hir().expect_item(enum_id).vis;
Entry {
kind: EntryKind::Variant(self.lazy(&data)),
visibility: self.lazy(&ty::Visibility::from_hir(enum_vis, enum_id, tcx)),
span: self.lazy(&tcx.def_span(def_id)),
kind: EntryKind::Variant(self.lazy(data)),
visibility: self.lazy(ty::Visibility::from_hir(enum_vis, enum_id, tcx)),
span: self.lazy(tcx.def_span(def_id)),
attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
children: self.lazy_seq(variant.fields.iter().map(|f| {
children: self.lazy(variant.fields.iter().map(|f| {
assert!(f.did.is_local());
f.did.index
})),
@ -613,11 +612,11 @@ impl EncodeContext<'tcx> {
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: if variant.ctor_kind == CtorKind::Fn {
self.encode_variances_of(def_id)
} else {
LazySeq::empty()
Lazy::empty()
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
@ -642,7 +641,7 @@ impl EncodeContext<'tcx> {
discr: variant.discr,
ctor: Some(def_id.index),
ctor_sig: if variant.ctor_kind == CtorKind::Fn {
Some(self.lazy(&tcx.fn_sig(def_id)))
Some(self.lazy(tcx.fn_sig(def_id)))
} else {
None
}
@ -658,20 +657,20 @@ impl EncodeContext<'tcx> {
}
Entry {
kind: EntryKind::Variant(self.lazy(&data)),
visibility: self.lazy(&ctor_vis),
span: self.lazy(&tcx.def_span(def_id)),
attributes: LazySeq::empty(),
children: LazySeq::empty(),
kind: EntryKind::Variant(self.lazy(data)),
visibility: self.lazy(ctor_vis),
span: self.lazy(tcx.def_span(def_id)),
attributes: Lazy::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: if variant.ctor_kind == CtorKind::Fn {
self.encode_variances_of(def_id)
} else {
LazySeq::empty()
Lazy::empty()
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
@ -691,25 +690,25 @@ impl EncodeContext<'tcx> {
let data = ModData {
reexports: match tcx.module_exports(def_id) {
Some(exports) => self.lazy_seq_ref(exports),
_ => LazySeq::empty(),
Some(exports) => self.lazy(exports),
_ => Lazy::empty(),
},
};
Entry {
kind: EntryKind::Mod(self.lazy(&data)),
visibility: self.lazy(&ty::Visibility::from_hir(vis, id, tcx)),
span: self.lazy(&tcx.def_span(def_id)),
kind: EntryKind::Mod(self.lazy(data)),
visibility: self.lazy(ty::Visibility::from_hir(vis, id, tcx)),
span: self.lazy(tcx.def_span(def_id)),
attributes: self.encode_attributes(attrs),
children: self.lazy_seq(md.item_ids.iter().map(|item_id| {
children: self.lazy(md.item_ids.iter().map(|item_id| {
tcx.hir().local_def_id(item_id.id).index
})),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: None,
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: None,
predicates: None,
predicates_defined_on: None,
@ -734,16 +733,16 @@ impl EncodeContext<'tcx> {
Entry {
kind: EntryKind::Field,
visibility: self.lazy(&field.vis),
span: self.lazy(&tcx.def_span(def_id)),
visibility: self.lazy(field.vis),
span: self.lazy(tcx.def_span(def_id)),
attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs),
children: LazySeq::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
predicates_defined_on: None,
@ -763,7 +762,7 @@ impl EncodeContext<'tcx> {
discr: variant.discr,
ctor: Some(def_id.index),
ctor_sig: if variant.ctor_kind == CtorKind::Fn {
Some(self.lazy(&tcx.fn_sig(def_id)))
Some(self.lazy(tcx.fn_sig(def_id)))
} else {
None
}
@ -789,20 +788,20 @@ impl EncodeContext<'tcx> {
let repr_options = get_repr_options(tcx, adt_def_id);
Entry {
kind: EntryKind::Struct(self.lazy(&data), repr_options),
visibility: self.lazy(&ctor_vis),
span: self.lazy(&tcx.def_span(def_id)),
attributes: LazySeq::empty(),
children: LazySeq::empty(),
kind: EntryKind::Struct(self.lazy(data), repr_options),
visibility: self.lazy(ctor_vis),
span: self.lazy(tcx.def_span(def_id)),
attributes: Lazy::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: if variant.ctor_kind == CtorKind::Fn {
self.encode_variances_of(def_id)
} else {
LazySeq::empty()
Lazy::empty()
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
@ -821,13 +820,13 @@ impl EncodeContext<'tcx> {
fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
debug!("EncodeContext::encode_predicates({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_of(def_id))
self.lazy(&*tcx.predicates_of(def_id))
}
fn encode_predicates_defined_on(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
debug!("EncodeContext::encode_predicates_defined_on({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_defined_on(def_id))
self.lazy(&*tcx.predicates_defined_on(def_id))
}
fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
@ -858,7 +857,7 @@ impl EncodeContext<'tcx> {
let rendered =
hir::print::to_string(self.tcx.hir(), |s| s.print_trait_item(ast_item));
let rendered_const = self.lazy(&RenderedConst(rendered));
let rendered_const = self.lazy(RenderedConst(rendered));
EntryKind::AssocConst(container, const_qualif, rendered_const)
}
@ -875,12 +874,12 @@ impl EncodeContext<'tcx> {
FnData {
constness: hir::Constness::NotConst,
arg_names,
sig: self.lazy(&tcx.fn_sig(def_id)),
sig: self.lazy(tcx.fn_sig(def_id)),
}
} else {
bug!()
};
EntryKind::Method(self.lazy(&MethodData {
EntryKind::Method(self.lazy(MethodData {
fn_data,
container,
has_self: trait_item.method_has_self_argument,
@ -892,10 +891,10 @@ impl EncodeContext<'tcx> {
Entry {
kind,
visibility: self.lazy(&trait_item.vis),
span: self.lazy(&ast_item.span),
visibility: self.lazy(trait_item.vis),
span: self.lazy(ast_item.span),
attributes: self.encode_attributes(&ast_item.attrs),
children: LazySeq::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
@ -913,11 +912,11 @@ impl EncodeContext<'tcx> {
}
ty::AssocKind::OpaqueTy => unreachable!(),
},
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: if trait_item.kind == ty::AssocKind::Method {
self.encode_variances_of(def_id)
} else {
LazySeq::empty()
Lazy::empty()
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
@ -971,12 +970,12 @@ impl EncodeContext<'tcx> {
FnData {
constness: sig.header.constness,
arg_names: self.encode_fn_arg_names_for_body(body),
sig: self.lazy(&tcx.fn_sig(def_id)),
sig: self.lazy(tcx.fn_sig(def_id)),
}
} else {
bug!()
};
EntryKind::Method(self.lazy(&MethodData {
EntryKind::Method(self.lazy(MethodData {
fn_data,
container,
has_self: impl_item.method_has_self_argument,
@ -1004,19 +1003,19 @@ impl EncodeContext<'tcx> {
Entry {
kind,
visibility: self.lazy(&impl_item.vis),
span: self.lazy(&ast_item.span),
visibility: self.lazy(impl_item.vis),
span: self.lazy(ast_item.span),
attributes: self.encode_attributes(&ast_item.attrs),
children: LazySeq::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: if impl_item.kind == ty::AssocKind::Method {
self.encode_variances_of(def_id)
} else {
LazySeq::empty()
Lazy::empty()
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
@ -1027,10 +1026,10 @@ impl EncodeContext<'tcx> {
}
fn encode_fn_arg_names_for_body(&mut self, body_id: hir::BodyId)
-> LazySeq<ast::Name> {
-> Lazy<[ast::Name]> {
self.tcx.dep_graph.with_ignore(|| {
let body = self.tcx.hir().body(body_id);
self.lazy_seq(body.arguments.iter().map(|arg| {
self.lazy(body.arguments.iter().map(|arg| {
match arg.pat.node {
PatKind::Binding(_, _, ident, _) => ident.name,
_ => kw::Invalid,
@ -1039,28 +1038,28 @@ impl EncodeContext<'tcx> {
})
}
fn encode_fn_arg_names(&mut self, param_names: &[ast::Ident]) -> LazySeq<ast::Name> {
self.lazy_seq(param_names.iter().map(|ident| ident.name))
fn encode_fn_arg_names(&mut self, param_names: &[ast::Ident]) -> Lazy<[ast::Name]> {
self.lazy(param_names.iter().map(|ident| ident.name))
}
fn encode_optimized_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::Body<'tcx>>> {
debug!("EntryBuilder::encode_mir({:?})", def_id);
if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) {
let mir = self.tcx.optimized_mir(def_id);
Some(self.lazy(&mir))
Some(self.lazy(mir))
} else {
None
}
}
// Encodes the inherent implementations of a structure, enumeration, or trait.
fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
fn encode_inherent_implementations(&mut self, def_id: DefId) -> Lazy<[DefIndex]> {
debug!("EncodeContext::encode_inherent_implementations({:?})", def_id);
let implementations = self.tcx.inherent_impls(def_id);
if implementations.is_empty() {
LazySeq::empty()
Lazy::empty()
} else {
self.lazy_seq(implementations.iter().map(|&def_id| {
self.lazy(implementations.iter().map(|&def_id| {
assert!(def_id.is_local());
def_id.index
}))
@ -1074,7 +1073,7 @@ impl EncodeContext<'tcx> {
fn encode_deprecation(&mut self, def_id: DefId) -> Option<Lazy<attr::Deprecation>> {
debug!("EncodeContext::encode_deprecation({:?})", def_id);
self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr))
self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(depr))
}
fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> Lazy<RenderedConst> {
@ -1103,10 +1102,10 @@ impl EncodeContext<'tcx> {
let data = FnData {
constness: header.constness,
arg_names: self.encode_fn_arg_names_for_body(body),
sig: self.lazy(&tcx.fn_sig(def_id)),
sig: self.lazy(tcx.fn_sig(def_id)),
};
EntryKind::Fn(self.lazy(&data))
EntryKind::Fn(self.lazy(data))
}
hir::ItemKind::Mod(ref m) => {
return self.encode_info_for_mod((item.hir_id, m, &item.attrs, &item.vis));
@ -1127,7 +1126,7 @@ impl EncodeContext<'tcx> {
let repr_options = get_repr_options(tcx, def_id);
EntryKind::Struct(self.lazy(&VariantData {
EntryKind::Struct(self.lazy(VariantData {
ctor_kind: variant.ctor_kind,
discr: variant.discr,
ctor,
@ -1138,7 +1137,7 @@ impl EncodeContext<'tcx> {
let variant = tcx.adt_def(def_id).non_enum_variant();
let repr_options = get_repr_options(tcx, def_id);
EntryKind::Union(self.lazy(&VariantData {
EntryKind::Union(self.lazy(VariantData {
ctor_kind: variant.ctor_kind,
discr: variant.discr,
ctor: None,
@ -1175,10 +1174,10 @@ impl EncodeContext<'tcx> {
defaultness,
parent_impl: parent,
coerce_unsized_info,
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
trait_ref: trait_ref.map(|trait_ref| self.lazy(trait_ref)),
};
EntryKind::Impl(self.lazy(&data))
EntryKind::Impl(self.lazy(data))
}
hir::ItemKind::Trait(..) => {
let trait_def = tcx.trait_def(def_id);
@ -1187,17 +1186,17 @@ impl EncodeContext<'tcx> {
paren_sugar: trait_def.paren_sugar,
has_auto_impl: tcx.trait_is_auto(def_id),
is_marker: trait_def.is_marker,
super_predicates: self.lazy(&tcx.super_predicates_of(def_id)),
super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)),
};
EntryKind::Trait(self.lazy(&data))
EntryKind::Trait(self.lazy(data))
}
hir::ItemKind::TraitAlias(..) => {
let data = TraitAliasData {
super_predicates: self.lazy(&tcx.super_predicates_of(def_id)),
super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)),
};
EntryKind::TraitAlias(self.lazy(&data))
EntryKind::TraitAlias(self.lazy(data))
}
hir::ItemKind::ExternCrate(_) |
hir::ItemKind::Use(..) => bug!("cannot encode info for item {:?}", item),
@ -1205,19 +1204,19 @@ impl EncodeContext<'tcx> {
Entry {
kind,
visibility: self.lazy(&ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)),
span: self.lazy(&item.span),
visibility: self.lazy(ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)),
span: self.lazy(item.span),
attributes: self.encode_attributes(&item.attrs),
children: match item.node {
hir::ItemKind::ForeignMod(ref fm) => {
self.lazy_seq(fm.items
self.lazy(fm.items
.iter()
.map(|foreign_item| tcx.hir().local_def_id(
foreign_item.hir_id).index))
}
hir::ItemKind::Enum(..) => {
let def = self.tcx.adt_def(def_id);
self.lazy_seq(def.variants.iter().map(|v| {
self.lazy(def.variants.iter().map(|v| {
assert!(v.def_id.is_local());
v.def_id.index
}))
@ -1225,19 +1224,19 @@ impl EncodeContext<'tcx> {
hir::ItemKind::Struct(..) |
hir::ItemKind::Union(..) => {
let def = self.tcx.adt_def(def_id);
self.lazy_seq(def.non_enum_variant().fields.iter().map(|f| {
self.lazy(def.non_enum_variant().fields.iter().map(|f| {
assert!(f.did.is_local());
f.did.index
}))
}
hir::ItemKind::Impl(..) |
hir::ItemKind::Trait(..) => {
self.lazy_seq(tcx.associated_item_def_ids(def_id).iter().map(|&def_id| {
self.lazy(tcx.associated_item_def_ids(def_id).iter().map(|&def_id| {
assert!(def_id.is_local());
def_id.index
}))
}
_ => LazySeq::empty(),
_ => Lazy::empty(),
},
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
@ -1260,7 +1259,7 @@ impl EncodeContext<'tcx> {
hir::ItemKind::Struct(..) |
hir::ItemKind::Union(..) |
hir::ItemKind::Fn(..) => self.encode_variances_of(def_id),
_ => LazySeq::empty(),
_ => Lazy::empty(),
},
generics: match item.node {
hir::ItemKind::Static(..) |
@ -1333,20 +1332,20 @@ impl EncodeContext<'tcx> {
use syntax::print::pprust;
let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
Entry {
kind: EntryKind::MacroDef(self.lazy(&MacroDef {
kind: EntryKind::MacroDef(self.lazy(MacroDef {
body: pprust::tokens_to_string(macro_def.body.clone()),
legacy: macro_def.legacy,
})),
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&macro_def.span),
visibility: self.lazy(ty::Visibility::Public),
span: self.lazy(macro_def.span),
attributes: self.encode_attributes(&macro_def.attrs),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
children: LazySeq::empty(),
children: Lazy::empty(),
ty: None,
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: None,
predicates: None,
predicates_defined_on: None,
@ -1363,15 +1362,15 @@ impl EncodeContext<'tcx> {
let tcx = self.tcx;
Entry {
kind: entry_kind,
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&tcx.def_span(def_id)),
attributes: LazySeq::empty(),
children: LazySeq::empty(),
visibility: self.lazy(ty::Visibility::Public),
span: self.lazy(tcx.def_span(def_id)),
attributes: Lazy::empty(),
children: Lazy::empty(),
stability: None,
deprecation: None,
ty: if encode_type { Some(self.encode_item_type(def_id)) } else { None },
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: None,
predicates: None,
predicates_defined_on: None,
@ -1408,13 +1407,13 @@ impl EncodeContext<'tcx> {
let data = GeneratorData {
layout: layout.clone(),
};
EntryKind::Generator(self.lazy(&data))
EntryKind::Generator(self.lazy(data))
}
ty::Closure(def_id, substs) => {
let sig = substs.closure_sig(def_id, self.tcx);
let data = ClosureData { sig: self.lazy(&sig) };
EntryKind::Closure(self.lazy(&data))
let data = ClosureData { sig: self.lazy(sig) };
EntryKind::Closure(self.lazy(data))
}
_ => bug!("closure that is neither generator nor closure")
@ -1422,16 +1421,16 @@ impl EncodeContext<'tcx> {
Entry {
kind,
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&tcx.def_span(def_id)),
visibility: self.lazy(ty::Visibility::Public),
span: self.lazy(tcx.def_span(def_id)),
attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
children: LazySeq::empty(),
children: Lazy::empty(),
stability: None,
deprecation: None,
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: Some(self.encode_generics(def_id)),
predicates: None,
predicates_defined_on: None,
@ -1450,16 +1449,16 @@ impl EncodeContext<'tcx> {
Entry {
kind: EntryKind::Const(self.const_qualif(mir, body_id), const_data),
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&tcx.def_span(def_id)),
attributes: LazySeq::empty(),
children: LazySeq::empty(),
visibility: self.lazy(ty::Visibility::Public),
span: self.lazy(tcx.def_span(def_id)),
attributes: Lazy::empty(),
children: Lazy::empty(),
stability: None,
deprecation: None,
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: Lazy::empty(),
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),
predicates_defined_on: None,
@ -1468,37 +1467,37 @@ impl EncodeContext<'tcx> {
}
}
fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq<ast::Attribute> {
self.lazy_seq_ref(attrs)
fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> Lazy<[ast::Attribute]> {
self.lazy(attrs)
}
fn encode_native_libraries(&mut self) -> LazySeq<NativeLibrary> {
fn encode_native_libraries(&mut self) -> Lazy<[NativeLibrary]> {
let used_libraries = self.tcx.native_libraries(LOCAL_CRATE);
self.lazy_seq(used_libraries.iter().cloned())
self.lazy(used_libraries.iter().cloned())
}
fn encode_foreign_modules(&mut self) -> LazySeq<ForeignModule> {
fn encode_foreign_modules(&mut self) -> Lazy<[ForeignModule]> {
let foreign_modules = self.tcx.foreign_modules(LOCAL_CRATE);
self.lazy_seq(foreign_modules.iter().cloned())
self.lazy(foreign_modules.iter().cloned())
}
fn encode_proc_macros(&mut self) -> Option<LazySeq<DefIndex>> {
fn encode_proc_macros(&mut self) -> Option<Lazy<[DefIndex]>> {
let is_proc_macro = self.tcx.sess.crate_types.borrow().contains(&CrateType::ProcMacro);
if is_proc_macro {
let proc_macros: Vec<_> = self.tcx.hir().krate().items.values().filter_map(|item| {
let tcx = self.tcx;
Some(self.lazy(tcx.hir().krate().items.values().filter_map(|item| {
if item.attrs.iter().any(|attr| is_proc_macro_attr(attr)) {
Some(item.hir_id.owner)
} else {
None
}
}).collect();
Some(self.lazy_seq(proc_macros))
})))
} else {
None
}
}
fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> {
fn encode_crate_deps(&mut self) -> Lazy<[CrateDep]> {
let crates = self.tcx.crates();
let mut deps = crates
@ -1529,20 +1528,20 @@ impl EncodeContext<'tcx> {
// the assumption that they are numbered 1 to n.
// FIXME (#2166): This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working.
self.lazy_seq_ref(deps.iter().map(|&(_, ref dep)| dep))
self.lazy(deps.iter().map(|&(_, ref dep)| dep))
}
fn encode_lib_features(&mut self) -> LazySeq<(ast::Name, Option<ast::Name>)> {
fn encode_lib_features(&mut self) -> Lazy<[(ast::Name, Option<ast::Name>)]> {
let tcx = self.tcx;
let lib_features = tcx.lib_features();
self.lazy_seq(lib_features.to_vec())
self.lazy(lib_features.to_vec())
}
fn encode_lang_items(&mut self) -> LazySeq<(DefIndex, usize)> {
fn encode_lang_items(&mut self) -> Lazy<[(DefIndex, usize)]> {
let tcx = self.tcx;
let lang_items = tcx.lang_items();
let lang_items = lang_items.items().iter();
self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| {
self.lazy(lang_items.enumerate().filter_map(|(i, &opt_def_id)| {
if let Some(def_id) = opt_def_id {
if def_id.is_local() {
return Some((def_id.index, i));
@ -1552,13 +1551,13 @@ impl EncodeContext<'tcx> {
}))
}
fn encode_lang_items_missing(&mut self) -> LazySeq<lang_items::LangItem> {
fn encode_lang_items_missing(&mut self) -> Lazy<[lang_items::LangItem]> {
let tcx = self.tcx;
self.lazy_seq_ref(&tcx.lang_items().missing)
self.lazy(&tcx.lang_items().missing)
}
/// Encodes an index, mapping each trait to its (local) implementations.
fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
fn encode_impls(&mut self) -> Lazy<[TraitImpls]> {
debug!("EncodeContext::encode_impls()");
let tcx = self.tcx;
let mut visitor = ImplVisitor {
@ -1584,12 +1583,12 @@ impl EncodeContext<'tcx> {
TraitImpls {
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
impls: self.lazy_seq_ref(&impls),
impls: self.lazy(&impls),
}
})
.collect();
self.lazy_seq_ref(&all_impls)
self.lazy(&all_impls)
}
// Encodes all symbols exported from this crate into the metadata.
@ -1600,12 +1599,12 @@ impl EncodeContext<'tcx> {
// definition (as that's not defined in this crate).
fn encode_exported_symbols(&mut self,
exported_symbols: &[(ExportedSymbol<'tcx>, SymbolExportLevel)])
-> LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
-> Lazy<[(ExportedSymbol<'tcx>, SymbolExportLevel)]> {
// The metadata symbol name is special. It should not show up in
// downstream crates.
let metadata_symbol_name = SymbolName::new(&metadata_symbol_name(self.tcx));
self.lazy_seq(exported_symbols
self.lazy(exported_symbols
.iter()
.filter(|&&(ref exported_symbol, _)| {
match *exported_symbol {
@ -1618,10 +1617,10 @@ impl EncodeContext<'tcx> {
.cloned())
}
fn encode_dylib_dependency_formats(&mut self) -> LazySeq<Option<LinkagePreference>> {
fn encode_dylib_dependency_formats(&mut self) -> Lazy<[Option<LinkagePreference>]> {
match self.tcx.sess.dependency_formats.borrow().get(&config::CrateType::Dylib) {
Some(arr) => {
self.lazy_seq(arr.iter().map(|slot| {
self.lazy(arr.iter().map(|slot| {
match *slot {
Linkage::NotLinked |
Linkage::IncludedFromDylib => None,
@ -1631,7 +1630,7 @@ impl EncodeContext<'tcx> {
}
}))
}
None => LazySeq::empty(),
None => Lazy::empty(),
}
}
@ -1647,9 +1646,9 @@ impl EncodeContext<'tcx> {
let data = FnData {
constness: hir::Constness::NotConst,
arg_names: self.encode_fn_arg_names(names),
sig: self.lazy(&tcx.fn_sig(def_id)),
sig: self.lazy(tcx.fn_sig(def_id)),
};
EntryKind::ForeignFn(self.lazy(&data))
EntryKind::ForeignFn(self.lazy(data))
}
hir::ForeignItemKind::Static(_, hir::MutMutable) => EntryKind::ForeignMutStatic,
hir::ForeignItemKind::Static(_, hir::MutImmutable) => EntryKind::ForeignImmStatic,
@ -1658,18 +1657,18 @@ impl EncodeContext<'tcx> {
Entry {
kind,
visibility: self.lazy(&ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, tcx)),
span: self.lazy(&nitem.span),
visibility: self.lazy(ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, tcx)),
span: self.lazy(nitem.span),
attributes: self.encode_attributes(&nitem.attrs),
children: LazySeq::empty(),
children: Lazy::empty(),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
ty: Some(self.encode_item_type(def_id)),
inherent_impls: LazySeq::empty(),
inherent_impls: Lazy::empty(),
variances: match nitem.node {
hir::ForeignItemKind::Fn(..) => self.encode_variances_of(def_id),
_ => LazySeq::empty(),
_ => Lazy::empty(),
},
generics: Some(self.encode_generics(def_id)),
predicates: Some(self.encode_predicates(def_id)),

View File

@ -108,18 +108,18 @@ impl Index<'tcx> {
position.write_to_bytes_at(positions, array_index)
}
pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Self> {
pub fn write_index(&self, buf: &mut Encoder) -> Lazy<[Self]> {
let pos = buf.position();
// First we write the length of the lower range ...
buf.emit_raw_bytes(&(self.positions.len() as u32 / 4).to_le_bytes());
// ... then the values.
buf.emit_raw_bytes(&self.positions);
LazySeq::with_position_and_length(pos as usize, self.positions.len() / 4 + 1)
Lazy::from_position_and_meta(pos as usize, self.positions.len() / 4 + 1)
}
}
impl LazySeq<Index<'tcx>> {
impl Lazy<[Index<'tcx>]> {
/// Given the metadata, extract out the offset of a particular
/// DefIndex (if any).
#[inline(never)]
@ -127,7 +127,7 @@ impl LazySeq<Index<'tcx>> {
let bytes = &bytes[self.position..];
debug!("Index::lookup: index={:?} len={:?}",
def_index,
self.len);
self.meta);
let position = u32::read_from_bytes_at(bytes, 1 + def_index.index());
if position == u32::MAX {
@ -135,7 +135,7 @@ impl LazySeq<Index<'tcx>> {
None
} else {
debug!("Index::lookup: position={:?}", position);
Some(Lazy::with_position(position as usize))
Some(Lazy::from_position(position as usize))
}
}
}

View File

@ -41,6 +41,33 @@ pub const METADATA_VERSION: u8 = 4;
pub const METADATA_HEADER: &[u8; 12] =
&[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
/// Additional metadata for a `Lazy<T>` where `T` may not be `Sized`,
/// e.g. for `Lazy<[T]>`, this is the length (count of `T` values).
pub trait LazyMeta {
type Meta: Copy + 'static;
/// Returns the minimum encoded size.
// FIXME(eddyb) Give better estimates for certain types.
fn min_size(meta: Self::Meta) -> usize;
}
impl<T> LazyMeta for T {
type Meta = ();
fn min_size(_: ()) -> usize {
assert_ne!(std::mem::size_of::<T>(), 0);
1
}
}
impl<T> LazyMeta for [T] {
type Meta = usize;
fn min_size(len: usize) -> usize {
len * T::min_size(())
}
}
/// A value of type T referred to by its absolute position
/// in the metadata, and which can be decoded lazily.
///
@ -56,40 +83,8 @@ pub const METADATA_HEADER: &[u8; 12] =
/// Distances start at 1, as 0-byte nodes are invalid.
/// Also invalid are nodes being referred in a different
/// order than they were encoded in.
#[must_use]
pub struct Lazy<T> {
pub position: usize,
_marker: PhantomData<T>,
}
impl<T> Lazy<T> {
pub fn with_position(position: usize) -> Lazy<T> {
Lazy {
position,
_marker: PhantomData,
}
}
/// Returns the minimum encoded size of a value of type `T`.
// FIXME(eddyb) Give better estimates for certain types.
pub fn min_size() -> usize {
1
}
}
impl<T> Copy for Lazy<T> {}
impl<T> Clone for Lazy<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> rustc_serialize::UseSpecializedEncodable for Lazy<T> {}
impl<T> rustc_serialize::UseSpecializedDecodable for Lazy<T> {}
/// A sequence of type T referred to by its absolute position
/// in the metadata and length, and which can be decoded lazily.
/// The sequence is a single node for the purposes of `Lazy`.
///
/// # Sequences (`Lazy<[T]>`)
///
/// Unlike `Lazy<Vec<T>>`, the length is encoded next to the
/// position, not at the position, which means that the length
@ -100,54 +95,62 @@ impl<T> rustc_serialize::UseSpecializedDecodable for Lazy<T> {}
/// the minimal distance the length of the sequence, i.e.
/// it's assumed there's no 0-byte element in the sequence.
#[must_use]
pub struct LazySeq<T> {
pub len: usize,
// FIXME(#59875) the `Meta` parameter only exists to dodge
// invariance wrt `T` (coming from the `meta: T::Meta` field).
pub struct Lazy<T, Meta = <T as LazyMeta>::Meta>
where T: ?Sized + LazyMeta<Meta = Meta>,
Meta: 'static + Copy,
{
pub position: usize,
pub meta: Meta,
_marker: PhantomData<T>,
}
impl<T> LazySeq<T> {
pub fn empty() -> LazySeq<T> {
LazySeq::with_position_and_length(0, 0)
}
pub fn with_position_and_length(position: usize, len: usize) -> LazySeq<T> {
LazySeq {
len,
impl<T: ?Sized + LazyMeta> Lazy<T> {
pub fn from_position_and_meta(position: usize, meta: T::Meta) -> Lazy<T> {
Lazy {
position,
meta,
_marker: PhantomData,
}
}
}
/// Returns the minimum encoded size of `length` values of type `T`.
pub fn min_size(length: usize) -> usize {
length
impl<T> Lazy<T> {
pub fn from_position(position: usize) -> Lazy<T> {
Lazy::from_position_and_meta(position, ())
}
}
impl<T> Copy for LazySeq<T> {}
impl<T> Clone for LazySeq<T> {
impl<T> Lazy<[T]> {
pub fn empty() -> Lazy<[T]> {
Lazy::from_position_and_meta(0, 0)
}
}
impl<T: ?Sized + LazyMeta> Copy for Lazy<T> {}
impl<T: ?Sized + LazyMeta> Clone for Lazy<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> rustc_serialize::UseSpecializedEncodable for LazySeq<T> {}
impl<T> rustc_serialize::UseSpecializedDecodable for LazySeq<T> {}
impl<T: ?Sized + LazyMeta> rustc_serialize::UseSpecializedEncodable for Lazy<T> {}
impl<T: ?Sized + LazyMeta> rustc_serialize::UseSpecializedDecodable for Lazy<T> {}
/// Encoding / decoding state for `Lazy` and `LazySeq`.
/// Encoding / decoding state for `Lazy`.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum LazyState {
/// Outside of a metadata node.
NoNode,
/// Inside a metadata node, and before any `Lazy` or `LazySeq`.
/// Inside a metadata node, and before any `Lazy`.
/// The position is that of the node itself.
NodeStart(usize),
/// Inside a metadata node, with a previous `Lazy` or `LazySeq`.
/// Inside a metadata node, with a previous `Lazy`.
/// The position is a conservative estimate of where that
/// previous `Lazy` / `LazySeq` would end (see their comments).
/// previous `Lazy` would end (see their comments).
Previous(usize),
}
@ -167,24 +170,24 @@ pub struct CrateRoot<'tcx> {
pub proc_macro_decls_static: Option<DefIndex>,
pub proc_macro_stability: Option<attr::Stability>,
pub crate_deps: LazySeq<CrateDep>,
pub dylib_dependency_formats: LazySeq<Option<LinkagePreference>>,
pub lib_features: LazySeq<(Symbol, Option<Symbol>)>,
pub lang_items: LazySeq<(DefIndex, usize)>,
pub lang_items_missing: LazySeq<lang_items::LangItem>,
pub native_libraries: LazySeq<NativeLibrary>,
pub foreign_modules: LazySeq<ForeignModule>,
pub source_map: LazySeq<syntax_pos::SourceFile>,
pub crate_deps: Lazy<[CrateDep]>,
pub dylib_dependency_formats: Lazy<[Option<LinkagePreference>]>,
pub lib_features: Lazy<[(Symbol, Option<Symbol>)]>,
pub lang_items: Lazy<[(DefIndex, usize)]>,
pub lang_items_missing: Lazy<[lang_items::LangItem]>,
pub native_libraries: Lazy<[NativeLibrary]>,
pub foreign_modules: Lazy<[ForeignModule]>,
pub source_map: Lazy<[syntax_pos::SourceFile]>,
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
pub impls: LazySeq<TraitImpls>,
pub exported_symbols: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)>,
pub interpret_alloc_index: LazySeq<u32>,
pub impls: Lazy<[TraitImpls]>,
pub exported_symbols: Lazy<[(ExportedSymbol<'tcx>, SymbolExportLevel)]>,
pub interpret_alloc_index: Lazy<[u32]>,
pub entries_index: LazySeq<index::Index<'tcx>>,
pub entries_index: Lazy<[index::Index<'tcx>]>,
/// The DefIndex's of any proc macros delcared by
/// this crate
pub proc_macro_data: Option<LazySeq<DefIndex>>,
pub proc_macro_data: Option<Lazy<[DefIndex]>>,
pub compiler_builtins: bool,
pub needs_allocator: bool,
@ -207,7 +210,7 @@ pub struct CrateDep {
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitImpls {
pub trait_id: (u32, DefIndex),
pub impls: LazySeq<DefIndex>,
pub impls: Lazy<[DefIndex]>,
}
#[derive(RustcEncodable, RustcDecodable)]
@ -215,14 +218,14 @@ pub struct Entry<'tcx> {
pub kind: EntryKind<'tcx>,
pub visibility: Lazy<ty::Visibility>,
pub span: Lazy<Span>,
pub attributes: LazySeq<ast::Attribute>,
pub children: LazySeq<DefIndex>,
pub attributes: Lazy<[ast::Attribute]>,
pub children: Lazy<[DefIndex]>,
pub stability: Option<Lazy<attr::Stability>>,
pub deprecation: Option<Lazy<attr::Deprecation>>,
pub ty: Option<Lazy<Ty<'tcx>>>,
pub inherent_impls: LazySeq<DefIndex>,
pub variances: LazySeq<ty::Variance>,
pub inherent_impls: Lazy<[DefIndex]>,
pub variances: Lazy<[ty::Variance]>,
pub generics: Option<Lazy<ty::Generics>>,
pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>,
pub predicates_defined_on: Option<Lazy<ty::GenericPredicates<'tcx>>>,
@ -278,7 +281,7 @@ pub struct RenderedConst(pub String);
#[derive(RustcEncodable, RustcDecodable)]
pub struct ModData {
pub reexports: LazySeq<def::Export<hir::HirId>>,
pub reexports: Lazy<[def::Export<hir::HirId>]>,
}
#[derive(RustcEncodable, RustcDecodable)]
@ -290,7 +293,7 @@ pub struct MacroDef {
#[derive(RustcEncodable, RustcDecodable)]
pub struct FnData<'tcx> {
pub constness: hir::Constness,
pub arg_names: LazySeq<ast::Name>,
pub arg_names: Lazy<[ast::Name]>,
pub sig: Lazy<ty::PolyFnSig<'tcx>>,
}