make serialization of source file thread local

This commit is contained in:
Moritz Hölting 2025-02-19 09:32:00 +01:00
parent a6a5e42b6b
commit b8303689db
1 changed files with 63 additions and 47 deletions

View File

@ -14,13 +14,15 @@ use serde::{
use crate::base::source_file::SourceFile; use crate::base::source_file::SourceFile;
static DEDUPLICATE_SOURCE_FILES: LazyLock<RwLock<bool>> = LazyLock::new(|| RwLock::new(false)); thread_local! {
static DEDUPLICATE_SOURCE_FILES: LazyLock<RwLock<bool>> = LazyLock::new(|| RwLock::new(false));
static SERIALIZE_DATA: LazyLock<Mutex<SerializeData>> = static SERIALIZE_DATA: LazyLock<Mutex<SerializeData>> =
LazyLock::new(|| Mutex::new(SerializeData::default())); LazyLock::new(|| Mutex::new(SerializeData::default()));
static DESERIALIZE_DATA: LazyLock<RwLock<Option<DeserializeData>>> = static DESERIALIZE_DATA: LazyLock<RwLock<Option<DeserializeData>>> =
LazyLock::new(|| RwLock::new(None)); LazyLock::new(|| RwLock::new(None));
}
/// Wrapper to remove duplicate source file data during (de-)serialization /// Wrapper to remove duplicate source file data during (de-)serialization
#[expect(clippy::module_name_repetitions)] #[expect(clippy::module_name_repetitions)]
@ -35,20 +37,27 @@ where
where where
S: serde::Serializer, S: serde::Serializer,
{ {
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = true; DEDUPLICATE_SOURCE_FILES.with(|d| *d.write().unwrap() = true);
SERIALIZE_DATA.lock().unwrap().clear(); SERIALIZE_DATA.with(|d| d.lock().unwrap().clear());
// hold guard so no other can serialize at the same time in same thread
let s = DEDUPLICATE_SOURCE_FILES.with(|d| {
let guard = d.read().unwrap();
let mut serialized_data = flexbuffers::FlexbufferSerializer::new(); let mut serialized_data = flexbuffers::FlexbufferSerializer::new();
self.0 self.0
.serialize(&mut serialized_data) .serialize(&mut serialized_data)
.map_err(|_| serde::ser::Error::custom("could not buffer serialization"))?; .map_err(|_| serde::ser::Error::custom("could not buffer serialization"))?;
drop(serialized_data); drop(serialized_data);
let mut s = serializer.serialize_struct("SerdeWrapper", 3)?; let mut s = serializer.serialize_struct("SerdeWrapper", 3)?;
s.serialize_field(
"source_files", SERIALIZE_DATA.with(|d| {
&SERIALIZE_DATA.lock().unwrap().id_to_source_file, s.serialize_field("source_files", &d.lock().unwrap().id_to_source_file)
)?; })?;
s.serialize_field("data", &self.0)?; s.serialize_field("data", &self.0)?;
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = false; drop(guard);
Ok(s)
})?;
DEDUPLICATE_SOURCE_FILES.with(|d| *d.write().unwrap() = false);
s.end() s.end()
} }
} }
@ -87,11 +96,13 @@ where
let source_files: BTreeMap<u64, SourceFile> = seq let source_files: BTreeMap<u64, SourceFile> = seq
.next_element()? .next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?; .ok_or_else(|| de::Error::invalid_length(0, &self))?;
*DESERIALIZE_DATA.write().unwrap() = Some(DeserializeData { DESERIALIZE_DATA.with(|d| {
*d.write().unwrap() = Some(DeserializeData {
id_to_source_file: source_files id_to_source_file: source_files
.into_iter() .into_iter()
.map(|(k, v)| (k, Arc::new(v))) .map(|(k, v)| (k, Arc::new(v)))
.collect(), .collect(),
})
}); });
let data = seq let data = seq
.next_element()? .next_element()?
@ -113,12 +124,14 @@ where
if data.is_some() { if data.is_some() {
return Err(de::Error::duplicate_field("data")); return Err(de::Error::duplicate_field("data"));
} }
*DESERIALIZE_DATA.write().unwrap() = DESERIALIZE_DATA.with(|d| {
*d.write().unwrap() =
source_files.as_ref().map(|source_files| DeserializeData { source_files.as_ref().map(|source_files| DeserializeData {
id_to_source_file: source_files id_to_source_file: source_files
.iter() .iter()
.map(|(&k, v)| (k, Arc::new(v.clone()))) .map(|(&k, v)| (k, Arc::new(v.clone())))
.collect(), .collect(),
})
}); });
data = Some(map.next_value()?); data = Some(map.next_value()?);
} }
@ -136,14 +149,14 @@ where
} }
} }
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = true; DEDUPLICATE_SOURCE_FILES.with(|d| *d.write().unwrap() = true);
*DESERIALIZE_DATA.write().unwrap() = None; DESERIALIZE_DATA.with(|d| *d.write().unwrap() = None);
let res = deserializer.deserialize_struct( let res = deserializer.deserialize_struct(
"SerdeWrapper", "SerdeWrapper",
&["source_files", "data"], &["source_files", "data"],
WrapperVisitor(PhantomData::<T>), WrapperVisitor(PhantomData::<T>),
); );
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = false; DEDUPLICATE_SOURCE_FILES.with(|d| *d.write().unwrap() = false);
res res
} }
@ -200,9 +213,11 @@ pub mod source_file {
where where
S: serde::Serializer, S: serde::Serializer,
{ {
if *DEDUPLICATE_SOURCE_FILES.read().unwrap() { if DEDUPLICATE_SOURCE_FILES.with(|d| *d.read().unwrap()) {
let mut data = SERIALIZE_DATA.lock().unwrap(); SERIALIZE_DATA.with(|d| {
let mut data = d.lock().unwrap();
serializer.serialize_u64(data.get_id_of(this)) serializer.serialize_u64(data.get_id_of(this))
})
} else { } else {
this.as_ref().serialize(serializer) this.as_ref().serialize(serializer)
} }
@ -212,17 +227,18 @@ pub mod source_file {
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
if *DEDUPLICATE_SOURCE_FILES.read().unwrap() { if DEDUPLICATE_SOURCE_FILES.with(|d| *d.read().unwrap()) {
let id = u64::deserialize(deserializer)?; let id = u64::deserialize(deserializer)?;
Ok(DESERIALIZE_DATA Ok(DESERIALIZE_DATA.with(|d| {
.read() d.read()
.unwrap() .unwrap()
.as_ref() .as_ref()
.ok_or_else(|| de::Error::custom("SourceFiles do not have been loaded yet"))? .ok_or_else(|| de::Error::custom("SourceFiles do not have been loaded yet"))?
.id_to_source_file .id_to_source_file
.get(&id) .get(&id)
.map(Arc::clone) .map(Arc::clone)
.ok_or_else(|| serde::de::Error::custom("invalid source_file id"))?) .ok_or_else(|| serde::de::Error::custom("invalid source_file id"))
}))?
} else { } else {
Ok(Arc::new(SourceFile::deserialize(deserializer)?)) Ok(Arc::new(SourceFile::deserialize(deserializer)?))
} }