Compare commits

...

6 Commits

Author SHA1 Message Date
Moritz Hölting 2d2ecb5129 update changelog and make flexbuffers dependency optional 2025-02-15 10:36:16 +01:00
Moritz Hölting af5717b10c change order of serialization of Span by serializing to temporary buffer 2025-02-15 10:32:33 +01:00
Moritz Hölting 4f787665c1 implement custom deserialize
- requires opposite order of data and source_files than what is serialized
2025-02-15 10:32:33 +01:00
Moritz Hölting ba8ffa0d86 implement custom serialize for Span 2025-02-15 10:32:33 +01:00
Moritz Hölting 8223dccc24 fix compilation errors depending on feature selection and update dependency version 2025-01-17 12:27:25 +01:00
Moritz Hölting 6179bebbf0 remove unnecessary RwLocks in Transpiler 2024-11-15 10:42:52 +01:00
11 changed files with 1412 additions and 48 deletions

1
.gitignore vendored
View File

@ -1,2 +1 @@
/target /target
/Cargo.lock

View File

@ -11,6 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed ### Changed
- Option to deduplicate source files during serialization when using `SerdeWrapper`
### Removed ### Removed
## [0.1.0] - 2024-10-01 ## [0.1.0] - 2024-10-01

1018
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -18,27 +18,28 @@ license = "MIT OR Apache-2.0"
default = ["fs_access", "lua", "shulkerbox", "zip"] default = ["fs_access", "lua", "shulkerbox", "zip"]
fs_access = ["shulkerbox?/fs_access"] fs_access = ["shulkerbox?/fs_access"]
lua = ["dep:mlua"] lua = ["dep:mlua"]
serde = ["dep:serde", "shulkerbox?/serde"] serde = ["dep:serde", "dep:flexbuffers", "shulkerbox?/serde"]
shulkerbox = ["dep:shulkerbox"] shulkerbox = ["dep:shulkerbox", "dep:chksum-md5"]
zip = ["shulkerbox?/zip"] zip = ["shulkerbox?/zip"]
[target.'cfg(target_arch = "wasm32")'.dependencies] [target.'cfg(target_arch = "wasm32")'.dependencies]
path-absolutize = { version = "3.1.1", features = ["use_unix_paths_on_wasm"] } path-absolutize = { version = "3.1.1", features = ["use_unix_paths_on_wasm"] }
[dependencies] [dependencies]
chksum-md5 = "0.0.0" chksum-md5 = { version = "0.1.0", optional = true }
colored = "2.1.0" colored = "3.0.0"
derive_more = { version = "1.0.0", default-features = false, features = ["deref", "deref_mut", "from"] } derive_more = { version = "1.0.0", default-features = false, features = ["deref", "deref_mut", "from"] }
enum-as-inner = "0.6.0" enum-as-inner = "0.6.0"
flexbuffers = { version = "25.2.10", optional = true }
getset = "0.1.2" getset = "0.1.2"
itertools = "0.13.0" itertools = "0.14.0"
mlua = { version = "0.10.0", features = ["lua54", "vendored"], optional = true } mlua = { version = "0.10.2", features = ["lua54", "vendored"], optional = true }
path-absolutize = "3.1.1" path-absolutize = "3.1.1"
pathdiff = "0.2.2" pathdiff = "0.2.3"
serde = { version = "1.0.214", features = ["derive", "rc"], optional = true } serde = { version = "1.0.217", features = ["derive", "rc"], optional = true }
shulkerbox = { version = "0.1.0", default-features = false, optional = true } shulkerbox = { version = "0.1.0", default-features = false, optional = true }
strsim = "0.11.1" strsim = "0.11.1"
strum = { version = "0.26.2", features = ["derive"] } strum = { version = "0.26.2", features = ["derive"] }
strum_macros = "0.26.4" strum_macros = "0.26.4"
thiserror = "1.0.65" thiserror = "2.0.11"
tracing = "0.1.40" tracing = "0.1.41"

View File

@ -145,7 +145,6 @@ impl SourceFile {
} }
/// Represents a range of characters in a source file. /// Represents a range of characters in a source file.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Clone, Getters, CopyGetters)] #[derive(Clone, Getters, CopyGetters)]
pub struct Span { pub struct Span {
/// Get the start byte index of the span. /// Get the start byte index of the span.

View File

@ -12,6 +12,7 @@
#![warn(missing_docs, clippy::all, clippy::pedantic)] #![warn(missing_docs, clippy::all, clippy::pedantic)]
#![allow(clippy::missing_panics_doc, clippy::missing_const_for_fn)] #![allow(clippy::missing_panics_doc, clippy::missing_const_for_fn)]
#[cfg(feature = "shulkerbox")]
pub use shulkerbox; pub use shulkerbox;
pub mod base; pub mod base;
@ -19,6 +20,12 @@ pub mod lexical;
pub mod syntax; pub mod syntax;
pub mod transpile; pub mod transpile;
#[cfg(feature = "serde")]
mod serde;
#[cfg(feature = "serde")]
#[cfg_attr(feature = "serde", doc(inline))]
pub use serde::SerdeWrapper;
use std::path::Path; use std::path::Path;
use base::{source_file::SourceFile, Error, FileProvider, Handler, Result}; use base::{source_file::SourceFile, Error, FileProvider, Handler, Result};

330
src/serde.rs Normal file
View File

@ -0,0 +1,330 @@
//! Utilities for (de-)serializing
use std::{
collections::BTreeMap,
marker::PhantomData,
sync::{Arc, LazyLock, Mutex, RwLock},
};
use serde::{
de::{self, Visitor},
ser::SerializeStruct,
Deserialize, Serialize,
};
use crate::base::source_file::{SourceFile, Span};
static DEDUPLICATE_SOURCE_FILES: LazyLock<RwLock<bool>> = LazyLock::new(|| RwLock::new(false));
static SERIALIZE_DATA: LazyLock<Mutex<SerializeData>> =
LazyLock::new(|| Mutex::new(SerializeData::default()));
static DESERIALIZE_DATA: LazyLock<RwLock<Option<DeserializeData>>> =
LazyLock::new(|| RwLock::new(None));
/// Wrapper to remove duplicate source file data during (de-)serialization
#[derive(Debug)]
pub struct SerdeWrapper<T>(pub T);
impl<T> Serialize for SerdeWrapper<T>
where
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = true;
SERIALIZE_DATA.lock().unwrap().clear();
let mut serialized_data = flexbuffers::FlexbufferSerializer::new();
self.0
.serialize(&mut serialized_data)
.map_err(|_| serde::ser::Error::custom("could not buffer serialization"))?;
drop(serialized_data);
let mut s = serializer.serialize_struct("SerdeWrapper", 3)?;
s.serialize_field(
"source_files",
&SERIALIZE_DATA.lock().unwrap().id_to_source_file,
)?;
s.serialize_field("data", &self.0)?;
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = false;
s.end()
}
}
impl<'de, T> Deserialize<'de> for SerdeWrapper<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(field_identifier, rename_all = "snake_case")]
enum Field {
Data,
SourceFiles,
}
struct WrapperVisitor<T>(PhantomData<T>);
impl<'de, T> Visitor<'de> for WrapperVisitor<T>
where
T: Deserialize<'de>,
{
type Value = SerdeWrapper<T>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("struct SerdeWrapper")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
let source_files: BTreeMap<usize, SourceFile> = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
*DESERIALIZE_DATA.write().unwrap() = Some(DeserializeData {
id_to_source_file: source_files
.into_iter()
.map(|(k, v)| (k, Arc::new(v)))
.collect(),
});
let data = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
Ok(SerdeWrapper(data))
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let mut source_files: Option<BTreeMap<usize, SourceFile>> = None;
let mut data = None;
while let Some(key) = map.next_key()? {
match key {
Field::Data => {
if data.is_some() {
return Err(de::Error::duplicate_field("data"));
}
*DESERIALIZE_DATA.write().unwrap() =
source_files.as_ref().map(|source_files| DeserializeData {
id_to_source_file: source_files
.iter()
.map(|(&k, v)| (k, Arc::new(v.clone())))
.collect(),
});
data = Some(map.next_value()?);
}
Field::SourceFiles => {
if source_files.is_some() {
return Err(de::Error::duplicate_field("source_files"));
}
source_files = Some(map.next_value()?);
}
}
}
let data = data.ok_or_else(|| de::Error::missing_field("data"))?;
Ok(SerdeWrapper(data))
}
}
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = true;
*DESERIALIZE_DATA.write().unwrap() = None;
let res = deserializer.deserialize_struct(
"SerdeWrapper",
&["source_files", "data"],
WrapperVisitor(PhantomData::<T>::default()),
);
*DEDUPLICATE_SOURCE_FILES.write().unwrap() = false;
res
}
}
/// Internally used for Serialization
#[derive(Debug, Default)]
struct SerializeData {
id_counter: usize,
ptr_to_id: BTreeMap<usize, usize>,
id_to_source_file: BTreeMap<usize, SourceFile>,
}
impl SerializeData {
fn clear(&mut self) {
self.id_counter = 0;
self.id_to_source_file.clear();
self.ptr_to_id.clear();
}
/// Get id of already stored [`Arc`] or store it and return new id
pub fn get_id_of(&mut self, source_file: &Arc<SourceFile>) -> usize {
let ptr = Arc::as_ptr(source_file);
if let Some(&id) = self.ptr_to_id.get(&(ptr as usize)) {
id
} else {
let id = self.id_counter;
self.id_counter += 1;
self.ptr_to_id.insert(ptr as usize, id);
self.id_to_source_file
.insert(id, Arc::unwrap_or_clone(source_file.to_owned()));
id
}
}
}
impl Serialize for Span {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut s = serializer.serialize_struct("Span", 3)?;
s.serialize_field("start", &self.start())?;
s.serialize_field("end", &self.end())?;
if *DEDUPLICATE_SOURCE_FILES.read().unwrap() {
let mut data = SERIALIZE_DATA.lock().unwrap();
s.serialize_field("source_file", &data.get_id_of(self.source_file()))?;
} else {
s.serialize_field("source_file", self.source_file())?;
}
s.end()
}
}
#[derive(Debug, Default)]
struct DeserializeData {
id_to_source_file: BTreeMap<usize, Arc<SourceFile>>,
}
impl<'de> Deserialize<'de> for Span {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(field_identifier, rename_all = "snake_case")]
enum Field {
Start,
End,
SourceFile,
}
struct SpanVisitor;
impl<'de> Visitor<'de> for SpanVisitor {
type Value = Span;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
if *DEDUPLICATE_SOURCE_FILES.read().unwrap() {
formatter.write_str("struct Span with deduplicated SourceFiles")
} else {
formatter.write_str("struct Span")
}
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: serde::de::SeqAccess<'de>,
{
let start = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let end = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
let source_file = if *DEDUPLICATE_SOURCE_FILES.read().unwrap() {
DESERIALIZE_DATA
.read()
.unwrap()
.as_ref()
.ok_or_else(|| {
de::Error::custom("SourceFiles do not have been loaded yet")
})?
.id_to_source_file
.get(
&seq.next_element()?
.ok_or_else(|| de::Error::invalid_length(2, &self))?,
)
.ok_or_else(|| de::Error::custom("invalid source_file id"))?
.clone()
} else {
Arc::new(
seq.next_element()?
.ok_or_else(|| de::Error::invalid_length(2, &self))?,
)
};
Ok(Span::new(source_file, start, end)
.ok_or_else(|| de::Error::custom("Invalid data"))?)
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let mut start = None;
let mut end = None;
let mut source_file = None;
while let Some(key) = map.next_key()? {
match key {
Field::Start => {
if start.is_some() {
return Err(de::Error::duplicate_field("start"));
}
start = Some(map.next_value()?);
}
Field::End => {
if end.is_some() {
return Err(de::Error::duplicate_field("end"));
}
end = Some(map.next_value()?);
}
Field::SourceFile => {
if source_file.is_some() {
return Err(de::Error::duplicate_field("source_file"));
}
source_file = if *DEDUPLICATE_SOURCE_FILES.read().unwrap() {
Some(
DESERIALIZE_DATA
.read()
.unwrap()
.as_ref()
.ok_or_else(|| {
de::Error::custom(
"SourceFiles do not have been loaded yet",
)
})?
.id_to_source_file
.get(&map.next_value()?)
.ok_or_else(|| de::Error::custom("invalid source_file id"))?
.clone(),
)
} else {
Some(Arc::new(map.next_value()?))
};
}
}
}
let start = start.ok_or_else(|| de::Error::missing_field("start"))?;
let end = end.ok_or_else(|| de::Error::missing_field("end"))?;
let source_file = source_file.ok_or_else(|| de::Error::missing_field("source"))?;
Ok(Span::new(source_file, start, end)
.ok_or_else(|| de::Error::custom("Invalid data"))?)
}
}
deserializer.deserialize_struct("Span", &["start", "end", "source_file"], SpanVisitor)
}
}

View File

@ -13,7 +13,7 @@ use crate::{
syntax::syntax_tree::expression::Expression, syntax::syntax_tree::expression::Expression,
}; };
use super::transpiler::FunctionData; use super::FunctionData;
/// Errors that can occur during transpilation. /// Errors that can occur during transpilation.
#[allow(clippy::module_name_repetitions, missing_docs)] #[allow(clippy::module_name_repetitions, missing_docs)]
@ -44,6 +44,7 @@ pub struct MissingFunctionDeclaration {
} }
impl MissingFunctionDeclaration { impl MissingFunctionDeclaration {
#[cfg_attr(not(feature = "shulkerbox"), expect(unused))]
pub(super) fn from_context( pub(super) fn from_context(
identifier_span: Span, identifier_span: Span,
functions: &BTreeMap<(String, String), FunctionData>, functions: &BTreeMap<(String, String), FunctionData>,

View File

@ -59,9 +59,8 @@ mod enabled {
err err
})?; })?;
self.handle_lua_result(lua_result).map_err(|err| { self.handle_lua_result(lua_result).inspect_err(|err| {
handler.receive(err.clone()); handler.receive(err.clone());
err
}) })
} }

View File

@ -1,9 +1,14 @@
//! The transpile module is responsible for transpiling the abstract syntax tree into a data pack. //! The transpile module is responsible for transpiling the abstract syntax tree into a data pack.
use std::collections::HashMap;
use crate::{base::source_file::Span, syntax::syntax_tree::statement::Statement};
#[doc(hidden)] #[doc(hidden)]
#[cfg(feature = "shulkerbox")] #[cfg(feature = "shulkerbox")]
pub mod conversions; pub mod conversions;
mod error; mod error;
#[doc(inline)] #[doc(inline)]
#[allow(clippy::module_name_repetitions)] #[allow(clippy::module_name_repetitions)]
pub use error::{TranspileError, TranspileResult}; pub use error::{TranspileError, TranspileResult};
@ -11,7 +16,18 @@ pub use error::{TranspileError, TranspileResult};
pub mod lua; pub mod lua;
#[cfg(feature = "shulkerbox")] #[cfg(feature = "shulkerbox")]
mod transpiler; mod transpiler;
#[doc(inline)] #[cfg(feature = "shulkerbox")]
#[cfg_attr(feature = "shulkerbox", doc(inline))]
pub use transpiler::Transpiler; pub use transpiler::Transpiler;
#[cfg(feature = "shulkerbox")]
mod util; mod util;
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) struct FunctionData {
pub(super) namespace: String,
pub(super) identifier_span: Span,
pub(super) statements: Vec<Statement>,
pub(super) public: bool,
pub(super) annotations: HashMap<String, Option<String>>,
}

View File

@ -4,7 +4,6 @@ use chksum_md5 as md5;
use std::{ use std::{
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap},
iter, iter,
sync::RwLock,
}; };
use shulkerbox::datapack::{self, Command, Datapack, Execute}; use shulkerbox::datapack::{self, Command, Datapack, Execute};
@ -27,25 +26,21 @@ use crate::{
transpile::error::{ConflictingFunctionNames, MissingFunctionDeclaration}, transpile::error::{ConflictingFunctionNames, MissingFunctionDeclaration},
}; };
use super::error::{TranspileError, TranspileResult, UnexpectedExpression}; use super::{
error::{TranspileError, TranspileResult, UnexpectedExpression},
FunctionData,
};
/// A transpiler for `Shulkerscript`. /// A transpiler for `Shulkerscript`.
#[derive(Debug)] #[derive(Debug)]
pub struct Transpiler { pub struct Transpiler {
datapack: shulkerbox::datapack::Datapack, datapack: shulkerbox::datapack::Datapack,
/// Key: (program identifier, function name) /// Key: (program identifier, function name)
functions: RwLock<BTreeMap<(String, String), FunctionData>>, functions: BTreeMap<(String, String), FunctionData>,
function_locations: RwLock<HashMap<(String, String), (String, bool)>>, /// Key: (program identifier, function name), Value: (function location, public)
aliases: RwLock<HashMap<(String, String), (String, String)>>, function_locations: HashMap<(String, String), (String, bool)>,
} /// Key: alias, Value: target
aliases: HashMap<(String, String), (String, String)>,
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) struct FunctionData {
pub(super) namespace: String,
pub(super) identifier_span: Span,
pub(super) statements: Vec<Statement>,
pub(super) public: bool,
pub(super) annotations: HashMap<String, Option<String>>,
} }
impl Transpiler { impl Transpiler {
@ -54,9 +49,9 @@ impl Transpiler {
pub fn new(pack_format: u8) -> Self { pub fn new(pack_format: u8) -> Self {
Self { Self {
datapack: shulkerbox::datapack::Datapack::new(pack_format), datapack: shulkerbox::datapack::Datapack::new(pack_format),
functions: RwLock::new(BTreeMap::new()), functions: BTreeMap::new(),
function_locations: RwLock::new(HashMap::new()), function_locations: HashMap::new(),
aliases: RwLock::new(HashMap::new()), aliases: HashMap::new(),
} }
} }
@ -85,7 +80,7 @@ impl Transpiler {
let mut always_transpile_functions = Vec::new(); let mut always_transpile_functions = Vec::new();
{ {
let functions = self.functions.read().unwrap(); let functions = &mut self.functions;
for (_, data) in functions.iter() { for (_, data) in functions.iter() {
let always_transpile_function = data.annotations.contains_key("tick") let always_transpile_function = data.annotations.contains_key("tick")
|| data.annotations.contains_key("load") || data.annotations.contains_key("load")
@ -148,7 +143,7 @@ impl Transpiler {
}) })
.collect(); .collect();
#[allow(clippy::significant_drop_tightening)] #[allow(clippy::significant_drop_tightening)]
self.functions.write().unwrap().insert( self.functions.insert(
(program_identifier, name), (program_identifier, name),
FunctionData { FunctionData {
namespace: namespace.namespace_name().str_content().to_string(), namespace: namespace.namespace_name().str_content().to_string(),
@ -164,7 +159,7 @@ impl Transpiler {
let import_identifier = let import_identifier =
super::util::calculate_import_identifier(&program_identifier, path); super::util::calculate_import_identifier(&program_identifier, path);
let mut aliases = self.aliases.write().unwrap(); let aliases = &mut self.aliases;
match import.items() { match import.items() {
ImportItems::All(_) => todo!("Importing all items is not yet supported."), ImportItems::All(_) => todo!("Importing all items is not yet supported."),
@ -215,12 +210,9 @@ impl Transpiler {
program_identifier.to_string(), program_identifier.to_string(),
identifier_span.str().to_string(), identifier_span.str().to_string(),
); );
let alias_query = { let alias_query = self.aliases.get(&program_query).cloned();
let aliases = self.aliases.read().unwrap();
aliases.get(&program_query).cloned()
};
let already_transpiled = { let already_transpiled = {
let locations = self.function_locations.read().unwrap(); let locations = &self.function_locations;
locations locations
.get(&program_query) .get(&program_query)
.or_else(|| { .or_else(|| {
@ -234,7 +226,7 @@ impl Transpiler {
tracing::trace!("Function not transpiled yet, transpiling."); tracing::trace!("Function not transpiled yet, transpiling.");
let statements = { let statements = {
let functions = self.functions.read().unwrap(); let functions = &self.functions;
let function_data = functions let function_data = functions
.get(&program_query) .get(&program_query)
.or_else(|| { .or_else(|| {
@ -246,7 +238,7 @@ impl Transpiler {
let error = TranspileError::MissingFunctionDeclaration( let error = TranspileError::MissingFunctionDeclaration(
MissingFunctionDeclaration::from_context( MissingFunctionDeclaration::from_context(
identifier_span.clone(), identifier_span.clone(),
&functions, functions,
), ),
); );
handler.receive(error.clone()); handler.receive(error.clone());
@ -256,7 +248,7 @@ impl Transpiler {
}; };
let commands = self.transpile_function(&statements, program_identifier, handler)?; let commands = self.transpile_function(&statements, program_identifier, handler)?;
let functions = self.functions.read().unwrap(); let functions = &self.functions;
let function_data = functions let function_data = functions
.get(&program_query) .get(&program_query)
.or_else(|| { .or_else(|| {
@ -268,7 +260,7 @@ impl Transpiler {
let error = TranspileError::MissingFunctionDeclaration( let error = TranspileError::MissingFunctionDeclaration(
MissingFunctionDeclaration::from_context( MissingFunctionDeclaration::from_context(
identifier_span.clone(), identifier_span.clone(),
&functions, functions,
), ),
); );
handler.receive(error.clone()); handler.receive(error.clone());
@ -314,7 +306,7 @@ impl Transpiler {
self.datapack.add_load(&function_location); self.datapack.add_load(&function_location);
} }
self.function_locations.write().unwrap().insert( self.function_locations.insert(
( (
program_identifier.to_string(), program_identifier.to_string(),
identifier_span.str().to_string(), identifier_span.str().to_string(),
@ -323,7 +315,7 @@ impl Transpiler {
); );
} }
let locations = self.function_locations.read().unwrap(); let locations = &self.function_locations;
locations locations
.get(&program_query) .get(&program_query)
.or_else(|| alias_query.and_then(|q| locations.get(&q).filter(|(_, p)| *p))) .or_else(|| alias_query.and_then(|q| locations.get(&q).filter(|(_, p)| *p)))
@ -331,7 +323,7 @@ impl Transpiler {
let error = TranspileError::MissingFunctionDeclaration( let error = TranspileError::MissingFunctionDeclaration(
MissingFunctionDeclaration::from_context( MissingFunctionDeclaration::from_context(
identifier_span.clone(), identifier_span.clone(),
&self.functions.read().unwrap(), &self.functions,
), ),
); );
handler.receive(error.clone()); handler.receive(error.clone());