diff options
| author | Thomas Voss <mail@thomasvoss.com> | 2026-03-04 01:20:54 +0100 |
|---|---|---|
| committer | Thomas Voss <mail@thomasvoss.com> | 2026-03-04 01:21:23 +0100 |
| commit | 16bf53da8c17dbf231fe3e4077ac873fbedc8ee9 (patch) | |
| tree | ba71000484574c8f4c63cd002722584e1dc9c963 | |
| parent | 6473caf1faf7b1099fa63fcfcf5e688b59e05199 (diff) | |
Introduce fdata_read!() and fdata_write!()
| -rw-r--r-- | oryxc/src/compiler.rs | 50 |
1 files changed, 26 insertions, 24 deletions
diff --git a/oryxc/src/compiler.rs b/oryxc/src/compiler.rs index 36c6438..f578375 100644 --- a/oryxc/src/compiler.rs +++ b/oryxc/src/compiler.rs @@ -128,14 +128,32 @@ where } } +macro_rules! fdata_read { + ($state:expr, $file:expr, $($field:ident),+ $(,)?) => { + #[allow(unused_parens)] + let ($($field),+) = { + let fdata = $state.files.get(&$file).unwrap(); + ($(fdata.$field.clone()),+) + }; + }; +} + +macro_rules! fdata_write { + ($state:expr, $file:expr, $($field:ident),+ $(,)?) => { + { + let mut fdata = $state.files.get_mut(&$file).unwrap(); + $( + fdata.$field = Arc::from(MaybeUninit::new($field)); + )+ + } + }; +} + fn emit_errors<T>(state: Arc<CompilerState>, file: FileId, errors: T) where T: IntoIterator<Item = OryxError>, { - let (name, buffer) = { - let fdata = state.files.get(&file).unwrap(); - (fdata.name.clone(), fdata.buffer.clone()) - }; + fdata_read!(state, file, name, buffer); for e in errors.into_iter() { e.report(name.as_ref(), buffer.as_ref()); } @@ -156,10 +174,7 @@ fn worker_loop( if let Some(job) = job { match job { Job::Lex { file } => { - let buffer = { - let fdata = state.files.get(&file).unwrap(); - fdata.buffer.clone() - }; + fdata_read!(state, file, buffer); let tokens = match lexer::tokenize(buffer.as_ref()) { Ok(xs) => xs, Err(e) => { @@ -175,19 +190,12 @@ fn worker_loop( } } - { - let mut fdata = state.files.get_mut(&file).unwrap(); - fdata.tokens = Arc::from(MaybeUninit::new(tokens)); - } - + fdata_write!(state, file, tokens); state.njobs.fetch_add(1, Ordering::Relaxed); queue.push(Job::Parse { file }); }, Job::Parse { file } => { - let tokens = { - let fdata = state.files.get(&file).unwrap(); - fdata.tokens.clone() - }; + fdata_read!(state, file, tokens); let (ast, extra_data) = match parser::parse( unsafe { tokens.assume_init() }.as_ref(), ) { @@ -205,13 +213,7 @@ fn worker_loop( } } - { - let mut fdata = state.files.get_mut(&file).unwrap(); - fdata.ast = Arc::from(MaybeUninit::new(ast)); - fdata.extra_data = - Arc::from(MaybeUninit::new(extra_data)); - } - + fdata_write!(state, file, ast, extra_data); state.njobs.fetch_add(1, Ordering::Relaxed); queue.push(Job::ResolveSymbols { file }); }, |