Rust · 25222 bytes Raw Blame History
1 //! Local load-store forwarding pass (O1+).
2 //!
3 //! Within a single basic block, if a `load %ptr` immediately follows
4 //! (with no intervening store to `%ptr` or call that might write `%ptr`)
5 //! a `store %val, %ptr`, the load is redundant — we already know the
6 //! value is `%val`. This pass:
7 //!
8 //! 1. Tracks the most recently stored value for each address.
9 //! 2. On a matching `load`, records `load_id → stored_val` in a
10 //! substitution table.
11 //! 3. After scanning the block, rewrites every instruction that uses any
12 //! forwarded `load_id` to use `stored_val` instead.
13 //!
14 //! The `load` instructions themselves become dead after substitution; the
15 //! subsequent DCE pass removes them.
16 //!
17 //! ### Alias model
18 //!
19 //! Uses the Fortran-aware alias oracle:
20 //! - `MustAlias` store → later load can forward
21 //! - `MayAlias` store → pending forwarded values are conservatively killed
22 //! - `NoAlias` store/call arg → pending forwarded values survive
23 //!
24 //! ### Invalidation rules (conservative)
25 //!
26 //! | Instruction | Effect on `available` map |
27 //! |---------------------|----------------------------------------------|
28 //! | `store %v, %ptr` | kill aliasing entries; record `%ptr → v` |
29 //! | `load %ptr` | Forward if available; otherwise no-op |
30 //! | `call / rcall` | Flush only entries aliasing pointer args |
31 //! | Any other | No-op (reads only) |
32 //!
33 //! ### Example
34 //!
35 //! ```text
36 //! ; Before LSF
37 //! store %42, %alloca
38 //! %w = load %alloca ; forward → %w is replaced by %42
39 //! %x = iadd %w, %1 ; becomes iadd %42, %1
40 //!
41 //! ; After LSF + DCE
42 //! store %42, %alloca
43 //! %x = iadd %42, %1
44 //! ```
45
46 use super::alias::{self, may_reach_through_call_arg, AliasResult};
47 use super::pass::Pass;
48 use crate::ir::inst::*;
49 use crate::ir::types::IrType;
50 use crate::ir::walk::{for_each_operand_mut, for_each_terminator_operand_mut};
51 use std::collections::HashMap;
52
53 pub struct LocalLsf;
54
55 impl Pass for LocalLsf {
56 fn name(&self) -> &'static str {
57 "load-store-fwd"
58 }
59
60 fn run(&self, module: &mut Module) -> bool {
61 let mut changed = false;
62 for func in &mut module.functions {
63 changed |= lsf_in_function(func);
64 }
65 changed
66 }
67 }
68
69 fn lsf_in_function(func: &mut Function) -> bool {
70 #[derive(Clone, Copy)]
71 struct AvailableStore {
72 ptr: ValueId,
73 val: ValueId,
74 }
75
76 let mut all_rewrites: HashMap<ValueId, ValueId> = HashMap::new();
77 let mut changed = false;
78
79 for block in &func.blocks {
80 let mut available: Vec<AvailableStore> = Vec::new();
81
82 for inst in &block.insts {
83 match &inst.kind {
84 InstKind::Store(val, ptr) => {
85 let eff_ptr = resolve(&all_rewrites, *ptr);
86 let eff_val = resolve(&all_rewrites, *val);
87 available.retain(|entry| {
88 matches!(alias::query(func, entry.ptr, eff_ptr), AliasResult::NoAlias)
89 });
90 available.push(AvailableStore {
91 ptr: eff_ptr,
92 val: eff_val,
93 });
94 }
95
96 InstKind::Load(ptr) => {
97 let eff_ptr = resolve(&all_rewrites, *ptr);
98 if let Some(entry) = available.iter().rev().find(|entry| {
99 matches!(
100 alias::query(func, entry.ptr, eff_ptr),
101 AliasResult::MustAlias
102 ) && func.value_type(entry.val).is_some_and(|ty| ty == inst.ty)
103 }) {
104 all_rewrites.insert(inst.id, entry.val);
105 changed = true;
106 }
107 }
108
109 InstKind::Call(_, args) | InstKind::RuntimeCall(_, args) => {
110 let pointer_args: Vec<ValueId> = args
111 .iter()
112 .copied()
113 .map(|arg| resolve(&all_rewrites, arg))
114 .filter(|arg| value_is_pointer(func, *arg))
115 .collect();
116 if !pointer_args.is_empty() {
117 // Call boundaries use the coarser
118 // may_reach_through_call_arg predicate: a
119 // callee that gets a pointer can walk to
120 // any offset within the same allocation,
121 // so a precise "different GEP offset →
122 // NoAlias" answer is unsound here.
123 available.retain(|entry| {
124 pointer_args
125 .iter()
126 .all(|arg| !may_reach_through_call_arg(func, entry.ptr, *arg))
127 });
128 }
129 }
130
131 _ => {} // Pure computation — doesn't affect memory availability.
132 }
133 }
134 }
135
136 if all_rewrites.is_empty() {
137 return false;
138 }
139
140 // Apply the substitution across the whole function (both instructions and
141 // terminators, including block-param arguments on branches).
142 apply_rewrites(func, &all_rewrites);
143 changed
144 }
145
146 /// Follow a substitution chain to its canonical root.
147 ///
148 /// Handles the case where forwarded loads are themselves forwarded again:
149 /// store %v, %p; %a = load %p; ← forward: a → v
150 /// store %a, %q; %b = load %q; ← forward: b → a, resolved → v
151 fn resolve(rewrites: &HashMap<ValueId, ValueId>, mut v: ValueId) -> ValueId {
152 let mut steps = 0usize;
153 while let Some(&next) = rewrites.get(&v) {
154 v = next;
155 steps += 1;
156 if steps > 64 {
157 break;
158 } // cycle guard (SSA has none, but be safe)
159 }
160 v
161 }
162
163 /// Rewrite all uses of forwarded loads across the entire function.
164 ///
165 /// Uses the same `for_each_operand_mut` / `for_each_terminator_operand_mut`
166 /// helpers as CSE to avoid code duplication.
167 fn apply_rewrites(func: &mut Function, rewrites: &HashMap<ValueId, ValueId>) {
168 let r = |v: &mut ValueId| {
169 let resolved = resolve(rewrites, *v);
170 if resolved != *v {
171 *v = resolved;
172 }
173 };
174 for block in &mut func.blocks {
175 for inst in &mut block.insts {
176 for_each_operand_mut(&mut inst.kind, r);
177 }
178 if let Some(term) = &mut block.terminator {
179 for_each_terminator_operand_mut(term, r);
180 }
181 }
182 }
183
184 fn value_is_pointer(func: &Function, value: ValueId) -> bool {
185 if matches!(func.value_type(value), Some(IrType::Ptr(_))) {
186 return true;
187 }
188 if func
189 .params
190 .iter()
191 .any(|param| param.id == value && matches!(param.ty, IrType::Ptr(_)))
192 {
193 return true;
194 }
195 func.blocks
196 .iter()
197 .flat_map(|block| block.insts.iter())
198 .find(|inst| inst.id == value)
199 .map(|inst| matches!(inst.ty, IrType::Ptr(_)))
200 .unwrap_or(false)
201 }
202
203 // ---------------------------------------------------------------------------
204 // Unit tests
205 // ---------------------------------------------------------------------------
206
207 #[cfg(test)]
208 mod tests {
209 use super::*;
210 use crate::ir::types::{IntWidth, IrType};
211 use crate::lexer::{Position, Span};
212 use crate::opt::pass::Pass;
213
214 fn dummy_span() -> Span {
215 let p = Position { line: 0, col: 0 };
216 Span {
217 file_id: 0,
218 start: p,
219 end: p,
220 }
221 }
222
223 fn push(f: &mut Function, kind: InstKind, ty: IrType) -> ValueId {
224 let id = f.next_value_id();
225 let entry = f.entry;
226 f.block_mut(entry).insts.push(Inst {
227 id,
228 kind,
229 ty,
230 span: dummy_span(),
231 });
232 id
233 }
234
235 fn param(name: &str, id: u32, fortran_noalias: bool) -> Param {
236 Param {
237 name: name.into(),
238 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
239 id: ValueId(id),
240 fortran_noalias,
241 }
242 }
243
244 #[test]
245 fn forwards_load_after_store() {
246 // store %42, %alloca
247 // %w = load %alloca ← should become %42
248 // %x = iadd %w, %1 ← becomes iadd %42, %1
249 let mut m = Module::new("test".into());
250 let mut f = Function::new("f".into(), vec![], IrType::Void);
251
252 let alloca = push(
253 &mut f,
254 InstKind::Alloca(IrType::Int(IntWidth::I32)),
255 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
256 );
257 let val42 = push(
258 &mut f,
259 InstKind::ConstInt(42, IntWidth::I32),
260 IrType::Int(IntWidth::I32),
261 );
262 let one = push(
263 &mut f,
264 InstKind::ConstInt(1, IntWidth::I32),
265 IrType::Int(IntWidth::I32),
266 );
267 push(&mut f, InstKind::Store(val42, alloca), IrType::Void);
268 let load = push(&mut f, InstKind::Load(alloca), IrType::Int(IntWidth::I32));
269 let add = push(
270 &mut f,
271 InstKind::IAdd(load, one),
272 IrType::Int(IntWidth::I32),
273 );
274 let entry = f.entry;
275 f.block_mut(entry).terminator = Some(Terminator::Return(Some(add)));
276 m.add_function(f);
277
278 let pass = LocalLsf;
279 let changed = pass.run(&mut m);
280 assert!(changed, "LSF should forward the load");
281
282 // After forwarding, the IAdd should use val42 directly, not load.
283 let func = &m.functions[0];
284 let insts = &func.block(func.entry).insts;
285 let add_inst = insts.iter().find(|i| i.id == add).unwrap();
286 assert!(
287 matches!(&add_inst.kind, InstKind::IAdd(a, _) if *a == val42),
288 "IAdd operand should be forwarded to val42, got {:?}",
289 add_inst.kind
290 );
291 }
292
293 #[test]
294 fn does_not_forward_across_intervening_store() {
295 // store %42, %alloca
296 // store %99, %alloca ← overwrites the 42
297 // %w = load %alloca ← should forward to %99, not %42
298 let mut m = Module::new("test".into());
299 let mut f = Function::new("f".into(), vec![], IrType::Void);
300
301 let alloca = push(
302 &mut f,
303 InstKind::Alloca(IrType::Int(IntWidth::I32)),
304 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
305 );
306 let v42 = push(
307 &mut f,
308 InstKind::ConstInt(42, IntWidth::I32),
309 IrType::Int(IntWidth::I32),
310 );
311 let v99 = push(
312 &mut f,
313 InstKind::ConstInt(99, IntWidth::I32),
314 IrType::Int(IntWidth::I32),
315 );
316 push(&mut f, InstKind::Store(v42, alloca), IrType::Void);
317 push(&mut f, InstKind::Store(v99, alloca), IrType::Void);
318 let load = push(&mut f, InstKind::Load(alloca), IrType::Int(IntWidth::I32));
319 let entry = f.entry;
320 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
321 m.add_function(f);
322
323 let pass = LocalLsf;
324 let changed = pass.run(&mut m);
325 assert!(changed, "LSF should forward to the latest store (99)");
326
327 let func = &m.functions[0];
328 let term = func.block(func.entry).terminator.as_ref().unwrap();
329 assert!(
330 matches!(term, Terminator::Return(Some(v)) if *v == v99),
331 "Return should use v99 (the latest store), got {:?}",
332 term
333 );
334 }
335
336 #[test]
337 fn does_not_forward_across_call() {
338 // store %42, %alloca
339 // call @ext() ← may write %alloca
340 // %w = load %alloca ← must NOT be forwarded
341 let mut m = Module::new("test".into());
342 let mut f = Function::new("f".into(), vec![], IrType::Void);
343
344 let alloca = push(
345 &mut f,
346 InstKind::Alloca(IrType::Int(IntWidth::I32)),
347 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
348 );
349 let v42 = push(
350 &mut f,
351 InstKind::ConstInt(42, IntWidth::I32),
352 IrType::Int(IntWidth::I32),
353 );
354 push(&mut f, InstKind::Store(v42, alloca), IrType::Void);
355 // A call that might write to the alloca.
356 push(
357 &mut f,
358 InstKind::Call(FuncRef::External("ext".into()), vec![alloca]),
359 IrType::Void,
360 );
361 let load = push(&mut f, InstKind::Load(alloca), IrType::Int(IntWidth::I32));
362 let entry = f.entry;
363 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
364 m.add_function(f);
365
366 let pass = LocalLsf;
367 // No forwarding should happen (call killed the available store).
368 let changed = pass.run(&mut m);
369 assert!(!changed, "LSF must not forward across a call");
370 }
371
372 #[test]
373 fn no_forwarding_without_prior_store() {
374 // %w = load %alloca ← no prior store — nothing to forward
375 let mut m = Module::new("test".into());
376 let mut f = Function::new("f".into(), vec![], IrType::Void);
377
378 let alloca = push(
379 &mut f,
380 InstKind::Alloca(IrType::Int(IntWidth::I32)),
381 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
382 );
383 let load = push(&mut f, InstKind::Load(alloca), IrType::Int(IntWidth::I32));
384 let entry = f.entry;
385 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
386 m.add_function(f);
387
388 let pass = LocalLsf;
389 let changed = pass.run(&mut m);
390 assert!(!changed, "No forwarding without a prior store");
391 }
392
393 #[test]
394 fn forwards_load_from_must_alias_gep() {
395 let mut m = Module::new("test".into());
396 let mut f = Function::new("f".into(), vec![], IrType::Int(IntWidth::I32));
397
398 let arr_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I32)), 4);
399 let arr_ptr_ty = IrType::Ptr(Box::new(arr_ty.clone()));
400 let ptr = push(&mut f, InstKind::Alloca(arr_ty), arr_ptr_ty);
401 let zero0 = push(
402 &mut f,
403 InstKind::ConstInt(0, IntWidth::I32),
404 IrType::Int(IntWidth::I32),
405 );
406 let zero1 = push(
407 &mut f,
408 InstKind::ConstInt(0, IntWidth::I32),
409 IrType::Int(IntWidth::I32),
410 );
411 let gep0 = push(
412 &mut f,
413 InstKind::GetElementPtr(ptr, vec![zero0]),
414 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
415 );
416 let gep1 = push(
417 &mut f,
418 InstKind::GetElementPtr(ptr, vec![zero1]),
419 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
420 );
421 let v42 = push(
422 &mut f,
423 InstKind::ConstInt(42, IntWidth::I32),
424 IrType::Int(IntWidth::I32),
425 );
426 push(&mut f, InstKind::Store(v42, gep0), IrType::Void);
427 let load = push(&mut f, InstKind::Load(gep1), IrType::Int(IntWidth::I32));
428 let entry = f.entry;
429 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
430 m.add_function(f);
431
432 assert!(
433 LocalLsf.run(&mut m),
434 "LSF should forward across must-alias GEPs"
435 );
436 let term = m.functions[0]
437 .block(m.functions[0].entry)
438 .terminator
439 .as_ref()
440 .unwrap();
441 assert!(
442 matches!(term, Terminator::Return(Some(v)) if *v == v42),
443 "return should use the stored value after forwarding, got {:?}",
444 term
445 );
446 }
447
448 #[test]
449 fn does_not_forward_scalar_store_into_aggregate_load() {
450 let mut m = Module::new("test".into());
451 let mut f = Function::new("f".into(), vec![], IrType::Void);
452
453 let arr_ty = IrType::Array(
454 Box::new(IrType::Float(crate::ir::types::FloatWidth::F64)),
455 2,
456 );
457 let alloca = push(
458 &mut f,
459 InstKind::Alloca(arr_ty.clone()),
460 IrType::Ptr(Box::new(arr_ty.clone())),
461 );
462 let zero = push(
463 &mut f,
464 InstKind::ConstInt(0, IntWidth::I64),
465 IrType::Int(IntWidth::I64),
466 );
467 let elem_ptr = push(
468 &mut f,
469 InstKind::GetElementPtr(alloca, vec![zero]),
470 IrType::Ptr(Box::new(IrType::Float(crate::ir::types::FloatWidth::F64))),
471 );
472 let scalar = push(
473 &mut f,
474 InstKind::ConstFloat(1.5, crate::ir::types::FloatWidth::F64),
475 IrType::Float(crate::ir::types::FloatWidth::F64),
476 );
477 push(&mut f, InstKind::Store(scalar, elem_ptr), IrType::Void);
478 let load = push(&mut f, InstKind::Load(alloca), arr_ty.clone());
479 let entry = f.entry;
480 f.block_mut(entry).terminator = Some(Terminator::Return(None));
481 m.add_function(f);
482
483 let changed = LocalLsf.run(&mut m);
484 assert!(
485 !changed,
486 "LSF must not forward a scalar store into an aggregate-typed load"
487 );
488
489 let func = &m.functions[0];
490 let load_inst = func
491 .block(func.entry)
492 .insts
493 .iter()
494 .find(|inst| inst.id == load)
495 .expect("load should still exist");
496 assert!(
497 matches!(load_inst.kind, InstKind::Load(ptr) if ptr == alloca),
498 "aggregate load should remain untouched, got {:?}",
499 load_inst.kind
500 );
501 }
502
503 #[test]
504 fn call_invalidates_store_when_arg_shares_alloca_base_with_entry() {
505 // A call receiving `gep %alloca, [0]` can walk to any
506 // offset within %alloca; a prior store to `gep %alloca, [1]`
507 // must be invalidated even though the precise offsets
508 // differ. Mirror of contained_dummy_array_ref: caller
509 // stores arr(2)=20, calls touch(arr), then reads arr(2).
510 // The read must see the callee's store, not the caller's
511 // earlier constant.
512 let mut m = Module::new("test".into());
513 let mut f = Function::new("f".into(), vec![], IrType::Int(IntWidth::I32));
514
515 let arr_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I32)), 2);
516 let alloca = push(
517 &mut f,
518 InstKind::Alloca(arr_ty.clone()),
519 IrType::Ptr(Box::new(arr_ty)),
520 );
521 let c0 = push(
522 &mut f,
523 InstKind::ConstInt(0, IntWidth::I64),
524 IrType::Int(IntWidth::I64),
525 );
526 let c1 = push(
527 &mut f,
528 InstKind::ConstInt(1, IntWidth::I64),
529 IrType::Int(IntWidth::I64),
530 );
531 let g0 = push(
532 &mut f,
533 InstKind::GetElementPtr(alloca, vec![c0]),
534 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
535 );
536 let g1 = push(
537 &mut f,
538 InstKind::GetElementPtr(alloca, vec![c1]),
539 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
540 );
541 let v20 = push(
542 &mut f,
543 InstKind::ConstInt(20, IntWidth::I32),
544 IrType::Int(IntWidth::I32),
545 );
546 push(&mut f, InstKind::Store(v20, g1), IrType::Void);
547 push(
548 &mut f,
549 InstKind::Call(FuncRef::External("touch".into()), vec![g0]),
550 IrType::Void,
551 );
552 let load = push(&mut f, InstKind::Load(g1), IrType::Int(IntWidth::I32));
553 let entry = f.entry;
554 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
555 m.add_function(f);
556
557 LocalLsf.run(&mut m);
558 let term = m.functions[0]
559 .block(m.functions[0].entry)
560 .terminator
561 .as_ref()
562 .unwrap();
563 if let Terminator::Return(Some(v)) = term {
564 assert_ne!(*v, v20, "LSF forwarded arr[1] load to the caller's pre-call constant across a call that received arr[0]; callee could have walked to arr[1]");
565 } else {
566 panic!("unexpected terminator: {:?}", term);
567 }
568 }
569
570 #[test]
571 fn chained_gep_through_arg_ptr_invalidates_direct_gep_store() {
572 // Mirror of contained_dummy_array_ref at O1.
573 //
574 // %alloca = alloca [i32 x 2] ; caller's arr
575 // %g0 = gep %alloca, [0] ; arr[0]
576 // %g1 = gep %alloca, [1] ; arr[1]
577 // store %20, %g1 ; arr[1] = 20 (caller init)
578 // %chain0 = gep %g0, [0] ; chained arr[0]
579 // store %11, %chain0 ; arr[0] = 11 (inlined touch)
580 // %chain1 = gep %g0, [1] ; chained arr[1]
581 // store %31, %chain1 ; arr[1] = 31 (inlined touch)
582 // %load = load %g1 ; print arr[1]
583 // return %load ; must be 31, not 20
584 //
585 // If LocalLsf or the alias oracle doesn't see that %chain1
586 // aliases %g1 (both are byte-offset 4 from %alloca), the
587 // load gets forwarded to %20 and the caller prints garbage.
588 let mut m = Module::new("test".into());
589 let mut f = Function::new("f".into(), vec![], IrType::Int(IntWidth::I32));
590
591 let arr_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I32)), 2);
592 let alloca = push(
593 &mut f,
594 InstKind::Alloca(arr_ty.clone()),
595 IrType::Ptr(Box::new(arr_ty)),
596 );
597 let c0 = push(
598 &mut f,
599 InstKind::ConstInt(0, IntWidth::I64),
600 IrType::Int(IntWidth::I64),
601 );
602 let c1 = push(
603 &mut f,
604 InstKind::ConstInt(1, IntWidth::I64),
605 IrType::Int(IntWidth::I64),
606 );
607 let g0 = push(
608 &mut f,
609 InstKind::GetElementPtr(alloca, vec![c0]),
610 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
611 );
612 let g1 = push(
613 &mut f,
614 InstKind::GetElementPtr(alloca, vec![c1]),
615 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
616 );
617
618 let v20 = push(
619 &mut f,
620 InstKind::ConstInt(20, IntWidth::I32),
621 IrType::Int(IntWidth::I32),
622 );
623 push(&mut f, InstKind::Store(v20, g1), IrType::Void);
624
625 let chain0 = push(
626 &mut f,
627 InstKind::GetElementPtr(g0, vec![c0]),
628 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
629 );
630 let v11 = push(
631 &mut f,
632 InstKind::ConstInt(11, IntWidth::I32),
633 IrType::Int(IntWidth::I32),
634 );
635 push(&mut f, InstKind::Store(v11, chain0), IrType::Void);
636
637 let chain1 = push(
638 &mut f,
639 InstKind::GetElementPtr(g0, vec![c1]),
640 IrType::Ptr(Box::new(IrType::Int(IntWidth::I32))),
641 );
642 let v31 = push(
643 &mut f,
644 InstKind::ConstInt(31, IntWidth::I32),
645 IrType::Int(IntWidth::I32),
646 );
647 push(&mut f, InstKind::Store(v31, chain1), IrType::Void);
648
649 let load = push(&mut f, InstKind::Load(g1), IrType::Int(IntWidth::I32));
650 let entry = f.entry;
651 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
652 m.add_function(f);
653
654 LocalLsf.run(&mut m);
655 let term = m.functions[0]
656 .block(m.functions[0].entry)
657 .terminator
658 .as_ref()
659 .unwrap();
660 // The load must either remain or be forwarded to v31 — never
661 // to v20.
662 if let Terminator::Return(Some(v)) = term {
663 assert_ne!(*v, v20, "LSF forwarded load %g1 to the pre-chained-store value v20, shadowing the chained-GEP aliasing store v31");
664 } else {
665 panic!("unexpected terminator: {:?}", term);
666 }
667 }
668
669 #[test]
670 fn keeps_store_available_across_noalias_call_arg() {
671 let mut m = Module::new("test".into());
672 let mut f = Function::new(
673 "f".into(),
674 vec![param("a", 0, true), param("b", 1, true)],
675 IrType::Int(IntWidth::I32),
676 );
677
678 let v42 = push(
679 &mut f,
680 InstKind::ConstInt(42, IntWidth::I32),
681 IrType::Int(IntWidth::I32),
682 );
683 push(&mut f, InstKind::Store(v42, ValueId(0)), IrType::Void);
684 push(
685 &mut f,
686 InstKind::Call(FuncRef::External("touch".into()), vec![ValueId(1)]),
687 IrType::Void,
688 );
689 let load = push(
690 &mut f,
691 InstKind::Load(ValueId(0)),
692 IrType::Int(IntWidth::I32),
693 );
694 let entry = f.entry;
695 f.block_mut(entry).terminator = Some(Terminator::Return(Some(load)));
696 m.add_function(f);
697
698 assert!(
699 LocalLsf.run(&mut m),
700 "LSF should preserve the stored value across a noalias pointer call"
701 );
702 let term = m.functions[0]
703 .block(m.functions[0].entry)
704 .terminator
705 .as_ref()
706 .unwrap();
707 assert!(
708 matches!(term, Terminator::Return(Some(v)) if *v == v42),
709 "return should use the forwarded value across the noalias call, got {:?}",
710 term
711 );
712 }
713 }
714