Rust · 294634 bytes Raw Blame History
1 //! Lowering of Fortran statements (Stmt::*) to IR.
2 //!
3 //! Extracted from `core.rs` in Sprint 11 Stage C. Pure mechanical
4 //! move — behavior unchanged. The dispatcher still matches on the
5 //! 41 Stmt variants; future sub-stages may split per-variant.
6
7 use std::collections::{HashMap, HashSet};
8 use std::io::Write;
9
10 use crate::ast::expr::Expr;
11 use crate::ast::stmt::*;
12 use crate::ir::builder::FuncBuilder;
13 use crate::ir::inst::*;
14 use crate::ir::types::*;
15
16 use super::core::*;
17 use super::ctx::{CharKind, HiddenResultAbi, LocalInfo, LowerCtx};
18 use super::helpers::coerce_to_type;
19
20 pub(crate) fn lower_stmts(b: &mut FuncBuilder, ctx: &mut LowerCtx, stmts: &[SpannedStmt]) {
21 for stmt in stmts {
22 // Labeled statements and labeled CONTINUEs create new basic blocks; they must be
23 // processed even after a branch/goto terminates the current block. All other dead
24 // code (statements after a terminator in an unlabeled position) is skipped.
25 let is_label_creating = matches!(
26 &stmt.node,
27 Stmt::Labeled { .. } | Stmt::Continue { label: Some(_) }
28 );
29 if !is_label_creating && b.func().block(b.current_block()).terminator.is_some() {
30 continue; // dead code — but keep looping so we can find the next label
31 }
32 lower_stmt(b, ctx, stmt);
33 }
34 }
35
36 /// Lower a single statement.
37 pub(crate) fn lower_stmt(b: &mut FuncBuilder, ctx: &mut LowerCtx, stmt: &SpannedStmt) {
38 match &stmt.node {
39 Stmt::Assignment { target, value } => {
40 // F77 statement-function definitions look like
41 // `name(p1, p2, ...) = expr` — sema records the body in a
42 // side table and flips the symbol kind to Function. There
43 // is no IR to emit for the definition itself; call sites
44 // inline-substitute the body in `lower_expr_full`.
45 if let Expr::FunctionCall { callee, .. } = &target.node {
46 if let Expr::Name { name } = &callee.node {
47 if ctx.lookup_statement_function(name).is_some() {
48 return;
49 }
50 }
51 }
52 match &target.node {
53 Expr::Name { name } => {
54 let key = name.to_lowercase();
55 // Defined assignment: INTERFACE ASSIGNMENT(=) covers
56 // cases where the LHS and RHS types differ or the
57 // user defined a custom store semantics. When we
58 // resolve a specific, lower the call and return —
59 // the default type-matched paths below would either
60 // memcpy garbage or fall through silently.
61 if try_defined_assignment(b, ctx, &key, value) {
62 return;
63 }
64 if let Some(info) = ctx.locals.get(&key).cloned() {
65 // Route fixed-size (non-descriptor) array assignments
66 // to lower_array_assign when the RHS is an array
67 // expression — array+scalar broadcasts and array
68 // constructors need element-wise lowering, not the
69 // scalar store fallback below. Skip derived/character
70 // arrays so their specialized paths run instead.
71 if local_is_array_like(&info)
72 && !local_uses_array_descriptor(&info)
73 && info.derived_type.is_none()
74 && info.char_kind == CharKind::None
75 && (matches!(value.node, Expr::ArrayConstructor { .. })
76 || expr_contains_array_constructor(value)
77 || expr_is_transfer_array_call(value))
78 {
79 lower_array_assign(b, ctx, name, &info, value);
80 return;
81 }
82 // `arr = transfer(src, mold, N)` for an
83 // allocatable rank-1 array. The source bits get
84 // memcpy'd into a freshly-allocated descriptor
85 // (handled by `try_lower_transfer_into_array`
86 // inside lower_array_assign). Without this
87 // route, the generic function-result path treated
88 // transfer's bit-cast bytes as a source
89 // descriptor pointer and segfaulted in
90 // `afs_assign_allocatable` on the first character
91 // byte (e.g. 0x6d for 'm' in stdlib_hashmaps).
92 // SIZE may be runtime-evaluated; the descriptor
93 // path inside try_lower_transfer_into_array
94 // lowers it via `lower_expr_ctx_tl`.
95 if local_is_array_like(&info)
96 && local_uses_array_descriptor(&info)
97 && info.derived_type.is_none()
98 && info.char_kind == CharKind::None
99 && expr_is_transfer_array_call_dynamic(value)
100 {
101 lower_array_assign(b, ctx, name, &info, value);
102 return;
103 }
104 if local_is_array_like(&info)
105 && (info.char_kind != CharKind::None
106 || descriptor_backed_runtime_char_array(&info))
107 {
108 lower_array_assign(b, ctx, name, &info, value);
109 return;
110 }
111 match &info.char_kind {
112 CharKind::Fixed(len) => {
113 // Fixed-length character assignment: copy with space padding.
114 // Get source pointer and length from the expression.
115 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
116 if let Some((dest_ptr, dest_len)) = local_char_ptr_and_len(b, &info)
117 {
118 b.call(
119 FuncRef::External("afs_assign_char_fixed".into()),
120 vec![dest_ptr, dest_len, src_ptr, src_len],
121 IrType::Void,
122 );
123 } else {
124 let dest_len = b.const_i64(*len);
125 b.call(
126 FuncRef::External("afs_assign_char_fixed".into()),
127 vec![info.addr, dest_len, src_ptr, src_len],
128 IrType::Void,
129 );
130 }
131 }
132 CharKind::FixedRuntime { len_addr } => {
133 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
134 let (dest_ptr, dest_len) =
135 fixed_runtime_char_ptr_and_len(b, &info, *len_addr);
136 b.call(
137 FuncRef::External("afs_assign_char_fixed".into()),
138 vec![dest_ptr, dest_len, src_ptr, src_len],
139 IrType::Void,
140 );
141 }
142 CharKind::Deferred => {
143 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
144 let desc = string_descriptor_addr(b, &info);
145 if info.is_pointer {
146 let (dest_ptr, dest_len) =
147 load_string_descriptor_substring_view(b, desc);
148 b.call(
149 FuncRef::External("afs_assign_char_fixed".into()),
150 vec![dest_ptr, dest_len, src_ptr, src_len],
151 IrType::Void,
152 );
153 } else {
154 // Deferred-length allocatables keep reallocation semantics.
155 b.call(
156 FuncRef::External("afs_assign_char_deferred".into()),
157 vec![desc, src_ptr, src_len],
158 IrType::Void,
159 );
160 }
161 }
162 CharKind::AssumedLen { len_addr } => {
163 // Assumed-length dummy assignment: use
164 // the hidden-length param as the
165 // destination length.
166 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
167 let outer = b.load(info.addr);
168 let dest_ptr = b.load_typed(
169 outer,
170 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
171 );
172 let dest_len = b.load(*len_addr);
173 b.call(
174 FuncRef::External("afs_assign_char_fixed".into()),
175 vec![dest_ptr, dest_len, src_ptr, src_len],
176 IrType::Void,
177 );
178 }
179 CharKind::None => {
180 if local_fixed_char_allocatable_scalar_len(&info).is_some() {
181 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
182 if let Some((dest_ptr, dest_len)) =
183 char_addr_and_runtime_len(b, target, &ctx.locals)
184 {
185 b.call(
186 FuncRef::External("afs_assign_char_fixed".into()),
187 vec![dest_ptr, dest_len, src_ptr, src_len],
188 IrType::Void,
189 );
190 }
191 } else if local_uses_array_descriptor(&info)
192 && local_declared_rank(&info) == 0
193 && info.derived_type.is_some()
194 && ctx
195 .st
196 .find_symbol_any_scope(&key)
197 .and_then(|sym| sym.type_info.as_ref())
198 .is_some_and(|ti| {
199 matches!(ti, crate::sema::symtab::TypeInfo::Derived(_))
200 })
201 {
202 let desc = array_descriptor_addr(b, &info);
203 let allocated = b.call(
204 FuncRef::External("afs_allocated".into()),
205 vec![desc],
206 IrType::Int(IntWidth::I32),
207 );
208 let zero32 = b.const_i32(0);
209 let needs_alloc = b.icmp(CmpOp::Eq, allocated, zero32);
210 let alloc_bb = b.create_block("scalar_derived_assign_alloc");
211 let copy_bb = b.create_block("scalar_derived_assign_copy");
212 let done_bb = b.create_block("scalar_derived_assign_done");
213 b.cond_branch(
214 needs_alloc,
215 alloc_bb,
216 vec![],
217 copy_bb,
218 vec![],
219 );
220
221 b.set_block(alloc_bb);
222 if let Some(ref tn) = info.derived_type {
223 if let Some(layout) = ctx.type_layouts.get(tn) {
224 let elem_size = b.const_i64(layout.size as i64);
225 let rank_val = b.const_i32(0);
226 let null_ptr = b.const_i64(0);
227 b.call(
228 FuncRef::External("afs_allocate_array".into()),
229 vec![desc, elem_size, rank_val, null_ptr, null_ptr],
230 IrType::Void,
231 );
232 if derived_layout_needs_runtime_initialization(
233 layout,
234 ctx.type_layouts,
235 ) {
236 let base_ptr = b.load_typed(
237 desc,
238 IrType::Ptr(Box::new(IrType::Int(
239 IntWidth::I8,
240 ))),
241 );
242 initialize_derived_storage(
243 b,
244 base_ptr,
245 layout,
246 ctx.type_layouts,
247 );
248 }
249 }
250 }
251 b.branch(copy_bb, vec![]);
252
253 b.set_block(copy_bb);
254 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
255 let dest = derived_storage_addr(b, &info);
256 if let Some(ref tn) = info.derived_type {
257 emit_derived_value_copy(b, ctx.type_layouts, tn, dest, val);
258 }
259 // Scalar descriptor-backed TYPE allocatables keep their
260 // dynamic type identity in the descriptor sidecar, so
261 // constructor/function-result assignment must restamp the
262 // concrete metadata after copying the value bytes.
263 if let Some(tag) = derived_type_tag_value(
264 b,
265 info.derived_type.as_deref(),
266 ctx.type_layouts,
267 ) {
268 store_array_desc_type_tag(b, desc, tag);
269 }
270 if let Some(lookup) = derived_type_tbp_lookup_value(
271 b,
272 info.derived_type.as_deref(),
273 ctx.type_layouts,
274 ) {
275 store_array_desc_tbp_lookup_ptr(b, desc, lookup);
276 }
277 b.branch(done_bb, vec![]);
278 b.set_block(done_bb);
279 } else if !info.dims.is_empty() || info.allocatable {
280 if try_lower_elemental_array_assign(b, ctx, name, &info, value)
281 {
282 return;
283 }
284 if let Expr::FunctionCall {
285 callee,
286 args: call_args,
287 } = &value.node
288 {
289 // F2018 §9.5.3.3 vector subscript: when the
290 // callee resolves to a local array (not a
291 // function), `x(col)` is gather, not a call.
292 // Route through lower_array_assign so the
293 // scalarization path picks it up.
294 let callee_is_local_array =
295 if let Expr::Name { name: cname } = &callee.node {
296 ctx.locals
297 .get(&cname.to_lowercase())
298 .is_some_and(local_is_array_like)
299 } else {
300 false
301 };
302 // F2018 §16.9: elemental intrinsic applied
303 // to an array actual returns an array. The
304 // direct `lower_expr_ctx_tl` path treats
305 // the call as scalar and emits e.g.
306 // `b.fsqrt(array_descriptor)` — wrong.
307 // Route to lower_array_assign so the
308 // scalarization path expands the elemental
309 // call element-wise.
310 let callee_is_elemental_array_intrinsic =
311 if let Expr::Name { name: cname } = &callee.node {
312 let lname = cname.to_lowercase();
313 let direct_elemental =
314 is_elemental_math_intrinsic(cname)
315 || ctx.elemental_funcs.contains(&lname)
316 || ctx
317 .st
318 .find_symbol_any_scope(&lname)
319 .is_some_and(|s| s.attrs.elemental);
320 let generic_specifics_elemental = !direct_elemental
321 && named_interface_specifics(ctx.st, &lname)
322 .map(|specs| {
323 !specs.is_empty()
324 && specs.iter().all(|s| {
325 ctx.elemental_funcs
326 .contains(&s.to_lowercase())
327 || ctx
328 .st
329 .find_symbol_any_scope(s)
330 .is_some_and(|sym| {
331 sym.attrs.elemental
332 })
333 })
334 })
335 .unwrap_or(false);
336 let is_elemental = direct_elemental
337 || generic_specifics_elemental;
338 is_elemental
339 && call_args.iter().any(|arg| {
340 matches!(
341 &arg.value,
342 crate::ast::expr::SectionSubscript::Element(e)
343 if expr_contains_array_refs(e, &ctx.locals)
344 )
345 })
346 } else {
347 false
348 };
349 // F2018 §16.9: transformational intrinsics that
350 // synthesize a fresh array result (RESHAPE, MATMUL,
351 // TRANSPOSE, SHAPE). Routing through
352 // lower_array_assign lets lower_array_expr_descriptor's
353 // dedicated arms allocate and fill the descriptor
354 // instead of the generic call path emitting
355 // unresolved `_reshape`/`_transpose` externals.
356 let callee_is_transformational_intrinsic =
357 if let Expr::Name { name: cname } = &callee.node {
358 let lname = cname.to_ascii_lowercase();
359 matches!(
360 lname.as_str(),
361 "reshape"
362 | "matmul"
363 | "transpose"
364 | "shape"
365 | "pack"
366 | "spread"
367 // cmplx(re, im, kind) over real
368 // arrays: lower_array_expr_descriptor
369 // has a dedicated arm (afs_array_cmplx).
370 // Without this entry the assignment
371 // routes through lower_expr_ctx_tl,
372 // which calls scalar lower_intrinsic
373 // with null-pointer probes and emits
374 // a single complex(4) const-zero
375 // buffer — wrong shape and wrong kind.
376 | "cmplx"
377 // merge(t, f, mask) over arrays:
378 // lower_array_merge_descriptor
379 // materializes a temp via per-element
380 // select. Without this entry the
381 // FunctionCall arm picks up scalar
382 // intrinsic merge, which emits
383 // `select` on pointer operands and
384 // hands a scalar f64 to the assignment
385 // memcpy as a "source descriptor" —
386 // SEGV on dereference. Surfaced in
387 // stdlib's iterative solvers
388 // (solve_cg/bicgstab/pcg).
389 | "merge"
390 )
391 || (
392 // sum(arr, dim) is rank-N-1: route to
393 // lower_array_assign so the sum-dim arm
394 // in lower_array_expr_descriptor fills
395 // the result descriptor. Plain sum(arr)
396 // is scalar; that arm returns None and
397 // assignment falls through to scalar
398 // broadcast.
399 lname == "sum"
400 && call_args.iter().enumerate().any(|(i, a)| {
401 let kw = a.keyword.as_deref().map(|s| s.to_lowercase());
402 matches!(kw.as_deref(), Some("dim"))
403 || (i == 1 && kw.is_none())
404 })
405 )
406 || (
407 // count(mask, dim) is rank-N-1 integer
408 // array: same routing as sum(arr, dim).
409 // Without this, the scalar logical-
410 // reduction path returns a single i32
411 // total and the array-assign treats it
412 // as a source descriptor, dereferencing
413 // a tiny address (e.g. 0x3) and aborting
414 // in afs_assign_allocatable. Surfaced
415 // in stdlib_stats var_mask_2_*.
416 lname == "count"
417 && call_args.iter().enumerate().any(|(i, a)| {
418 let kw = a.keyword.as_deref().map(|s| s.to_lowercase());
419 matches!(kw.as_deref(), Some("dim"))
420 || (i == 1 && kw.is_none())
421 })
422 )
423 } else {
424 false
425 };
426 // Scalar-returning intrinsic broadcast to a whole array:
427 // `x = ieee_value(1.0, NaN)`, `x = epsilon(1.0)`,
428 // `x = huge(1.0)`. The fall-through path lowers the
429 // call as a scalar then treats the result as a source
430 // descriptor pointer — IR verifier catches "load from
431 // non-pointer fN" on fixed-size dests, and SEGV inside
432 // afs_assign_allocatable on descriptor-backed dests
433 // (stdlib's pinv_s_operator on the linalg-error path
434 // `pinva = ieee_value(1.0_sp, ieee_quiet_nan)`).
435 // Restricted to a known set of always-scalar
436 // intrinsics — extending it broadly mis-routes user
437 // functions that legitimately return arrays.
438 let callee_is_scalar_broadcast_intrinsic =
439 local_is_array_like(&info)
440 && !callee_is_local_array
441 && !callee_is_elemental_array_intrinsic
442 && !callee_is_transformational_intrinsic
443 && {
444 if let Expr::Name { name: cname } = &callee.node {
445 let lk = cname.to_lowercase();
446 matches!(
447 lk.as_str(),
448 "ieee_value"
449 | "epsilon"
450 | "huge"
451 | "tiny"
452 | "radix"
453 | "digits"
454 | "precision"
455 | "range"
456 | "minexponent"
457 | "maxexponent"
458 )
459 } else {
460 false
461 }
462 };
463 if callee_is_local_array
464 || callee_is_elemental_array_intrinsic
465 || callee_is_transformational_intrinsic
466 || callee_is_scalar_broadcast_intrinsic
467 {
468 lower_array_assign(b, ctx, name, &info, value);
469 return;
470 }
471 if let Expr::Name { name: callee_name } = &callee.node {
472 let callee_key = callee_name.to_lowercase();
473 if ctx.alloc_return_funcs.contains(&callee_key) {
474 // sret call. When dest is descriptor-backed we
475 // can let the callee write straight in. When
476 // dest is a fixed-shape stack buffer (e.g.
477 // `real :: r(10)`) `array_descriptor_addr`
478 // returns the buffer itself, but the callee
479 // expects a 384-byte descriptor — handing it
480 // the buffer corrupts the caller frame the
481 // moment the callee touches dims/flags. Allocate
482 // a real descriptor temp, call into it, copy
483 // the bytes back, and deallocate the heap
484 // result.
485 if local_uses_array_descriptor(&info) {
486 let dest_desc = array_descriptor_addr(b, &info);
487 lower_alloc_return_call_into_desc(
488 b,
489 ctx,
490 dest_desc,
491 callee_name,
492 call_args,
493 );
494 } else {
495 let tmp_desc = b.alloca(IrType::Array(
496 Box::new(IrType::Int(IntWidth::I8)),
497 384,
498 ));
499 let zero32 = b.const_i32(0);
500 let sz384 = b.const_i64(384);
501 b.call(
502 FuncRef::External("memset".into()),
503 vec![tmp_desc, zero32, sz384],
504 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
505 );
506 lower_alloc_return_call_into_desc(
507 b,
508 ctx,
509 tmp_desc,
510 callee_name,
511 call_args,
512 );
513 let n = array_total_elems_value(b, &info);
514 let elem_bytes = b.const_i64(
515 ir_scalar_byte_size(&info.ty),
516 );
517 let byte_count = b.imul(n, elem_bytes);
518 let src_base = b.load_typed(
519 tmp_desc,
520 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
521 );
522 b.call(
523 FuncRef::External("memcpy".into()),
524 vec![info.addr, src_base, byte_count],
525 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
526 );
527 let stat = b.alloca(IrType::Int(IntWidth::I32));
528 b.store(zero32, stat);
529 b.call(
530 FuncRef::External("afs_deallocate_array".into()),
531 vec![tmp_desc, stat],
532 IrType::Void,
533 );
534 }
535 } else {
536 // Function returns a temp descriptor. Mirror
537 // the alloc_return path: when dest is a real
538 // descriptor, route through afs_assign_allocatable;
539 // when dest is a fixed-shape buffer, memcpy the
540 // result bytes in.
541 let src_desc = super::expr::lower_expr_ctx_tl(b, ctx, value);
542 if local_uses_array_descriptor(&info) {
543 let dest_desc = array_descriptor_addr(b, &info);
544 b.call(
545 FuncRef::External(
546 "afs_assign_allocatable".into(),
547 ),
548 vec![dest_desc, src_desc],
549 IrType::Void,
550 );
551 } else {
552 let n = array_total_elems_value(b, &info);
553 let elem_bytes = b.const_i64(
554 ir_scalar_byte_size(&info.ty),
555 );
556 let byte_count = b.imul(n, elem_bytes);
557 let src_base = b.load_typed(
558 src_desc,
559 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
560 );
561 b.call(
562 FuncRef::External("memcpy".into()),
563 vec![info.addr, src_base, byte_count],
564 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
565 );
566 }
567 let stat = b.alloca(IrType::Int(IntWidth::I32));
568 let zero32 = b.const_i32(0);
569 b.store(zero32, stat);
570 b.call(
571 FuncRef::External(
572 "afs_deallocate_array".into(),
573 ),
574 vec![src_desc, stat],
575 IrType::Void,
576 );
577 }
578 } else {
579 // Indirect callee: same dest split as above.
580 let src_desc = super::expr::lower_expr_ctx_tl(b, ctx, value);
581 if local_uses_array_descriptor(&info) {
582 let dest_desc = array_descriptor_addr(b, &info);
583 b.call(
584 FuncRef::External("afs_assign_allocatable".into()),
585 vec![dest_desc, src_desc],
586 IrType::Void,
587 );
588 } else {
589 let n = array_total_elems_value(b, &info);
590 let elem_bytes = b.const_i64(
591 ir_scalar_byte_size(&info.ty),
592 );
593 let byte_count = b.imul(n, elem_bytes);
594 let src_base = b.load_typed(
595 src_desc,
596 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
597 );
598 b.call(
599 FuncRef::External("memcpy".into()),
600 vec![info.addr, src_base, byte_count],
601 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
602 );
603 }
604 let stat = b.alloca(IrType::Int(IntWidth::I32));
605 let zero32 = b.const_i32(0);
606 b.store(zero32, stat);
607 b.call(
608 FuncRef::External("afs_deallocate_array".into()),
609 vec![src_desc, stat],
610 IrType::Void,
611 );
612 }
613 } else {
614 lower_array_assign(b, ctx, name, &info, value);
615 }
616 } else if info.derived_type.is_some() {
617 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
618 let dest = derived_storage_addr(b, &info);
619 if let Some(ref tn) = info.derived_type {
620 emit_derived_value_copy(b, ctx.type_layouts, tn, dest, val);
621 }
622 } else if info.is_pointer {
623 // Plain `=` on a POINTER dereferences:
624 // load the target address out of the
625 // pointer slot, then store through it.
626 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
627 let coerced = coerce_to_type(b, val, &info.ty);
628 let tgt = b.load_typed(
629 info.addr,
630 IrType::Ptr(Box::new(info.ty.clone())),
631 );
632 b.store(coerced, tgt);
633 } else if is_complex_ty(&info.ty) {
634 // Complex assignment: RHS may evaluate to a
635 // ptr<[fN x 2]> (already a complex buffer)
636 // or to a scalar int/real value (Fortran
637 // permits `c = i` / `c = r` with implicit
638 // promotion). For the scalar case we have
639 // to materialize a fresh [fN x 2] buffer
640 // first — without it we'd memcpy from the
641 // scalar's value treated as a pointer
642 // (LAPACK CGEEV's `work(1)=maxwrk` was
643 // SEGV-ing on this exact path).
644 let raw = super::expr::lower_expr_ctx_tl(b, ctx, value);
645 let src_ty = b.func().value_type(raw);
646 let src = if matches!(&src_ty, Some(t) if is_complex_ty(t)) {
647 raw
648 } else {
649 let fw = complex_float_width(&info.ty);
650 materialize_complex_operand(b, raw, fw)
651 };
652 let bytes = complex_byte_size(&info.ty);
653 let sz = b.const_i64(bytes);
654 if info.by_ref {
655 let dst = b.load(info.addr);
656 b.call(
657 FuncRef::External("memcpy".into()),
658 vec![dst, src, sz],
659 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
660 );
661 } else {
662 b.call(
663 FuncRef::External("memcpy".into()),
664 vec![info.addr, src, sz],
665 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
666 );
667 }
668 } else if info.is_class
669 && info.dims.is_empty()
670 && ctx.st
671 .find_symbol_any_scope(&key)
672 .map(|s| s.attrs.allocatable)
673 .unwrap_or(false)
674 && matches!(value.node, Expr::ComponentAccess { .. })
675 {
676 // Scalar polymorphic allocatable assign:
677 // `class(*), allocatable :: out; out = h%poly_field`
678 // — copy the 384-byte descriptor verbatim
679 // when the RHS is a polymorphic component
680 // access. Avoids the scalar-store
681 // fallback that would truncate the source
682 // descriptor's payload to a single i32.
683 let dst = array_descriptor_addr(b, &info);
684 let src_desc_opt: Option<ValueId> =
685 resolve_component_field_access(
686 b,
687 &ctx.locals,
688 value,
689 ctx.st,
690 ctx.type_layouts,
691 )
692 .map(|(p, _)| p);
693 if let Some(src) = src_desc_opt {
694 let sz = b.const_i64(384);
695 b.call(
696 FuncRef::External("memcpy".into()),
697 vec![dst, src, sz],
698 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
699 );
700 } else {
701 // Fall through if we can't resolve.
702 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
703 let coerced = coerce_to_type(b, val, &info.ty);
704 let ptr = b.load(info.addr);
705 b.store(coerced, ptr);
706 }
707 } else if info.by_ref {
708 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
709 let coerced = coerce_to_type(b, val, &info.ty);
710 let ptr = b.load(info.addr);
711 b.store(coerced, ptr);
712 } else {
713 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
714 let coerced = coerce_to_type(b, val, &info.ty);
715 b.store(coerced, info.addr);
716 }
717 }
718 }
719 }
720 }
721 Expr::FunctionCall { callee, args } => {
722 if let Expr::Name { name } = &callee.node {
723 let akey = name.to_lowercase();
724 if let Some(info) = ctx.locals.get(&akey).cloned() {
725 let is_scalar_fixed_alloc_char =
726 local_fixed_char_allocatable_scalar_len(&info).is_some();
727 // Vector subscript: a([i1, i2, ...]) = scalar
728 // Expand to scalar assignments a(i1) = scalar, etc.
729 if local_is_array_like(&info)
730 && !is_scalar_fixed_alloc_char
731 && args.len() == 1
732 {
733 if let crate::ast::expr::SectionSubscript::Element(idx_expr) =
734 &args[0].value
735 {
736 if let Expr::ArrayConstructor { values: idx_values, .. } =
737 &idx_expr.node
738 {
739 for v in idx_values {
740 let crate::ast::expr::AcValue::Expr(scalar_idx) = v
741 else {
742 continue;
743 };
744 let scalar_target = crate::ast::Spanned::new(
745 Expr::FunctionCall {
746 callee: callee.clone(),
747 args: vec![crate::ast::expr::Argument {
748 keyword: None,
749 value: crate::ast::expr::SectionSubscript::Element(
750 scalar_idx.clone(),
751 ),
752 }],
753 },
754 target.span,
755 );
756 let scalar_stmt = crate::ast::Spanned::new(
757 Stmt::Assignment {
758 target: scalar_target,
759 value: value.clone(),
760 },
761 stmt.span,
762 );
763 lower_stmt(b, ctx, &scalar_stmt);
764 }
765 return;
766 }
767 }
768 }
769 // Vector subscript with array-returning expression
770 // index: `a(falseloc(mask)) = scalar` etc. The
771 // subscript is a single Element whose value is an
772 // expression whose result is an integer array.
773 // Materialize that array as a descriptor, then
774 // loop over its elements as scalar indices.
775 if local_is_array_like(&info)
776 && !is_scalar_fixed_alloc_char
777 && args.len() == 1
778 && info.derived_type.is_none()
779 && info.char_kind == CharKind::None
780 {
781 if let crate::ast::expr::SectionSubscript::Element(idx_expr) =
782 &args[0].value
783 {
784 if !matches!(idx_expr.node, Expr::ArrayConstructor { .. })
785 && expr_returns_array(
786 idx_expr,
787 &ctx.locals,
788 ctx.st,
789 )
790 && lower_dynamic_vector_subscript_assign(
791 b,
792 ctx,
793 &info,
794 idx_expr,
795 value,
796 )
797 {
798 return;
799 }
800 }
801 }
802 if local_is_array_like(&info)
803 && !is_scalar_fixed_alloc_char
804 && is_full_rank1_whole_slice(args)
805 {
806 let whole_view = descriptor_backed_whole_array_view(&info);
807 lower_array_assign(b, ctx, &akey, &whole_view, value);
808 return;
809 }
810 if local_is_array_like(&info)
811 && !is_scalar_fixed_alloc_char
812 && lower_1d_section_assign(b, ctx, &info, args, value)
813 {
814 return;
815 }
816 // Substring LHS: s(lo:hi) = rhs where s is a
817 // scalar character. Compute the target substring
818 // pointer+length, get the RHS as (ptr, len), and
819 // call afs_assign_char_fixed to do the bounded
820 // copy with space-padding.
821 if (info.char_kind != CharKind::None || is_scalar_fixed_alloc_char)
822 && info.dims.is_empty()
823 && args.len() == 1
824 && matches!(
825 args[0].value,
826 crate::ast::expr::SectionSubscript::Range { .. }
827 )
828 {
829 if let crate::ast::expr::SectionSubscript::Range {
830 ref start,
831 ref end,
832 ..
833 } = args[0].value
834 {
835 if let Some((base_ptr, base_len)) =
836 char_addr_and_substring_bound_len(
837 b,
838 callee,
839 &ctx.locals,
840 ctx.st,
841 Some(ctx.type_layouts),
842 )
843 {
844 let (dest_ptr, dest_len) = lower_substring_full(
845 b,
846 &ctx.locals,
847 ctx.st,
848 base_ptr,
849 base_len,
850 start.as_ref(),
851 end.as_ref(),
852 Some(ctx.type_layouts),
853 Some(ctx.internal_funcs),
854 Some(ctx.contained_host_refs),
855 Some(ctx.descriptor_params),
856 );
857 let (src_ptr, src_len) =
858 lower_string_expr_ctx(b, ctx, value);
859 b.call(
860 FuncRef::External("afs_assign_char_fixed".into()),
861 vec![dest_ptr, dest_len, src_ptr, src_len],
862 IrType::Void,
863 );
864 }
865 }
866 } else if !is_scalar_fixed_alloc_char && local_is_array_like(&info) {
867 // Array element assignment: a(i) = val
868 if info.char_kind != CharKind::None
869 || descriptor_backed_runtime_char_array(&info)
870 {
871 lower_char_array_store(
872 b,
873 &ctx.locals,
874 &info,
875 args,
876 value,
877 ctx.st,
878 Some(ctx.type_layouts),
879 );
880 } else {
881 let arr_val = super::expr::lower_expr_ctx(b, ctx, value);
882 if matches!(
883 b.func().value_type(arr_val),
884 Some(IrType::Array(inner, 4096))
885 if matches!(inner.as_ref(), IrType::Int(IntWidth::I8))
886 ) && matches!(info.ty, IrType::Ptr(ref inner) if matches!(inner.as_ref(), IrType::Int(IntWidth::I8)))
887 {
888 eprintln!(
889 "DEBUG suspicious array store target={} dims={:?} alloc={} by_ref={} descriptor={} ty={:?}",
890 name,
891 info.dims,
892 info.allocatable,
893 info.by_ref,
894 info.descriptor_arg,
895 info.ty
896 );
897 }
898 lower_array_store(
899 b,
900 &ctx.locals,
901 &info,
902 args,
903 arr_val,
904 ctx.st,
905 Some(ctx.type_layouts),
906 );
907 }
908 }
909 }
910 } else if let Expr::FunctionCall {
911 callee: inner_callee,
912 args: inner_args,
913 } = &callee.node
914 {
915 if args.len() == 1
916 && matches!(
917 args[0].value,
918 crate::ast::expr::SectionSubscript::Range { .. }
919 )
920 {
921 if let crate::ast::expr::SectionSubscript::Range {
922 ref start,
923 ref end,
924 ..
925 } = args[0].value
926 {
927 if let Expr::Name { name } = &inner_callee.node {
928 let akey = name.to_lowercase();
929 if let Some(info) = ctx.locals.get(&akey).cloned() {
930 if (info.char_kind != CharKind::None
931 || descriptor_backed_runtime_char_array(&info))
932 && local_is_array_like(&info)
933 {
934 if let Some((elem_ptr, elem_len)) =
935 char_array_element_ptr_and_len(
936 b,
937 &ctx.locals,
938 &info,
939 inner_args,
940 ctx.st,
941 Some(ctx.type_layouts),
942 )
943 {
944 let (dest_ptr, dest_len) = lower_substring_full(
945 b,
946 &ctx.locals,
947 ctx.st,
948 elem_ptr,
949 elem_len,
950 start.as_ref(),
951 end.as_ref(),
952 Some(ctx.type_layouts),
953 Some(ctx.internal_funcs),
954 Some(ctx.contained_host_refs),
955 Some(ctx.descriptor_params),
956 );
957 let (src_ptr, src_len) =
958 lower_string_expr_ctx(b, ctx, value);
959 b.call(
960 FuncRef::External(
961 "afs_assign_char_fixed".into(),
962 ),
963 vec![dest_ptr, dest_len, src_ptr, src_len],
964 IrType::Void,
965 );
966 }
967 }
968 }
969 } else if let Expr::ComponentAccess { .. } = &inner_callee.node {
970 if let Some((elem_ptr, elem_len)) =
971 fixed_component_char_array_elem_ptr_and_len(
972 b,
973 &ctx.locals,
974 inner_callee,
975 inner_args,
976 ctx.st,
977 ctx.type_layouts,
978 )
979 {
980 let (dest_ptr, dest_len) = lower_substring_full(
981 b,
982 &ctx.locals,
983 ctx.st,
984 elem_ptr,
985 elem_len,
986 start.as_ref(),
987 end.as_ref(),
988 Some(ctx.type_layouts),
989 Some(ctx.internal_funcs),
990 Some(ctx.contained_host_refs),
991 Some(ctx.descriptor_params),
992 );
993 let (src_ptr, src_len) =
994 lower_string_expr_ctx(b, ctx, value);
995 b.call(
996 FuncRef::External("afs_assign_char_fixed".into()),
997 vec![dest_ptr, dest_len, src_ptr, src_len],
998 IrType::Void,
999 );
1000 return;
1001 }
1002 if let Some(info) = component_intrinsic_local_info(
1003 b,
1004 &ctx.locals,
1005 inner_callee,
1006 ctx.st,
1007 ctx.type_layouts,
1008 ) {
1009 if (info.char_kind != CharKind::None
1010 || descriptor_backed_runtime_char_array(&info))
1011 && local_is_array_like(&info)
1012 {
1013 if let Some((elem_ptr, elem_len)) =
1014 char_array_element_ptr_and_len(
1015 b,
1016 &ctx.locals,
1017 &info,
1018 inner_args,
1019 ctx.st,
1020 Some(ctx.type_layouts),
1021 )
1022 {
1023 let (dest_ptr, dest_len) = lower_substring_full(
1024 b,
1025 &ctx.locals,
1026 ctx.st,
1027 elem_ptr,
1028 elem_len,
1029 start.as_ref(),
1030 end.as_ref(),
1031 Some(ctx.type_layouts),
1032 Some(ctx.internal_funcs),
1033 Some(ctx.contained_host_refs),
1034 Some(ctx.descriptor_params),
1035 );
1036 let (src_ptr, src_len) =
1037 lower_string_expr_ctx(b, ctx, value);
1038 b.call(
1039 FuncRef::External(
1040 "afs_assign_char_fixed".into(),
1041 ),
1042 vec![dest_ptr, dest_len, src_ptr, src_len],
1043 IrType::Void,
1044 );
1045 }
1046 }
1047 }
1048 }
1049 }
1050 }
1051 } else if let Expr::ComponentAccess { .. } = &callee.node {
1052 if let Some((dest_ptr, dest_len)) =
1053 fixed_component_char_array_elem_ptr_and_len(
1054 b,
1055 &ctx.locals,
1056 callee,
1057 args,
1058 ctx.st,
1059 ctx.type_layouts,
1060 )
1061 {
1062 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
1063 b.call(
1064 FuncRef::External("afs_assign_char_fixed".into()),
1065 vec![dest_ptr, dest_len, src_ptr, src_len],
1066 IrType::Void,
1067 );
1068 return;
1069 }
1070 if let Some(info) = component_intrinsic_local_info(
1071 b,
1072 &ctx.locals,
1073 callee,
1074 ctx.st,
1075 ctx.type_layouts,
1076 ) {
1077 if local_is_array_like(&info) && is_full_rank1_whole_slice(args) {
1078 let whole_view = descriptor_backed_whole_array_view(&info);
1079 lower_array_assign(b, ctx, "", &whole_view, value);
1080 return;
1081 }
1082 if local_is_array_like(&info)
1083 && lower_1d_section_assign(b, ctx, &info, args, value)
1084 {
1085 return;
1086 }
1087 if local_is_array_like(&info) {
1088 if info.char_kind != CharKind::None
1089 || descriptor_backed_runtime_char_array(&info)
1090 {
1091 lower_char_array_store(
1092 b,
1093 &ctx.locals,
1094 &info,
1095 args,
1096 value,
1097 ctx.st,
1098 Some(ctx.type_layouts),
1099 );
1100 } else {
1101 let arr_val = super::expr::lower_expr_ctx(b, ctx, value);
1102 lower_array_store(
1103 b,
1104 &ctx.locals,
1105 &info,
1106 args,
1107 arr_val,
1108 ctx.st,
1109 Some(ctx.type_layouts),
1110 );
1111 }
1112 return;
1113 }
1114 }
1115 if args.len() == 1
1116 && matches!(
1117 args[0].value,
1118 crate::ast::expr::SectionSubscript::Range { .. }
1119 )
1120 {
1121 if let crate::ast::expr::SectionSubscript::Range {
1122 ref start,
1123 ref end,
1124 ..
1125 } = args[0].value
1126 {
1127 if let Some((field_ptr, field)) = resolve_component_field_access(
1128 b,
1129 &ctx.locals,
1130 callee,
1131 ctx.st,
1132 ctx.type_layouts,
1133 ) {
1134 match field_char_kind(&field) {
1135 CharKind::Fixed(flen) => {
1136 let (base_ptr, base_len) =
1137 (field_ptr, b.const_i64(flen));
1138 let (dest_ptr, dest_len) = lower_substring_full(
1139 b,
1140 &ctx.locals,
1141 ctx.st,
1142 base_ptr,
1143 base_len,
1144 start.as_ref(),
1145 end.as_ref(),
1146 Some(ctx.type_layouts),
1147 Some(ctx.internal_funcs),
1148 Some(ctx.contained_host_refs),
1149 Some(ctx.descriptor_params),
1150 );
1151 let (src_ptr, src_len) =
1152 lower_string_expr_ctx(b, ctx, value);
1153 b.call(
1154 FuncRef::External("afs_assign_char_fixed".into()),
1155 vec![dest_ptr, dest_len, src_ptr, src_len],
1156 IrType::Void,
1157 );
1158 }
1159 CharKind::Deferred => {
1160 let (base_ptr, base_len) =
1161 load_string_descriptor_substring_view(b, field_ptr);
1162 let (dest_ptr, dest_len) = lower_substring_full(
1163 b,
1164 &ctx.locals,
1165 ctx.st,
1166 base_ptr,
1167 base_len,
1168 start.as_ref(),
1169 end.as_ref(),
1170 Some(ctx.type_layouts),
1171 Some(ctx.internal_funcs),
1172 Some(ctx.contained_host_refs),
1173 Some(ctx.descriptor_params),
1174 );
1175 let (src_ptr, src_len) =
1176 lower_string_expr_ctx(b, ctx, value);
1177 b.call(
1178 FuncRef::External("afs_assign_char_fixed".into()),
1179 vec![dest_ptr, dest_len, src_ptr, src_len],
1180 IrType::Void,
1181 );
1182 }
1183 _ => {}
1184 }
1185 }
1186 }
1187 }
1188 }
1189 }
1190 Expr::ComponentAccess { base, component } => {
1191 // x%field = val (supports chained: x%a%b = val).
1192 if let Some(info) = component_intrinsic_local_info(
1193 b,
1194 &ctx.locals,
1195 target,
1196 ctx.st,
1197 ctx.type_layouts,
1198 ) {
1199 if local_is_array_like(&info) {
1200 lower_array_assign(b, ctx, "", &info, value);
1201 return;
1202 }
1203 }
1204 if let Some((base_addr, type_name)) =
1205 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
1206 {
1207 if let Some(layout) = ctx.type_layouts.get(&type_name) {
1208 if let Some(field) = layout.field(component) {
1209 let offset = b.const_i64(field.offset as i64);
1210 let field_ptr =
1211 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
1212
1213 // Character field: copy string data with space padding.
1214 if let CharKind::Fixed(flen) = field_char_kind(field) {
1215 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
1216 let dest_len = b.const_i64(flen);
1217 b.call(
1218 FuncRef::External("afs_assign_char_fixed".into()),
1219 vec![field_ptr, dest_len, src_ptr, src_len],
1220 IrType::Void,
1221 );
1222 } else if is_deferred_char_component_field(field) {
1223 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
1224 if field.pointer {
1225 let (dest_ptr, dest_len) =
1226 load_string_descriptor_substring_view(b, field_ptr);
1227 b.call(
1228 FuncRef::External("afs_assign_char_fixed".into()),
1229 vec![dest_ptr, dest_len, src_ptr, src_len],
1230 IrType::Void,
1231 );
1232 } else {
1233 b.call(
1234 FuncRef::External("afs_assign_char_deferred".into()),
1235 vec![field_ptr, src_ptr, src_len],
1236 IrType::Void,
1237 );
1238 }
1239 } else if matches!(
1240 field.type_info,
1241 crate::sema::symtab::TypeInfo::Derived(_)
1242 ) && field.allocatable
1243 && field.size == 384
1244 && field.dims.is_empty()
1245 {
1246 let Some(type_name) = field_derived_type_name(field) else {
1247 return;
1248 };
1249 let desc = field_ptr;
1250 let allocated = b.call(
1251 FuncRef::External("afs_allocated".into()),
1252 vec![desc],
1253 IrType::Int(IntWidth::I32),
1254 );
1255 let zero32 = b.const_i32(0);
1256 let needs_alloc = b.icmp(CmpOp::Eq, allocated, zero32);
1257 let alloc_bb =
1258 b.create_block("component_scalar_derived_assign_alloc");
1259 let copy_bb =
1260 b.create_block("component_scalar_derived_assign_copy");
1261 let done_bb =
1262 b.create_block("component_scalar_derived_assign_done");
1263 b.cond_branch(needs_alloc, alloc_bb, vec![], copy_bb, vec![]);
1264
1265 b.set_block(alloc_bb);
1266 if let Some(layout) = ctx.type_layouts.get(&type_name) {
1267 let elem_size = b.const_i64(layout.size as i64);
1268 let rank_val = b.const_i32(0);
1269 let null_ptr = b.const_i64(0);
1270 b.call(
1271 FuncRef::External("afs_allocate_array".into()),
1272 vec![desc, elem_size, rank_val, null_ptr, null_ptr],
1273 IrType::Void,
1274 );
1275 if derived_layout_needs_runtime_initialization(
1276 layout,
1277 ctx.type_layouts,
1278 ) {
1279 let base_ptr = b.load_typed(
1280 desc,
1281 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
1282 );
1283 initialize_derived_storage(
1284 b,
1285 base_ptr,
1286 layout,
1287 ctx.type_layouts,
1288 );
1289 }
1290 }
1291 b.branch(copy_bb, vec![]);
1292
1293 b.set_block(copy_bb);
1294 let src_ptr = super::expr::lower_expr_ctx_tl(b, ctx, value);
1295 let dest_ptr = b.load_typed(
1296 desc,
1297 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
1298 );
1299 emit_derived_value_copy(
1300 b,
1301 ctx.type_layouts,
1302 &type_name,
1303 dest_ptr,
1304 src_ptr,
1305 );
1306 if let Some(tag) = derived_type_tag_value(
1307 b,
1308 Some(type_name.as_str()),
1309 ctx.type_layouts,
1310 ) {
1311 store_array_desc_type_tag(b, desc, tag);
1312 }
1313 if let Some(lookup) = derived_type_tbp_lookup_value(
1314 b,
1315 Some(type_name.as_str()),
1316 ctx.type_layouts,
1317 ) {
1318 store_array_desc_tbp_lookup_ptr(b, desc, lookup);
1319 }
1320 b.branch(done_bb, vec![]);
1321 b.set_block(done_bb);
1322 } else if matches!(
1323 field.type_info,
1324 crate::sema::symtab::TypeInfo::Derived(_)
1325 ) && !is_opaque_c_handle_type(&field.type_info)
1326 && !field.pointer
1327 && !field.allocatable
1328 && field.dims.is_empty()
1329 {
1330 let src_ptr = super::expr::lower_expr_ctx_tl(b, ctx, value);
1331 if let Some(nested_name) = field_derived_type_name(field) {
1332 emit_derived_value_copy(
1333 b,
1334 ctx.type_layouts,
1335 &nested_name,
1336 field_ptr,
1337 src_ptr,
1338 );
1339 }
1340 } else if is_complex_ty(&type_info_to_ir_type(&field.type_info))
1341 && !field.pointer
1342 && !field.allocatable
1343 && field.dims.is_empty()
1344 {
1345 let raw = super::expr::lower_expr_ctx_tl(b, ctx, value);
1346 let field_ir_ty = type_info_to_ir_type(&field.type_info);
1347 let src_ty = b.func().value_type(raw);
1348 let src = if matches!(&src_ty, Some(t) if is_complex_ty(t)) {
1349 raw
1350 } else {
1351 let fw = complex_float_width(&field_ir_ty);
1352 materialize_complex_operand(b, raw, fw)
1353 };
1354 let bytes = complex_byte_size(&field_ir_ty);
1355 let sz = b.const_i64(bytes);
1356 b.call(
1357 FuncRef::External("memcpy".into()),
1358 vec![field_ptr, src, sz],
1359 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
1360 );
1361 } else if matches!(
1362 field.type_info,
1363 crate::sema::symtab::TypeInfo::ClassStar
1364 | crate::sema::symtab::TypeInfo::TypeStar
1365 ) && field.allocatable
1366 && field.dims.is_empty()
1367 {
1368 // Polymorphic component target:
1369 // `derived%poly_field = expr` where
1370 // `poly_field` is class(*), allocatable.
1371 // Memcpy the source descriptor. RHS is
1372 // expected to be a polymorphic local or
1373 // another class(*) component access.
1374 let src_desc_opt: Option<ValueId> =
1375 match &value.node {
1376 Expr::ComponentAccess { .. } => {
1377 resolve_component_field_access(
1378 b,
1379 &ctx.locals,
1380 value,
1381 ctx.st,
1382 ctx.type_layouts,
1383 )
1384 .map(|(p, _)| p)
1385 }
1386 Expr::Name { name } => ctx
1387 .locals
1388 .get(&name.to_lowercase())
1389 .filter(|info| {
1390 info.is_class && info.dims.is_empty()
1391 })
1392 .map(|info| {
1393 array_descriptor_addr(b, info)
1394 }),
1395 _ => None,
1396 };
1397 if let Some(src) = src_desc_opt {
1398 let sz = b.const_i64(384);
1399 b.call(
1400 FuncRef::External("memcpy".into()),
1401 vec![field_ptr, src, sz],
1402 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
1403 );
1404 } else {
1405 // Last-resort: skip the assignment
1406 // rather than emit invalid IR.
1407 // Better than truncating to i32.
1408 }
1409 } else {
1410 let val = super::expr::lower_expr_ctx_tl(b, ctx, value);
1411 let coerced = coerce_to_type(
1412 b,
1413 val,
1414 &type_info_to_ir_type(&field.type_info),
1415 );
1416 b.store(coerced, field_ptr);
1417 }
1418 }
1419 }
1420 }
1421 }
1422 _ => {}
1423 }
1424 }
1425
1426 Stmt::Print { items, .. } => {
1427 // PRINT * → unit 6 (stdout).
1428 let unit = b.const_i32(6);
1429 lower_write_items(b, ctx, items, unit);
1430 }
1431
1432 Stmt::Write { controls, items } => {
1433 // Check for format specifier (second positional control).
1434 // * means list-directed; a string literal means formatted.
1435 let fmt_control = controls
1436 .iter()
1437 .skip(1)
1438 .find(|c| c.keyword.is_none()) // positional, not keyword=
1439 .or_else(|| {
1440 controls.iter().find(|c| {
1441 c.keyword
1442 .as_deref()
1443 .map(|k| k.eq_ignore_ascii_case("fmt"))
1444 .unwrap_or(false)
1445 })
1446 });
1447
1448 let is_list_directed = match fmt_control {
1449 None => true,
1450 Some(ctrl) => matches!(&ctrl.value.node, Expr::Name { name } if name == "*"),
1451 };
1452
1453 // Check for ADVANCE='NO'.
1454 //
1455 // `advance_static` is the compile-time bool used by the
1456 // existing per-item helpers. When `advance=` is a string
1457 // literal we honor it directly. When it's a non-literal
1458 // expression (e.g. `advance=optval(adv, 'YES')` from
1459 // stdlib's write_bitset_unit_64), we cannot decide at
1460 // compile time, so we keep `advance_static = true`
1461 // (preserve item lowering's optional newline emit) and
1462 // separately compute `advance_runtime`, an i32 value the
1463 // runtime helpers consult to suppress the newline when the
1464 // expression evaluates to "no" at runtime.
1465 let advance_ctrl = controls.iter().find(|c| {
1466 c.keyword
1467 .as_deref()
1468 .map(|k| k.eq_ignore_ascii_case("advance"))
1469 .unwrap_or(false)
1470 });
1471 let advance_static = advance_ctrl
1472 .map(|c| {
1473 if let Expr::StringLiteral { value, .. } = &c.value.node {
1474 !value.eq_ignore_ascii_case("no")
1475 } else {
1476 true
1477 }
1478 })
1479 .unwrap_or(true);
1480 let advance_runtime: Option<ValueId> = advance_ctrl.and_then(|c| {
1481 if matches!(&c.value.node, Expr::StringLiteral { .. }) {
1482 None
1483 } else {
1484 let (p, l) = lower_string_expr_with_layouts(
1485 b,
1486 &ctx.locals,
1487 &c.value,
1488 ctx.st,
1489 Some(ctx.type_layouts),
1490 );
1491 Some(b.call(
1492 FuncRef::External("afs_advance_eval".into()),
1493 vec![p, l],
1494 IrType::Int(IntWidth::I32),
1495 ))
1496 }
1497 });
1498 let advance = advance_static;
1499
1500 // Optional iostat=ios / iomsg=msg specifiers. The push-based
1501 // formatted runtime ignored these on previous builds, so a
1502 // caller's `if (ios /= 0) error_stop` always tripped on the
1503 // pre-call sentinel — stdlib's savetxt loops exactly that
1504 // pattern around `write(unit, fmt_, iostat=ios) d(i, :)` and
1505 // unconditionally error_stops every example_savetxt /
1506 // example_loadtxt without proper iostat plumbing.
1507 let null_i64 = b.const_i64(0);
1508 let null_i8_ptr = b.int_to_ptr(null_i64, IrType::Int(IntWidth::I8));
1509 let zero_i64 = b.const_i64(0);
1510 let iostat_ctrl = controls.iter().find(|c| {
1511 c.keyword
1512 .as_deref()
1513 .map(|k| k.eq_ignore_ascii_case("iostat"))
1514 .unwrap_or(false)
1515 });
1516 let iomsg_ctrl = controls.iter().find(|c| {
1517 c.keyword
1518 .as_deref()
1519 .map(|k| k.eq_ignore_ascii_case("iomsg"))
1520 .unwrap_or(false)
1521 });
1522 let iostat_ptr = iostat_ctrl
1523 .map(|c| lower_arg_by_ref_ctx(b, ctx, &c.value))
1524 .unwrap_or(null_i8_ptr);
1525 let (iomsg_ptr, iomsg_len) = if let Some(c) = iomsg_ctrl {
1526 lower_string_expr_with_layouts(
1527 b,
1528 &ctx.locals,
1529 &c.value,
1530 ctx.st,
1531 Some(ctx.type_layouts),
1532 )
1533 } else {
1534 (null_i8_ptr, zero_i64)
1535 };
1536
1537 if let Some(ctrl) = controls.first() {
1538 if let Some((buf_ptr, buf_len)) = internal_io_buffer(b, ctx, ctrl) {
1539 if is_list_directed {
1540 lower_internal_write_items(b, ctx, items, buf_ptr, buf_len);
1541 } else {
1542 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
1543 b,
1544 &ctx.locals,
1545 &fmt_control.unwrap().value,
1546 ctx.st,
1547 Some(ctx.type_layouts),
1548 );
1549 b.call(
1550 FuncRef::External("afs_fmt_begin_internal_ex".into()),
1551 vec![
1552 buf_ptr, buf_len, fmt_ptr, fmt_len, iostat_ptr, iomsg_ptr,
1553 iomsg_len,
1554 ],
1555 IrType::Void,
1556 );
1557 for item in items {
1558 lower_fmt_push(b, ctx, item);
1559 }
1560 let adv = advance_runtime
1561 .unwrap_or_else(|| b.const_i32(if advance { 1 } else { 0 }));
1562 b.call(
1563 FuncRef::External("afs_fmt_end".into()),
1564 vec![adv],
1565 IrType::Void,
1566 );
1567 }
1568 return;
1569 }
1570 }
1571
1572 // Extract unit (first control). * means stdout (unit 6).
1573 let unit = if let Some(ctrl) = controls.first() {
1574 if matches!(&ctrl.value.node, Expr::Name { name } if name == "*") {
1575 b.const_i32(6)
1576 } else {
1577 super::expr::lower_expr_ctx(b, ctx, &ctrl.value)
1578 }
1579 } else {
1580 b.const_i32(6)
1581 };
1582
1583 if is_list_directed {
1584 // Wrap the per-item writes in begin/end so the runtime
1585 // can (a) emit sequential-unformatted record markers,
1586 // and (b) thread iostat=/iomsg= through. Pass
1587 // `advance=false` to suppress the per-item helper's
1588 // unconditional newline emit; we emit our own
1589 // `afs_write_newline_if` afterwards using the i32 that
1590 // honors a runtime-evaluated advance= expression
1591 // (e.g. `advance=optval(adv,'YES')`).
1592 b.call(
1593 FuncRef::External("afs_list_write_begin".into()),
1594 vec![unit, iostat_ptr, iomsg_ptr, iomsg_len],
1595 IrType::Void,
1596 );
1597 lower_write_items_adv(b, ctx, items, unit, false);
1598 let adv = advance_runtime
1599 .unwrap_or_else(|| b.const_i32(if advance { 1 } else { 0 }));
1600 b.call(
1601 FuncRef::External("afs_write_newline_if".into()),
1602 vec![unit, adv],
1603 IrType::Void,
1604 );
1605 b.call(
1606 FuncRef::External("afs_list_write_end".into()),
1607 vec![unit, adv, iostat_ptr, iomsg_ptr, iomsg_len],
1608 IrType::Void,
1609 );
1610 } else {
1611 // Formatted I/O: use push-based API.
1612 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
1613 b,
1614 &ctx.locals,
1615 &fmt_control.unwrap().value,
1616 ctx.st,
1617 Some(ctx.type_layouts),
1618 );
1619 b.call(
1620 FuncRef::External("afs_fmt_begin_ex".into()),
1621 vec![unit, fmt_ptr, fmt_len, iostat_ptr, iomsg_ptr, iomsg_len],
1622 IrType::Void,
1623 );
1624
1625 for item in items {
1626 lower_fmt_push(b, ctx, item);
1627 }
1628
1629 let adv = advance_runtime
1630 .unwrap_or_else(|| b.const_i32(if advance { 1 } else { 0 }));
1631 b.call(
1632 FuncRef::External("afs_fmt_end".into()),
1633 vec![adv],
1634 IrType::Void,
1635 );
1636 }
1637 }
1638
1639 Stmt::Call { callee, args } => {
1640 // Handle type-bound procedure calls: call obj%method(args)
1641 if let Expr::ComponentAccess { base, component } = &callee.node {
1642 if emit_polymorphic_component_bound_dispatch(
1643 b,
1644 &ctx.locals,
1645 ctx.st,
1646 Some(ctx.type_layouts),
1647 Some(ctx.internal_funcs),
1648 Some(ctx.contained_host_refs),
1649 Some(ctx.optional_params),
1650 Some(ctx.descriptor_params),
1651 callee.span,
1652 base,
1653 component,
1654 args,
1655 Some(IrType::Void),
1656 )
1657 .is_some()
1658 {
1659 return;
1660 }
1661 if let Some((obj_addr, type_name)) = resolve_component_base_for_method(
1662 b,
1663 &ctx.locals,
1664 base,
1665 ctx.st,
1666 ctx.type_layouts,
1667 ) {
1668 if let Some(layout) = ctx.type_layouts.get(&type_name) {
1669 let candidates = layout.bound_proc_candidates(component);
1670 if !candidates.is_empty() {
1671 let pass_desc_addr = lower_arg_descriptor(
1672 b,
1673 &ctx.locals,
1674 base,
1675 ctx.st,
1676 Some(ctx.type_layouts),
1677 false,
1678 );
1679 let bp = resolved_bound_proc_for_call(
1680 b,
1681 &ctx.locals,
1682 ctx.st,
1683 layout,
1684 component,
1685 args,
1686 Some(ctx.type_layouts),
1687 Some(ctx.internal_funcs),
1688 Some(ctx.contained_host_refs),
1689 Some(ctx.descriptor_params),
1690 )
1691 .or_else(|| layout.bound_proc(component))
1692 .unwrap_or_else(|| {
1693 fail_unmatched_bound_proc_resolution(callee.span, layout, component)
1694 });
1695 let _ = emit_resolved_bound_proc_call(
1696 b,
1697 &ctx.locals,
1698 ctx.st,
1699 Some(ctx.type_layouts),
1700 Some(ctx.internal_funcs),
1701 Some(ctx.contained_host_refs),
1702 Some(ctx.optional_params),
1703 Some(ctx.descriptor_params),
1704 obj_addr,
1705 Some(pass_desc_addr),
1706 FuncRef::External(bp.target_name.clone()),
1707 bp,
1708 args,
1709 None,
1710 IrType::Void,
1711 );
1712 return;
1713 }
1714 }
1715 }
1716 if let Some((target, signature_key)) = procedure_pointer_component_call_target(
1717 b,
1718 &ctx.locals,
1719 callee,
1720 ctx.st,
1721 ctx.type_layouts,
1722 ) {
1723 let arg_slots = reorder_args_by_keyword_slots(args, &signature_key, ctx.st);
1724 let abi_lookup_keys = procedure_abi_lookup_keys(ctx.st, &[&signature_key]);
1725 let abi_primary_key = abi_lookup_keys
1726 .first()
1727 .map(String::as_str)
1728 .unwrap_or(signature_key.as_str());
1729 let value_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1730 callee_value_arg_mask(ctx.st, k)
1731 });
1732 let desc_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1733 cached_param_mask_for_lookup(ctx.st, ctx.descriptor_params, k)
1734 });
1735 let bind_c_char_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1736 callee_bind_c_char_arg_mask(ctx.st, k)
1737 });
1738 let pointer_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1739 callee_pointer_arg_mask(ctx.st, k)
1740 });
1741 let class_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1742 callee_class_arg_mask(ctx.st, k)
1743 });
1744 let string_desc_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1745 callee_string_descriptor_arg_mask(ctx.st, k)
1746 });
1747 let opt_flags = first_procedure_lookup(&abi_lookup_keys, |k| {
1748 cached_param_mask_for_lookup(ctx.st, ctx.optional_params, k)
1749 .or_else(|| callee_optional_arg_mask(ctx.st, k))
1750 });
1751 let mut arg_vals: Vec<ValueId> = Vec::with_capacity(arg_slots.len());
1752 for (i, slot) in arg_slots.iter().enumerate() {
1753 let is_value = value_mask
1754 .as_ref()
1755 .map(|mask| mask.get(i).copied().unwrap_or(false))
1756 .unwrap_or(false);
1757 let wants_descriptor = desc_mask
1758 .as_ref()
1759 .map(|mask| mask.get(i).copied().unwrap_or(false))
1760 .unwrap_or(false);
1761 let wants_bind_c_char = bind_c_char_mask
1762 .as_ref()
1763 .map(|mask| mask.get(i).copied().unwrap_or(false))
1764 .unwrap_or(false);
1765 let wants_pointer = pointer_mask
1766 .as_ref()
1767 .map(|mask| mask.get(i).copied().unwrap_or(false))
1768 .unwrap_or(false);
1769 let wants_string_descriptor = string_desc_mask
1770 .as_ref()
1771 .map(|mask| mask.get(i).copied().unwrap_or(false))
1772 .unwrap_or(false);
1773 let wants_descriptor = wants_descriptor && !wants_bind_c_char;
1774 let wants_polymorphic_descriptor = wants_descriptor
1775 && class_mask
1776 .as_ref()
1777 .map(|mask| mask.get(i).copied().unwrap_or(false))
1778 .unwrap_or(false);
1779 let wants_string_descriptor =
1780 wants_string_descriptor && !wants_bind_c_char;
1781 let value = match slot {
1782 Some(arg) => match &arg.value {
1783 crate::ast::expr::SectionSubscript::Element(e) => {
1784 if is_value && wants_bind_c_char {
1785 lower_bind_c_char_value_arg(
1786 b,
1787 &ctx.locals,
1788 e,
1789 ctx.st,
1790 Some(ctx.type_layouts),
1791 Some(ctx.internal_funcs),
1792 Some(ctx.contained_host_refs),
1793 Some(ctx.descriptor_params),
1794 )
1795 } else if is_value {
1796 let raw = super::expr::lower_expr_full(
1797 b,
1798 &ctx.locals,
1799 e,
1800 ctx.st,
1801 Some(ctx.type_layouts),
1802 Some(ctx.internal_funcs),
1803 Some(ctx.contained_host_refs),
1804 Some(ctx.descriptor_params),
1805 );
1806 coerce_value_call_arg(b, ctx.st, abi_primary_key, i, raw)
1807 } else if wants_descriptor {
1808 lower_arg_descriptor(
1809 b,
1810 &ctx.locals,
1811 e,
1812 ctx.st,
1813 Some(ctx.type_layouts),
1814 wants_polymorphic_descriptor,
1815 )
1816 } else if wants_string_descriptor {
1817 lower_arg_string_descriptor(
1818 b,
1819 &ctx.locals,
1820 e,
1821 ctx.st,
1822 Some(ctx.type_layouts),
1823 )
1824 } else if wants_bind_c_char {
1825 lower_bind_c_char_arg_raw(
1826 b,
1827 &ctx.locals,
1828 e,
1829 ctx.st,
1830 Some(ctx.type_layouts),
1831 Some(ctx.internal_funcs),
1832 Some(ctx.contained_host_refs),
1833 Some(ctx.descriptor_params),
1834 )
1835 } else if wants_pointer {
1836 lower_pointer_dummy_actual(
1837 b,
1838 &ctx.locals,
1839 e,
1840 ctx.st,
1841 Some(ctx.type_layouts),
1842 Some(ctx.internal_funcs),
1843 Some(ctx.contained_host_refs),
1844 Some(ctx.descriptor_params),
1845 )
1846 .unwrap_or_else(|| lower_arg_by_ref_ctx(b, ctx, e))
1847 } else {
1848 lower_arg_by_ref_ctx(b, ctx, e)
1849 }
1850 }
1851 _ => b.const_i32(0),
1852 },
1853 None => {
1854 missing_optional_call_arg(b, ctx.st, abi_primary_key, i, is_value)
1855 }
1856 };
1857 arg_vals.push(value);
1858 }
1859 if let Some(opt_flags) = opt_flags {
1860 for flag in opt_flags.iter().skip(arg_vals.len()) {
1861 if *flag {
1862 arg_vals.push(b.const_i64(0));
1863 }
1864 }
1865 }
1866 if let Some(cls_flags) = first_procedure_lookup(&abi_lookup_keys, |k| {
1867 cached_param_mask_for_lookup(ctx.st, ctx.char_len_star_params, k)
1868 .or_else(|| callee_char_len_star_mask(ctx.st, k))
1869 }) {
1870 for (i, flag) in cls_flags.iter().enumerate() {
1871 if !*flag || i >= arg_slots.len() {
1872 continue;
1873 }
1874 if let Some(arg) = &arg_slots[i] {
1875 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
1876 arg_vals.push(
1877 actual_char_arg_runtime_len(
1878 b,
1879 &ctx.locals,
1880 Some(&ctx.optional_locals),
1881 e,
1882 ctx.st,
1883 Some(ctx.type_layouts),
1884 Some(ctx.internal_funcs),
1885 Some(ctx.contained_host_refs),
1886 Some(ctx.descriptor_params),
1887 )
1888 .unwrap_or_else(|| b.const_i64(0)),
1889 );
1890 } else {
1891 arg_vals.push(b.const_i64(0));
1892 }
1893 } else {
1894 arg_vals.push(b.const_i64(0));
1895 }
1896 }
1897 }
1898 b.call(FuncRef::Indirect(target), arg_vals, IrType::Void);
1899 }
1900 } else if let Expr::Name { name } = &callee.node {
1901 let key = name.to_lowercase();
1902 // Elemental subroutine call with array actuals: F2018 §15.8.3
1903 // requires the call to be evaluated element-wise. Emit a loop
1904 // that drives one scalar call per element, with copy-in/copy-out
1905 // through per-iteration scalar temps for each array actual.
1906 if try_lower_elemental_subroutine_call(b, ctx, name, &key, args, callee.span) {
1907 return;
1908 }
1909 // Try intrinsic subroutine lowering first.
1910 if !super::intrinsic_sub::lower_intrinsic_subroutine(b, ctx, &key, args) {
1911 let procptr_target =
1912 procedure_pointer_call_target(b, &ctx.locals, ctx.st, &key);
1913 let signature_key = procptr_target
1914 .as_ref()
1915 .map(|(_, sig_key)| sig_key.clone())
1916 .unwrap_or_else(|| key.clone());
1917 // Not an intrinsic — general subroutine call.
1918 // Keyword-argument reordering (F2003 §12.4.1.2).
1919 // `call sub(b=10, a=20)` must bind by name, not
1920 // position. reorder_args_by_keyword permutes the
1921 // actual-arg list to match the callee's declared
1922 // param order; the rest of the call-site code
1923 // then runs positionally against that reordered
1924 // list.
1925 let resolution_arg_vals: Vec<ValueId> = args
1926 .iter()
1927 .map(|arg| match &arg.value {
1928 crate::ast::expr::SectionSubscript::Element(e) => {
1929 generic_dispatch_probe_value(
1930 b,
1931 &ctx.locals,
1932 e,
1933 ctx.st,
1934 Some(ctx.type_layouts),
1935 Some(ctx.internal_funcs),
1936 Some(ctx.contained_host_refs),
1937 Some(ctx.descriptor_params),
1938 )
1939 }
1940 _ => b.const_i32(0),
1941 })
1942 .collect();
1943 // Generic SUBROUTINE dispatch: if the callee name
1944 // resolves to a NamedInterface symbol, replace it
1945 // with the specific matched by the actual argument
1946 // types. On failure, emit a diagnostic — the same
1947 // rule as generic function-call resolution.
1948 let (resolved_name, resolved_key) = if procptr_target.is_some() {
1949 (name.clone(), signature_key.clone())
1950 } else {
1951 resolve_subroutine_call_name(
1952 ctx.st,
1953 b,
1954 Some(&ctx.locals),
1955 Some(ctx.type_layouts),
1956 Some(ctx.internal_funcs),
1957 name,
1958 &key,
1959 args,
1960 &resolution_arg_vals,
1961 callee.span,
1962 )
1963 };
1964 let arg_slots = reorder_args_by_keyword_slots(
1965 args,
1966 if procptr_target.is_some() {
1967 &signature_key
1968 } else {
1969 &resolved_key
1970 },
1971 ctx.st,
1972 );
1973 let abi_lookup_keys = procedure_abi_lookup_keys(
1974 ctx.st,
1975 &[resolved_name.as_str(), &resolved_key, &signature_key, &key],
1976 );
1977 let abi_primary_key = abi_lookup_keys
1978 .first()
1979 .map(String::as_str)
1980 .unwrap_or(resolved_key.as_str());
1981 let value_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1982 callee_value_arg_mask(ctx.st, k)
1983 });
1984 let desc_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1985 cached_param_mask_for_lookup(ctx.st, ctx.descriptor_params, k)
1986 });
1987 let bind_c_char_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1988 callee_bind_c_char_arg_mask(ctx.st, k)
1989 });
1990 let pointer_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1991 callee_pointer_arg_mask(ctx.st, k)
1992 });
1993 let class_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1994 callee_class_arg_mask(ctx.st, k)
1995 });
1996 let string_desc_mask = first_procedure_lookup(&abi_lookup_keys, |k| {
1997 callee_string_descriptor_arg_mask(ctx.st, k)
1998 });
1999 // If the callee has more parameters than provided args, and the
2000 // trailing ones are OPTIONAL, pass null pointers so PRESENT() works.
2001 let opt_flags = first_procedure_lookup(&abi_lookup_keys, |k| {
2002 cached_param_mask_for_lookup(ctx.st, ctx.optional_params, k)
2003 .or_else(|| callee_optional_arg_mask(ctx.st, k))
2004 });
2005 let mut arg_vals: Vec<ValueId> = Vec::with_capacity(arg_slots.len());
2006 for (i, slot) in arg_slots.iter().enumerate() {
2007 let is_value = value_mask
2008 .as_ref()
2009 .map(|mask| mask.get(i).copied().unwrap_or(false))
2010 .unwrap_or(false);
2011 let wants_descriptor = desc_mask
2012 .as_ref()
2013 .map(|mask| mask.get(i).copied().unwrap_or(false))
2014 .unwrap_or(false);
2015 let wants_bind_c_char = bind_c_char_mask
2016 .as_ref()
2017 .map(|mask| mask.get(i).copied().unwrap_or(false))
2018 .unwrap_or(false);
2019 let wants_pointer = pointer_mask
2020 .as_ref()
2021 .map(|mask| mask.get(i).copied().unwrap_or(false))
2022 .unwrap_or(false);
2023 let wants_string_descriptor = string_desc_mask
2024 .as_ref()
2025 .map(|mask| mask.get(i).copied().unwrap_or(false))
2026 .unwrap_or(false);
2027 let wants_descriptor = wants_descriptor && !wants_bind_c_char;
2028 let wants_polymorphic_descriptor = wants_descriptor
2029 && class_mask
2030 .as_ref()
2031 .map(|mask| mask.get(i).copied().unwrap_or(false))
2032 .unwrap_or(false);
2033 let wants_string_descriptor =
2034 wants_string_descriptor && !wants_bind_c_char;
2035 let value = match slot {
2036 Some(arg) => match &arg.value {
2037 crate::ast::expr::SectionSubscript::Element(e) => {
2038 if is_value && wants_bind_c_char {
2039 lower_bind_c_char_value_arg(
2040 b,
2041 &ctx.locals,
2042 e,
2043 ctx.st,
2044 Some(ctx.type_layouts),
2045 Some(ctx.internal_funcs),
2046 Some(ctx.contained_host_refs),
2047 Some(ctx.descriptor_params),
2048 )
2049 } else if is_value {
2050 let raw = super::expr::lower_expr_full(
2051 b,
2052 &ctx.locals,
2053 e,
2054 ctx.st,
2055 Some(ctx.type_layouts),
2056 Some(ctx.internal_funcs),
2057 Some(ctx.contained_host_refs),
2058 Some(ctx.descriptor_params),
2059 );
2060 coerce_value_call_arg(b, ctx.st, abi_primary_key, i, raw)
2061 } else if wants_descriptor {
2062 lower_arg_descriptor(
2063 b,
2064 &ctx.locals,
2065 e,
2066 ctx.st,
2067 Some(ctx.type_layouts),
2068 wants_polymorphic_descriptor,
2069 )
2070 } else if wants_string_descriptor {
2071 lower_arg_string_descriptor(
2072 b,
2073 &ctx.locals,
2074 e,
2075 ctx.st,
2076 Some(ctx.type_layouts),
2077 )
2078 } else if wants_bind_c_char {
2079 lower_bind_c_char_arg_raw(
2080 b,
2081 &ctx.locals,
2082 e,
2083 ctx.st,
2084 Some(ctx.type_layouts),
2085 Some(ctx.internal_funcs),
2086 Some(ctx.contained_host_refs),
2087 Some(ctx.descriptor_params),
2088 )
2089 } else if wants_pointer {
2090 lower_pointer_dummy_actual(
2091 b,
2092 &ctx.locals,
2093 e,
2094 ctx.st,
2095 Some(ctx.type_layouts),
2096 Some(ctx.internal_funcs),
2097 Some(ctx.contained_host_refs),
2098 Some(ctx.descriptor_params),
2099 )
2100 .unwrap_or_else(|| lower_arg_by_ref_ctx(b, ctx, e))
2101 } else {
2102 lower_arg_by_ref_ctx(b, ctx, e)
2103 }
2104 }
2105 _ => b.const_i32(0),
2106 },
2107 None => {
2108 missing_optional_call_arg(b, ctx.st, abi_primary_key, i, is_value)
2109 }
2110 };
2111 arg_vals.push(value);
2112 }
2113 if let Some(opt_flags) = opt_flags {
2114 for flag in opt_flags.iter().skip(arg_vals.len()) {
2115 if *flag {
2116 arg_vals.push(b.const_i64(0)); // null → absent
2117 }
2118 }
2119 }
2120 // Hidden character-length ABI: for each callee
2121 // param that is character(len=*), append the
2122 // actual argument's string length as an i64.
2123 if let Some(cls_flags) = first_procedure_lookup(&abi_lookup_keys, |k| {
2124 cached_param_mask_for_lookup(ctx.st, ctx.char_len_star_params, k)
2125 .or_else(|| callee_char_len_star_mask(ctx.st, k))
2126 }) {
2127 for (i, flag) in cls_flags.iter().enumerate() {
2128 if !*flag || i >= arg_slots.len() {
2129 continue;
2130 }
2131 if let Some(arg) = &arg_slots[i] {
2132 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
2133 arg_vals.push(
2134 actual_char_arg_runtime_len(
2135 b,
2136 &ctx.locals,
2137 Some(&ctx.optional_locals),
2138 e,
2139 ctx.st,
2140 Some(ctx.type_layouts),
2141 Some(ctx.internal_funcs),
2142 Some(ctx.contained_host_refs),
2143 Some(ctx.descriptor_params),
2144 )
2145 .unwrap_or_else(|| b.const_i64(0)),
2146 );
2147 } else {
2148 arg_vals.push(b.const_i64(0));
2149 }
2150 } else {
2151 arg_vals.push(b.const_i64(0));
2152 }
2153 }
2154 }
2155 // Host-association closure-passing ABI: if the
2156 // callee is a contained procedure, append one
2157 // address per host-local variable it reads or
2158 // writes. Caller must hold the matching variable
2159 // in its own locals — this is guaranteed by the
2160 // host-refs analysis that drove the callee
2161 // signature, since both caller and callee share
2162 // the same enclosing host.
2163 if procptr_target.is_none() {
2164 append_host_closure_args(b, ctx, &resolved_key, &mut arg_vals);
2165 }
2166 let func_ref = if let Some((target, _)) = procptr_target {
2167 FuncRef::Indirect(target)
2168 } else {
2169 same_unit_func_ref(
2170 ctx.st,
2171 b.func().name.as_str(),
2172 Some(ctx.internal_funcs),
2173 &[&resolved_key],
2174 resolved_name,
2175 )
2176 };
2177 b.call(func_ref, arg_vals, IrType::Void);
2178 }
2179 }
2180 }
2181
2182 // ---- Control flow ----
2183 Stmt::IfConstruct {
2184 condition,
2185 then_body,
2186 else_ifs,
2187 else_body,
2188 ..
2189 } => {
2190 lower_if(b, ctx, condition, then_body, else_ifs, else_body);
2191 }
2192
2193 Stmt::IfStmt { condition, action } => {
2194 let bb_then = b.create_block("if_then");
2195 let bb_end = b.create_block("if_end");
2196 lower_condition_branch(b, ctx, condition, bb_then, bb_end);
2197
2198 b.set_block(bb_then);
2199 lower_stmt(b, ctx, action);
2200 if b.func().block(b.current_block()).terminator.is_none() {
2201 b.branch(bb_end, vec![]);
2202 }
2203
2204 b.set_block(bb_end);
2205 }
2206
2207 Stmt::DoLoop {
2208 name,
2209 var,
2210 start,
2211 end,
2212 step,
2213 body,
2214 } => {
2215 lower_do_loop(
2216 b,
2217 ctx,
2218 DoLoopFields {
2219 name,
2220 var,
2221 start,
2222 end,
2223 step,
2224 body,
2225 concurrent: false,
2226 },
2227 );
2228 }
2229
2230 Stmt::DoConcurrent {
2231 name,
2232 controls,
2233 mask,
2234 body,
2235 locality: _,
2236 ..
2237 } => {
2238 lower_do_concurrent(b, ctx, name, controls, mask.as_ref(), body, stmt.span);
2239 }
2240
2241 Stmt::DoWhile {
2242 name,
2243 condition,
2244 body,
2245 } => {
2246 let bb_header = b.create_block("do_while_header");
2247 let bb_body = b.create_block("do_while_body");
2248 let bb_exit = b.create_block("do_while_exit");
2249 b.branch(bb_header, vec![]);
2250
2251 ctx.push_loop(name.clone(), bb_header, bb_exit);
2252
2253 b.set_block(bb_header);
2254 lower_condition_branch(b, ctx, condition, bb_body, bb_exit);
2255
2256 b.set_block(bb_body);
2257 lower_stmts(b, ctx, body);
2258 if b.func().block(b.current_block()).terminator.is_none() {
2259 b.branch(bb_header, vec![]);
2260 }
2261
2262 ctx.pop_loop();
2263 b.set_block(bb_exit);
2264 }
2265
2266 Stmt::SelectCase {
2267 selector, cases, ..
2268 } => {
2269 lower_select_case(b, ctx, selector, cases);
2270 }
2271
2272 Stmt::WhereConstruct {
2273 mask,
2274 body,
2275 elsewhere,
2276 ..
2277 } => {
2278 // WHERE(mask) body [ELSEWHERE body] END WHERE
2279 // Collect ALL array names referenced in mask, body, OR
2280 // elsewhere body. Missing the elsewhere arm caused a
2281 // silent miscompile: an array reference appearing only
2282 // in elsewhere (e.g., `where (a > 0) c = a; elsewhere; c
2283 // = d`) was not scalarized, so `c = d` lowered through
2284 // the scalar path and silently produced 0.0 instead of
2285 // d(i).
2286 let mut array_names: Vec<String> = Vec::new();
2287 collect_array_names(mask, &ctx.locals, &mut array_names);
2288 for s in body {
2289 collect_array_names_stmt(s, &ctx.locals, &mut array_names);
2290 }
2291 if let Some((_emask, ebody)) = elsewhere.first() {
2292 for s in ebody {
2293 collect_array_names_stmt(s, &ctx.locals, &mut array_names);
2294 }
2295 }
2296
2297 if array_names.is_empty() {
2298 // No arrays — fall back to scalar IF-THEN-ELSE.
2299 let cond = super::expr::lower_expr_ctx_tl(b, ctx, mask);
2300 let bb_then = b.create_block("where_then");
2301 let bb_else = if !elsewhere.is_empty() {
2302 Some(b.create_block("where_else"))
2303 } else {
2304 None
2305 };
2306 let bb_end = b.create_block("where_end");
2307 b.cond_branch(cond, bb_then, vec![], bb_else.unwrap_or(bb_end), vec![]);
2308
2309 b.set_block(bb_then);
2310 lower_stmts(b, ctx, body);
2311 if b.func().block(b.current_block()).terminator.is_none() {
2312 b.branch(bb_end, vec![]);
2313 }
2314 if let Some(bb_e) = bb_else {
2315 b.set_block(bb_e);
2316 if let Some((_m, else_body)) = elsewhere.first() {
2317 lower_stmts(b, ctx, else_body);
2318 }
2319 if b.func().block(b.current_block()).terminator.is_none() {
2320 b.branch(bb_end, vec![]);
2321 }
2322 }
2323 b.set_block(bb_end);
2324 return;
2325 }
2326
2327 // Array-level WHERE: iterate over elements.
2328 // Use the first array to determine the iteration count. For
2329 // stack arrays `info.addr` is the raw element buffer — calling
2330 // afs_array_size on that would read garbage out of the rank
2331 // slot. array_total_elems_value picks the right source: it
2332 // materialises a descriptor query for descriptor-backed locals
2333 // and folds dims to a constant for explicit-shape stack arrays.
2334 let first_arr_name = &array_names[0];
2335 let first_arr = ctx
2336 .locals
2337 .get(first_arr_name)
2338 .cloned()
2339 .expect("array must exist");
2340 let n = array_total_elems_value(b, &first_arr);
2341
2342 // Get base addresses for all arrays (loaded once outside the loop).
2343 let mut array_bases: HashMap<String, ValueId> = HashMap::new();
2344 for arr_name in &array_names {
2345 if let Some(info) = ctx.locals.get(arr_name) {
2346 let base = if info.allocatable {
2347 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
2348 } else {
2349 info.addr
2350 };
2351 array_bases.insert(arr_name.clone(), base);
2352 }
2353 }
2354
2355 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
2356 let i_zero = b.const_i64(0);
2357 b.store(i_zero, i_addr);
2358
2359 let bb_check = b.create_block("where_check");
2360 let bb_body = b.create_block("where_body");
2361 let bb_exit = b.create_block("where_exit");
2362 b.branch(bb_check, vec![]);
2363
2364 b.set_block(bb_check);
2365 let i = b.load(i_addr);
2366 let done = b.icmp(CmpOp::Ge, i, n);
2367 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
2368
2369 b.set_block(bb_body);
2370 let i_val = b.load(i_addr);
2371
2372 // Substitute each array variable with a scalar local bound to element i.
2373 // Save original locals for restoration.
2374 let mut saved_locals: Vec<(String, Option<LocalInfo>)> = Vec::new();
2375 for arr_name in &array_names {
2376 saved_locals.push((arr_name.clone(), ctx.locals.get(arr_name).cloned()));
2377 if let Some(orig_info) = ctx.locals.get(arr_name).cloned() {
2378 let base = *array_bases.get(arr_name).unwrap();
2379 // Compute element address: base + i * elem_bytes.
2380 let elem_bytes_val = match &orig_info.ty {
2381 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => {
2382 b.const_i64(8)
2383 }
2384 IrType::Int(IntWidth::I16) => b.const_i64(2),
2385 IrType::Int(IntWidth::I8) => b.const_i64(1),
2386 _ => b.const_i64(4),
2387 };
2388 let byte_off = b.imul(i_val, elem_bytes_val);
2389 let elem_ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
2390 // Replace the local with a scalar pointing to this element.
2391 ctx.locals.insert(
2392 arr_name.clone(),
2393 LocalInfo {
2394 addr: elem_ptr,
2395 ty: orig_info.ty.clone(),
2396 dims: vec![],
2397 allocatable: false,
2398 descriptor_arg: false,
2399 by_ref: false,
2400 char_kind: CharKind::None,
2401 derived_type: None,
2402 inline_const: None,
2403 is_pointer: false,
2404 runtime_dim_upper: vec![],
2405 is_class: false,
2406 logical_kind: None,
2407 last_dim_assumed_size: false,
2408 },
2409 );
2410 }
2411 }
2412
2413 // Pre-rewrite mask and body: any residual `name(section)`
2414 // FunctionCall AST node referencing a scalarized array name
2415 // would, after substitution, dispatch through the user-call
2416 // path on a scalar local and emit an undefined `bl _name` at
2417 // link time. Folding it to bare `Name` makes the substituted
2418 // per-iter scalar binding pick up at the element index.
2419 // Stdlib pattern: `where (lambda(1:m) > 0.0_sp) sv(1:m) =
2420 // sqrt(lambda(1:m) * real(n-1, sp))` — both `lambda(1:m)`
2421 // and `sv(1:m)` are scalarized to `lambda` / `sv` per iter.
2422 let rewritten_mask = rewrite_scalarized_section_refs(mask, &array_names);
2423 let rewritten_body: Vec<SpannedStmt> = body
2424 .iter()
2425 .map(|s| rewrite_scalarized_section_refs_stmt(s, &array_names))
2426 .collect();
2427 let rewritten_else: Vec<SpannedStmt> = elsewhere
2428 .first()
2429 .map(|(_m, els)| {
2430 els.iter()
2431 .map(|s| rewrite_scalarized_section_refs_stmt(s, &array_names))
2432 .collect()
2433 })
2434 .unwrap_or_default();
2435
2436 // Evaluate mask with element-level bindings.
2437 let cond = super::expr::lower_expr_ctx_tl(b, ctx, &rewritten_mask);
2438
2439 let bb_then = b.create_block("where_then");
2440 let bb_else = b.create_block("where_else");
2441 let bb_incr = b.create_block("where_incr");
2442 b.cond_branch(cond, bb_then, vec![], bb_else, vec![]);
2443
2444 b.set_block(bb_then);
2445 lower_stmts(b, ctx, &rewritten_body);
2446 if b.func().block(b.current_block()).terminator.is_none() {
2447 b.branch(bb_incr, vec![]);
2448 }
2449
2450 b.set_block(bb_else);
2451 if !rewritten_else.is_empty() {
2452 lower_stmts(b, ctx, &rewritten_else);
2453 }
2454 if b.func().block(b.current_block()).terminator.is_none() {
2455 b.branch(bb_incr, vec![]);
2456 }
2457
2458 b.set_block(bb_incr);
2459 // Restore original locals.
2460 for (name, orig) in saved_locals {
2461 if let Some(info) = orig {
2462 ctx.locals.insert(name, info);
2463 } else {
2464 ctx.locals.remove(&name);
2465 }
2466 }
2467
2468 let i_cur = b.load(i_addr);
2469 let one = b.const_i64(1);
2470 let next = b.iadd(i_cur, one);
2471 b.store(next, i_addr);
2472 b.branch(bb_check, vec![]);
2473
2474 b.set_block(bb_exit);
2475 }
2476
2477 Stmt::WhereStmt { mask, stmt } => {
2478 // Single-line WHERE: where (cond) assignment.
2479 // F2018 §10.2.3.2: when the mask is an array-valued logical
2480 // expression, the assignment runs element-wise under the
2481 // mask. Reuse the WhereConstruct array-iteration shape: set
2482 // up per-element bindings for every array referenced in the
2483 // mask or assignment, evaluate the scalar mask, and run the
2484 // assignment under it.
2485 let mut array_names: Vec<String> = Vec::new();
2486 collect_array_names(mask, &ctx.locals, &mut array_names);
2487 collect_array_names_stmt(stmt, &ctx.locals, &mut array_names);
2488
2489 if array_names.is_empty() {
2490 let cond = super::expr::lower_expr_ctx_tl(b, ctx, mask);
2491 let bb_then = b.create_block("where_stmt");
2492 let bb_end = b.create_block("where_stmt_end");
2493 b.cond_branch(cond, bb_then, vec![], bb_end, vec![]);
2494 b.set_block(bb_then);
2495 lower_stmt(b, ctx, stmt);
2496 if b.func().block(b.current_block()).terminator.is_none() {
2497 b.branch(bb_end, vec![]);
2498 }
2499 b.set_block(bb_end);
2500 return;
2501 }
2502
2503 let first_arr_name = &array_names[0];
2504 let first_arr = ctx
2505 .locals
2506 .get(first_arr_name)
2507 .cloned()
2508 .expect("array must exist");
2509 let n = array_total_elems_value(b, &first_arr);
2510
2511 let mut array_bases: HashMap<String, ValueId> = HashMap::new();
2512 for arr_name in &array_names {
2513 if let Some(info) = ctx.locals.get(arr_name) {
2514 let base = if info.allocatable {
2515 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
2516 } else {
2517 info.addr
2518 };
2519 array_bases.insert(arr_name.clone(), base);
2520 }
2521 }
2522
2523 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
2524 let i_zero = b.const_i64(0);
2525 b.store(i_zero, i_addr);
2526
2527 let bb_check = b.create_block("where_stmt_check");
2528 let bb_body = b.create_block("where_stmt_body");
2529 let bb_exit = b.create_block("where_stmt_exit");
2530 b.branch(bb_check, vec![]);
2531
2532 b.set_block(bb_check);
2533 let i = b.load(i_addr);
2534 let done = b.icmp(CmpOp::Ge, i, n);
2535 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
2536
2537 b.set_block(bb_body);
2538 let i_val = b.load(i_addr);
2539
2540 let mut saved_locals: Vec<(String, Option<LocalInfo>)> = Vec::new();
2541 for arr_name in &array_names {
2542 saved_locals.push((arr_name.clone(), ctx.locals.get(arr_name).cloned()));
2543 if let Some(orig_info) = ctx.locals.get(arr_name).cloned() {
2544 let base = *array_bases.get(arr_name).unwrap();
2545 let elem_bytes_val = match &orig_info.ty {
2546 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => {
2547 b.const_i64(8)
2548 }
2549 IrType::Int(IntWidth::I16) => b.const_i64(2),
2550 IrType::Int(IntWidth::I8) => b.const_i64(1),
2551 _ => b.const_i64(4),
2552 };
2553 let byte_off = b.imul(i_val, elem_bytes_val);
2554 let elem_ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
2555 ctx.locals.insert(
2556 arr_name.clone(),
2557 LocalInfo {
2558 addr: elem_ptr,
2559 ty: orig_info.ty.clone(),
2560 dims: vec![],
2561 allocatable: false,
2562 descriptor_arg: false,
2563 by_ref: false,
2564 char_kind: CharKind::None,
2565 derived_type: None,
2566 inline_const: None,
2567 is_pointer: false,
2568 runtime_dim_upper: vec![],
2569 is_class: false,
2570 logical_kind: None,
2571 last_dim_assumed_size: false,
2572 },
2573 );
2574 }
2575 }
2576
2577 // See WhereConstruct: residual `name(section)` calls in the
2578 // mask/stmt would emit undefined externals after the
2579 // substitution. Fold them to bare `Name` so the per-iter
2580 // scalar binding picks up.
2581 let rewritten_mask = rewrite_scalarized_section_refs(mask, &array_names);
2582 let rewritten_stmt = rewrite_scalarized_section_refs_stmt(stmt, &array_names);
2583
2584 let cond = super::expr::lower_expr_ctx_tl(b, ctx, &rewritten_mask);
2585 let bb_then = b.create_block("where_stmt_then");
2586 let bb_incr = b.create_block("where_stmt_incr");
2587 b.cond_branch(cond, bb_then, vec![], bb_incr, vec![]);
2588
2589 b.set_block(bb_then);
2590 lower_stmt(b, ctx, &rewritten_stmt);
2591 if b.func().block(b.current_block()).terminator.is_none() {
2592 b.branch(bb_incr, vec![]);
2593 }
2594
2595 b.set_block(bb_incr);
2596 for (name, orig) in saved_locals {
2597 if let Some(info) = orig {
2598 ctx.locals.insert(name, info);
2599 } else {
2600 ctx.locals.remove(&name);
2601 }
2602 }
2603 let i_cur = b.load(i_addr);
2604 let one = b.const_i64(1);
2605 let next = b.iadd(i_cur, one);
2606 b.store(next, i_addr);
2607 b.branch(bb_check, vec![]);
2608
2609 b.set_block(bb_exit);
2610 }
2611
2612 Stmt::ForallConstruct {
2613 specs, mask, body, ..
2614 } => {
2615 // FORALL: nest loops. The body goes inside the innermost loop.
2616 // Build the body statements including optional mask as a closure-like pattern.
2617 // The innermost loop gets the real body; outer loops wrap it.
2618 lower_forall_nested(b, ctx, specs, mask.as_ref(), body);
2619 }
2620
2621 Stmt::ForallStmt { specs, mask, stmt } => {
2622 let body_vec = vec![(**stmt).clone()];
2623 lower_forall_nested(b, ctx, specs, mask.as_ref(), &body_vec);
2624 }
2625
2626 Stmt::SelectType {
2627 selector,
2628 guards,
2629 assoc_name,
2630 ..
2631 } => {
2632 let bb_end = b.create_block("select_type_end");
2633 let selector_type =
2634 operator_expr_type_info(selector, Some(&ctx.locals), ctx.st, Some(ctx.type_layouts));
2635 let selector_info = associate_alias_local_info(b, ctx, selector);
2636 let dynamic_class_selector = selector_info.as_ref().filter(|info| {
2637 info.derived_type.is_some()
2638 && local_uses_array_descriptor(info)
2639 && local_declared_rank(info) == 0
2640 });
2641
2642 if let Some(info) = dynamic_class_selector {
2643 let desc = array_descriptor_addr(b, info);
2644 let tag_val = load_array_desc_type_tag(b, desc);
2645 let default_body = guards.iter().find_map(|guard| match guard {
2646 crate::ast::stmt::TypeGuard::ClassDefault { body } => Some(body),
2647 _ => None,
2648 });
2649 for guard in guards {
2650 match guard {
2651 crate::ast::stmt::TypeGuard::TypeIs {
2652 type_name: guard_type,
2653 body,
2654 } => {
2655 if let Some(guard_layout) = ctx.type_layouts.get(guard_type) {
2656 let guard_tag = b.const_i64(guard_layout.type_tag as i64);
2657 let matches = b.icmp(CmpOp::Eq, tag_val, guard_tag);
2658 let bb_match = b.create_block("type_is_match");
2659 let bb_next = b.create_block("type_is_next");
2660 b.cond_branch(matches, bb_match, vec![], bb_next, vec![]);
2661
2662 b.set_block(bb_match);
2663 with_select_type_guard_binding(
2664 b,
2665 ctx,
2666 selector,
2667 assoc_name.as_deref(),
2668 guard_type,
2669 |b, ctx| lower_stmts(b, ctx, body),
2670 );
2671 if b.func().block(b.current_block()).terminator.is_none() {
2672 b.branch(bb_end, vec![]);
2673 }
2674
2675 b.set_block(bb_next);
2676 }
2677 }
2678 crate::ast::stmt::TypeGuard::ClassIs {
2679 type_name: guard_type,
2680 body,
2681 } => {
2682 let mut matching_tags: Vec<u64> = ctx
2683 .type_layouts
2684 .layouts
2685 .values()
2686 .filter(|layout| !layout.is_abstract)
2687 .filter(|layout| {
2688 is_type_or_extends(&layout.name, guard_type, ctx.type_layouts)
2689 })
2690 .map(|layout| layout.type_tag)
2691 .collect();
2692 matching_tags.sort_unstable();
2693 if !matching_tags.is_empty() {
2694 let mut matches = None;
2695 for tag in matching_tags {
2696 let tag_val_const = b.const_i64(tag as i64);
2697 let eq = b.icmp(CmpOp::Eq, tag_val, tag_val_const);
2698 matches = Some(match matches {
2699 Some(prev) => b.or(prev, eq),
2700 None => eq,
2701 });
2702 }
2703 let bb_match = b.create_block("class_is_match");
2704 let bb_next = b.create_block("class_is_next");
2705 b.cond_branch(
2706 matches.expect("non-empty CLASS IS candidate set"),
2707 bb_match,
2708 vec![],
2709 bb_next,
2710 vec![],
2711 );
2712
2713 b.set_block(bb_match);
2714 with_select_type_guard_binding(
2715 b,
2716 ctx,
2717 selector,
2718 assoc_name.as_deref(),
2719 guard_type,
2720 |b, ctx| lower_stmts(b, ctx, body),
2721 );
2722 if b.func().block(b.current_block()).terminator.is_none() {
2723 b.branch(bb_end, vec![]);
2724 }
2725
2726 b.set_block(bb_next);
2727 }
2728 }
2729 crate::ast::stmt::TypeGuard::ClassDefault { .. } => {}
2730 }
2731 }
2732 if let Some(body) = default_body {
2733 lower_stmts(b, ctx, body);
2734 if b.func().block(b.current_block()).terminator.is_none() {
2735 b.branch(bb_end, vec![]);
2736 }
2737 }
2738 } else {
2739 if matches!(selector_type, Some(crate::sema::symtab::TypeInfo::Class(_))) {
2740 eprintln!(
2741 "armfortas: error: {}:{}: SELECT TYPE on polymorphic CLASS(...) selectors is not implemented yet",
2742 stmt.span.start.line, stmt.span.start.col
2743 );
2744 let _ = std::io::stderr().flush();
2745 std::process::exit(1);
2746 }
2747 let static_type = selector_info.as_ref().and_then(|info| info.derived_type.clone());
2748 if let Some(ref type_name) = static_type {
2749 if let Some(layout) = ctx.type_layouts.get(type_name) {
2750 let tag_val = b.const_i64(layout.type_tag as i64);
2751 let default_body = guards.iter().find_map(|guard| match guard {
2752 crate::ast::stmt::TypeGuard::ClassDefault { body } => Some(body),
2753 _ => None,
2754 });
2755
2756 for guard in guards {
2757 match guard {
2758 crate::ast::stmt::TypeGuard::TypeIs {
2759 type_name: guard_type,
2760 body,
2761 } => {
2762 if let Some(guard_layout) = ctx.type_layouts.get(guard_type) {
2763 let guard_tag = b.const_i64(guard_layout.type_tag as i64);
2764 let matches = b.icmp(CmpOp::Eq, tag_val, guard_tag);
2765 let bb_match = b.create_block("type_is_match");
2766 let bb_next = b.create_block("type_is_next");
2767 b.cond_branch(matches, bb_match, vec![], bb_next, vec![]);
2768
2769 b.set_block(bb_match);
2770 with_select_type_guard_binding(
2771 b,
2772 ctx,
2773 selector,
2774 assoc_name.as_deref(),
2775 guard_type,
2776 |b, ctx| lower_stmts(b, ctx, body),
2777 );
2778 if b.func().block(b.current_block()).terminator.is_none() {
2779 b.branch(bb_end, vec![]);
2780 }
2781
2782 b.set_block(bb_next);
2783 } else {
2784 // Unknown guard type — skip.
2785 let tag_matches = type_name.eq_ignore_ascii_case(guard_type);
2786 if tag_matches {
2787 with_select_type_guard_binding(
2788 b,
2789 ctx,
2790 selector,
2791 assoc_name.as_deref(),
2792 guard_type,
2793 |b, ctx| lower_stmts(b, ctx, body),
2794 );
2795 if b.func().block(b.current_block()).terminator.is_none() {
2796 b.branch(bb_end, vec![]);
2797 }
2798 break;
2799 }
2800 }
2801 }
2802 crate::ast::stmt::TypeGuard::ClassIs {
2803 type_name: guard_type,
2804 body,
2805 } => {
2806 // CLASS IS matches the type or any extension.
2807 // Check if static type is or extends the guard type.
2808 let is_match =
2809 is_type_or_extends(type_name, guard_type, ctx.type_layouts);
2810 if is_match {
2811 with_select_type_guard_binding(
2812 b,
2813 ctx,
2814 selector,
2815 assoc_name.as_deref(),
2816 guard_type,
2817 |b, ctx| lower_stmts(b, ctx, body),
2818 );
2819 if b.func().block(b.current_block()).terminator.is_none() {
2820 b.branch(bb_end, vec![]);
2821 }
2822 break; // CLASS IS matched, skip remaining guards.
2823 }
2824 }
2825 crate::ast::stmt::TypeGuard::ClassDefault { .. } => {}
2826 }
2827 }
2828 if let Some(body) = default_body {
2829 lower_stmts(b, ctx, body);
2830 if b.func().block(b.current_block()).terminator.is_none() {
2831 b.branch(bb_end, vec![]);
2832 }
2833 }
2834 }
2835 }
2836 }
2837
2838 if b.func().block(b.current_block()).terminator.is_none() {
2839 b.branch(bb_end, vec![]);
2840 }
2841 b.set_block(bb_end);
2842 }
2843
2844 Stmt::Exit { name } => {
2845 if let Some(lp) = ctx.find_loop(name) {
2846 let exit = lp.exit;
2847 b.branch(exit, vec![]);
2848 }
2849 }
2850
2851 Stmt::Cycle { name } => {
2852 if let Some(lp) = ctx.find_loop(name) {
2853 let header = lp.header;
2854 b.branch(header, vec![]);
2855 }
2856 }
2857
2858 Stmt::Return { .. } => {
2859 if ctx.hidden_result_abi == HiddenResultAbi::StringDescriptor {
2860 lower_hidden_string_result_copy(b, ctx);
2861 }
2862 let skip = if matches!(
2863 ctx.hidden_result_abi,
2864 HiddenResultAbi::ArrayDescriptor | HiddenResultAbi::DerivedAggregate
2865 ) {
2866 Some(ValueId(0))
2867 } else {
2868 None
2869 };
2870 insert_implicit_dealloc(
2871 b,
2872 &ctx.locals,
2873 &ctx.locals,
2874 ctx.type_layouts,
2875 ctx.st,
2876 ctx.internal_funcs,
2877 Some(ctx.contained_host_refs),
2878 skip,
2879 );
2880 if ctx.hidden_result_abi != HiddenResultAbi::None {
2881 // sret convention: result was written into the hidden first param.
2882 b.ret(None);
2883 } else if let Some(addr) = ctx.result_addr {
2884 let returns_derived_buffer = ctx
2885 .result_name
2886 .as_ref()
2887 .and_then(|name| ctx.locals.get(name))
2888 .map(|info| !info.is_pointer && info.derived_type.is_some())
2889 .unwrap_or(false);
2890 if returns_derived_buffer {
2891 // Derived-type function results use the pointer-return
2892 // convention; explicit RETURN must mirror the implicit
2893 // fallthrough path instead of loading the aggregate bytes.
2894 let zero = b.const_i64(0);
2895 let byte_ptr = b.gep(addr, vec![zero], IrType::Int(IntWidth::I8));
2896 b.ret(Some(byte_ptr));
2897 } else {
2898 let rv = b.load(addr);
2899 b.ret(Some(rv));
2900 }
2901 } else {
2902 b.ret_void();
2903 }
2904 }
2905
2906 Stmt::Stop { .. } => {
2907 let skip = if matches!(
2908 ctx.hidden_result_abi,
2909 HiddenResultAbi::ArrayDescriptor | HiddenResultAbi::DerivedAggregate
2910 ) {
2911 Some(ValueId(0))
2912 } else {
2913 None
2914 };
2915 insert_implicit_dealloc(
2916 b,
2917 &ctx.locals,
2918 &ctx.locals,
2919 ctx.type_layouts,
2920 ctx.st,
2921 ctx.internal_funcs,
2922 Some(ctx.contained_host_refs),
2923 skip,
2924 );
2925 b.runtime_call(RuntimeFunc::Stop, vec![], IrType::Void);
2926 b.unreachable();
2927 }
2928 Stmt::ErrorStop { code, .. } => {
2929 // F2018 §11.4: error stop with a stop-code prints the
2930 // implementation banner together with the user's code. The
2931 // earlier lowering threw the code away so all stdlib error
2932 // diagnostics surfaced as bare "ERROR STOP" — masking real
2933 // problems such as stdlib_sorting's "work array is too small"
2934 // and "Allocation of adjoint_array buffer failed". Dispatch
2935 // to the message or integer entry depending on stop-code type.
2936 //
2937 // Lower the stop-code expression BEFORE the implicit dealloc:
2938 // for an allocatable character stop-code (e.g. stdlib's
2939 // `error stop err_msg` where err_msg is character(:),
2940 // allocatable), the dealloc nullifies the descriptor's data
2941 // pointer, so loading after dealloc gives a null ptr and
2942 // afs_error_stop_msg falls back to the bare "ERROR STOP"
2943 // branch. Capturing first preserves the pointer for the
2944 // call (the buffer remains mapped through process exit).
2945 enum StopCode {
2946 Msg(ValueId, ValueId),
2947 Int(ValueId),
2948 None,
2949 }
2950 let stop_code = if let Some(code_expr) = code {
2951 let is_char = expr_is_character_expr(
2952 b,
2953 &ctx.locals,
2954 code_expr,
2955 ctx.st,
2956 Some(ctx.type_layouts),
2957 ) || matches!(code_expr.node, Expr::StringLiteral { .. });
2958 if is_char {
2959 let (ptr, len) = lower_string_expr_ctx(b, ctx, code_expr);
2960 StopCode::Msg(ptr, len)
2961 } else {
2962 let val = super::expr::lower_expr_ctx(b, ctx, code_expr);
2963 let val_ty = b
2964 .func()
2965 .value_type(val)
2966 .unwrap_or(IrType::Int(IntWidth::I64));
2967 let widened = match val_ty {
2968 IrType::Int(IntWidth::I64) => val,
2969 IrType::Int(_) => b.int_extend(val, IntWidth::I64, true),
2970 _ => val,
2971 };
2972 StopCode::Int(widened)
2973 }
2974 } else {
2975 StopCode::None
2976 };
2977
2978 // Skip implicit dealloc for character-stop-code error stops.
2979 // The user-provided message often references a local
2980 // allocatable string whose buffer would be freed by the
2981 // dealloc, leaving afs_error_stop_msg reading freed memory
2982 // (or, if the load order let it run before dealloc, a now-
2983 // null data pointer). Process exit cleans up the heap
2984 // anyway. For integer / no-code stops the dealloc still
2985 // runs to satisfy any non-error cleanup expectations.
2986 if !matches!(stop_code, StopCode::Msg(..)) {
2987 let skip = if matches!(
2988 ctx.hidden_result_abi,
2989 HiddenResultAbi::ArrayDescriptor | HiddenResultAbi::DerivedAggregate
2990 ) {
2991 Some(ValueId(0))
2992 } else {
2993 None
2994 };
2995 insert_implicit_dealloc(
2996 b,
2997 &ctx.locals,
2998 &ctx.locals,
2999 ctx.type_layouts,
3000 ctx.st,
3001 ctx.internal_funcs,
3002 Some(ctx.contained_host_refs),
3003 skip,
3004 );
3005 }
3006
3007 match stop_code {
3008 StopCode::Msg(ptr, len) => {
3009 b.call(
3010 FuncRef::External("afs_error_stop_msg".into()),
3011 vec![ptr, len],
3012 IrType::Void,
3013 );
3014 }
3015 StopCode::Int(widened) => {
3016 b.call(
3017 FuncRef::External("afs_error_stop_int".into()),
3018 vec![widened],
3019 IrType::Void,
3020 );
3021 }
3022 StopCode::None => {
3023 b.runtime_call(RuntimeFunc::ErrorStop, vec![], IrType::Void);
3024 }
3025 }
3026 b.unreachable();
3027 }
3028
3029 Stmt::Allocate {
3030 type_spec,
3031 items,
3032 opts,
3033 } => {
3034 let stat_target = super::core::allocate_status_target(b, ctx, opts);
3035 let stat_addr = stat_target.runtime_addr;
3036 // F2018 §9.7.1.3: stat-variable is 0 on success. Pre-zero so
3037 // any item path that doesn't update stat_addr (e.g. scalar
3038 // simple allocates that don't go through a runtime helper)
3039 // leaves the user's stat at SUCCESS rather than the
3040 // uninitialized garbage that previously surfaced as the
3041 // stdlib_bitsets "allocation fault for STRING" miscall.
3042 // Failing item paths still overwrite stat_addr through their
3043 // runtime helpers.
3044 {
3045 let zero_i32 = b.const_i32(0);
3046 b.store(zero_i32, stat_addr);
3047 }
3048 let errmsg_target = allocate_errmsg_target(b, ctx, opts);
3049 let typed_char_len = typed_allocate_char_len(
3050 b,
3051 &ctx.locals,
3052 type_spec.as_ref(),
3053 ctx.st,
3054 Some(ctx.type_layouts),
3055 );
3056 let typed_type_tag =
3057 typed_allocate_type_tag_value(b, type_spec.as_ref(), ctx.type_layouts);
3058 let typed_tbp_lookup =
3059 typed_allocate_tbp_lookup_value(b, type_spec.as_ref(), ctx.type_layouts);
3060 let typed_layout = typed_allocate_layout(type_spec.as_ref(), ctx.type_layouts);
3061 let source_desc = allocate_descriptor_keyword_expr(b, ctx, opts, "source");
3062 let mold_desc = allocate_descriptor_keyword_expr(b, ctx, opts, "mold");
3063 let shape_desc = source_desc.or(mold_desc);
3064 let source_expr = allocate_keyword_expr(opts, "source");
3065 let source_scalar_desc = allocate_scalar_source_descriptor(b, ctx, opts);
3066
3067 for item in items {
3068 let source_char = allocate_char_source_value(b, ctx, opts);
3069 let char_alloc_len = typed_char_len
3070 .or_else(|| source_char.as_ref().map(|(_, len)| *len))
3071 .or_else(|| allocate_char_mold_len(b, ctx, opts));
3072 let component_alloc = match &item.node {
3073 Expr::ComponentAccess { .. } => Some((item, &[][..])),
3074 Expr::FunctionCall { callee, args }
3075 if matches!(callee.node, Expr::ComponentAccess { .. }) =>
3076 {
3077 Some((callee.as_ref(), args.as_slice()))
3078 }
3079 _ => None,
3080 };
3081 if let Some((component_expr, args)) = component_alloc {
3082 if let Some((field_ptr, field)) = resolve_component_field_access(
3083 b,
3084 &ctx.locals,
3085 component_expr,
3086 ctx.st,
3087 ctx.type_layouts,
3088 ) {
3089 if matches!(field_char_kind(&field), CharKind::Deferred) && field.size == 32
3090 {
3091 let Some(len_val) = char_alloc_len else {
3092 eprintln!(
3093 "armfortas: error: {}:{}: deferred-length character ALLOCATE requires a typed length or SOURCE/MOLD support",
3094 stmt.span.start.line, stmt.span.start.col
3095 );
3096 let _ = std::io::stderr().flush();
3097 std::process::exit(1);
3098 };
3099 init_allocated_string_descriptor(b, field_ptr, len_val);
3100 if let Some((src_ptr, src_len)) = source_char {
3101 b.call(
3102 FuncRef::External("afs_assign_char_deferred".into()),
3103 vec![field_ptr, src_ptr, src_len],
3104 IrType::Void,
3105 );
3106 }
3107 continue;
3108 }
3109 if field.size == 384 && (field.allocatable || field.pointer) {
3110 let elem_ty = field_storage_ir_type(&field, ctx.type_layouts);
3111 let rank = args.len();
3112 let field_info = LocalInfo {
3113 addr: field_ptr,
3114 ty: elem_ty.clone(),
3115 dims: vec![],
3116 allocatable: true,
3117 descriptor_arg: false,
3118 by_ref: false,
3119 char_kind: field_char_kind(&field),
3120 derived_type: field_derived_type_name(&field),
3121 inline_const: None,
3122 is_pointer: field.pointer,
3123 runtime_dim_upper: vec![],
3124 is_class: false,
3125 logical_kind: None,
3126 last_dim_assumed_size: false,
3127 };
3128 let source_scalar_layout = if rank == 0 && source_desc.is_none() {
3129 source_expr.and_then(|expr| {
3130 expr_type_layout(expr, None, ctx.st, ctx.type_layouts)
3131 })
3132 } else {
3133 None
3134 };
3135 let source_scalar_type = if rank == 0 && source_desc.is_none() {
3136 source_expr.and_then(|expr| {
3137 expr_derived_type_name(expr, None, ctx.st, ctx.type_layouts)
3138 })
3139 } else {
3140 None
3141 };
3142 let dynamic_layout = source_scalar_layout
3143 .or(typed_layout)
3144 .or_else(|| {
3145 field_info
3146 .derived_type
3147 .as_deref()
3148 .and_then(|type_name| ctx.type_layouts.get(type_name))
3149 });
3150 let scalar_source_copy_plan =
3151 if rank == 0 && source_desc.is_none() {
3152 source_expr.and_then(|expr| {
3153 expr_scalar_alloc_source_copy_plan(
3154 expr,
3155 &ctx.locals,
3156 ctx.st,
3157 ctx.type_layouts,
3158 )
3159 })
3160 } else {
3161 None
3162 };
3163 let array_source_copy_layout = if source_desc.is_some() {
3164 dynamic_layout.filter(|layout| {
3165 derived_layout_needs_deep_copy(layout, ctx.type_layouts)
3166 })
3167 } else {
3168 None
3169 };
3170 let elem_size_bytes = dynamic_layout
3171 .map(|layout| layout.size as i64)
3172 .unwrap_or_else(|| descriptor_element_size_bytes(&field_info));
3173 let es = allocated_array_elem_size(
3174 b,
3175 &field_info,
3176 elem_size_bytes,
3177 char_alloc_len,
3178 );
3179 let one_i64 = b.const_i64(1);
3180 let dim_buf = if rank == 0 {
3181 b.const_i64(0)
3182 } else {
3183 let dim_buf_bytes = (rank * 24) as u64;
3184 let dim_buf = b.alloca(IrType::Array(
3185 Box::new(IrType::Int(IntWidth::I8)),
3186 dim_buf_bytes,
3187 ));
3188 for (i, arg) in args.iter().enumerate() {
3189 let (lo64, up64) = lower_alloc_bounds(b, ctx, &arg.value);
3190 let base = (i * 24) as i64;
3191 let off_lo = b.const_i64(base);
3192 let off_up = b.const_i64(base + 8);
3193 let off_st = b.const_i64(base + 16);
3194 let p_lo =
3195 b.gep(dim_buf, vec![off_lo], IrType::Int(IntWidth::I8));
3196 let p_up =
3197 b.gep(dim_buf, vec![off_up], IrType::Int(IntWidth::I8));
3198 let p_st =
3199 b.gep(dim_buf, vec![off_st], IrType::Int(IntWidth::I8));
3200 b.store(lo64, p_lo);
3201 b.store(up64, p_up);
3202 b.store(one_i64, p_st);
3203 }
3204 dim_buf
3205 };
3206 if rank == 0 {
3207 if let Some(shape_desc) = shape_desc {
3208 b.call(
3209 FuncRef::External("afs_allocate_like".into()),
3210 vec![field_ptr, shape_desc, stat_addr],
3211 IrType::Void,
3212 );
3213 } else {
3214 let rank_val = b.const_i32(0);
3215 b.call(
3216 FuncRef::External("afs_allocate_array".into()),
3217 vec![field_ptr, es, rank_val, dim_buf, stat_addr],
3218 IrType::Void,
3219 );
3220 }
3221 } else {
3222 let rank_val = b.const_i32(rank as i32);
3223 b.call(
3224 FuncRef::External("afs_allocate_array".into()),
3225 vec![field_ptr, es, rank_val, dim_buf, stat_addr],
3226 IrType::Void,
3227 );
3228 }
3229 emit_runtime_errmsg_on_failure(
3230 b,
3231 stat_addr,
3232 errmsg_target.as_ref(),
3233 "ALLOCATE failed",
3234 );
3235 if let Some(source_desc) = source_desc {
3236 emit_allocatable_source_copy_on_success(
3237 b,
3238 stat_addr,
3239 field_ptr,
3240 source_desc,
3241 rank > 0,
3242 array_source_copy_layout,
3243 scalar_source_copy_plan.as_ref(),
3244 ctx.type_layouts,
3245 errmsg_target.as_ref(),
3246 );
3247 } else if rank == 0 {
3248 if let Some(source_desc) = source_scalar_desc {
3249 emit_allocatable_source_copy_on_success(
3250 b,
3251 stat_addr,
3252 field_ptr,
3253 source_desc,
3254 false,
3255 None,
3256 scalar_source_copy_plan.as_ref(),
3257 ctx.type_layouts,
3258 errmsg_target.as_ref(),
3259 );
3260 } else if let Some(source_expr) = source_expr {
3261 if !expr_is_character_expr(
3262 b,
3263 &ctx.locals,
3264 source_expr,
3265 ctx.st,
3266 Some(ctx.type_layouts),
3267 ) {
3268 let dest_base = b.load_typed(
3269 field_ptr,
3270 IrType::Ptr(Box::new(elem_ty.clone())),
3271 );
3272 emit_scalar_allocate_source_init_on_success(
3273 b,
3274 ctx,
3275 stat_addr,
3276 dest_base,
3277 &elem_ty,
3278 source_scalar_type
3279 .as_deref()
3280 .or(field_derived_type_name(&field).as_deref()),
3281 source_expr,
3282 );
3283 }
3284 }
3285 }
3286 if rank == 0 {
3287 let field_type_name = field_derived_type_name(&field);
3288 let type_tag = if source_desc.is_some() {
3289 None
3290 } else if let Some(source_expr) = source_expr {
3291 expr_type_tag_value(
3292 b,
3293 source_expr,
3294 None,
3295 ctx.st,
3296 ctx.type_layouts,
3297 )
3298 .or_else(|| {
3299 static_alloc_target_type_tag_value(
3300 b,
3301 item,
3302 ctx.st,
3303 ctx.type_layouts,
3304 )
3305 })
3306 } else if let Some(tag) = typed_type_tag {
3307 Some(tag)
3308 } else {
3309 derived_type_tag_value(
3310 b,
3311 field_type_name.as_deref(),
3312 ctx.type_layouts,
3313 )
3314 .or_else(|| {
3315 static_alloc_target_type_tag_value(
3316 b,
3317 item,
3318 ctx.st,
3319 ctx.type_layouts,
3320 )
3321 })
3322 };
3323 let tbp_lookup = if source_desc.is_some() {
3324 None
3325 } else if let Some(source_expr) = source_expr {
3326 expr_tbp_lookup_value(
3327 b,
3328 source_expr,
3329 None,
3330 ctx.st,
3331 ctx.type_layouts,
3332 )
3333 .or_else(|| {
3334 static_alloc_target_tbp_lookup_value(
3335 b,
3336 item,
3337 ctx.st,
3338 ctx.type_layouts,
3339 )
3340 })
3341 } else if let Some(ptr) = typed_tbp_lookup {
3342 Some(ptr)
3343 } else {
3344 derived_type_tbp_lookup_value(
3345 b,
3346 field_type_name.as_deref(),
3347 ctx.type_layouts,
3348 )
3349 .or_else(|| {
3350 static_alloc_target_tbp_lookup_value(
3351 b,
3352 item,
3353 ctx.st,
3354 ctx.type_layouts,
3355 )
3356 })
3357 };
3358 emit_scalar_alloc_polymorphic_metadata_on_success(
3359 b,
3360 stat_addr,
3361 field_ptr,
3362 type_tag,
3363 tbp_lookup,
3364 );
3365 if let Some(source_desc) = source_desc {
3366 emit_scalar_alloc_source_descriptor_metadata_on_success(
3367 b,
3368 stat_addr,
3369 field_ptr,
3370 source_desc,
3371 );
3372 } else if let Some(source_desc) = source_scalar_desc {
3373 emit_scalar_alloc_source_descriptor_metadata_on_success(
3374 b,
3375 stat_addr,
3376 field_ptr,
3377 source_desc,
3378 );
3379 }
3380 let copied_from_source = source_desc.is_some()
3381 || source_scalar_desc.is_some()
3382 || source_expr.is_some();
3383 if !copied_from_source {
3384 if let Some(layout) = dynamic_layout {
3385 let base_ptr = b.load_typed(
3386 field_ptr,
3387 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3388 );
3389 if derived_layout_needs_runtime_initialization(
3390 layout,
3391 ctx.type_layouts,
3392 ) {
3393 initialize_derived_storage(
3394 b,
3395 base_ptr,
3396 layout,
3397 ctx.type_layouts,
3398 );
3399 }
3400 }
3401 }
3402 }
3403 continue;
3404 }
3405 if (field.allocatable || field.pointer) && args.is_empty() {
3406 let field_info = LocalInfo {
3407 addr: field_ptr,
3408 ty: field_storage_ir_type(&field, ctx.type_layouts),
3409 dims: vec![],
3410 allocatable: false,
3411 descriptor_arg: false,
3412 by_ref: false,
3413 char_kind: field_char_kind(&field),
3414 derived_type: field_derived_type_name(&field),
3415 inline_const: None,
3416 is_pointer: field.pointer,
3417 runtime_dim_upper: vec![],
3418 is_class: false,
3419 logical_kind: None,
3420 last_dim_assumed_size: false,
3421 };
3422 let elem_size_bytes =
3423 local_storage_size_bytes(&field_info, ctx.type_layouts);
3424 let size_val = b.const_i32(elem_size_bytes as i32);
3425 let ptr = b.runtime_call(
3426 RuntimeFunc::Allocate,
3427 vec![size_val],
3428 IrType::Ptr(Box::new(field_info.ty.clone())),
3429 );
3430 b.store(ptr, field_ptr);
3431 if let Some(type_name) = &field_info.derived_type {
3432 if let Some(layout) = ctx.type_layouts.get(type_name) {
3433 let base_ptr = b.load_typed(
3434 field_ptr,
3435 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3436 );
3437 if derived_layout_needs_runtime_initialization(
3438 layout,
3439 ctx.type_layouts,
3440 ) {
3441 initialize_derived_storage(
3442 b,
3443 base_ptr,
3444 layout,
3445 ctx.type_layouts,
3446 );
3447 }
3448 }
3449 }
3450 continue;
3451 }
3452 }
3453 }
3454 let (base_name, args): (Option<String>, &[crate::ast::expr::Argument]) =
3455 match &item.node {
3456 Expr::FunctionCall { callee, args } => (extract_base_name(callee), args),
3457 Expr::Name { name } => (Some(name.clone()), &[]),
3458 _ => (None, &[]),
3459 };
3460 if let Some(name) = base_name {
3461 if let Some(info) = ctx.locals.get(&name.to_lowercase()).cloned() {
3462 if matches!(info.char_kind, CharKind::Deferred) {
3463 let Some(len_val) = char_alloc_len else {
3464 eprintln!(
3465 "armfortas: error: {}:{}: deferred-length character ALLOCATE requires a typed length or SOURCE/MOLD support",
3466 stmt.span.start.line, stmt.span.start.col
3467 );
3468 let _ = std::io::stderr().flush();
3469 std::process::exit(1);
3470 };
3471 let desc = string_descriptor_addr(b, &info);
3472 init_allocated_string_descriptor(b, desc, len_val);
3473 if let Some((src_ptr, src_len)) = source_char {
3474 b.call(
3475 FuncRef::External("afs_assign_char_deferred".into()),
3476 vec![desc, src_ptr, src_len],
3477 IrType::Void,
3478 );
3479 }
3480 continue;
3481 }
3482 let elem_size_bytes = local_storage_size_bytes(&info, ctx.type_layouts);
3483
3484 if info.allocatable || info.descriptor_arg {
3485 let rank = args.len();
3486 let source_scalar_layout = if rank == 0 && source_desc.is_none() {
3487 source_expr.and_then(|expr| {
3488 expr_type_layout(expr, None, ctx.st, ctx.type_layouts)
3489 })
3490 } else {
3491 None
3492 };
3493 let source_scalar_type = if rank == 0 && source_desc.is_none() {
3494 source_expr.and_then(|expr| {
3495 expr_derived_type_name(expr, None, ctx.st, ctx.type_layouts)
3496 })
3497 } else {
3498 None
3499 };
3500 let dynamic_layout = source_scalar_layout
3501 .or(typed_layout)
3502 .or_else(|| {
3503 info.derived_type
3504 .as_deref()
3505 .and_then(|type_name| ctx.type_layouts.get(type_name))
3506 });
3507 let scalar_source_copy_plan =
3508 if rank == 0 && source_desc.is_none() {
3509 source_expr.and_then(|expr| {
3510 expr_scalar_alloc_source_copy_plan(
3511 expr,
3512 &ctx.locals,
3513 ctx.st,
3514 ctx.type_layouts,
3515 )
3516 })
3517 } else {
3518 None
3519 };
3520 let array_source_copy_layout = if source_desc.is_some() {
3521 dynamic_layout.filter(|layout| {
3522 derived_layout_needs_deep_copy(layout, ctx.type_layouts)
3523 })
3524 } else {
3525 None
3526 };
3527 // Build a stack DimDescriptor[rank] honoring
3528 // each subscript's actual (lower, upper) bounds,
3529 // then call afs_allocate_array. Descriptor-backed
3530 // dummy arrays use the caller-owned descriptor
3531 // rather than the local spill slot that holds its
3532 // address. Scalar allocatables lower as a rank-0
3533 // descriptor allocation.
3534 let es = allocated_array_elem_size(
3535 b,
3536 &info,
3537 dynamic_layout
3538 .map(|layout| layout.size as i64)
3539 .unwrap_or(elem_size_bytes),
3540 char_alloc_len,
3541 );
3542 let desc = array_descriptor_addr(b, &info);
3543 let one_i64 = b.const_i64(1);
3544 let dim_buf = if rank == 0 {
3545 b.const_i64(0)
3546 } else {
3547 let dim_buf_bytes = (rank * 24) as u64;
3548 let dim_buf = b.alloca(IrType::Array(
3549 Box::new(IrType::Int(IntWidth::I8)),
3550 dim_buf_bytes,
3551 ));
3552 for (i, arg) in args.iter().enumerate() {
3553 let (lo64, up64) = lower_alloc_bounds(b, ctx, &arg.value);
3554 let base = (i * 24) as i64;
3555 let off_lo = b.const_i64(base);
3556 let off_up = b.const_i64(base + 8);
3557 let off_st = b.const_i64(base + 16);
3558 let p_lo =
3559 b.gep(dim_buf, vec![off_lo], IrType::Int(IntWidth::I8));
3560 let p_up =
3561 b.gep(dim_buf, vec![off_up], IrType::Int(IntWidth::I8));
3562 let p_st =
3563 b.gep(dim_buf, vec![off_st], IrType::Int(IntWidth::I8));
3564 b.store(lo64, p_lo);
3565 b.store(up64, p_up);
3566 b.store(one_i64, p_st);
3567 }
3568 dim_buf
3569 };
3570 if rank == 0 {
3571 if let Some(shape_desc) = shape_desc {
3572 b.call(
3573 FuncRef::External("afs_allocate_like".into()),
3574 vec![desc, shape_desc, stat_addr],
3575 IrType::Void,
3576 );
3577 } else {
3578 let rank_val = b.const_i32(0);
3579 b.call(
3580 FuncRef::External("afs_allocate_array".into()),
3581 vec![desc, es, rank_val, dim_buf, stat_addr],
3582 IrType::Void,
3583 );
3584 }
3585 } else {
3586 let rank_val = b.const_i32(rank as i32);
3587 b.call(
3588 FuncRef::External("afs_allocate_array".into()),
3589 vec![desc, es, rank_val, dim_buf, stat_addr],
3590 IrType::Void,
3591 );
3592 }
3593 emit_runtime_errmsg_on_failure(
3594 b,
3595 stat_addr,
3596 errmsg_target.as_ref(),
3597 "ALLOCATE failed",
3598 );
3599 if let Some(source_desc) = source_desc {
3600 emit_allocatable_source_copy_on_success(
3601 b,
3602 stat_addr,
3603 desc,
3604 source_desc,
3605 rank > 0,
3606 array_source_copy_layout,
3607 scalar_source_copy_plan.as_ref(),
3608 ctx.type_layouts,
3609 errmsg_target.as_ref(),
3610 );
3611 } else if rank == 0 {
3612 if let Some(source_desc) = source_scalar_desc {
3613 emit_allocatable_source_copy_on_success(
3614 b,
3615 stat_addr,
3616 desc,
3617 source_desc,
3618 false,
3619 None,
3620 scalar_source_copy_plan.as_ref(),
3621 ctx.type_layouts,
3622 errmsg_target.as_ref(),
3623 );
3624 } else if let Some(source_expr) = source_expr {
3625 if !expr_is_character_expr(
3626 b,
3627 &ctx.locals,
3628 source_expr,
3629 ctx.st,
3630 Some(ctx.type_layouts),
3631 ) {
3632 let dest_base = b.load_typed(
3633 desc,
3634 IrType::Ptr(Box::new(info.ty.clone())),
3635 );
3636 emit_scalar_allocate_source_init_on_success(
3637 b,
3638 ctx,
3639 stat_addr,
3640 dest_base,
3641 &info.ty,
3642 source_scalar_type
3643 .as_deref()
3644 .or(info.derived_type.as_deref()),
3645 source_expr,
3646 );
3647 }
3648 }
3649 }
3650 if rank == 0 {
3651 let type_tag = if source_desc.is_some() {
3652 None
3653 } else if let Some(source_expr) = source_expr {
3654 expr_type_tag_value(
3655 b,
3656 source_expr,
3657 None,
3658 ctx.st,
3659 ctx.type_layouts,
3660 )
3661 .or_else(|| {
3662 static_alloc_target_type_tag_value(
3663 b,
3664 item,
3665 ctx.st,
3666 ctx.type_layouts,
3667 )
3668 })
3669 } else if let Some(tag) = typed_type_tag {
3670 Some(tag)
3671 } else {
3672 derived_type_tag_value(
3673 b,
3674 info.derived_type.as_deref(),
3675 ctx.type_layouts,
3676 )
3677 .or_else(|| {
3678 static_alloc_target_type_tag_value(
3679 b,
3680 item,
3681 ctx.st,
3682 ctx.type_layouts,
3683 )
3684 })
3685 };
3686 let tbp_lookup = if source_desc.is_some() {
3687 None
3688 } else if let Some(source_expr) = source_expr {
3689 expr_tbp_lookup_value(
3690 b,
3691 source_expr,
3692 None,
3693 ctx.st,
3694 ctx.type_layouts,
3695 )
3696 .or_else(|| {
3697 static_alloc_target_tbp_lookup_value(
3698 b,
3699 item,
3700 ctx.st,
3701 ctx.type_layouts,
3702 )
3703 })
3704 } else if let Some(ptr) = typed_tbp_lookup {
3705 Some(ptr)
3706 } else {
3707 derived_type_tbp_lookup_value(
3708 b,
3709 info.derived_type.as_deref(),
3710 ctx.type_layouts,
3711 )
3712 .or_else(|| {
3713 static_alloc_target_tbp_lookup_value(
3714 b,
3715 item,
3716 ctx.st,
3717 ctx.type_layouts,
3718 )
3719 })
3720 };
3721 emit_scalar_alloc_polymorphic_metadata_on_success(
3722 b,
3723 stat_addr,
3724 desc,
3725 type_tag,
3726 tbp_lookup,
3727 );
3728 if let Some(source_desc) = source_desc {
3729 emit_scalar_alloc_source_descriptor_metadata_on_success(
3730 b,
3731 stat_addr,
3732 desc,
3733 source_desc,
3734 );
3735 } else if let Some(source_desc) = source_scalar_desc {
3736 emit_scalar_alloc_source_descriptor_metadata_on_success(
3737 b,
3738 stat_addr,
3739 desc,
3740 source_desc,
3741 );
3742 }
3743 let copied_from_source = source_desc.is_some()
3744 || source_scalar_desc.is_some()
3745 || source_expr.is_some();
3746 if !copied_from_source {
3747 if let Some(layout) = dynamic_layout {
3748 let base_ptr = b.load_typed(
3749 desc,
3750 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3751 );
3752 if derived_layout_needs_runtime_initialization(
3753 layout,
3754 ctx.type_layouts,
3755 ) {
3756 initialize_derived_storage(
3757 b,
3758 base_ptr,
3759 layout,
3760 ctx.type_layouts,
3761 );
3762 }
3763 }
3764 }
3765 }
3766 } else {
3767 // Non-allocatable array: old path (shouldn't happen for ALLOCATE).
3768 let size_val = b.const_i32(elem_size_bytes as i32);
3769 let ptr = b.runtime_call(
3770 RuntimeFunc::Allocate,
3771 vec![size_val],
3772 IrType::Ptr(Box::new(info.ty.clone())),
3773 );
3774 let slot = if info.is_pointer && info.by_ref {
3775 b.load_typed(
3776 info.addr,
3777 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3778 )
3779 } else {
3780 info.addr
3781 };
3782 b.store(ptr, slot);
3783 if let Some(type_name) = &info.derived_type {
3784 if let Some(layout) = ctx.type_layouts.get(type_name) {
3785 let base_ptr = b.load_typed(
3786 slot,
3787 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3788 );
3789 if derived_layout_needs_runtime_initialization(
3790 layout,
3791 ctx.type_layouts,
3792 ) {
3793 initialize_derived_storage(
3794 b,
3795 base_ptr,
3796 layout,
3797 ctx.type_layouts,
3798 );
3799 }
3800 }
3801 }
3802 }
3803 }
3804 }
3805 }
3806 super::core::emit_allocate_status_writeback(b, &stat_target);
3807 }
3808
3809 Stmt::Deallocate { items, opts } => {
3810 let dealloc_stat_target = super::core::allocate_status_target(b, ctx, opts);
3811 let stat_addr = dealloc_stat_target.runtime_addr;
3812 let errmsg_target = allocate_errmsg_target(b, ctx, opts);
3813 for item in items {
3814 if let Expr::ComponentAccess { .. } = &item.node {
3815 if let Some((field_ptr, field)) = resolve_component_field_access(
3816 b,
3817 &ctx.locals,
3818 item,
3819 ctx.st,
3820 ctx.type_layouts,
3821 ) {
3822 if is_deferred_char_component_field(&field) {
3823 b.call(
3824 FuncRef::External("afs_dealloc_string".into()),
3825 vec![field_ptr],
3826 IrType::Void,
3827 );
3828 continue;
3829 }
3830 if field.size == 384 && (field.allocatable || field.pointer) {
3831 b.call(
3832 FuncRef::External("afs_deallocate_array".into()),
3833 vec![field_ptr, stat_addr],
3834 IrType::Void,
3835 );
3836 emit_runtime_errmsg_on_failure(
3837 b,
3838 stat_addr,
3839 errmsg_target.as_ref(),
3840 "DEALLOCATE failed",
3841 );
3842 continue;
3843 }
3844 if field.pointer {
3845 let ptr = b.load_typed(
3846 field_ptr,
3847 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3848 );
3849 b.runtime_call(RuntimeFunc::Deallocate, vec![ptr], IrType::Void);
3850 // F2018 §9.7.3.2: deallocating a pointer
3851 // disassociates it. Null the slot so a
3852 // subsequent `associated()` returns false
3853 // and `=> null()`-style sentinels work.
3854 // Without this, free_map_entry_pool's
3855 // `if (.not. associated(pool)) return`
3856 // never fires for re-deallocated pools
3857 // and recurses until stack overflow.
3858 let null_v = b.const_i64(0);
3859 let null_p = b.int_to_ptr(
3860 null_v,
3861 IrType::Int(IntWidth::I8),
3862 );
3863 b.store(null_p, field_ptr);
3864 continue;
3865 }
3866 }
3867 }
3868 let base_name = extract_base_name(item);
3869 if let Some(name) = base_name {
3870 if let Some(info) = ctx.locals.get(&name.to_lowercase()) {
3871 if matches!(info.char_kind, CharKind::Deferred) {
3872 let desc = string_descriptor_addr(b, info);
3873 b.call(
3874 FuncRef::External("afs_dealloc_string".into()),
3875 vec![desc],
3876 IrType::Void,
3877 );
3878 } else if info.allocatable || info.descriptor_arg {
3879 let desc = array_descriptor_addr(b, info);
3880 b.call(
3881 FuncRef::External("afs_deallocate_array".into()),
3882 vec![desc, stat_addr],
3883 IrType::Void,
3884 );
3885 emit_runtime_errmsg_on_failure(
3886 b,
3887 stat_addr,
3888 errmsg_target.as_ref(),
3889 "DEALLOCATE failed",
3890 );
3891 } else if info.is_pointer {
3892 let slot = if info.by_ref {
3893 b.load(info.addr)
3894 } else {
3895 info.addr
3896 };
3897 let ptr = b
3898 .load_typed(slot, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
3899 b.runtime_call(RuntimeFunc::Deallocate, vec![ptr], IrType::Void);
3900 // Null the pointer slot per F2018 §9.7.3.2.
3901 let null_v = b.const_i64(0);
3902 let null_p = b.int_to_ptr(
3903 null_v,
3904 IrType::Int(IntWidth::I8),
3905 );
3906 b.store(null_p, slot);
3907 } else {
3908 let ptr = b.load_typed(
3909 info.addr,
3910 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3911 );
3912 b.runtime_call(RuntimeFunc::Deallocate, vec![ptr], IrType::Void);
3913 }
3914 }
3915 }
3916 }
3917 super::core::emit_allocate_status_writeback(b, &dealloc_stat_target);
3918 }
3919
3920 Stmt::Block {
3921 uses,
3922 implicit,
3923 decls,
3924 body,
3925 ..
3926 } => {
3927 // F2008 BLOCK: declarations are scoped to the body.
3928 // Save any locals that the BLOCK's decls shadow, run
3929 // the body, then restore the originals. F2018 §11.1.4
3930 // also gives the BLOCK its own implicit-typing
3931 // environment: an `implicit integer (i-n)` here introduces
3932 // `n` as an integer local even when the enclosing scope
3933 // is IMPLICIT NONE. Synthesise TypeDecl entries for any
3934 // name the body references that isn't in ctx.locals and
3935 // whose first letter falls in a block-local implicit
3936 // range, then run alloc_decls / init_decls over the
3937 // combined list.
3938 let pre_block_keys: HashSet<String> = ctx.locals.keys().cloned().collect();
3939 let mut effective_decls: Vec<crate::ast::decl::SpannedDecl> = decls.clone();
3940 let mut implicit_map: std::collections::HashMap<char, crate::ast::decl::TypeSpec> =
3941 std::collections::HashMap::new();
3942 for d in implicit {
3943 if let crate::ast::decl::Decl::ImplicitStmt { specs } = &d.node {
3944 for spec in specs {
3945 for &(start, end) in &spec.ranges {
3946 for letter_byte in start as u8..=end as u8 {
3947 let letter = (letter_byte as char).to_ascii_lowercase();
3948 implicit_map.insert(letter, spec.type_spec.clone());
3949 }
3950 }
3951 }
3952 }
3953 }
3954 if !implicit_map.is_empty() {
3955 let mut already_decl: std::collections::HashSet<String> = decls
3956 .iter()
3957 .flat_map(|d| {
3958 if let crate::ast::decl::Decl::TypeDecl { entities, .. } = &d.node {
3959 entities
3960 .iter()
3961 .map(|e| e.name.to_lowercase())
3962 .collect::<Vec<_>>()
3963 } else {
3964 vec![]
3965 }
3966 })
3967 .collect();
3968 let mut referenced: Vec<String> = Vec::new();
3969 for s in body {
3970 collect_referenced_names(s, &mut referenced);
3971 }
3972 for name in referenced {
3973 let key = name.to_lowercase();
3974 if already_decl.contains(&key) {
3975 continue;
3976 }
3977 if ctx.locals.contains_key(&key) {
3978 continue;
3979 }
3980 let Some(first) = key.chars().next() else {
3981 continue;
3982 };
3983 let Some(type_spec) = implicit_map.get(&first.to_ascii_lowercase()) else {
3984 continue;
3985 };
3986 already_decl.insert(key.clone());
3987 let synth = crate::ast::decl::Decl::TypeDecl {
3988 type_spec: type_spec.clone(),
3989 attrs: Vec::new(),
3990 entities: vec![crate::ast::decl::EntityDecl {
3991 name: name.clone(),
3992 array_spec: None,
3993 char_len: None,
3994 init: None,
3995 ptr_init: None,
3996 }],
3997 };
3998 effective_decls.push(crate::ast::Spanned {
3999 node: synth,
4000 span: stmt.span,
4001 });
4002 }
4003 }
4004 let block_keys: Vec<String> = effective_decls
4005 .iter()
4006 .flat_map(|d| {
4007 if let crate::ast::decl::Decl::TypeDecl { entities, .. } = &d.node {
4008 entities
4009 .iter()
4010 .map(|e| e.name.to_lowercase())
4011 .collect::<Vec<_>>()
4012 } else {
4013 vec![]
4014 }
4015 })
4016 .collect();
4017 let saved: Vec<(String, Option<LocalInfo>)> = block_keys
4018 .iter()
4019 .map(|k| (k.clone(), ctx.locals.get(k).cloned()))
4020 .collect();
4021 if !effective_decls.is_empty() {
4022 // Remove shadowed keys so alloc_decls creates fresh allocas.
4023 for k in &block_keys {
4024 ctx.locals.remove(k);
4025 }
4026 super::alloc::alloc_decls(
4027 b,
4028 &mut ctx.locals,
4029 &effective_decls,
4030 &HashMap::new(),
4031 ctx.type_layouts,
4032 &mut Vec::new(),
4033 "",
4034 ctx.st,
4035 );
4036 super::init::init_decls(
4037 b,
4038 &ctx.locals,
4039 &effective_decls,
4040 ctx.st,
4041 Some(ctx.type_layouts),
4042 );
4043 }
4044 if !uses.is_empty() {
4045 let required_import_names = collect_required_import_names(&effective_decls, body);
4046 install_globals_as_locals(
4047 b,
4048 &mut ctx.locals,
4049 ctx.globals,
4050 uses,
4051 Some(&required_import_names),
4052 None,
4053 ctx.st,
4054 &ctx.ambiguous_use_warnings,
4055 );
4056 }
4057 lower_stmts(b, ctx, body);
4058 // F2018 §7.5.6.3 / §9.7.3.2: at END BLOCK, finalize derived-type
4059 // locals that have FINAL subroutines and deallocate
4060 // block-scoped allocatables. Only do this for keys that were
4061 // newly introduced by the block (not shadowed outer locals).
4062 if b.func().block(b.current_block()).terminator.is_none() {
4063 let block_only: HashMap<String, LocalInfo> = block_keys
4064 .iter()
4065 .filter(|k| ctx.locals.contains_key(*k))
4066 .filter(|k| !saved.iter().any(|(sk, so)| sk == *k && so.is_some()))
4067 .filter_map(|k| ctx.locals.get(k).map(|v| (k.clone(), v.clone())))
4068 .collect();
4069 if !block_only.is_empty() {
4070 insert_implicit_dealloc(
4071 b,
4072 &block_only,
4073 &ctx.locals,
4074 ctx.type_layouts,
4075 ctx.st,
4076 ctx.internal_funcs,
4077 Some(ctx.contained_host_refs),
4078 None,
4079 );
4080 }
4081 }
4082 // Restore the outer scope's locals.
4083 for (k, orig) in saved {
4084 if let Some(info) = orig {
4085 ctx.locals.insert(k, info);
4086 } else {
4087 ctx.locals.remove(&k);
4088 }
4089 }
4090 ctx.locals.retain(|k, _| pre_block_keys.contains(k));
4091 }
4092
4093 Stmt::Associate { assocs, body, .. } => {
4094 // Associate names are scoped — they only exist within the body.
4095 let added_keys: Vec<String> =
4096 assocs.iter().map(|(name, _)| name.to_lowercase()).collect();
4097
4098 for (name, expr) in assocs {
4099 if let Some(info) = associate_alias_local_info(b, ctx, expr) {
4100 ctx.locals.insert(name.to_lowercase(), info);
4101 continue;
4102 }
4103 let val = super::expr::lower_expr_ctx(b, ctx, expr);
4104 let ty = b
4105 .func()
4106 .value_type(val)
4107 .unwrap_or(IrType::Int(IntWidth::I32));
4108 let addr = b.alloca(ty.clone());
4109 b.store(val, addr);
4110 ctx.locals.insert(
4111 name.to_lowercase(),
4112 LocalInfo {
4113 addr,
4114 ty,
4115 dims: vec![],
4116 allocatable: false,
4117 descriptor_arg: false,
4118 by_ref: false,
4119 char_kind: CharKind::None,
4120 derived_type: None,
4121 inline_const: None,
4122 is_pointer: false,
4123 runtime_dim_upper: vec![],
4124 is_class: false,
4125 logical_kind: None,
4126 last_dim_assumed_size: false,
4127 },
4128 );
4129 }
4130 lower_stmts(b, ctx, body);
4131
4132 // Remove associate names from scope.
4133 for key in &added_keys {
4134 ctx.locals.remove(key);
4135 }
4136 }
4137
4138 Stmt::Continue { label: Some(lbl) } => {
4139 // Labeled CONTINUE: fall through to the label's block.
4140 if let Some(&label_bb) = ctx.label_blocks.get(lbl) {
4141 if b.func().block(b.current_block()).terminator.is_none() {
4142 b.branch(label_bb, vec![]);
4143 }
4144 b.set_block(label_bb);
4145 }
4146 }
4147 Stmt::Continue { label: None } => {} // no-op
4148
4149 Stmt::Goto { label } => {
4150 if let Some(&target_bb) = ctx.label_blocks.get(label) {
4151 b.branch(target_bb, vec![]);
4152 }
4153 }
4154
4155 Stmt::ComputedGoto { labels, selector } => {
4156 // F2018 §11.2.3: `GO TO (l1, l2, ..., ln) expr` evaluates `expr`
4157 // (integer); if 1 <= expr <= n, branches to label expr; otherwise
4158 // falls through to the next statement.
4159 //
4160 // Lower as a chain of (icmp eq expr, k) branches: for each
4161 // k = 1..n create (cond_br to labels[k-1], else fallthrough).
4162 // The fallthrough block is what the next statement sees.
4163 if labels.is_empty() {
4164 // Empty list — purely a fall-through with side effect of
4165 // evaluating the selector. Just lower the expression.
4166 let _ = super::expr::lower_expr_ctx(b, ctx, selector);
4167 return;
4168 }
4169 let sel_raw = super::expr::lower_expr_ctx(b, ctx, selector);
4170 let sel_i32 = match b.func().value_type(sel_raw) {
4171 Some(IrType::Int(IntWidth::I32)) => sel_raw,
4172 Some(IrType::Int(IntWidth::I64)) => b.int_trunc(sel_raw, IntWidth::I32),
4173 Some(IrType::Int(_)) => b.int_extend(sel_raw, IntWidth::I32, true),
4174 _ => sel_raw,
4175 };
4176 for (i, label) in labels.iter().enumerate() {
4177 let Some(&target_bb) = ctx.label_blocks.get(label) else {
4178 continue;
4179 };
4180 let key = (i + 1) as i32;
4181 let key_val = b.const_i32(key);
4182 let matches = b.icmp(CmpOp::Eq, sel_i32, key_val);
4183 let next_check = b.create_block("computed_goto_next");
4184 b.cond_branch(matches, target_bb, vec![], next_check, vec![]);
4185 b.set_block(next_check);
4186 }
4187 // Falling out of the loop, current block is the post-chain
4188 // block — execution continues into whatever statement follows.
4189 }
4190
4191 Stmt::Labeled { label, stmt: inner } => {
4192 // Create an edge from the current block into the label's block (fall-through),
4193 // then switch to the label's block and lower the inner statement.
4194 if let Some(&label_bb) = ctx.label_blocks.get(label) {
4195 if b.func().block(b.current_block()).terminator.is_none() {
4196 b.branch(label_bb, vec![]);
4197 }
4198 b.set_block(label_bb);
4199 }
4200 lower_stmt(b, ctx, inner);
4201 }
4202
4203 Stmt::Open { specs } => {
4204 let unit_spec = specs
4205 .iter()
4206 .find(|s| {
4207 s.keyword
4208 .as_deref()
4209 .map(|k| k.eq_ignore_ascii_case("unit"))
4210 .unwrap_or(false)
4211 })
4212 .or_else(|| specs.iter().find(|s| s.keyword.is_none()));
4213 let newunit_spec = specs.iter().find(|s| {
4214 s.keyword
4215 .as_deref()
4216 .map(|k| k.eq_ignore_ascii_case("newunit"))
4217 .unwrap_or(false)
4218 });
4219 let iostat_spec = specs.iter().find(|s| {
4220 s.keyword
4221 .as_deref()
4222 .map(|k| k.eq_ignore_ascii_case("iostat"))
4223 .unwrap_or(false)
4224 });
4225 let unit = if let Some(s) = unit_spec {
4226 super::expr::lower_expr_ctx(b, ctx, &s.value)
4227 } else if newunit_spec.is_some() {
4228 b.const_i32(0)
4229 } else {
4230 b.const_i32(6)
4231 };
4232
4233 // Find FILE= spec.
4234 let (file_ptr, file_len) = specs
4235 .iter()
4236 .find(|s| {
4237 s.keyword
4238 .as_deref()
4239 .map(|k| k.eq_ignore_ascii_case("file"))
4240 .unwrap_or(false)
4241 })
4242 .map(|s| {
4243 lower_string_expr_with_layouts(
4244 b,
4245 &ctx.locals,
4246 &s.value,
4247 ctx.st,
4248 Some(ctx.type_layouts),
4249 )
4250 })
4251 .unwrap_or_else(|| {
4252 let z = b.const_i64(0);
4253 (z, z)
4254 });
4255
4256 // Find STATUS= spec.
4257 let (status_ptr, status_len) = specs
4258 .iter()
4259 .find(|s| {
4260 s.keyword
4261 .as_deref()
4262 .map(|k| k.eq_ignore_ascii_case("status"))
4263 .unwrap_or(false)
4264 })
4265 .map(|s| {
4266 lower_string_expr_with_layouts(
4267 b,
4268 &ctx.locals,
4269 &s.value,
4270 ctx.st,
4271 Some(ctx.type_layouts),
4272 )
4273 })
4274 .unwrap_or_else(|| {
4275 let z = b.const_i64(0);
4276 (z, z)
4277 });
4278
4279 // Find ACTION= spec.
4280 let (action_ptr, action_len) = specs
4281 .iter()
4282 .find(|s| {
4283 s.keyword
4284 .as_deref()
4285 .map(|k| k.eq_ignore_ascii_case("action"))
4286 .unwrap_or(false)
4287 })
4288 .map(|s| {
4289 lower_string_expr_with_layouts(
4290 b,
4291 &ctx.locals,
4292 &s.value,
4293 ctx.st,
4294 Some(ctx.type_layouts),
4295 )
4296 })
4297 .unwrap_or_else(|| {
4298 let z = b.const_i64(0);
4299 (z, z)
4300 });
4301
4302 // Find ACCESS= spec.
4303 let (access_ptr, access_len) = specs
4304 .iter()
4305 .find(|s| {
4306 s.keyword
4307 .as_deref()
4308 .map(|k| k.eq_ignore_ascii_case("access"))
4309 .unwrap_or(false)
4310 })
4311 .map(|s| {
4312 lower_string_expr_with_layouts(
4313 b,
4314 &ctx.locals,
4315 &s.value,
4316 ctx.st,
4317 Some(ctx.type_layouts),
4318 )
4319 })
4320 .unwrap_or_else(|| {
4321 let z = b.const_i64(0);
4322 (z, z)
4323 });
4324
4325 // Find FORM= spec.
4326 let (form_ptr, form_len) = specs
4327 .iter()
4328 .find(|s| {
4329 s.keyword
4330 .as_deref()
4331 .map(|k| k.eq_ignore_ascii_case("form"))
4332 .unwrap_or(false)
4333 })
4334 .map(|s| {
4335 lower_string_expr_with_layouts(
4336 b,
4337 &ctx.locals,
4338 &s.value,
4339 ctx.st,
4340 Some(ctx.type_layouts),
4341 )
4342 })
4343 .unwrap_or_else(|| {
4344 let z = b.const_i64(0);
4345 (z, z)
4346 });
4347
4348 // Find RECL= spec.
4349 let recl_val = specs
4350 .iter()
4351 .find(|s| {
4352 s.keyword
4353 .as_deref()
4354 .map(|k| k.eq_ignore_ascii_case("recl"))
4355 .unwrap_or(false)
4356 })
4357 .map(|s| super::expr::lower_expr_ctx(b, ctx, &s.value))
4358 .unwrap_or_else(|| b.const_i64(0));
4359
4360 let null = b.const_i64(0);
4361 let unit_i32 = coerce_to_type(b, unit, &IrType::Int(IntWidth::I32));
4362 let recl_i64 = coerce_to_type(b, recl_val, &IrType::Int(IntWidth::I64));
4363
4364 // Check if we have any extended specifiers beyond the basic 7-arg set.
4365 let has_access = specs.iter().any(|s| {
4366 s.keyword
4367 .as_deref()
4368 .map(|k| k.eq_ignore_ascii_case("access"))
4369 .unwrap_or(false)
4370 });
4371 let has_form = specs.iter().any(|s| {
4372 s.keyword
4373 .as_deref()
4374 .map(|k| k.eq_ignore_ascii_case("form"))
4375 .unwrap_or(false)
4376 });
4377 let has_recl = specs.iter().any(|s| {
4378 s.keyword
4379 .as_deref()
4380 .map(|k| k.eq_ignore_ascii_case("recl"))
4381 .unwrap_or(false)
4382 });
4383 let has_position = specs.iter().any(|s| {
4384 s.keyword
4385 .as_deref()
4386 .map(|k| k.eq_ignore_ascii_case("position"))
4387 .unwrap_or(false)
4388 });
4389 let has_iostat = iostat_spec.is_some();
4390 let has_newunit = newunit_spec.is_some();
4391
4392 if !has_access && !has_form && !has_recl && !has_position && !has_iostat && !has_newunit
4393 {
4394 // Simple case: use 7-arg afs_open_simple (unit + 3 string pairs).
4395 b.call(
4396 FuncRef::External("afs_open_simple".into()),
4397 vec![
4398 unit_i32, file_ptr, file_len, status_ptr, status_len, action_ptr,
4399 action_len,
4400 ],
4401 IrType::Void,
4402 );
4403 } else {
4404 // Extended case: build OpenControlBlock on the stack.
4405 // Find POSITION= spec.
4406 let (position_ptr, position_len) = specs
4407 .iter()
4408 .find(|s| {
4409 s.keyword
4410 .as_deref()
4411 .map(|k| k.eq_ignore_ascii_case("position"))
4412 .unwrap_or(false)
4413 })
4414 .map(|s| {
4415 lower_string_expr_with_layouts(
4416 b,
4417 &ctx.locals,
4418 &s.value,
4419 ctx.st,
4420 Some(ctx.type_layouts),
4421 )
4422 })
4423 .unwrap_or_else(|| {
4424 let z = b.const_i64(0);
4425 (z, z)
4426 });
4427
4428 // Layout matches repr(C) OpenControlBlock (128 bytes):
4429 // 0: unit(i32) + 4 pad, 8: filename(ptr), 16: filename_len(i64),
4430 // 24: status(ptr), 32: status_len(i64), 40: action(ptr), 48: action_len(i64),
4431 // 56: access(ptr), 64: access_len(i64), 72: form(ptr), 80: form_len(i64),
4432 // 88: recl(i64), 96: iostat(ptr), 104: newunit(ptr),
4433 // 112: position(ptr), 120: position_len(i64)
4434 let cb_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 128);
4435 let cb = b.alloca(cb_ty);
4436
4437 let store_at = |b: &mut crate::ir::builder::FuncBuilder,
4438 base,
4439 offset: i64,
4440 field_ty: IrType,
4441 val| {
4442 let field_bytes = field_ty.size_bytes() as i64;
4443 debug_assert!(field_bytes > 0 && offset % field_bytes == 0);
4444 let slot = b.const_i64(offset / field_bytes);
4445 let ptr = b.gep(base, vec![slot], field_ty.clone());
4446 let stored = match field_ty {
4447 IrType::Int(_) | IrType::Float(_) | IrType::Bool => {
4448 coerce_to_type(b, val, &field_ty)
4449 }
4450 _ => val,
4451 };
4452 b.store(stored, ptr);
4453 };
4454
4455 let file_ptr_ty = b
4456 .func()
4457 .value_type(file_ptr)
4458 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4459 let status_ptr_ty = b
4460 .func()
4461 .value_type(status_ptr)
4462 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4463 let action_ptr_ty = b
4464 .func()
4465 .value_type(action_ptr)
4466 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4467 let access_ptr_ty = b
4468 .func()
4469 .value_type(access_ptr)
4470 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4471 let form_ptr_ty = b
4472 .func()
4473 .value_type(form_ptr)
4474 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4475 let position_ptr_ty = b
4476 .func()
4477 .value_type(position_ptr)
4478 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4479 let iostat_ptr = iostat_spec
4480 .map(|spec| lower_arg_by_ref_ctx(b, ctx, &spec.value))
4481 .unwrap_or(null);
4482 let newunit_ptr = newunit_spec
4483 .map(|spec| lower_arg_by_ref_ctx(b, ctx, &spec.value))
4484 .unwrap_or(null);
4485 let iostat_ptr_ty = b
4486 .func()
4487 .value_type(iostat_ptr)
4488 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4489 let newunit_ptr_ty = b
4490 .func()
4491 .value_type(newunit_ptr)
4492 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4493
4494 store_at(b, cb, 0, IrType::Int(IntWidth::I32), unit_i32);
4495 store_at(b, cb, 8, file_ptr_ty, file_ptr);
4496 store_at(b, cb, 16, IrType::Int(IntWidth::I64), file_len);
4497 store_at(b, cb, 24, status_ptr_ty, status_ptr);
4498 store_at(b, cb, 32, IrType::Int(IntWidth::I64), status_len);
4499 store_at(b, cb, 40, action_ptr_ty, action_ptr);
4500 store_at(b, cb, 48, IrType::Int(IntWidth::I64), action_len);
4501 store_at(b, cb, 56, access_ptr_ty, access_ptr);
4502 store_at(b, cb, 64, IrType::Int(IntWidth::I64), access_len);
4503 store_at(b, cb, 72, form_ptr_ty, form_ptr);
4504 store_at(b, cb, 80, IrType::Int(IntWidth::I64), form_len);
4505 store_at(b, cb, 88, IrType::Int(IntWidth::I64), recl_i64);
4506 store_at(b, cb, 96, iostat_ptr_ty, iostat_ptr);
4507 store_at(b, cb, 104, newunit_ptr_ty, newunit_ptr);
4508 store_at(b, cb, 112, position_ptr_ty, position_ptr);
4509 store_at(b, cb, 120, IrType::Int(IntWidth::I64), position_len);
4510
4511 b.call(FuncRef::External("afs_open".into()), vec![cb], IrType::Void);
4512 }
4513 }
4514
4515 Stmt::Close { specs } => {
4516 let unit_spec = specs
4517 .iter()
4518 .find(|s| {
4519 s.keyword
4520 .as_deref()
4521 .map(|k| k.eq_ignore_ascii_case("unit"))
4522 .unwrap_or(false)
4523 })
4524 .or_else(|| specs.iter().find(|s| s.keyword.is_none()));
4525 let iostat_spec = specs.iter().find(|s| {
4526 s.keyword
4527 .as_deref()
4528 .map(|k| k.eq_ignore_ascii_case("iostat"))
4529 .unwrap_or(false)
4530 });
4531 let status_spec = specs.iter().find(|s| {
4532 s.keyword
4533 .as_deref()
4534 .map(|k| k.eq_ignore_ascii_case("status"))
4535 .unwrap_or(false)
4536 });
4537 let unit = if let Some(s) = unit_spec {
4538 super::expr::lower_expr_ctx(b, ctx, &s.value)
4539 } else {
4540 b.const_i32(6)
4541 };
4542 let null = b.const_i64(0);
4543 let unit_i32 = coerce_to_type(b, unit, &IrType::Int(IntWidth::I32));
4544 let iostat_ptr = iostat_spec
4545 .map(|spec| lower_arg_by_ref_ctx(b, ctx, &spec.value))
4546 .unwrap_or(null);
4547 let (status_ptr, status_len) = status_spec
4548 .map(|spec| {
4549 lower_string_expr_with_layouts(
4550 b,
4551 &ctx.locals,
4552 &spec.value,
4553 ctx.st,
4554 Some(ctx.type_layouts),
4555 )
4556 })
4557 .unwrap_or_else(|| (null, null));
4558 b.call(
4559 FuncRef::External("afs_close_ex".into()),
4560 vec![unit_i32, status_ptr, status_len, iostat_ptr],
4561 IrType::Void,
4562 );
4563 }
4564
4565 Stmt::Read { controls, items } => {
4566 // `nonadvancing` is the compile-time bool used by the
4567 // existing per-item helpers; it stays false when the
4568 // advance= expression is non-literal so the static path
4569 // calls the advancing helper. `advance_runtime` carries
4570 // the runtime i32 (0 = no, 1 = yes) for non-literal
4571 // expressions like `advance=optval(adv,'YES')` from
4572 // stdlib's read_bitset_unit_64. The char-read helper picks
4573 // it up via afs_fmt_read_string_dyn.
4574 let advance_ctrl = controls.iter().find(|c| {
4575 c.keyword
4576 .as_deref()
4577 .map(|k| k.eq_ignore_ascii_case("advance"))
4578 .unwrap_or(false)
4579 });
4580 let nonadvancing = advance_ctrl
4581 .and_then(|c| match &c.value.node {
4582 Expr::StringLiteral { value, .. } => Some(value.eq_ignore_ascii_case("no")),
4583 Expr::Name { name } => Some(name.eq_ignore_ascii_case("no")),
4584 _ => None,
4585 })
4586 .unwrap_or(false);
4587 let advance_runtime: Option<ValueId> = advance_ctrl.and_then(|c| {
4588 if matches!(&c.value.node, Expr::StringLiteral { .. }) {
4589 None
4590 } else {
4591 let (p, l) = lower_string_expr_with_layouts(
4592 b,
4593 &ctx.locals,
4594 &c.value,
4595 ctx.st,
4596 Some(ctx.type_layouts),
4597 );
4598 Some(b.call(
4599 FuncRef::External("afs_advance_eval".into()),
4600 vec![p, l],
4601 IrType::Int(IntWidth::I32),
4602 ))
4603 }
4604 });
4605 let err_label = controls.iter().find_map(|c| {
4606 if c.keyword
4607 .as_deref()
4608 .map(|k| k.eq_ignore_ascii_case("err"))
4609 .unwrap_or(false)
4610 {
4611 match &c.value.node {
4612 Expr::IntegerLiteral { text, .. } => text.parse::<u64>().ok(),
4613 _ => None,
4614 }
4615 } else {
4616 None
4617 }
4618 });
4619 let fmt_control = controls
4620 .iter()
4621 .skip(1)
4622 .find(|c| c.keyword.is_none())
4623 .or_else(|| {
4624 controls.iter().find(|c| {
4625 c.keyword
4626 .as_deref()
4627 .map(|k| k.eq_ignore_ascii_case("fmt"))
4628 .unwrap_or(false)
4629 })
4630 });
4631
4632 let is_list_directed = match fmt_control {
4633 None => true,
4634 Some(ctrl) => matches!(&ctrl.value.node, Expr::Name { name } if name == "*"),
4635 };
4636
4637 let explicit_iostat_addr = controls
4638 .iter()
4639 .find(|c| {
4640 c.keyword
4641 .as_deref()
4642 .map(|k| k.eq_ignore_ascii_case("iostat"))
4643 .unwrap_or(false)
4644 })
4645 .map(|c| lower_arg_by_ref_ctx(b, ctx, &c.value));
4646
4647 let iostat_addr = match (err_label, explicit_iostat_addr) {
4648 (_, Some(addr)) => addr,
4649 (Some(_), None) => {
4650 let tmp = b.alloca(IrType::Int(IntWidth::I32));
4651 let zero = b.const_i32(0);
4652 b.store(zero, tmp);
4653 tmp
4654 }
4655 (None, None) => b.const_i64(0),
4656 };
4657
4658 let size_addr = controls
4659 .iter()
4660 .find(|c| {
4661 c.keyword
4662 .as_deref()
4663 .map(|k| k.eq_ignore_ascii_case("size"))
4664 .unwrap_or(false)
4665 })
4666 .map(|c| lower_arg_by_ref_ctx(b, ctx, &c.value))
4667 .unwrap_or_else(|| b.const_i64(0));
4668
4669 if let Some(ctrl) = controls.first() {
4670 if let Some((buf_ptr, buf_len)) = internal_io_buffer(b, ctx, ctrl) {
4671 if is_list_directed {
4672 lower_internal_read_items(b, ctx, items, buf_ptr, buf_len, iostat_addr);
4673 } else {
4674 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
4675 b,
4676 &ctx.locals,
4677 &fmt_control.unwrap().value,
4678 ctx.st,
4679 Some(ctx.type_layouts),
4680 );
4681 lower_formatted_internal_read_items(
4682 b,
4683 ctx,
4684 items,
4685 buf_ptr,
4686 buf_len,
4687 fmt_ptr,
4688 fmt_len,
4689 iostat_addr,
4690 size_addr,
4691 );
4692 }
4693 lower_read_err_branch(b, ctx, err_label, iostat_addr);
4694 return;
4695 }
4696 }
4697
4698 // Extract unit (first control). * means stdin (unit 5).
4699 let unit = if let Some(ctrl) = controls.first() {
4700 if matches!(&ctrl.value.node, Expr::Name { name } if name == "*") {
4701 b.const_i32(5)
4702 } else {
4703 super::expr::lower_expr_ctx(b, ctx, &ctrl.value)
4704 }
4705 } else {
4706 b.const_i32(5) // default stdin
4707 };
4708 if is_list_directed {
4709 // Wrap the per-item reads in begin/end so the runtime
4710 // can slurp a sequential-unformatted record up front
4711 // and let the typed helpers consume binary bytes.
4712 // Formatted units pass through (begin only resets
4713 // iostat). iomsg= isn't yet plumbed on the read side;
4714 // stick a null pointer for now.
4715 let null_iomsg = {
4716 let z = b.const_i64(0);
4717 b.int_to_ptr(z, IrType::Int(IntWidth::I8))
4718 };
4719 let zero_len = b.const_i64(0);
4720 b.call(
4721 FuncRef::External("afs_list_read_begin".into()),
4722 vec![unit, iostat_addr, null_iomsg, zero_len],
4723 IrType::Void,
4724 );
4725 lower_list_read_items(b, ctx, items, unit, iostat_addr);
4726 b.call(
4727 FuncRef::External("afs_list_read_end".into()),
4728 vec![unit, iostat_addr, null_iomsg, zero_len],
4729 IrType::Void,
4730 );
4731 } else {
4732 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
4733 b,
4734 &ctx.locals,
4735 &fmt_control.unwrap().value,
4736 ctx.st,
4737 Some(ctx.type_layouts),
4738 );
4739 lower_formatted_read_items_with_runtime_advance(
4740 b,
4741 ctx,
4742 items,
4743 unit,
4744 fmt_ptr,
4745 fmt_len,
4746 nonadvancing,
4747 advance_runtime,
4748 iostat_addr,
4749 size_addr,
4750 );
4751 }
4752 lower_read_err_branch(b, ctx, err_label, iostat_addr);
4753 }
4754
4755 Stmt::Inquire { specs, .. } => {
4756 let null = b.const_i64(0);
4757 let zero_len = b.const_i64(0);
4758 let spec_by_keyword = |needle: &str| {
4759 specs.iter().find(|s| {
4760 s.keyword
4761 .as_deref()
4762 .map(|k| k.eq_ignore_ascii_case(needle))
4763 .unwrap_or(false)
4764 })
4765 };
4766 let file_spec = specs.iter().find(|s| {
4767 s.keyword
4768 .as_deref()
4769 .map(|k| k.eq_ignore_ascii_case("file"))
4770 .unwrap_or(false)
4771 });
4772 let unit_spec = specs.iter().find(|s| {
4773 s.keyword
4774 .as_deref()
4775 .map(|k| k.eq_ignore_ascii_case("unit"))
4776 .unwrap_or(false)
4777 });
4778
4779 let lower_ref_spec = |b: &mut FuncBuilder, needle: &str| -> ValueId {
4780 spec_by_keyword(needle)
4781 .map(|spec| lower_arg_by_ref_ctx(b, ctx, &spec.value))
4782 .unwrap_or(null)
4783 };
4784 let lower_string_spec = |b: &mut FuncBuilder, needle: &str| -> (ValueId, ValueId) {
4785 if let Some(spec) = spec_by_keyword(needle) {
4786 lower_string_expr_with_layouts(
4787 b,
4788 &ctx.locals,
4789 &spec.value,
4790 ctx.st,
4791 Some(ctx.type_layouts),
4792 )
4793 } else {
4794 (null, zero_len)
4795 }
4796 };
4797
4798 let exist_addr = lower_ref_spec(b, "exist");
4799 let opened_addr = lower_ref_spec(b, "opened");
4800 let iostat_addr = lower_ref_spec(b, "iostat");
4801 let (name_ptr, name_len) = lower_string_spec(b, "name");
4802 let (access_ptr, access_len) = lower_string_spec(b, "access");
4803 let (form_ptr, form_len) = lower_string_spec(b, "form");
4804 let (action_ptr, action_len) = lower_string_spec(b, "action");
4805 let (read_ptr, read_len) = lower_string_spec(b, "read");
4806 let (write_ptr, write_len) = lower_string_spec(b, "write");
4807 let (readwrite_ptr, readwrite_len) = lower_string_spec(b, "readwrite");
4808 let recl_addr = lower_ref_spec(b, "recl");
4809 let size_spec = spec_by_keyword("size");
4810 let (size_addr, size_storeback) = if let Some(spec) = size_spec {
4811 let dest_addr = lower_arg_by_ref_ctx(b, ctx, &spec.value);
4812 let temp = b.alloca(IrType::Int(IntWidth::I64));
4813 (temp, Some(dest_addr))
4814 } else {
4815 (null, None)
4816 };
4817
4818 if let Some(fs) = file_spec {
4819 let (fptr, flen) = lower_string_expr_with_layouts(
4820 b,
4821 &ctx.locals,
4822 &fs.value,
4823 ctx.st,
4824 Some(ctx.type_layouts),
4825 );
4826 b.call(
4827 FuncRef::External("afs_inquire_file".into()),
4828 vec![
4829 fptr,
4830 flen,
4831 exist_addr,
4832 opened_addr,
4833 iostat_addr,
4834 name_ptr,
4835 name_len,
4836 access_ptr,
4837 access_len,
4838 form_ptr,
4839 form_len,
4840 action_ptr,
4841 action_len,
4842 recl_addr,
4843 size_addr,
4844 read_ptr,
4845 read_len,
4846 write_ptr,
4847 write_len,
4848 readwrite_ptr,
4849 readwrite_len,
4850 ],
4851 IrType::Void,
4852 );
4853 } else if let Some(us) = unit_spec {
4854 let unit_raw = super::expr::lower_expr_ctx(b, ctx, &us.value);
4855 let unit = coerce_to_type(b, unit_raw, &IrType::Int(IntWidth::I32));
4856 b.call(
4857 FuncRef::External("afs_inquire_unit".into()),
4858 vec![
4859 unit,
4860 exist_addr,
4861 opened_addr,
4862 iostat_addr,
4863 name_ptr,
4864 name_len,
4865 access_ptr,
4866 access_len,
4867 form_ptr,
4868 form_len,
4869 action_ptr,
4870 action_len,
4871 recl_addr,
4872 size_addr,
4873 read_ptr,
4874 read_len,
4875 write_ptr,
4876 write_len,
4877 readwrite_ptr,
4878 readwrite_len,
4879 ],
4880 IrType::Void,
4881 );
4882 }
4883 if let Some(dest_addr) = size_storeback {
4884 let size_val = b.load(size_addr);
4885 let dest_ty = match b.func().value_type(dest_addr) {
4886 Some(IrType::Ptr(inner)) => (*inner).clone(),
4887 _ => IrType::Int(IntWidth::I32),
4888 };
4889 let coerced = coerce_to_type(b, size_val, &dest_ty);
4890 b.store(coerced, dest_addr);
4891 }
4892 }
4893
4894 Stmt::Flush { specs } => {
4895 let unit = if let Some(s) = specs.first() {
4896 super::expr::lower_expr_ctx(b, ctx, &s.value)
4897 } else {
4898 b.const_i32(6)
4899 };
4900 let null = b.const_i64(0);
4901 b.call(
4902 FuncRef::External("afs_flush".into()),
4903 vec![unit, null],
4904 IrType::Void,
4905 );
4906 }
4907
4908 Stmt::Rewind { specs } => {
4909 let unit = if let Some(s) = specs.first() {
4910 super::expr::lower_expr_ctx(b, ctx, &s.value)
4911 } else {
4912 b.const_i32(6)
4913 };
4914 let null = b.const_i64(0);
4915 b.call(
4916 FuncRef::External("afs_rewind".into()),
4917 vec![unit, null],
4918 IrType::Void,
4919 );
4920 }
4921
4922 Stmt::Nullify { items } => {
4923 // Zero each pointer slot so ASSOCIATED returns false.
4924 for item in items {
4925 if let Some((field_ptr, field)) =
4926 resolve_component_field_access(b, &ctx.locals, item, ctx.st, ctx.type_layouts)
4927 {
4928 if !field.pointer {
4929 continue;
4930 }
4931 let zero_byte = b.const_i32(0);
4932 let sz = b.const_i64(field.size as i64);
4933 b.call(
4934 FuncRef::External("memset".into()),
4935 vec![field_ptr, zero_byte, sz],
4936 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4937 );
4938 continue;
4939 }
4940 let Expr::Name { name } = &item.node else {
4941 continue;
4942 };
4943 let Some(info) = ctx.locals.get(&name.to_lowercase()) else {
4944 continue;
4945 };
4946 if !info.is_pointer {
4947 continue;
4948 }
4949 // Array pointers use the 384-byte descriptor, scalar
4950 // deferred-length character pointers use a 32-byte
4951 // string descriptor, and scalar pointers use an 8-byte
4952 // slot. Pointer dummies passed by reference must write
4953 // through to the caller-owned slot.
4954 let size = if matches!(info.char_kind, CharKind::Deferred) {
4955 32i64
4956 } else if info.allocatable || info.descriptor_arg {
4957 384i64
4958 } else {
4959 8i64
4960 };
4961 let slot = if info.by_ref {
4962 b.load(info.addr)
4963 } else {
4964 info.addr
4965 };
4966 let zero_byte = b.const_i32(0);
4967 let sz = b.const_i64(size);
4968 b.call(
4969 FuncRef::External("memset".into()),
4970 vec![slot, zero_byte, sz],
4971 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4972 );
4973 }
4974 }
4975
4976 Stmt::PointerAssignment { target, value } => {
4977 // `p => q` or `p => x`: rebind the pointer slot `p` to the
4978 // address of the RHS designator. Three shapes:
4979 //
4980 // * scalar + derived-type pointer: slot holds an 8-byte
4981 // pointer, `=>` stores the target's address into it.
4982 // * array pointer: slot holds a 384-byte ArrayDescriptor,
4983 // `=>` materialises a descriptor of the target and
4984 // memcpy's it into the slot.
4985 //
4986 // In both cases the target must be a simple Name for now;
4987 // component-access and slice targets are follow-up work.
4988
4989 // Rank-remapping pointer assignment, F2018 §10.2.2.3:
4990 // `xmat(1:n, 1:nrhs) => x` (1-D `x` reinterpreted as 2-D)
4991 // The LHS is a FunctionCall in the AST whose subscripts are
4992 // Range bounds, not array indices. The previous fall-through
4993 // path treated this as scalar pointer assignment and never
4994 // populated the destination descriptor — `xmat(i,j)` then
4995 // tripped a bounds check against `[1, 0]`. stdlib_linalg's
4996 // solve/chol/eig/inverse/svd/norm all do this; ~25 stdlib
4997 // examples were blocked.
4998 if let Expr::FunctionCall { callee, args } = &target.node {
4999 if let Expr::Name { name: tgt_name } = &callee.node {
5000 let tgt_key = tgt_name.to_lowercase();
5001 let is_remap_target = ctx
5002 .locals
5003 .get(&tgt_key)
5004 .map(|info| {
5005 info.is_pointer && local_uses_array_descriptor(info)
5006 })
5007 .unwrap_or(false);
5008 let all_ranges = !args.is_empty()
5009 && args.iter().all(|a| {
5010 matches!(
5011 a.value,
5012 crate::ast::expr::SectionSubscript::Range { .. }
5013 )
5014 });
5015 if is_remap_target
5016 && all_ranges
5017 && lower_rank_remap_pointer_assignment(
5018 b, ctx, &tgt_key, args, value,
5019 )
5020 {
5021 return;
5022 }
5023 }
5024 }
5025
5026 let component_target =
5027 resolve_component_field_access(b, &ctx.locals, target, ctx.st, ctx.type_layouts)
5028 .filter(|(_, field)| field.pointer);
5029
5030 if let Some((tgt_field_ptr, tgt_field)) = component_target.as_ref() {
5031 if !tgt_field.pointer {
5032 return;
5033 }
5034 if is_deferred_char_component_field(tgt_field) {
5035 if let Expr::FunctionCall { callee, .. } = &value.node {
5036 if let Expr::Name { name } = &callee.node {
5037 if name.eq_ignore_ascii_case("null") {
5038 let zero = b.const_i64(0);
5039 let null = b.int_to_ptr(zero, IrType::Int(IntWidth::I8));
5040 store_string_descriptor_view(b, *tgt_field_ptr, null, zero);
5041 return;
5042 }
5043 }
5044 }
5045 if let Expr::FunctionCall { callee, args } = &value.node {
5046 if let Expr::FunctionCall {
5047 callee: inner_callee,
5048 args: inner_args,
5049 } = &callee.node
5050 {
5051 if let Expr::Name { name: arr_name } = &inner_callee.node {
5052 let akey = arr_name.to_lowercase();
5053 if let Some(info) = ctx.locals.get(&akey) {
5054 if matches!(info.char_kind, CharKind::Fixed(_))
5055 && (!info.dims.is_empty() || info.allocatable)
5056 && args.len() == 1
5057 {
5058 if let crate::ast::expr::SectionSubscript::Range {
5059 ref start,
5060 ref end,
5061 ..
5062 } = args[0].value
5063 {
5064 let elem_slot = lower_array_element_addr(
5065 b,
5066 &ctx.locals,
5067 info,
5068 inner_args,
5069 ctx.st,
5070 Some(ctx.type_layouts),
5071 );
5072 let zero = b.const_i64(0);
5073 let elem_ptr = b.gep(
5074 elem_slot,
5075 vec![zero],
5076 IrType::Int(IntWidth::I8),
5077 );
5078 let elem_len = match info.char_kind {
5079 CharKind::Fixed(n) => b.const_i64(n),
5080 _ => b.const_i64(0),
5081 };
5082 let (ptr, len) = lower_substring_full(
5083 b,
5084 &ctx.locals,
5085 ctx.st,
5086 elem_ptr,
5087 elem_len,
5088 start.as_ref(),
5089 end.as_ref(),
5090 Some(ctx.type_layouts),
5091 Some(ctx.internal_funcs),
5092 Some(ctx.contained_host_refs),
5093 Some(ctx.descriptor_params),
5094 );
5095 store_string_descriptor_view(
5096 b,
5097 *tgt_field_ptr,
5098 ptr,
5099 len,
5100 );
5101 return;
5102 }
5103 }
5104 }
5105 }
5106 }
5107 }
5108 if let Some((src_field_ptr, src_field)) = resolve_component_field_access(
5109 b,
5110 &ctx.locals,
5111 value,
5112 ctx.st,
5113 ctx.type_layouts,
5114 ) {
5115 if is_deferred_char_component_field(&src_field) {
5116 let (ptr, len) = load_string_descriptor_view(b, src_field_ptr);
5117 store_string_descriptor_view(b, *tgt_field_ptr, ptr, len);
5118 return;
5119 }
5120 }
5121 let (ptr, len) = lower_string_expr_ctx(b, ctx, value);
5122 store_string_descriptor_view(b, *tgt_field_ptr, ptr, len);
5123 return;
5124 }
5125 }
5126
5127 let Some(tgt_info) = component_target
5128 .map(|(tgt_field_ptr, tgt_field)| LocalInfo {
5129 addr: tgt_field_ptr,
5130 ty: if tgt_field.pointer
5131 && matches!(
5132 tgt_field.type_info,
5133 crate::sema::symtab::TypeInfo::Derived(_)
5134 )
5135 && tgt_field.size != 384
5136 {
5137 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)))
5138 } else {
5139 field_storage_ir_type(&tgt_field, ctx.type_layouts)
5140 },
5141 dims: vec![],
5142 allocatable: tgt_field.size == 384
5143 && (tgt_field.allocatable || tgt_field.pointer),
5144 descriptor_arg: false,
5145 by_ref: false,
5146 char_kind: field_char_kind(&tgt_field),
5147 derived_type: field_derived_type_name(&tgt_field),
5148 inline_const: None,
5149 is_pointer: tgt_field.pointer,
5150 runtime_dim_upper: vec![],
5151 is_class: false,
5152 logical_kind: None,
5153 last_dim_assumed_size: false,
5154 })
5155 .or_else(|| {
5156 if let Expr::Name { name: tgt_name } = &target.node {
5157 ctx.locals.get(&tgt_name.to_lowercase()).cloned()
5158 } else {
5159 None
5160 }
5161 })
5162 else {
5163 return;
5164 };
5165 if !tgt_info.is_pointer {
5166 return;
5167 }
5168
5169 if let Expr::FunctionCall { callee, .. } = &value.node {
5170 if let Expr::Name { name } = &callee.node {
5171 if name.eq_ignore_ascii_case("null") {
5172 if matches!(tgt_info.char_kind, CharKind::Deferred) {
5173 let tgt_desc = string_descriptor_addr(b, &tgt_info);
5174 let zero = b.const_i64(0);
5175 let null = b.int_to_ptr(zero, IrType::Int(IntWidth::I8));
5176 store_string_descriptor_view(b, tgt_desc, null, zero);
5177 return;
5178 }
5179 let zero_byte = b.const_i32(0);
5180 let size = if local_uses_array_descriptor(&tgt_info) {
5181 384i64
5182 } else {
5183 8i64
5184 };
5185 let tgt_slot = if local_uses_array_descriptor(&tgt_info) {
5186 array_descriptor_addr(b, &tgt_info)
5187 } else if tgt_info.by_ref {
5188 b.load(tgt_info.addr)
5189 } else {
5190 tgt_info.addr
5191 };
5192 let size_val = b.const_i64(size);
5193 b.call(
5194 FuncRef::External("memset".into()),
5195 vec![tgt_slot, zero_byte, size_val],
5196 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5197 );
5198 return;
5199 }
5200 }
5201 }
5202
5203 if matches!(tgt_info.char_kind, CharKind::Deferred) {
5204 let tgt_desc = string_descriptor_addr(b, &tgt_info);
5205 if let Expr::FunctionCall { callee, .. } = &value.node {
5206 if let Expr::Name { name } = &callee.node {
5207 if name.eq_ignore_ascii_case("null") {
5208 let zero = b.const_i64(0);
5209 let null = b.int_to_ptr(zero, IrType::Int(IntWidth::I8));
5210 store_string_descriptor_view(b, tgt_desc, null, zero);
5211 return;
5212 }
5213 }
5214 }
5215 if let Some((src_field_ptr, src_field)) =
5216 resolve_component_field_access(b, &ctx.locals, value, ctx.st, ctx.type_layouts)
5217 {
5218 if is_deferred_char_component_field(&src_field) {
5219 let (ptr, len) = load_string_descriptor_view(b, src_field_ptr);
5220 store_string_descriptor_view(b, tgt_desc, ptr, len);
5221 return;
5222 }
5223 }
5224 if let Expr::Name { name: src_name } = &value.node {
5225 if let Some(src_info) = ctx.locals.get(&src_name.to_lowercase()) {
5226 if matches!(src_info.char_kind, CharKind::Deferred) {
5227 let src_desc = string_descriptor_addr(b, src_info);
5228 let (ptr, len) = load_string_descriptor_view(b, src_desc);
5229 store_string_descriptor_view(b, tgt_desc, ptr, len);
5230 return;
5231 }
5232 }
5233 }
5234 let (ptr, len) = lower_string_expr_with_layouts(
5235 b,
5236 &ctx.locals,
5237 value,
5238 ctx.st,
5239 Some(ctx.type_layouts),
5240 );
5241 store_string_descriptor_view(b, tgt_desc, ptr, len);
5242 return;
5243 }
5244
5245 // Handle section-RHS: pa => ia(lo:hi). The RHS is a
5246 // FunctionCall{Name(arr), [Range(lo,hi)]}. Build a
5247 // descriptor pointing at arr(lo) with extent hi-lo+1.
5248 if let Expr::FunctionCall {
5249 callee,
5250 args: val_args,
5251 } = &value.node
5252 {
5253 if let Expr::Name { name: arr_name } = &callee.node {
5254 let arr_key = arr_name.to_lowercase();
5255 if let Some(arr_info) = ctx.locals.get(&arr_key).cloned() {
5256 if (!arr_info.dims.is_empty() || arr_info.allocatable)
5257 && val_args.len() == 1
5258 {
5259 if let crate::ast::expr::SectionSubscript::Range {
5260 start,
5261 end,
5262 stride: _,
5263 } = &val_args[0].value
5264 {
5265 let base = array_data_ptr_for_call(b, &arr_info);
5266 let lo = if let Some(se) = start {
5267 let v = super::expr::lower_expr_ctx(b, ctx, se);
5268 match b.func().value_type(v) {
5269 Some(IrType::Int(IntWidth::I64)) => v,
5270 _ => b.int_extend(v, IntWidth::I64, true),
5271 }
5272 } else {
5273 b.const_i64(1)
5274 };
5275 let hi = if let Some(ee) = end {
5276 let v = super::expr::lower_expr_ctx(b, ctx, ee);
5277 match b.func().value_type(v) {
5278 Some(IrType::Int(IntWidth::I64)) => v,
5279 _ => b.int_extend(v, IntWidth::I64, true),
5280 }
5281 } else {
5282 array_total_elems_value(b, &arr_info)
5283 };
5284 // Build a descriptor in the pointer's slot.
5285 let desc = array_descriptor_addr(b, &tgt_info);
5286 let zero32 = b.const_i32(0);
5287 let sz384 = b.const_i64(384);
5288 b.call(
5289 FuncRef::External("memset".into()),
5290 vec![desc, zero32, sz384],
5291 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5292 );
5293 // base_addr: base + (lo - 1) * elem_size
5294 let one = b.const_i64(1);
5295 let lo_0 = b.isub(lo, one);
5296 let elem_bytes = b.const_i64(ir_scalar_byte_size(&arr_info.ty));
5297 let byte_off = b.imul(lo_0, elem_bytes);
5298 let slice_base =
5299 b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
5300 store_byte_aggregate_field(
5301 b,
5302 desc,
5303 0,
5304 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5305 slice_base,
5306 );
5307 store_byte_aggregate_field(
5308 b,
5309 desc,
5310 8,
5311 IrType::Int(IntWidth::I64),
5312 elem_bytes,
5313 );
5314 let rank = b.const_i32(1);
5315 store_byte_aggregate_field(
5316 b,
5317 desc,
5318 16,
5319 IrType::Int(IntWidth::I32),
5320 rank,
5321 );
5322 let flags = b.const_i32(2);
5323 store_byte_aggregate_field(
5324 b,
5325 desc,
5326 20,
5327 IrType::Int(IntWidth::I32),
5328 flags,
5329 );
5330 // dim[0]: lower=1, upper=extent, stride=1
5331 store_byte_aggregate_field(
5332 b,
5333 desc,
5334 24,
5335 IrType::Int(IntWidth::I64),
5336 one,
5337 );
5338 let extent = b.isub(hi, lo);
5339 let extent1 = b.iadd(extent, one);
5340 store_byte_aggregate_field(
5341 b,
5342 desc,
5343 32,
5344 IrType::Int(IntWidth::I64),
5345 extent1,
5346 );
5347 store_byte_aggregate_field(
5348 b,
5349 desc,
5350 40,
5351 IrType::Int(IntWidth::I64),
5352 one,
5353 );
5354 return;
5355 }
5356 }
5357 }
5358 }
5359 }
5360
5361 // Array element target: p => arr(i) — compute the
5362 // element's address via GEP and store into the pointer slot.
5363 if let Expr::FunctionCall {
5364 callee,
5365 args: val_args,
5366 } = &value.node
5367 {
5368 if let Expr::Name { name: arr_name } = &callee.node {
5369 let arr_key = arr_name.to_lowercase();
5370 if let Some(arr_info) = ctx.locals.get(&arr_key).cloned() {
5371 if (!arr_info.dims.is_empty() || arr_info.allocatable)
5372 && val_args.len() == 1
5373 && matches!(
5374 val_args[0].value,
5375 crate::ast::expr::SectionSubscript::Element(_)
5376 )
5377 {
5378 if let crate::ast::expr::SectionSubscript::Element(idx_expr) =
5379 &val_args[0].value
5380 {
5381 let base = array_data_ptr_for_call(b, &arr_info);
5382 let idx = super::expr::lower_expr_ctx(b, ctx, idx_expr);
5383 let idx64 = match b.func().value_type(idx) {
5384 Some(IrType::Int(IntWidth::I64)) => idx,
5385 _ => b.int_extend(idx, IntWidth::I64, true),
5386 };
5387 let one = b.const_i64(1);
5388 let idx0 = b.isub(idx64, one);
5389 let elem_ptr = b.gep(base, vec![idx0], arr_info.ty.clone());
5390 store_scalar_pointer_slot_value(b, &tgt_info, elem_ptr);
5391 return;
5392 }
5393 }
5394 }
5395 }
5396 }
5397
5398 // Component access target: p => dt%field — resolve the
5399 // field's address and store into the pointer slot.
5400 if let Expr::ComponentAccess { base, component } = &value.node {
5401 if let Some((base_addr, type_name)) =
5402 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
5403 {
5404 if let Some(layout) = ctx.type_layouts.get(&type_name) {
5405 if let Some(field) = layout.field(component) {
5406 let offset = b.const_i64(field.offset as i64);
5407 let field_ptr =
5408 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
5409 if is_deferred_char_component_field(field) {
5410 let (ptr, _len) = load_string_descriptor_view(b, field_ptr);
5411 store_scalar_pointer_slot_value(b, &tgt_info, ptr);
5412 return;
5413 }
5414 if field.size == 384 && (field.pointer || field.allocatable) {
5415 if local_uses_array_descriptor(&tgt_info) {
5416 let tgt_desc = array_descriptor_addr(b, &tgt_info);
5417 let size = b.const_i64(384);
5418 b.call(
5419 FuncRef::External("memcpy".into()),
5420 vec![tgt_desc, field_ptr, size],
5421 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5422 );
5423 } else {
5424 let associated = b.load_typed(
5425 field_ptr,
5426 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5427 );
5428 store_scalar_pointer_slot_value(b, &tgt_info, associated);
5429 }
5430 return;
5431 }
5432 if field.pointer {
5433 let slot_value_ty = match &field.type_info {
5434 crate::sema::symtab::TypeInfo::Derived(_) => {
5435 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)))
5436 }
5437 _ => IrType::Ptr(Box::new(field_storage_ir_type(
5438 field,
5439 ctx.type_layouts,
5440 ))),
5441 };
5442 let associated = b.load_typed(field_ptr, slot_value_ty);
5443 store_scalar_pointer_slot_value(b, &tgt_info, associated);
5444 return;
5445 }
5446 // Cast ptr<i8> → ptr<tgt_ty> via zero-offset GEP
5447 // so the store type matches the pointer slot.
5448 let zero = b.const_i64(0);
5449 let typed_ptr = b.gep(field_ptr, vec![zero], tgt_info.ty.clone());
5450 store_scalar_pointer_slot_value(b, &tgt_info, typed_ptr);
5451 return;
5452 }
5453 }
5454 }
5455 }
5456
5457 if matches!(value.node, Expr::FunctionCall { .. }) {
5458 let addr = super::expr::lower_expr_ctx_tl(b, ctx, value);
5459 if local_uses_array_descriptor(&tgt_info) && tgt_info.dims.is_empty() {
5460 let type_tag = static_expr_type_tag_value(b, value, ctx.st, ctx.type_layouts);
5461 let elem_size = expr_type_layout(value, None, ctx.st, ctx.type_layouts)
5462 .map(|layout| b.const_i64(layout.size as i64));
5463 let tbp_lookup =
5464 static_expr_tbp_lookup_value(b, value, ctx.st, ctx.type_layouts);
5465 let tgt_desc = array_descriptor_addr(b, &tgt_info);
5466 store_scalar_polymorphic_descriptor_view(
5467 b,
5468 tgt_desc,
5469 addr,
5470 elem_size,
5471 type_tag,
5472 tbp_lookup,
5473 );
5474 } else {
5475 store_scalar_pointer_slot_value(b, &tgt_info, addr);
5476 }
5477 return;
5478 }
5479
5480 let Expr::Name { name: src_name } = &value.node else {
5481 return;
5482 };
5483 let src_key = src_name.to_lowercase();
5484 let Some(src_info) = ctx.locals.get(&src_key).cloned() else {
5485 if let Some(sym) = ctx.st.lookup_local_then_any(ctx.proc_scope_id, &src_key) {
5486 if matches!(
5487 sym.kind,
5488 crate::sema::symtab::SymbolKind::Function
5489 | crate::sema::symtab::SymbolKind::Subroutine
5490 ) {
5491 let (link_name, resolved_key) =
5492 resolved_symbol_call_target(ctx.st, &src_key, src_name);
5493 let lowered_name = if ctx.internal_funcs.contains_key(&resolved_key)
5494 || ctx.internal_funcs.contains_key(&src_key)
5495 {
5496 lowered_procedure_symbol_name(
5497 resolved_key.as_str(),
5498 None,
5499 Some(b.func().name.as_str()),
5500 None,
5501 true,
5502 ctx.internal_funcs,
5503 )
5504 } else {
5505 link_name
5506 };
5507 let addr = b.global_addr(
5508 &lowered_name,
5509 procedure_pointer_symbol_addr_elem_type(&tgt_info),
5510 );
5511 store_scalar_pointer_slot_value(b, &tgt_info, addr);
5512 }
5513 }
5514 return;
5515 };
5516
5517 // Array pointer path: materialise a descriptor from the
5518 // target and memcpy 384 bytes into the pointer's slot.
5519 // Both explicit-shape stack arrays and descriptor-backed
5520 // allocatables are supported via array_data_ptr_for_call.
5521 let target_is_array =
5522 !src_info.dims.is_empty() || src_info.allocatable || src_info.descriptor_arg;
5523 if target_is_array {
5524 let src_desc = if local_uses_array_descriptor(&src_info) {
5525 array_descriptor_addr(b, &src_info)
5526 } else {
5527 materialize_array_descriptor_for_info(b, &src_info)
5528 };
5529 if local_uses_array_descriptor(&tgt_info) {
5530 let tgt_desc = array_descriptor_addr(b, &tgt_info);
5531 let size = b.const_i64(384);
5532 b.call(
5533 FuncRef::External("memcpy".into()),
5534 vec![tgt_desc, src_desc, size],
5535 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5536 );
5537 } else if src_info.dims.is_empty() {
5538 let src_base = if local_uses_array_descriptor(&src_info) {
5539 scalar_descriptor_base_addr_raw(b, &src_info)
5540 } else {
5541 array_base_addr(b, &src_info)
5542 };
5543 store_scalar_pointer_slot_value(b, &tgt_info, src_base);
5544 }
5545 return;
5546 }
5547
5548 // Scalar / derived-type pointer path.
5549 let addr = if src_info.is_pointer {
5550 // Copy the current association of another pointer
5551 // (pointer-to-pointer, including derived-type pointer
5552 // chains). For scalar pointers (ty = i32) the stored
5553 // value is Ptr<i32>; for DT pointers (ty = Ptr<i8>)
5554 // the stored value is already Ptr<i8> — wrapping
5555 // again would produce Ptr<Ptr<i8>> and fail the
5556 // verifier. Use ty directly when it's already a
5557 // pointer.
5558 let load_ty = if src_info.ty.is_ptr() {
5559 src_info.ty.clone()
5560 } else {
5561 IrType::Ptr(Box::new(src_info.ty.clone()))
5562 };
5563 if src_info.by_ref {
5564 let caller_slot = b.load_typed(
5565 src_info.addr,
5566 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5567 );
5568 b.load_typed(caller_slot, load_ty)
5569 } else {
5570 b.load_typed(src_info.addr, load_ty)
5571 }
5572 } else if src_info.by_ref {
5573 // Dummy TARGETs and procedure dummies are stored as
5574 // caller-provided addresses inside the local slot.
5575 // Pointer association must load through that slot so
5576 // `p => x` binds to the caller's storage/symbol rather
5577 // than this callee's alloca.
5578 let load_ty = if src_info.ty.is_ptr() {
5579 src_info.ty.clone()
5580 } else {
5581 IrType::Ptr(Box::new(src_info.ty.clone()))
5582 };
5583 b.load_typed(src_info.addr, load_ty)
5584 } else if src_info.derived_type.is_some() {
5585 // Derived-type TARGET. src_info.addr is a
5586 // ptr<[i8 x size]>; the pointer slot expects ptr<i8>.
5587 // A zero-offset GEP with element type i8 produces
5588 // the element-pointer view and round-trips through
5589 // the verifier.
5590 let zero = b.const_i64(0);
5591 b.gep(src_info.addr, vec![zero], IrType::Int(IntWidth::I8))
5592 } else {
5593 // Plain TARGET or ordinary scalar local: the local
5594 // alloca address IS the associated target.
5595 src_info.addr
5596 };
5597 if local_uses_array_descriptor(&tgt_info) && tgt_info.dims.is_empty() {
5598 let type_tag = static_expr_type_tag_value(b, value, ctx.st, ctx.type_layouts);
5599 let elem_size = expr_type_layout(value, None, ctx.st, ctx.type_layouts)
5600 .map(|layout| b.const_i64(layout.size as i64));
5601 let tbp_lookup =
5602 static_expr_tbp_lookup_value(b, value, ctx.st, ctx.type_layouts);
5603 let tgt_desc = array_descriptor_addr(b, &tgt_info);
5604 store_scalar_polymorphic_descriptor_view(
5605 b,
5606 tgt_desc,
5607 addr,
5608 elem_size,
5609 type_tag,
5610 tbp_lookup,
5611 );
5612 } else {
5613 store_scalar_pointer_slot_value(b, &tgt_info, addr);
5614 }
5615 }
5616
5617 Stmt::SelectRank {
5618 selector,
5619 assoc_name,
5620 guards,
5621 ..
5622 } => {
5623 // Read rank from selector's descriptor (offset 16, i32 field).
5624 // For non-descriptor-backed selectors, fall back to the static
5625 // declared rank.
5626 let bb_end = b.create_block("select_rank_end");
5627 let selector_info = associate_alias_local_info(b, ctx, selector);
5628 let runtime_rank: ValueId = if let Some(info) = selector_info.as_ref() {
5629 if local_uses_array_descriptor(info) {
5630 let desc = array_descriptor_addr(b, info);
5631 let rank32 = load_array_desc_i32_field(b, desc, 16);
5632 b.int_extend(rank32, IntWidth::I64, true)
5633 } else {
5634 b.const_i64(local_declared_rank(info) as i64)
5635 }
5636 } else {
5637 b.const_i64(0)
5638 };
5639
5640 // Install `v` as an alias for the selector inside each guard.
5641 let saved_alias = assoc_name.as_ref().and_then(|name| {
5642 let key = name.to_lowercase();
5643 ctx.locals.remove(&key)
5644 });
5645 if let (Some(name), Some(info)) = (assoc_name.as_ref(), selector_info.as_ref()) {
5646 ctx.locals.insert(name.to_lowercase(), info.clone());
5647 }
5648
5649 let default_body = guards.iter().find_map(|guard| {
5650 if let crate::ast::stmt::RankGuard::RankDefault { body } = guard {
5651 Some(body)
5652 } else {
5653 None
5654 }
5655 });
5656
5657 for guard in guards {
5658 use crate::ast::stmt::RankGuard;
5659 match guard {
5660 RankGuard::Rank { rank, body } => {
5661 let want = b.const_i64(*rank);
5662 let matches = b.icmp(CmpOp::Eq, runtime_rank, want);
5663 let bb_match = b.create_block("rank_match");
5664 let bb_next = b.create_block("rank_next");
5665 b.cond_branch(matches, bb_match, vec![], bb_next, vec![]);
5666 b.set_block(bb_match);
5667 lower_stmts(b, ctx, body);
5668 if b.func().block(b.current_block()).terminator.is_none() {
5669 b.branch(bb_end, vec![]);
5670 }
5671 b.set_block(bb_next);
5672 }
5673 RankGuard::RankStar { .. } | RankGuard::RankDefault { .. } => {
5674 // Defaults handled after specific ranks.
5675 }
5676 }
5677 }
5678 if let Some(body) = default_body {
5679 lower_stmts(b, ctx, body);
5680 if b.func().block(b.current_block()).terminator.is_none() {
5681 b.branch(bb_end, vec![]);
5682 }
5683 } else if b.func().block(b.current_block()).terminator.is_none() {
5684 b.branch(bb_end, vec![]);
5685 }
5686 b.set_block(bb_end);
5687
5688 if let Some(name) = assoc_name.as_ref() {
5689 let key = name.to_lowercase();
5690 ctx.locals.remove(&key);
5691 if let Some(saved) = saved_alias {
5692 ctx.locals.insert(key, saved);
5693 }
5694 }
5695 }
5696
5697 _ => {} // remaining statements (FORALL, WHERE, etc.) deferred
5698 }
5699 }
5700